diff --git a/.asf.yaml b/.asf.yaml index cf409dc6a353..1a8f27e5e0e5 100644 --- a/.asf.yaml +++ b/.asf.yaml @@ -47,6 +47,7 @@ github: - Docs - Frontend Build - "Mergeable: milestone-label-check" + - "Title Validator" required_pull_request_reviews: dismiss_stale_reviews: true - required_approving_review_count: 1 + required_approving_review_count: 2 diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index e4d5c7dc75fe..b7ebace1b244 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -16,30 +16,40 @@ # /.github/ @SbloodyS -/deploy/ @caishunfeng -/dolphinscheduler-alert/ @caishunfeng -/dolphinscheduler-e2e/ @SbloodyS -/dolphinscheduler-api-test/ @SbloodyS -/dolphinscheduler-registry/ @caishunfeng @ruanwenjun +/deploy/ @SbloodyS +/deploy/kubernetes/ @Gallardot +/dolphinscheduler-alert/ @SbloodyS /dolphinscheduler-api/ @caishunfeng @SbloodyS -/dolphinscheduler-dao/ @caishunfeng @SbloodyS -/dolphinscheduler-dao/src/main/resources/sql/ @EricGao888 -/dolphinscheduler-common/ @caishunfeng -/dolphinscheduler-standalone-server/ @caishunfeng -/dolphinscheduler-datasource-plugin/ @caishunfeng -/dolphinscheduler-dist/ @caishunfeng -/dolphinscheduler-meter/ @caishunfeng @ruanwenjun @EricGao888 -/dolphinscheduler-scheduler-plugin/ @caishunfeng +/dolphinscheduler-api-test/ @SbloodyS +/dolphinscheduler-authentication/ @ruanwenjun +/dolphinscheduler-bom/ @ruanwenjun +/dolphinscheduler-common/ @SbloodyS +/dolphinscheduler-dao/ @SbloodyS @ruanwenjun +/dolphinscheduler-dao-plugin/ @SbloodyS @ruanwenjun +/dolphinscheduler-data-quality/ @SbloodyS +/dolphinscheduler-datasource-plugin/ @SbloodyS +/dolphinscheduler-dist/ @SbloodyS +/dolphinscheduler-e2e/ @SbloodyS +/dolphinscheduler-extract/ @ruanwenjun /dolphinscheduler-master/ @caishunfeng @SbloodyS @ruanwenjun -/dolphinscheduler-worker/ @caishunfeng @SbloodyS @ruanwenjun +/dolphinscheduler-meter/ @ruanwenjun @EricGao888 @Gallardot +/dolphinscheduler-microbench/ @SbloodyS @ruanwenjun +/dolphinscheduler-registry/ @ruanwenjun @Gallardot +/dolphinscheduler-scheduler-plugin/ @ruanwenjun /dolphinscheduler-service/ @caishunfeng -/dolphinscheduler-extract/ @caishunfeng @ruanwenjun /dolphinscheduler-spi/ @caishunfeng -/dolphinscheduler-task-plugin/ @caishunfeng @SbloodyS @zhuangchong +/dolphinscheduler-standalone-server/ @SbloodyS +/dolphinscheduler-storage-plugin/ @ruanwenjun @SbloodyS +/dolphinscheduler-task-plugin/ @caishunfeng @SbloodyS @zhuangchong @Gallardot /dolphinscheduler-tools/ @caishunfeng @SbloodyS @EricGao888 -/script/ @caishunfeng @SbloodyS @EricGao888 /dolphinscheduler-ui/ @songjianet @Amy0104 -/docs/ @EricGao888 +/dolphinscheduler-worker/ @caishunfeng @SbloodyS @ruanwenjun +/dolphinscheduler-yarn-aop/ @ruanwenjun + +/dolphinscheduler-dao/src/main/resources/sql/ @EricGao888 + +/script/ @SbloodyS @EricGao888 +/docs/ @EricGao888 @SbloodyS /licenses/ @EricGao888 /images/ @EricGao888 -/style/ @caishunfeng +/style/ @SbloodyS @EricGao888 diff --git a/.github/ISSUE_TEMPLATE/dsip-request.yml b/.github/ISSUE_TEMPLATE/dsip-request.yml new file mode 100644 index 000000000000..f54421b06624 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/dsip-request.yml @@ -0,0 +1,77 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +name: DSIP +description: Suggest an idea for this project +title: "[DSIP-][Module Name] DSIP title" +labels: [ "DSIP", "Waiting for reply" ] +body: + - type: markdown + attributes: + value: | + For better global communication, Please write in English. + + If you feel the description in English is not clear, then you can append description in Chinese, thanks! + + - type: checkboxes + attributes: + label: Search before asking + description: > + Please make sure to search in the [DSIP](https://github.com/apache/dolphinscheduler/issues/14102) first + to see whether the same DSIP was created already. + options: + - label: > + I had searched in the [DSIP](https://github.com/apache/dolphinscheduler/issues/14102) and found no + similar DSIP. + required: true + + - type: textarea + attributes: + label: Motivation + description: Why you want to do this change? + + - type: textarea + attributes: + label: Design Detail + description: Your design. + placeholder: > + It's better to provide a detailed design, such as the design of the interface, the design of the database, etc. + + - type: textarea + attributes: + label: Compatibility, Deprecation, and Migration Plan + description: > + If this feature is related to compatibility, deprecation, or migration, please describe it here. + + - type: textarea + attributes: + label: Test Plan + description: > + How to test this improvement. + + - type: checkboxes + attributes: + label: Code of Conduct + description: | + The Code of Conduct helps create a safe space for everyone. We require that everyone agrees to it. + options: + - label: | + I agree to follow this project's [Code of Conduct](https://www.apache.org/foundation/policies/conduct) + required: true + + - type: markdown + attributes: + value: "Thanks for completing our form!" diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 652e41a3bd25..a3fcfc5686e2 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -31,4 +31,7 @@ This change added tests and can be verified as follows: (or) +## Pull Request Notice +[Pull Request Notice](https://github.com/apache/dolphinscheduler/blob/dev/docs/docs/en/contribute/join/pull-request.md) + If your pull request contain incompatible change, you should also add it to `docs/docs/en/guide/upgrede/incompatible.md` diff --git a/.github/actions/auto-assign-action b/.github/actions/auto-assign-action new file mode 160000 index 000000000000..288f36f8cea1 --- /dev/null +++ b/.github/actions/auto-assign-action @@ -0,0 +1 @@ +Subproject commit 288f36f8cea1dded676b9c4477a64ff91e50c104 diff --git a/.github/actions/auto-assign/auto-assign.yml b/.github/actions/auto-assign/auto-assign.yml new file mode 100644 index 000000000000..986ed6c4f2d9 --- /dev/null +++ b/.github/actions/auto-assign/auto-assign.yml @@ -0,0 +1,22 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Set addAssignees to 'author' to set the PR creator as the assignee. +addAssignees: author + +# Enable it to run on drafts +runOnDraft: true diff --git a/.github/actions/labeler/labeler.yml b/.github/actions/labeler/labeler.yml index b4776198e2ac..6bd9b6daf1e5 100644 --- a/.github/actions/labeler/labeler.yml +++ b/.github/actions/labeler/labeler.yml @@ -18,37 +18,54 @@ backend: - 'dolphinscheduler-alert/**/*' - 'dolphinscheduler-api/**/*' + - 'dolphinscheduler-authentication/**/*' + - 'dolphinscheduler-bom/**/*' - 'dolphinscheduler-common/**/*' - 'dolphinscheduler-dao/**/*' + - 'dolphinscheduler-dao-plugin/**/*' - 'dolphinscheduler-data-quality/**/*' - 'dolphinscheduler-datasource-plugin/**/*' - 'dolphinscheduler-dist/**/*' + - 'dolphinscheduler-extract/**/*' - 'dolphinscheduler-master/**/*' + - 'dolphinscheduler-meter/**/*' + - 'dolphinscheduler-microbench/**/*' - 'dolphinscheduler-registry/**/*' - - 'dolphinscheduler-extract/**/*' - 'dolphinscheduler-scheduler-plugin/**/*' - 'dolphinscheduler-service/**/*' - 'dolphinscheduler-spi/**/*' - 'dolphinscheduler-standalone-server/**/*' + - 'dolphinscheduler-storage-plugin/**/*' - 'dolphinscheduler-task-plugin/**/*' - 'dolphinscheduler-tools/**/*' - 'dolphinscheduler-worker/**/*' + - 'dolphinscheduler-yarn-aop/**/*' - 'script/**/*' + - '.github/workflows/backend.yml' document: - 'docs/**/*' + - '**/*.md' CI&CD: - any: ['.github/**/*'] docker: - - any: ['.deploy/**/*'] + - any: ['deploy/docker/**/*'] UI: - any: ['dolphinscheduler-ui/**/*'] + - any: ['.github/workflows/frontend.yml'] e2e: - any: ['dolphinscheduler-e2e/**/*'] + - any: ['dolphinscheduler-api-test/**/*'] + - any: ['.github/workflows/e2e.yml'] + - any: ['.github/workflows/api-test.yml'] + - any: ['.github/workflows/e2e-k8s.yml'] test: - - any: ['dolphinscheduler-api-test/**/*'] + - any: ['**/test/**/*'] + +kubernetes: + - any: ['deploy/kubernetes/**/*'] diff --git a/.github/actions/workflow-telemetry-action b/.github/actions/workflow-telemetry-action new file mode 160000 index 000000000000..f974e0c5942f --- /dev/null +++ b/.github/actions/workflow-telemetry-action @@ -0,0 +1 @@ +Subproject commit f974e0c5942f8f37973c4cc395704165fbe629ba diff --git a/.github/boring-cyborg.yml b/.github/boring-cyborg.yml new file mode 100644 index 000000000000..7c5418e87d19 --- /dev/null +++ b/.github/boring-cyborg.yml @@ -0,0 +1,72 @@ +################################################################################ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# Details: https://github.com/apps/boring-cyborg + +###### IssueLink Adder ################################################################################################# +# Insert Issue (Jira/Github etc) link in PR description based on the Issue ID in PR title. +insertIssueLinkInPrDescription: + # specify the placeholder for the issue link that should be present in the description + descriptionIssuePlaceholderRegexp: "^fix: (.*)$" + matchers: + # you can have several matches - for different types of issues + # only the first matching entry is replaced + jiraIssueMatch: + # specify the regexp of issue id that you can find in the title of the PR + # the match groups can be used to build the issue id (${1}, ${2}, etc.). + titleIssueIdRegexp: \[(Feature|Fix|Improvement|Test|E2E|Doc-[0-9]+)\] + # the issue link to be added. ${1}, ${2} ... are replaced with the match groups from the + # title match (remember to use quotes) + descriptionIssueLink: "[${1}](https://github.com/apache/dolphinschedule/${1}/)" + +###### Title Validator ################################################################################################# +# Verifies if commit/PR titles match the regexp specified +verifyTitles: + # Regular expression that should be matched by titles of commits or PR + titleRegexp: ^\[DSIP-[0-9]+\].*$|^\[Feature-[0-9]+\].*$|^\[Fix-[0-9]+\].*$|^\[Doc-[0-9]+\].*$|^\[Improvement-[0-9]+\].*$|^\[Test-[0-9]+\].*$|^\[E2E-[0-9]+\].*$|^\[CI].*$|^\[Chore].*$ + # If set to true, it will always check the PR title (as opposed to the individual commits). + alwaysUsePrTitle: true + # If set to true, it will only check the commit in case there is a single commit. + # In case of multiple commits it will check PR title. + # This reflects the standard behaviour of Github that for `Squash & Merge` GitHub + # uses the PR title rather than commit messages for the squashed commit ¯\_(ツ)_/¯ + # For single-commit PRs it takes the squashed commit message from the commit as expected. + # + # If set to false it will check all commit messages. This is useful when you do not squash commits at merge. + validateEitherPrOrSingleCommitTitle: true + # The title the GitHub status should appear from. + statusTitle: "Title Validator" + # A custom message to be displayed when the title passes validation. + successMessage: "Title Validation successful!" + # A custom message to be displayed when the title fails validation. + # Allows insertion of ${type} (commit/PR), ${title} (the title validated) and ${regex} (the titleRegexp above). + failureMessage: "Wrong ${type} title: ${title}" + +# Various Flags to control behaviour of the "Labeler" +labelerFlags: + # If this flag is changed to 'false', labels would only be added when the PR is first created + # and not when existing PR is updated. + # The default is 'true' which means the labels would be added when PR is updated even if they + # were removed by the user + labelOnPRUpdates: true + +# Comment to be posted to welcome users when they open their first PR +firstPRWelcomeComment: > + Thanks for opening this pull request! Please check out our contributing guidelines. (https://github.com/apache/dolphinscheduler/blob/dev/docs/docs/en/contribute/join/pull-request.md) + +# Comment to be posted to congratulate user on their first merged PR +firstPRMergeComment: > + Awesome work, congrats on your first merged pull request! diff --git a/.github/mergeable.yml b/.github/mergeable.yml deleted file mode 100644 index a1df2e7410b0..000000000000 --- a/.github/mergeable.yml +++ /dev/null @@ -1,62 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. ---- -version: 2 -mergeable: - # we can not use `pull_request.*` which including event `pull_request.labeled`, according to https://github.com/mergeability/mergeable/issues/643, - # otherwise mergeable will keep add or remove label endless, we just need this CI act like the default behavior as - # GitHub action workflow `pull_requests` https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request like, - # which only trigger runs when a pull_request event's activity type is opened, synchronize, or reopened - - when: pull_request.opened, pull_request.reopened, pull_request.synchronize - name: sync-sql-ddl - validate: - # Sql files must change synchronize - - do: dependent - files: - - 'dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_h2.sql' - - 'dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_mysql.sql' - - 'dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_postgresql.sql' - message: 'Sql files not change synchronize' - # Add labels 'sql not sync' and comment to reviewers if Sql files not change synchronize - fail: - - do: comment - payload: - body: > - :warning: This PR do not change database DDL synchronize. - leave_old_comment: false - - do: labels - add: 'sql not sync' - # Remove labels 'sql not sync' if pass - pass: - - do: labels - delete: 'sql not sync' - - - when: pull_request.* - name: milestone-label-check - validate: - - do: milestone - no_empty: - enabled: false # Cannot be empty when true. - message: 'Milestone is required and cannot be empty.' - - do: label - and: - - must_include: - regex: 'feature|bug|improvement|document|chore|revert' - message: 'Label must include one of the following: `feature`, `bug`, `improvement`, `document`, `chore`, `revert`' - - must_include: - regex: 'ready-to-merge' - message: 'Please check if there are PRs that already have a `ready-to-merge` label and can be merged, if exists please merge them first.' diff --git a/.github/workflows/api-test.yml b/.github/workflows/api-test.yml index fd39bd097791..c635b1bc8b43 100644 --- a/.github/workflows/api-test.yml +++ b/.github/workflows/api-test.yml @@ -20,6 +20,8 @@ on: push: branches: - dev + - '[0-9]+.[0-9]+.[0-9]+-prepare' + - '[0-9]+.[0-9]+.[0-9]+-release' name: API-Test @@ -35,7 +37,7 @@ jobs: outputs: not-ignore: ${{ steps.filter.outputs.not-ignore }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: dorny/paths-filter@b2feaf19c27470162a626bd6fa8438ae5b263721 id: filter with: @@ -49,7 +51,7 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 20 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 with: submodules: true - name: Sanity Check @@ -73,7 +75,7 @@ jobs: run: | docker save apache/dolphinscheduler-standalone-server:ci -o /tmp/standalone-image.tar \ && du -sh /tmp/standalone-image.tar - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v4 name: Upload Docker Images with: name: standalone-image @@ -104,7 +106,7 @@ jobs: env: RECORDING_PATH: /tmp/recording-${{ matrix.case.name }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 with: submodules: true - name: Cache local Maven repository @@ -113,7 +115,7 @@ jobs: path: ~/.m2/repository key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}-api-test restore-keys: ${{ runner.os }}-maven- - - uses: actions/download-artifact@v2 + - uses: actions/download-artifact@v4 name: Download Docker Images with: name: standalone-image @@ -125,9 +127,9 @@ jobs: run: | ./mvnw -B -f dolphinscheduler-api-test/pom.xml -am \ -DfailIfNoTests=false \ - -Dspotless.skip=false \ + -Dspotless.skip=true \ -Dtest=${{ matrix.case.class }} test - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v4 if: always() name: Upload Recording with: diff --git a/.github/workflows/backend.yml b/.github/workflows/backend.yml index ea09a17fd2ae..8e83cd979274 100644 --- a/.github/workflows/backend.yml +++ b/.github/workflows/backend.yml @@ -44,7 +44,7 @@ jobs: not-ignore: ${{ steps.filter.outputs.not-ignore }} db-schema: ${{ steps.filter.outputs.db-schema }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: dorny/paths-filter@b2feaf19c27470162a626bd6fa8438ae5b263721 id: filter with: @@ -63,7 +63,7 @@ jobs: java: [ '8', '11' ] timeout-minutes: 30 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 with: submodules: true - name: Set up JDK ${{ matrix.java }} @@ -91,7 +91,7 @@ jobs: -Dmaven.wagon.httpconnectionManager.ttlSeconds=120 - name: Check dependency license run: tools/dependencies/check-LICENSE.sh - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v4 if: ${{ matrix.java == '8' }} name: Upload Binary Package with: @@ -106,15 +106,19 @@ jobs: strategy: matrix: case: - - name: cluster-test-mysql - script: .github/workflows/cluster-test/mysql/start-job.sh - - name: cluster-test-postgresql - script: .github/workflows/cluster-test/postgresql/start-job.sh + - name: cluster-test-mysql-with-zookeeper-registry + script: .github/workflows/cluster-test/mysql_with_zookeeper_registry/start-job.sh + - name: cluster-test-mysql-with-mysql-registry + script: .github/workflows/cluster-test/mysql_with_mysql_registry/start-job.sh + - name: cluster-test-postgresql-zookeeper-registry + script: .github/workflows/cluster-test/postgresql_with_zookeeper_registry/start-job.sh + - name: cluster-test-postgresql-with-postgresql-registry + script: .github/workflows/cluster-test/postgresql_with_postgresql_registry/start-job.sh steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 with: submodules: true - - uses: actions/download-artifact@v2 + - uses: actions/download-artifact@v4 name: Download Binary Package with: # Only run cluster test on jdk8 @@ -149,7 +153,7 @@ jobs: fail-fast: false matrix: db: ["mysql", "postgresql"] - version: ["2.0.9", "3.0.6", "3.1.9", "3.2.0"] + version: ["3.1.9", "3.2.0"] steps: - name: Set up JDK 8 uses: actions/setup-java@v2 @@ -161,7 +165,7 @@ jobs: mkdir -p dolphinscheduler/dev dolphinscheduler/${{ matrix.version }} curl -sSf https://atlasgo.sh | sh - name: Download Tarball - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v4 with: name: binary-package-8 path: dolphinscheduler/dev diff --git a/.github/workflows/cluster-test/mysql/Dockerfile b/.github/workflows/cluster-test/mysql_with_mysql_registry/Dockerfile similarity index 85% rename from .github/workflows/cluster-test/mysql/Dockerfile rename to .github/workflows/cluster-test/mysql_with_mysql_registry/Dockerfile index c7d6abe8890b..12c7db3c187b 100644 --- a/.github/workflows/cluster-test/mysql/Dockerfile +++ b/.github/workflows/cluster-test/mysql_with_mysql_registry/Dockerfile @@ -28,10 +28,10 @@ RUN mv /root/apache-dolphinscheduler-*-SNAPSHOT-bin /root/apache-dolphinschedule ENV DOLPHINSCHEDULER_HOME /root/apache-dolphinscheduler-test-SNAPSHOT-bin #Setting install.sh -COPY .github/workflows/cluster-test/mysql/install_env.sh $DOLPHINSCHEDULER_HOME/bin/env/install_env.sh +COPY .github/workflows/cluster-test/mysql_with_mysql_registry/install_env.sh $DOLPHINSCHEDULER_HOME/bin/env/install_env.sh #Setting dolphinscheduler_env.sh -COPY .github/workflows/cluster-test/mysql/dolphinscheduler_env.sh $DOLPHINSCHEDULER_HOME/bin/env/dolphinscheduler_env.sh +COPY .github/workflows/cluster-test/mysql_with_mysql_registry/dolphinscheduler_env.sh $DOLPHINSCHEDULER_HOME/bin/env/dolphinscheduler_env.sh #Download mysql jar ENV MYSQL_URL "https://repo.maven.apache.org/maven2/mysql/mysql-connector-java/8.0.16/mysql-connector-java-8.0.16.jar" @@ -43,6 +43,6 @@ cp $DOLPHINSCHEDULER_HOME/alert-server/libs/$MYSQL_DRIVER $DOLPHINSCHEDULER_HOME cp $DOLPHINSCHEDULER_HOME/alert-server/libs/$MYSQL_DRIVER $DOLPHINSCHEDULER_HOME/tools/libs/$MYSQL_DRIVER #Deploy -COPY .github/workflows/cluster-test/mysql/deploy.sh /root/deploy.sh +COPY .github/workflows/cluster-test/mysql_with_mysql_registry/deploy.sh /root/deploy.sh CMD [ "/bin/bash", "/root/deploy.sh" ] diff --git a/.github/workflows/cluster-test/mysql_with_mysql_registry/deploy.sh b/.github/workflows/cluster-test/mysql_with_mysql_registry/deploy.sh new file mode 100644 index 000000000000..72b2a630faba --- /dev/null +++ b/.github/workflows/cluster-test/mysql_with_mysql_registry/deploy.sh @@ -0,0 +1,44 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +set -euox pipefail + + +USER=root + +#Create database +mysql -hmysql -P3306 -uroot -p123456 -e "CREATE DATABASE IF NOT EXISTS dolphinscheduler DEFAULT CHARACTER SET utf8 DEFAULT COLLATE utf8_general_ci;" + +#Sudo +sed -i '$a'$USER' ALL=(ALL) NOPASSWD: NOPASSWD: ALL' /etc/sudoers +sed -i 's/Defaults requirett/#Defaults requirett/g' /etc/sudoers + +#SSH +ssh-keygen -t rsa -P '' -f ~/.ssh/id_rsa +cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys +chmod 600 ~/.ssh/authorized_keys +service ssh start + +#Init schema +/bin/bash $DOLPHINSCHEDULER_HOME/tools/bin/upgrade-schema.sh +/bin/bash $DOLPHINSCHEDULER_HOME/tools/bin/initialize-jdbc-registry.sh + +#Start Cluster +/bin/bash $DOLPHINSCHEDULER_HOME/bin/start-all.sh + +#Keep running +tail -f /dev/null diff --git a/.github/workflows/cluster-test/mysql_with_mysql_registry/docker-compose-base.yaml b/.github/workflows/cluster-test/mysql_with_mysql_registry/docker-compose-base.yaml new file mode 100644 index 000000000000..d59e3c868ce1 --- /dev/null +++ b/.github/workflows/cluster-test/mysql_with_mysql_registry/docker-compose-base.yaml @@ -0,0 +1,34 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +version: "3" + +services: + mysql: + container_name: mysql + image: mysql:5.7.36 + command: --default-authentication-plugin=mysql_native_password + restart: always + environment: + MYSQL_ROOT_PASSWORD: 123456 + ports: + - "3306:3306" + healthcheck: + test: mysqladmin ping -h 127.0.0.1 -u root --password=$$MYSQL_ROOT_PASSWORD + interval: 5s + timeout: 60s + retries: 120 diff --git a/.github/workflows/cluster-test/mysql/docker-compose-cluster.yaml b/.github/workflows/cluster-test/mysql_with_mysql_registry/docker-compose-cluster.yaml similarity index 100% rename from .github/workflows/cluster-test/mysql/docker-compose-cluster.yaml rename to .github/workflows/cluster-test/mysql_with_mysql_registry/docker-compose-cluster.yaml diff --git a/.github/workflows/cluster-test/mysql_with_mysql_registry/dolphinscheduler_env.sh b/.github/workflows/cluster-test/mysql_with_mysql_registry/dolphinscheduler_env.sh new file mode 100755 index 000000000000..8536eb0905a9 --- /dev/null +++ b/.github/workflows/cluster-test/mysql_with_mysql_registry/dolphinscheduler_env.sh @@ -0,0 +1,57 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# JAVA_HOME, will use it to start DolphinScheduler server +export JAVA_HOME=${JAVA_HOME:-/opt/java/openjdk} + +# Database related configuration, set database type, username and password +export DATABASE=${DATABASE:-mysql} +export SPRING_PROFILES_ACTIVE=${DATABASE} +export SPRING_DATASOURCE_URL="jdbc:mysql://mysql:3306/dolphinscheduler?useUnicode=true&characterEncoding=UTF-8&useSSL=false" +export SPRING_DATASOURCE_USERNAME=root +export SPRING_DATASOURCE_PASSWORD=123456 + +# DolphinScheduler server related configuration +export SPRING_CACHE_TYPE=${SPRING_CACHE_TYPE:-none} +export SPRING_JACKSON_TIME_ZONE=${SPRING_JACKSON_TIME_ZONE:-UTC} + +# Registry center configuration, determines the type and link of the registry center +export REGISTRY_TYPE=${REGISTRY_TYPE:-jdbc} +export REGISTRY_HIKARI_CONFIG_JDBC_URL="jdbc:mysql://mysql:3306/dolphinscheduler?useUnicode=true&characterEncoding=UTF-8&useSSL=false" +export REGISTRY_HIKARI_CONFIG_USERNAME=root +export REGISTRY_HIKARI_CONFIG_PASSWORD=123456 + +# Tasks related configurations, need to change the configuration if you use the related tasks. +export HADOOP_HOME=${HADOOP_HOME:-/opt/soft/hadoop} +export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-/opt/soft/hadoop/etc/hadoop} +export SPARK_HOME=${SPARK_HOME:-/opt/soft/spark} +export PYTHON_LAUNCHER=${PYTHON_LAUNCHER:-/opt/soft/python/bin/python3} +export HIVE_HOME=${HIVE_HOME:-/opt/soft/hive} +export FLINK_HOME=${FLINK_HOME:-/opt/soft/flink} +export DATAX_LAUNCHER=${DATAX_LAUNCHER:-/opt/soft/datax/bin/datax.py} + +export PATH=$HADOOP_HOME/bin:$SPARK_HOME/bin:$PYTHON_LAUNCHER:$JAVA_HOME/bin:$HIVE_HOME/bin:$FLINK_HOME/bin:$DATAX_LAUNCHER:$PATH + +export MASTER_RESERVED_MEMORY=0.01 +export WORKER_RESERVED_MEMORY=0.01 + +# applicationId auto collection related configuration, the following configurations are unnecessary if setting appId.collect=log +#export HADOOP_CLASSPATH=`hadoop classpath`:${DOLPHINSCHEDULER_HOME}/tools/libs/* +#export SPARK_DIST_CLASSPATH=$HADOOP_CLASSPATH:$SPARK_DIST_CLASS_PATH +#export HADOOP_CLIENT_OPTS="-javaagent:${DOLPHINSCHEDULER_HOME}/tools/libs/aspectjweaver-1.9.7.jar":$HADOOP_CLIENT_OPTS +#export SPARK_SUBMIT_OPTS="-javaagent:${DOLPHINSCHEDULER_HOME}/tools/libs/aspectjweaver-1.9.7.jar":$SPARK_SUBMIT_OPTS +#export FLINK_ENV_JAVA_OPTS="-javaagent:${DOLPHINSCHEDULER_HOME}/tools/libs/aspectjweaver-1.9.7.jar":$FLINK_ENV_JAVA_OPTS diff --git a/script/env/install_env.sh b/.github/workflows/cluster-test/mysql_with_mysql_registry/install_env.sh similarity index 79% rename from script/env/install_env.sh rename to .github/workflows/cluster-test/mysql_with_mysql_registry/install_env.sh index 8de1c78637f2..cd660febf88b 100644 --- a/script/env/install_env.sh +++ b/.github/workflows/cluster-test/mysql_with_mysql_registry/install_env.sh @@ -22,7 +22,7 @@ # including master, worker, api, alert. If you want to deploy in pseudo-distributed # mode, just write a pseudo-distributed hostname # Example for hostnames: ips="ds1,ds2,ds3,ds4,ds5", Example for IPs: ips="192.168.8.1,192.168.8.2,192.168.8.3,192.168.8.4,192.168.8.5" -ips=${ips:-"ds1,ds2,ds3,ds4,ds5"} +ips=${ips:-"localhost"} # Port of SSH protocol, default value is 22. For now we only support same port in all `ips` machine # modify it if you use different ssh port @@ -31,33 +31,28 @@ sshPort=${sshPort:-"22"} # A comma separated list of machine hostname or IP would be installed Master server, it # must be a subset of configuration `ips`. # Example for hostnames: masters="ds1,ds2", Example for IPs: masters="192.168.8.1,192.168.8.2" -masters=${masters:-"ds1,ds2"} +masters=${masters:-"localhost"} # A comma separated list of machine : or :.All hostname or IP must be a # subset of configuration `ips`, And workerGroup have default value as `default`, but we recommend you declare behind the hosts # Example for hostnames: workers="ds1:default,ds2:default,ds3:default", Example for IPs: workers="192.168.8.1:default,192.168.8.2:default,192.168.8.3:default" -workers=${workers:-"ds1:default,ds2:default,ds3:default,ds4:default,ds5:default"} +workers=${workers:-"localhost:default"} # A comma separated list of machine hostname or IP would be installed Alert server, it # must be a subset of configuration `ips`. # Example for hostname: alertServer="ds3", Example for IP: alertServer="192.168.8.3" -alertServer=${alertServer:-"ds3"} +alertServer=${alertServer:-"localhost"} # A comma separated list of machine hostname or IP would be installed API server, it # must be a subset of configuration `ips`. # Example for hostname: apiServers="ds1", Example for IP: apiServers="192.168.8.1" -apiServers=${apiServers:-"ds1"} +apiServers=${apiServers:-"localhost"} # The directory to install DolphinScheduler for all machine we config above. It will automatically be created by `install.sh` script if not exists. -# Do not set this configuration same as the current path (pwd). Do not add quotes to it if you using related path. -installPath=${installPath:-"/tmp/dolphinscheduler"} +# Do not set this configuration same as the current path (pwd) +installPath=${installPath:-"/root/apache-dolphinscheduler-*-SNAPSHOT-bin"} # The user to deploy DolphinScheduler for all machine we config above. For now user must create by yourself before running `install.sh` # script. The user needs to have sudo privileges and permissions to operate hdfs. If hdfs is enabled than the root directory needs # to be created by this user deployUser=${deployUser:-"dolphinscheduler"} - -# The root of zookeeper, for now DolphinScheduler default registry server is zookeeper. -# It will delete ${zkRoot} in the zookeeper when you run install.sh, so please keep it same as registry.zookeeper.namespace in yml files. -# Similarly, if you want to modify the value, please modify registry.zookeeper.namespace in yml files as well. -zkRoot=${zkRoot:-"/dolphinscheduler"} diff --git a/.github/workflows/cluster-test/mysql/running_test.sh b/.github/workflows/cluster-test/mysql_with_mysql_registry/running_test.sh similarity index 100% rename from .github/workflows/cluster-test/mysql/running_test.sh rename to .github/workflows/cluster-test/mysql_with_mysql_registry/running_test.sh diff --git a/.github/workflows/cluster-test/mysql/start-job.sh b/.github/workflows/cluster-test/mysql_with_mysql_registry/start-job.sh similarity index 74% rename from .github/workflows/cluster-test/mysql/start-job.sh rename to .github/workflows/cluster-test/mysql_with_mysql_registry/start-job.sh index ee67c5179b7f..0ce48c64ae9b 100644 --- a/.github/workflows/cluster-test/mysql/start-job.sh +++ b/.github/workflows/cluster-test/mysql_with_mysql_registry/start-job.sh @@ -18,16 +18,16 @@ set -euox pipefail #Start base service containers -docker-compose -f .github/workflows/cluster-test/mysql/docker-compose-base.yaml up -d +docker-compose -f .github/workflows/cluster-test/mysql_with_mysql_registry/docker-compose-base.yaml up -d #Build ds mysql cluster image -docker build -t jdk8:ds_mysql_cluster -f .github/workflows/cluster-test/mysql/Dockerfile . +docker build -t jdk8:ds_mysql_cluster -f .github/workflows/cluster-test/mysql_with_mysql_registry/Dockerfile . #Start ds mysql cluster container -docker-compose -f .github/workflows/cluster-test/mysql/docker-compose-cluster.yaml up -d +docker-compose -f .github/workflows/cluster-test/mysql_with_mysql_registry/docker-compose-cluster.yaml up -d #Running tests -/bin/bash .github/workflows/cluster-test/mysql/running_test.sh +/bin/bash .github/workflows/cluster-test/mysql_with_mysql_registry/running_test.sh #Cleanup docker rm -f $(docker ps -aq) diff --git a/.github/workflows/cluster-test/mysql_with_zookeeper_registry/Dockerfile b/.github/workflows/cluster-test/mysql_with_zookeeper_registry/Dockerfile new file mode 100644 index 000000000000..574c05944239 --- /dev/null +++ b/.github/workflows/cluster-test/mysql_with_zookeeper_registry/Dockerfile @@ -0,0 +1,48 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +FROM eclipse-temurin:8-jre + +RUN apt update ; \ + apt install -y wget default-mysql-client sudo openssh-server netcat-traditional ; + +COPY ./apache-dolphinscheduler-*-SNAPSHOT-bin.tar.gz /root +RUN tar -zxvf /root/apache-dolphinscheduler-*-SNAPSHOT-bin.tar.gz -C ~ + +RUN mv /root/apache-dolphinscheduler-*-SNAPSHOT-bin /root/apache-dolphinscheduler-test-SNAPSHOT-bin + +ENV DOLPHINSCHEDULER_HOME /root/apache-dolphinscheduler-test-SNAPSHOT-bin + +#Setting install.sh +COPY .github/workflows/cluster-test/mysql_with_zookeeper_registry/install_env.sh $DOLPHINSCHEDULER_HOME/bin/env/install_env.sh + +#Setting dolphinscheduler_env.sh +COPY .github/workflows/cluster-test/mysql_with_zookeeper_registry/dolphinscheduler_env.sh $DOLPHINSCHEDULER_HOME/bin/env/dolphinscheduler_env.sh + +#Download mysql jar +ENV MYSQL_URL "https://repo.maven.apache.org/maven2/mysql/mysql-connector-java/8.0.16/mysql-connector-java-8.0.16.jar" +ENV MYSQL_DRIVER "mysql-connector-java-8.0.16.jar" +RUN wget -O $DOLPHINSCHEDULER_HOME/alert-server/libs/$MYSQL_DRIVER $MYSQL_URL ; \ +cp $DOLPHINSCHEDULER_HOME/alert-server/libs/$MYSQL_DRIVER $DOLPHINSCHEDULER_HOME/api-server/libs/$MYSQL_DRIVER ; \ +cp $DOLPHINSCHEDULER_HOME/alert-server/libs/$MYSQL_DRIVER $DOLPHINSCHEDULER_HOME/master-server/libs/$MYSQL_DRIVER ; \ +cp $DOLPHINSCHEDULER_HOME/alert-server/libs/$MYSQL_DRIVER $DOLPHINSCHEDULER_HOME/worker-server/libs/$MYSQL_DRIVER ; \ +cp $DOLPHINSCHEDULER_HOME/alert-server/libs/$MYSQL_DRIVER $DOLPHINSCHEDULER_HOME/tools/libs/$MYSQL_DRIVER + +#Deploy +COPY .github/workflows/cluster-test/mysql_with_zookeeper_registry/deploy.sh /root/deploy.sh + +CMD [ "/bin/bash", "/root/deploy.sh" ] diff --git a/.github/workflows/cluster-test/mysql/deploy.sh b/.github/workflows/cluster-test/mysql_with_zookeeper_registry/deploy.sh similarity index 100% rename from .github/workflows/cluster-test/mysql/deploy.sh rename to .github/workflows/cluster-test/mysql_with_zookeeper_registry/deploy.sh diff --git a/.github/workflows/cluster-test/mysql/docker-compose-base.yaml b/.github/workflows/cluster-test/mysql_with_zookeeper_registry/docker-compose-base.yaml similarity index 100% rename from .github/workflows/cluster-test/mysql/docker-compose-base.yaml rename to .github/workflows/cluster-test/mysql_with_zookeeper_registry/docker-compose-base.yaml diff --git a/.github/workflows/cluster-test/mysql_with_zookeeper_registry/docker-compose-cluster.yaml b/.github/workflows/cluster-test/mysql_with_zookeeper_registry/docker-compose-cluster.yaml new file mode 100644 index 000000000000..7343c8eee71a --- /dev/null +++ b/.github/workflows/cluster-test/mysql_with_zookeeper_registry/docker-compose-cluster.yaml @@ -0,0 +1,29 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +version: "3" + +services: + ds: + container_name: ds + image: jdk8:ds_mysql_cluster + restart: always + ports: + - "12345:12345" + - "5679:5679" + - "1235:1235" + - "50053:50053" diff --git a/.github/workflows/cluster-test/mysql/dolphinscheduler_env.sh b/.github/workflows/cluster-test/mysql_with_zookeeper_registry/dolphinscheduler_env.sh similarity index 97% rename from .github/workflows/cluster-test/mysql/dolphinscheduler_env.sh rename to .github/workflows/cluster-test/mysql_with_zookeeper_registry/dolphinscheduler_env.sh index 671c70a5bba5..f64e59b768c5 100755 --- a/.github/workflows/cluster-test/mysql/dolphinscheduler_env.sh +++ b/.github/workflows/cluster-test/mysql_with_zookeeper_registry/dolphinscheduler_env.sh @@ -28,7 +28,6 @@ export SPRING_DATASOURCE_PASSWORD=123456 # DolphinScheduler server related configuration export SPRING_CACHE_TYPE=${SPRING_CACHE_TYPE:-none} export SPRING_JACKSON_TIME_ZONE=${SPRING_JACKSON_TIME_ZONE:-UTC} -export MASTER_FETCH_COMMAND_NUM=${MASTER_FETCH_COMMAND_NUM:-10} # Registry center configuration, determines the type and link of the registry center export REGISTRY_TYPE=${REGISTRY_TYPE:-zookeeper} diff --git a/.github/workflows/cluster-test/mysql/install_env.sh b/.github/workflows/cluster-test/mysql_with_zookeeper_registry/install_env.sh similarity index 100% rename from .github/workflows/cluster-test/mysql/install_env.sh rename to .github/workflows/cluster-test/mysql_with_zookeeper_registry/install_env.sh diff --git a/.github/workflows/cluster-test/mysql_with_zookeeper_registry/running_test.sh b/.github/workflows/cluster-test/mysql_with_zookeeper_registry/running_test.sh new file mode 100644 index 000000000000..7582c3ccc5ea --- /dev/null +++ b/.github/workflows/cluster-test/mysql_with_zookeeper_registry/running_test.sh @@ -0,0 +1,108 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +set -x + + +API_HEALTHCHECK_COMMAND="curl -I -m 10 -o /dev/null -s -w %{http_code} http://0.0.0.0:12345/dolphinscheduler/actuator/health" +MASTER_HEALTHCHECK_COMMAND="curl -I -m 10 -o /dev/null -s -w %{http_code} http://0.0.0.0:5679/actuator/health" +WORKER_HEALTHCHECK_COMMAND="curl -I -m 10 -o /dev/null -s -w %{http_code} http://0.0.0.0:1235/actuator/health" +ALERT_HEALTHCHECK_COMMAND="curl -I -m 10 -o /dev/null -s -w %{http_code} http://0.0.0.0:50053/actuator/health" + +#Cluster start health check +TIMEOUT=180 +START_HEALTHCHECK_EXITCODE=0 + +for ((i=1; i<=TIMEOUT; i++)) +do + MASTER_HTTP_STATUS=$(eval "$MASTER_HEALTHCHECK_COMMAND") + WORKER_HTTP_STATUS=$(eval "$WORKER_HEALTHCHECK_COMMAND") + API_HTTP_STATUS=$(eval "$API_HEALTHCHECK_COMMAND") + ALERT_HTTP_STATUS=$(eval "$ALERT_HEALTHCHECK_COMMAND") + if [[ $MASTER_HTTP_STATUS -eq 200 && $WORKER_HTTP_STATUS -eq 200 && $API_HTTP_STATUS -eq 200 && $ALERT_HTTP_STATUS -eq 200 ]];then + START_HEALTHCHECK_EXITCODE=0 + else + START_HEALTHCHECK_EXITCODE=2 + fi + + if [[ $START_HEALTHCHECK_EXITCODE -eq 0 ]];then + echo "cluster start health check success" + break + fi + + if [[ $i -eq $TIMEOUT ]];then + if [[ $MASTER_HTTP_STATUS -ne 200 ]];then + docker exec -u root ds bash -c "cat /root/apache-dolphinscheduler-*-SNAPSHOT-bin/master-server/logs/dolphinscheduler-master.log" + docker exec -u root ds bash -c "cat /root/apache-dolphinscheduler-*-SNAPSHOT-bin/master-server/logs/*.out" + echo "master start health check failed" + fi + if [[ $WORKER_HTTP_STATUS -ne 200 ]]; then + docker exec -u root ds bash -c "cat /root/apache-dolphinscheduler-*-SNAPSHOT-bin/worker-server/logs/dolphinscheduler-worker.log" + docker exec -u root ds bash -c "cat /root/apache-dolphinscheduler-*-SNAPSHOT-bin/worker-server/logs/*.out" + echo "worker start health check failed" + fi + if [[ $API_HTTP_STATUS -ne 200 ]]; then + docker exec -u root ds bash -c "cat /root/apache-dolphinscheduler-*-SNAPSHOT-bin/api-server/logs/dolphinscheduler-api.log" + docker exec -u root ds bash -c "cat /root/apache-dolphinscheduler-*-SNAPSHOT-bin/api-server/logs/*.out" + echo "api start health check failed" + fi + if [[ $ALERT_HTTP_STATUS -ne 200 ]]; then + docker exec -u root ds bash -c "cat /root/apache-dolphinscheduler-*-SNAPSHOT-bin/alert-server/logs/dolphinscheduler-alert.log" + docker exec -u root ds bash -c "cat /root/apache-dolphinscheduler-*-SNAPSHOT-bin/alert-server/logs/*.out" + echo "alert start health check failed" + fi + exit $START_HEALTHCHECK_EXITCODE + fi + sleep 1 +done + +#Stop Cluster +docker exec -u root ds bash -c "/root/apache-dolphinscheduler-*-SNAPSHOT-bin/bin/stop-all.sh" + +#Cluster stop health check +sleep 5 +MASTER_HTTP_STATUS=$(eval "$MASTER_HEALTHCHECK_COMMAND") +if [[ $MASTER_HTTP_STATUS -ne 200 ]];then + echo "master stop health check success" +else + echo "master stop health check failed" + exit 3 +fi + +WORKER_HTTP_STATUS=$(eval "$WORKER_HEALTHCHECK_COMMAND") +if [[ $WORKER_HTTP_STATUS -ne 200 ]];then + echo "worker stop health check success" +else + echo "worker stop health check failed" + exit 3 +fi + +API_HTTP_STATUS=$(eval "$API_HEALTHCHECK_COMMAND") +if [[ $API_HTTP_STATUS -ne 200 ]];then + echo "api stop health check success" +else + echo "api stop health check failed" + exit 3 +fi + +ALERT_HTTP_STATUS=$(eval "$ALERT_HEALTHCHECK_COMMAND") +if [[ $ALERT_HTTP_STATUS -ne 200 ]];then + echo "alert stop health check success" +else + echo "alert stop health check failed" + exit 3 +fi diff --git a/.github/workflows/cluster-test/mysql_with_zookeeper_registry/start-job.sh b/.github/workflows/cluster-test/mysql_with_zookeeper_registry/start-job.sh new file mode 100644 index 000000000000..db8d23147ed3 --- /dev/null +++ b/.github/workflows/cluster-test/mysql_with_zookeeper_registry/start-job.sh @@ -0,0 +1,33 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +set -euox pipefail + +#Start base service containers +docker-compose -f .github/workflows/cluster-test/mysql_with_zookeeper_registry/docker-compose-base.yaml up -d + +#Build ds mysql cluster image +docker build -t jdk8:ds_mysql_cluster -f .github/workflows/cluster-test/mysql_with_zookeeper_registry/Dockerfile . + +#Start ds mysql cluster container +docker-compose -f .github/workflows/cluster-test/mysql_with_zookeeper_registry/docker-compose-cluster.yaml up -d + +#Running tests +/bin/bash .github/workflows/cluster-test/mysql_with_zookeeper_registry/running_test.sh + +#Cleanup +docker rm -f $(docker ps -aq) diff --git a/.github/workflows/cluster-test/postgresql_with_postgresql_registry/Dockerfile b/.github/workflows/cluster-test/postgresql_with_postgresql_registry/Dockerfile new file mode 100644 index 000000000000..bb2d9a5383d0 --- /dev/null +++ b/.github/workflows/cluster-test/postgresql_with_postgresql_registry/Dockerfile @@ -0,0 +1,39 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +FROM eclipse-temurin:8-jre + +RUN apt update ; \ + apt install -y wget sudo openssh-server netcat-traditional ; + +COPY ./apache-dolphinscheduler-*-SNAPSHOT-bin.tar.gz /root +RUN tar -zxvf /root/apache-dolphinscheduler-*-SNAPSHOT-bin.tar.gz -C ~ + +RUN mv /root/apache-dolphinscheduler-*-SNAPSHOT-bin /root/apache-dolphinscheduler-test-SNAPSHOT-bin + +ENV DOLPHINSCHEDULER_HOME /root/apache-dolphinscheduler-test-SNAPSHOT-bin + +#Setting install.sh +COPY .github/workflows/cluster-test/postgresql_with_postgresql_registry/install_env.sh $DOLPHINSCHEDULER_HOME/bin/env/install_env.sh + +#Setting dolphinscheduler_env.sh +COPY .github/workflows/cluster-test/postgresql_with_postgresql_registry/dolphinscheduler_env.sh $DOLPHINSCHEDULER_HOME/bin/env/dolphinscheduler_env.sh + +#Deploy +COPY .github/workflows/cluster-test/postgresql_with_postgresql_registry/deploy.sh /root/deploy.sh + +CMD [ "/bin/bash", "/root/deploy.sh" ] diff --git a/.github/workflows/cluster-test/postgresql_with_postgresql_registry/deploy.sh b/.github/workflows/cluster-test/postgresql_with_postgresql_registry/deploy.sh new file mode 100644 index 000000000000..37bf3433c014 --- /dev/null +++ b/.github/workflows/cluster-test/postgresql_with_postgresql_registry/deploy.sh @@ -0,0 +1,41 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +set -euox pipefail + + +USER=root + +#Sudo +sed -i '$a'$USER' ALL=(ALL) NOPASSWD: NOPASSWD: ALL' /etc/sudoers +sed -i 's/Defaults requirett/#Defaults requirett/g' /etc/sudoers + +#SSH +ssh-keygen -t rsa -P '' -f ~/.ssh/id_rsa +cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys +chmod 600 ~/.ssh/authorized_keys +service ssh start + +#Init schema +/bin/bash $DOLPHINSCHEDULER_HOME/tools/bin/upgrade-schema.sh +/bin/bash $DOLPHINSCHEDULER_HOME/tools/bin/initialize-jdbc-registry.sh + +#Start Cluster +/bin/bash $DOLPHINSCHEDULER_HOME/bin/start-all.sh + +#Keep running +tail -f /dev/null diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/src/main/resources/org/apache/dolphinscheduler/plugin/task/pigeon/config.properties b/.github/workflows/cluster-test/postgresql_with_postgresql_registry/docker-compose-base.yaml similarity index 63% rename from dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/src/main/resources/org/apache/dolphinscheduler/plugin/task/pigeon/config.properties rename to .github/workflows/cluster-test/postgresql_with_postgresql_registry/docker-compose-base.yaml index c54e53ad41d1..1793d94f39dc 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/src/main/resources/org/apache/dolphinscheduler/plugin/task/pigeon/config.properties +++ b/.github/workflows/cluster-test/postgresql_with_postgresql_registry/docker-compose-base.yaml @@ -15,12 +15,21 @@ # limitations under the License. # -job.trigger.url=http://%s/tjs/coredefine/coredefine.ajax -job.trigger.post.body=action=datax_action&emethod=trigger_fullbuild_task +version: "3" -job.cancel.post.body=action=core_action&event_submit_do_cancel_task=y&taskid=%s +services: + postgres: + container_name: postgres + image: postgres:14.1 + restart: always + environment: + POSTGRES_PASSWORD: postgres + POSTGRES_DB: dolphinscheduler + ports: + - "5432:5432" + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres"] + interval: 5s + timeout: 60s + retries: 120 -job.status.url=http://%s/tjs/config/config.ajax?action=collection_action&emethod=get_task_status -job.status.post.body={\n taskid: %s\n, log: false } - -job.logs.fetch.url=ws://%s/tjs/download/logfeedback?logtype=full&collection=%s&taskid=%s \ No newline at end of file diff --git a/.github/workflows/cluster-test/postgresql/docker-compose-cluster.yaml b/.github/workflows/cluster-test/postgresql_with_postgresql_registry/docker-compose-cluster.yaml similarity index 100% rename from .github/workflows/cluster-test/postgresql/docker-compose-cluster.yaml rename to .github/workflows/cluster-test/postgresql_with_postgresql_registry/docker-compose-cluster.yaml diff --git a/.github/workflows/cluster-test/postgresql_with_postgresql_registry/dolphinscheduler_env.sh b/.github/workflows/cluster-test/postgresql_with_postgresql_registry/dolphinscheduler_env.sh new file mode 100644 index 000000000000..29f8570319b1 --- /dev/null +++ b/.github/workflows/cluster-test/postgresql_with_postgresql_registry/dolphinscheduler_env.sh @@ -0,0 +1,57 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# JAVA_HOME, will use it to start DolphinScheduler server +export JAVA_HOME=${JAVA_HOME:-/opt/java/openjdk} + +# Database related configuration, set database type, username and password +export DATABASE=${DATABASE:-postgresql} +export SPRING_PROFILES_ACTIVE=${DATABASE} +export SPRING_DATASOURCE_URL="jdbc:postgresql://postgres:5432/dolphinscheduler" +export SPRING_DATASOURCE_USERNAME=postgres +export SPRING_DATASOURCE_PASSWORD=postgres + +# DolphinScheduler server related configuration +export SPRING_CACHE_TYPE=${SPRING_CACHE_TYPE:-none} +export SPRING_JACKSON_TIME_ZONE=${SPRING_JACKSON_TIME_ZONE:-UTC} + +# Registry center configuration, determines the type and link of the registry center +export REGISTRY_TYPE=jdbc +export REGISTRY_HIKARI_CONFIG_JDBC_URL="jdbc:postgresql://postgres:5432/dolphinscheduler" +export REGISTRY_HIKARI_CONFIG_USERNAME=postgres +export REGISTRY_HIKARI_CONFIG_PASSWORD=postgres + +# Tasks related configurations, need to change the configuration if you use the related tasks. +export HADOOP_HOME=${HADOOP_HOME:-/opt/soft/hadoop} +export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-/opt/soft/hadoop/etc/hadoop} +export SPARK_HOME=${SPARK_HOME:-/opt/soft/spark} +export PYTHON_LAUNCHER=${PYTHON_LAUNCHER:-/opt/soft/python/bin/python3} +export HIVE_HOME=${HIVE_HOME:-/opt/soft/hive} +export FLINK_HOME=${FLINK_HOME:-/opt/soft/flink} +export DATAX_LAUNCHER=${DATAX_LAUNCHER:-/opt/soft/datax/bin/datax.py} + +export PATH=$HADOOP_HOME/bin:$SPARK_HOME/bin:$PYTHON_LAUNCHER:$JAVA_HOME/bin:$HIVE_HOME/bin:$FLINK_HOME/bin:$DATAX_LAUNCHER:$PATH + +export MASTER_RESERVED_MEMORY=0.01 +export WORKER_RESERVED_MEMORY=0.01 + +# applicationId auto collection related configuration, the following configurations are unnecessary if setting appId.collect=log +#export HADOOP_CLASSPATH=`hadoop classpath`:${DOLPHINSCHEDULER_HOME}/tools/libs/* +#export SPARK_DIST_CLASSPATH=$HADOOP_CLASSPATH:$SPARK_DIST_CLASS_PATH +#export HADOOP_CLIENT_OPTS="-javaagent:${DOLPHINSCHEDULER_HOME}/tools/libs/aspectjweaver-1.9.7.jar":$HADOOP_CLIENT_OPTS +#export SPARK_SUBMIT_OPTS="-javaagent:${DOLPHINSCHEDULER_HOME}/tools/libs/aspectjweaver-1.9.7.jar":$SPARK_SUBMIT_OPTS +#export FLINK_ENV_JAVA_OPTS="-javaagent:${DOLPHINSCHEDULER_HOME}/tools/libs/aspectjweaver-1.9.7.jar":$FLINK_ENV_JAVA_OPTS diff --git a/.github/workflows/cluster-test/postgresql_with_postgresql_registry/install_env.sh b/.github/workflows/cluster-test/postgresql_with_postgresql_registry/install_env.sh new file mode 100644 index 000000000000..cd660febf88b --- /dev/null +++ b/.github/workflows/cluster-test/postgresql_with_postgresql_registry/install_env.sh @@ -0,0 +1,58 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# --------------------------------------------------------- +# INSTALL MACHINE +# --------------------------------------------------------- +# A comma separated list of machine hostname or IP would be installed DolphinScheduler, +# including master, worker, api, alert. If you want to deploy in pseudo-distributed +# mode, just write a pseudo-distributed hostname +# Example for hostnames: ips="ds1,ds2,ds3,ds4,ds5", Example for IPs: ips="192.168.8.1,192.168.8.2,192.168.8.3,192.168.8.4,192.168.8.5" +ips=${ips:-"localhost"} + +# Port of SSH protocol, default value is 22. For now we only support same port in all `ips` machine +# modify it if you use different ssh port +sshPort=${sshPort:-"22"} + +# A comma separated list of machine hostname or IP would be installed Master server, it +# must be a subset of configuration `ips`. +# Example for hostnames: masters="ds1,ds2", Example for IPs: masters="192.168.8.1,192.168.8.2" +masters=${masters:-"localhost"} + +# A comma separated list of machine : or :.All hostname or IP must be a +# subset of configuration `ips`, And workerGroup have default value as `default`, but we recommend you declare behind the hosts +# Example for hostnames: workers="ds1:default,ds2:default,ds3:default", Example for IPs: workers="192.168.8.1:default,192.168.8.2:default,192.168.8.3:default" +workers=${workers:-"localhost:default"} + +# A comma separated list of machine hostname or IP would be installed Alert server, it +# must be a subset of configuration `ips`. +# Example for hostname: alertServer="ds3", Example for IP: alertServer="192.168.8.3" +alertServer=${alertServer:-"localhost"} + +# A comma separated list of machine hostname or IP would be installed API server, it +# must be a subset of configuration `ips`. +# Example for hostname: apiServers="ds1", Example for IP: apiServers="192.168.8.1" +apiServers=${apiServers:-"localhost"} + +# The directory to install DolphinScheduler for all machine we config above. It will automatically be created by `install.sh` script if not exists. +# Do not set this configuration same as the current path (pwd) +installPath=${installPath:-"/root/apache-dolphinscheduler-*-SNAPSHOT-bin"} + +# The user to deploy DolphinScheduler for all machine we config above. For now user must create by yourself before running `install.sh` +# script. The user needs to have sudo privileges and permissions to operate hdfs. If hdfs is enabled than the root directory needs +# to be created by this user +deployUser=${deployUser:-"dolphinscheduler"} diff --git a/.github/workflows/cluster-test/postgresql/running_test.sh b/.github/workflows/cluster-test/postgresql_with_postgresql_registry/running_test.sh similarity index 100% rename from .github/workflows/cluster-test/postgresql/running_test.sh rename to .github/workflows/cluster-test/postgresql_with_postgresql_registry/running_test.sh diff --git a/.github/workflows/cluster-test/postgresql_with_postgresql_registry/start-job.sh b/.github/workflows/cluster-test/postgresql_with_postgresql_registry/start-job.sh new file mode 100644 index 000000000000..e2b6b630e8c8 --- /dev/null +++ b/.github/workflows/cluster-test/postgresql_with_postgresql_registry/start-job.sh @@ -0,0 +1,33 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +set -euox pipefail + +#Start base service containers +docker-compose -f .github/workflows/cluster-test/postgresql_with_postgresql_registry/docker-compose-base.yaml up -d + +#Build ds postgresql cluster image +docker build -t jdk8:ds_postgresql_cluster -f .github/workflows/cluster-test/postgresql_with_postgresql_registry/Dockerfile . + +#Start ds postgresql cluster container +docker-compose -f .github/workflows/cluster-test/postgresql_with_postgresql_registry/docker-compose-cluster.yaml up -d + +#Running tests +/bin/bash .github/workflows/cluster-test/postgresql_with_postgresql_registry/running_test.sh + +#Cleanup +docker rm -f $(docker ps -aq) diff --git a/.github/workflows/cluster-test/postgresql/Dockerfile b/.github/workflows/cluster-test/postgresql_with_zookeeper_registry/Dockerfile similarity index 77% rename from .github/workflows/cluster-test/postgresql/Dockerfile rename to .github/workflows/cluster-test/postgresql_with_zookeeper_registry/Dockerfile index 38234ee7b343..077b5c97b89f 100644 --- a/.github/workflows/cluster-test/postgresql/Dockerfile +++ b/.github/workflows/cluster-test/postgresql_with_zookeeper_registry/Dockerfile @@ -28,12 +28,12 @@ RUN mv /root/apache-dolphinscheduler-*-SNAPSHOT-bin /root/apache-dolphinschedule ENV DOLPHINSCHEDULER_HOME /root/apache-dolphinscheduler-test-SNAPSHOT-bin #Setting install.sh -COPY .github/workflows/cluster-test/postgresql/install_env.sh $DOLPHINSCHEDULER_HOME/bin/env/install_env.sh +COPY .github/workflows/cluster-test/postgresql_with_zookeeper_registry/install_env.sh $DOLPHINSCHEDULER_HOME/bin/env/install_env.sh #Setting dolphinscheduler_env.sh -COPY .github/workflows/cluster-test/postgresql/dolphinscheduler_env.sh $DOLPHINSCHEDULER_HOME/bin/env/dolphinscheduler_env.sh +COPY .github/workflows/cluster-test/postgresql_with_zookeeper_registry/dolphinscheduler_env.sh $DOLPHINSCHEDULER_HOME/bin/env/dolphinscheduler_env.sh #Deploy -COPY .github/workflows/cluster-test/postgresql/deploy.sh /root/deploy.sh +COPY .github/workflows/cluster-test/postgresql_with_zookeeper_registry/deploy.sh /root/deploy.sh CMD [ "/bin/bash", "/root/deploy.sh" ] diff --git a/.github/workflows/cluster-test/postgresql/deploy.sh b/.github/workflows/cluster-test/postgresql_with_zookeeper_registry/deploy.sh similarity index 100% rename from .github/workflows/cluster-test/postgresql/deploy.sh rename to .github/workflows/cluster-test/postgresql_with_zookeeper_registry/deploy.sh diff --git a/.github/workflows/cluster-test/postgresql/docker-compose-base.yaml b/.github/workflows/cluster-test/postgresql_with_zookeeper_registry/docker-compose-base.yaml similarity index 100% rename from .github/workflows/cluster-test/postgresql/docker-compose-base.yaml rename to .github/workflows/cluster-test/postgresql_with_zookeeper_registry/docker-compose-base.yaml diff --git a/.github/workflows/cluster-test/postgresql_with_zookeeper_registry/docker-compose-cluster.yaml b/.github/workflows/cluster-test/postgresql_with_zookeeper_registry/docker-compose-cluster.yaml new file mode 100644 index 000000000000..9ab79ea44dee --- /dev/null +++ b/.github/workflows/cluster-test/postgresql_with_zookeeper_registry/docker-compose-cluster.yaml @@ -0,0 +1,29 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +version: "3" + +services: + ds: + container_name: ds + image: jdk8:ds_postgresql_cluster + restart: always + ports: + - "12345:12345" + - "5679:5679" + - "1235:1235" + - "50053:50053" diff --git a/.github/workflows/cluster-test/postgresql/dolphinscheduler_env.sh b/.github/workflows/cluster-test/postgresql_with_zookeeper_registry/dolphinscheduler_env.sh similarity index 97% rename from .github/workflows/cluster-test/postgresql/dolphinscheduler_env.sh rename to .github/workflows/cluster-test/postgresql_with_zookeeper_registry/dolphinscheduler_env.sh index 1dbd63254eee..685171605850 100644 --- a/.github/workflows/cluster-test/postgresql/dolphinscheduler_env.sh +++ b/.github/workflows/cluster-test/postgresql_with_zookeeper_registry/dolphinscheduler_env.sh @@ -28,7 +28,6 @@ export SPRING_DATASOURCE_PASSWORD=postgres # DolphinScheduler server related configuration export SPRING_CACHE_TYPE=${SPRING_CACHE_TYPE:-none} export SPRING_JACKSON_TIME_ZONE=${SPRING_JACKSON_TIME_ZONE:-UTC} -export MASTER_FETCH_COMMAND_NUM=${MASTER_FETCH_COMMAND_NUM:-10} # Registry center configuration, determines the type and link of the registry center export REGISTRY_TYPE=${REGISTRY_TYPE:-zookeeper} diff --git a/.github/workflows/cluster-test/postgresql/install_env.sh b/.github/workflows/cluster-test/postgresql_with_zookeeper_registry/install_env.sh similarity index 100% rename from .github/workflows/cluster-test/postgresql/install_env.sh rename to .github/workflows/cluster-test/postgresql_with_zookeeper_registry/install_env.sh diff --git a/.github/workflows/cluster-test/postgresql_with_zookeeper_registry/running_test.sh b/.github/workflows/cluster-test/postgresql_with_zookeeper_registry/running_test.sh new file mode 100644 index 000000000000..0bc861c389d1 --- /dev/null +++ b/.github/workflows/cluster-test/postgresql_with_zookeeper_registry/running_test.sh @@ -0,0 +1,109 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +set -x + + +API_HEALTHCHECK_COMMAND="curl -I -m 10 -o /dev/null -s -w %{http_code} http://0.0.0.0:12345/dolphinscheduler/actuator/health" +MASTER_HEALTHCHECK_COMMAND="curl -I -m 10 -o /dev/null -s -w %{http_code} http://0.0.0.0:5679/actuator/health" +WORKER_HEALTHCHECK_COMMAND="curl -I -m 10 -o /dev/null -s -w %{http_code} http://0.0.0.0:1235/actuator/health" +ALERT_HEALTHCHECK_COMMAND="curl -I -m 10 -o /dev/null -s -w %{http_code} http://0.0.0.0:50053/actuator/health" + +#Cluster start health check +TIMEOUT=180 +START_HEALTHCHECK_EXITCODE=0 + +for ((i=1; i<=TIMEOUT; i++)) +do + MASTER_HTTP_STATUS=$(eval "$MASTER_HEALTHCHECK_COMMAND") + WORKER_HTTP_STATUS=$(eval "$WORKER_HEALTHCHECK_COMMAND") + API_HTTP_STATUS=$(eval "$API_HEALTHCHECK_COMMAND") + ALERT_HTTP_STATUS=$(eval "$ALERT_HEALTHCHECK_COMMAND") + if [[ $MASTER_HTTP_STATUS -eq 200 && $WORKER_HTTP_STATUS -eq 200 && $API_HTTP_STATUS -eq 200 && $ALERT_HTTP_STATUS -eq 200 ]];then + START_HEALTHCHECK_EXITCODE=0 + else + START_HEALTHCHECK_EXITCODE=2 + fi + + if [[ $START_HEALTHCHECK_EXITCODE -eq 0 ]];then + echo "cluster start health check success" + break + fi + + if [[ $i -eq $TIMEOUT ]];then + if [[ $MASTER_HTTP_STATUS -ne 200 ]];then + docker exec -u root ds bash -c "cat /root/apache-dolphinscheduler-*-SNAPSHOT-bin/master-server/logs/dolphinscheduler-master.log" + docker exec -u root ds bash -c "cat /root/apache-dolphinscheduler-*-SNAPSHOT-bin/master-server/logs/*.out" + echo "master start health check failed" + fi + if [[ $WORKER_HTTP_STATUS -ne 200 ]]; then + docker exec -u root ds bash -c "cat /root/apache-dolphinscheduler-*-SNAPSHOT-bin/worker-server/logs/dolphinscheduler-worker.log" + docker exec -u root ds bash -c "cat /root/apache-dolphinscheduler-*-SNAPSHOT-bin/worker-server/logs/*.out" + echo "worker start health check failed" + fi + if [[ $API_HTTP_STATUS -ne 200 ]]; then + docker exec -u root ds bash -c "cat /root/apache-dolphinscheduler-*-SNAPSHOT-bin/api-server/logs/dolphinscheduler-api.log" + docker exec -u root ds bash -c "cat /root/apache-dolphinscheduler-*-SNAPSHOT-bin/api-server/logs/*.out" + echo "api start health check failed" + fi + if [[ $ALERT_HTTP_STATUS -ne 200 ]]; then + docker exec -u root ds bash -c "cat /root/apache-dolphinscheduler-*-SNAPSHOT-bin/alert-server/logs/dolphinscheduler-alert.log" + docker exec -u root ds bash -c "cat /root/apache-dolphinscheduler-*-SNAPSHOT-bin/alert-server/logs/*.out" + echo "alert start health check failed" + fi + exit $START_HEALTHCHECK_EXITCODE + fi + + sleep 1 +done + +#Stop Cluster +docker exec -u root ds bash -c "/root/apache-dolphinscheduler-*-SNAPSHOT-bin/bin/stop-all.sh" + +#Cluster stop health check +sleep 5 +MASTER_HTTP_STATUS=$(eval "$MASTER_HEALTHCHECK_COMMAND") +if [[ $MASTER_HTTP_STATUS -ne 200 ]];then + echo "master stop health check success" +else + echo "master stop health check failed" + exit 3 +fi + +WORKER_HTTP_STATUS=$(eval "$WORKER_HEALTHCHECK_COMMAND") +if [[ $WORKER_HTTP_STATUS -ne 200 ]];then + echo "worker stop health check success" +else + echo "worker stop health check failed" + exit 3 +fi + +API_HTTP_STATUS=$(eval "$API_HEALTHCHECK_COMMAND") +if [[ $API_HTTP_STATUS -ne 200 ]];then + echo "api stop health check success" +else + echo "api stop health check failed" + exit 3 +fi + +ALERT_HTTP_STATUS=$(eval "$ALERT_HEALTHCHECK_COMMAND") +if [[ $ALERT_HTTP_STATUS -ne 200 ]];then + echo "alert stop health check success" +else + echo "alert stop health check failed" + exit 3 +fi diff --git a/.github/workflows/cluster-test/postgresql/start-job.sh b/.github/workflows/cluster-test/postgresql_with_zookeeper_registry/start-job.sh similarity index 73% rename from .github/workflows/cluster-test/postgresql/start-job.sh rename to .github/workflows/cluster-test/postgresql_with_zookeeper_registry/start-job.sh index ba0878e3ecf2..fe755c97f1a8 100644 --- a/.github/workflows/cluster-test/postgresql/start-job.sh +++ b/.github/workflows/cluster-test/postgresql_with_zookeeper_registry/start-job.sh @@ -18,16 +18,16 @@ set -euox pipefail #Start base service containers -docker-compose -f .github/workflows/cluster-test/postgresql/docker-compose-base.yaml up -d +docker-compose -f .github/workflows/cluster-test/postgresql_with_zookeeper_registry/docker-compose-base.yaml up -d #Build ds postgresql cluster image -docker build -t jdk8:ds_postgresql_cluster -f .github/workflows/cluster-test/postgresql/Dockerfile . +docker build -t jdk8:ds_postgresql_cluster -f .github/workflows/cluster-test/postgresql_with_zookeeper_registry/Dockerfile . #Start ds postgresql cluster container -docker-compose -f .github/workflows/cluster-test/postgresql/docker-compose-cluster.yaml up -d +docker-compose -f .github/workflows/cluster-test/postgresql_with_zookeeper_registry/docker-compose-cluster.yaml up -d #Running tests -/bin/bash .github/workflows/cluster-test/postgresql/running_test.sh +/bin/bash .github/workflows/cluster-test/postgresql_with_zookeeper_registry/running_test.sh #Cleanup docker rm -f $(docker ps -aq) diff --git a/.github/workflows/codeql.yaml b/.github/workflows/codeql.yaml index 8810a33c96f4..79859e3df613 100644 --- a/.github/workflows/codeql.yaml +++ b/.github/workflows/codeql.yaml @@ -23,6 +23,8 @@ on: pull_request: branches: - 'dev' + - '[0-9]+.[0-9]+.[0-9]+-prepare' + - '[0-9]+.[0-9]+.[0-9]+-release' concurrency: group: codeql-${{ github.event.pull_request.number || github.ref }} diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 591bb0a65bb1..81eb7f20738a 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -30,7 +30,7 @@ jobs: timeout-minutes: 10 runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Style Check run: ./mvnw spotless:check img-check: @@ -40,7 +40,7 @@ jobs: run: working-directory: docs steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Set up Python 3.9 uses: actions/setup-python@v2 with: @@ -54,7 +54,7 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 30 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - run: sudo npm install -g markdown-link-check@3.11.2 - run: sudo apt install plocate -y # NOTE: Change command from `find . -name "*.md"` to `find . -not -path "*/node_modules/*" -not -path "*/.tox/*" -name "*.md"` @@ -70,7 +70,7 @@ jobs: outputs: helm-doc: ${{ steps.filter.outputs.helm-doc }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: dorny/paths-filter@b2feaf19c27470162a626bd6fa8438ae5b263721 id: filter with: @@ -84,7 +84,7 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 20 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 with: submodules: true - name: Generating helm-doc diff --git a/.github/workflows/e2e-k8s.yml b/.github/workflows/e2e-k8s.yml index fb402b3c7973..9074d97a8c30 100644 --- a/.github/workflows/e2e-k8s.yml +++ b/.github/workflows/e2e-k8s.yml @@ -20,6 +20,8 @@ on: push: branches: - dev + - '[0-9]+.[0-9]+.[0-9]+-prepare' + - '[0-9]+.[0-9]+.[0-9]+-release' name: E2E-K8S @@ -35,7 +37,7 @@ jobs: outputs: not-ignore: ${{ steps.filter.outputs.not-ignore }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: dorny/paths-filter@b2feaf19c27470162a626bd6fa8438ae5b263721 id: filter with: @@ -49,7 +51,7 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 20 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 with: submodules: true - name: Build Image diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml index 60a2f4486541..6246416c36a8 100644 --- a/.github/workflows/e2e.yml +++ b/.github/workflows/e2e.yml @@ -20,6 +20,8 @@ on: push: branches: - dev + - '[0-9]+.[0-9]+.[0-9]+-prepare' + - '[0-9]+.[0-9]+.[0-9]+-release' name: E2E @@ -27,6 +29,8 @@ concurrency: group: e2e-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true +permissions: + pull-requests: write jobs: paths-filter: @@ -35,7 +39,7 @@ jobs: outputs: not-ignore: ${{ steps.filter.outputs.not-ignore }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: dorny/paths-filter@b2feaf19c27470162a626bd6fa8438ae5b263721 id: filter with: @@ -49,7 +53,7 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 20 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 with: submodules: true - name: Sanity Check @@ -74,7 +78,7 @@ jobs: run: | docker save apache/dolphinscheduler-standalone-server:ci -o /tmp/standalone-image.tar \ && du -sh /tmp/standalone-image.tar - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v4 name: Upload Docker Images with: name: standalone-image @@ -114,56 +118,16 @@ jobs: # class: org.apache.dolphinscheduler.e2e.cases.WorkflowSwitchE2ETest - name: FileManage class: org.apache.dolphinscheduler.e2e.cases.FileManageE2ETest - - name: UdfManage - class: org.apache.dolphinscheduler.e2e.cases.UdfManageE2ETest - - name: FunctionManage - class: org.apache.dolphinscheduler.e2e.cases.FunctionManageE2ETest - name: MysqlDataSource class: org.apache.dolphinscheduler.e2e.cases.MysqlDataSourceE2ETest - name: ClickhouseDataSource class: org.apache.dolphinscheduler.e2e.cases.ClickhouseDataSourceE2ETest - name: PostgresDataSource class: org.apache.dolphinscheduler.e2e.cases.PostgresDataSourceE2ETest - env: - RECORDING_PATH: /tmp/recording-${{ matrix.case.name }} - steps: - - uses: actions/checkout@v2 - with: - submodules: true - - name: Cache local Maven repository - uses: actions/cache@v3 - with: - path: ~/.m2/repository - key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}-e2e - restore-keys: ${{ runner.os }}-maven- - - uses: actions/download-artifact@v2 - name: Download Docker Images - with: - name: standalone-image - path: /tmp - - name: Load Docker Images - run: | - docker load -i /tmp/standalone-image.tar - - name: Run Test - run: | - ./mvnw -B -f dolphinscheduler-e2e/pom.xml -am \ - -DfailIfNoTests=false \ - -Dtest=${{ matrix.case.class }} test - - uses: actions/upload-artifact@v2 - if: always() - name: Upload Recording - with: - name: recording-${{ matrix.case.name }} - path: ${{ env.RECORDING_PATH }} - retention-days: 1 - e2e-optional: - name: ${{ matrix.case.name }} - needs: build - runs-on: ubuntu-latest - timeout-minutes: 30 - strategy: - matrix: - case: + - name: ShellTaskE2ETest + class: org.apache.dolphinscheduler.e2e.cases.tasks.ShellTaskE2ETest + - name: PythonTaskE2ETest + class: org.apache.dolphinscheduler.e2e.cases.tasks.PythonTaskE2ETest - name: SqlServerDataSource class: org.apache.dolphinscheduler.e2e.cases.SqlServerDataSourceE2ETest - name: HiveDataSource @@ -171,16 +135,25 @@ jobs: env: RECORDING_PATH: /tmp/recording-${{ matrix.case.name }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 with: submodules: true + - name: Set up JDK 11 + uses: actions/setup-java@v4 + with: + java-version: 11 + distribution: 'adopt' + - name: Collect Workflow Telemetry + uses: ./.github/actions/workflow-telemetry-action + with: + comment_on_pr: false - name: Cache local Maven repository uses: actions/cache@v3 with: path: ~/.m2/repository key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}-e2e restore-keys: ${{ runner.os }}-maven- - - uses: actions/download-artifact@v2 + - uses: actions/download-artifact@v4 name: Download Docker Images with: name: standalone-image @@ -193,7 +166,7 @@ jobs: ./mvnw -B -f dolphinscheduler-e2e/pom.xml -am \ -DfailIfNoTests=false \ -Dtest=${{ matrix.case.class }} test - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v4 if: always() name: Upload Recording with: diff --git a/.github/workflows/frontend.yml b/.github/workflows/frontend.yml index 502d669b66f6..1cb5f5a84b4a 100644 --- a/.github/workflows/frontend.yml +++ b/.github/workflows/frontend.yml @@ -20,7 +20,8 @@ name: Frontend on: push: branches: - - dev + - '[0-9]+.[0-9]+.[0-9]+-prepare' + - '[0-9]+.[0-9]+.[0-9]+-release' paths: - '.github/workflows/frontend.yml' - 'dolphinscheduler-ui/**' @@ -41,7 +42,7 @@ jobs: outputs: not-ignore: ${{ steps.filter.outputs.not-ignore }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: dorny/paths-filter@b2feaf19c27470162a626bd6fa8438ae5b263721 id: filter with: @@ -58,7 +59,7 @@ jobs: matrix: os: [ ubuntu-latest, macos-latest ] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 with: submodules: true - if: matrix.os == 'ubuntu-latest' @@ -82,7 +83,7 @@ jobs: needs: [ build, paths-filter ] if: always() steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Status run: | if [[ ${{ needs.paths-filter.outputs.not-ignore }} == 'false' && ${{ github.event_name }} == 'pull_request' ]]; then diff --git a/.github/workflows/issue-robot.yml b/.github/workflows/issue-robot.yml index ab00b346815d..06a363d11f7b 100644 --- a/.github/workflows/issue-robot.yml +++ b/.github/workflows/issue-robot.yml @@ -26,7 +26,7 @@ jobs: runs-on: ubuntu-latest steps: - name: "Checkout ${{ github.ref }}" - uses: actions/checkout@v2 + uses: actions/checkout@v4 with: persist-credentials: false submodules: true diff --git a/.github/workflows/mergeable.yml b/.github/workflows/mergeable.yml new file mode 100644 index 000000000000..835b34bf5761 --- /dev/null +++ b/.github/workflows/mergeable.yml @@ -0,0 +1,63 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +on: + pull_request: + types: + - opened + - reopened + - synchronize + - labeled + - unlabeled + - milestoned + - demilestoned + + +name: "Mergeable" + + +jobs: + result: + name: "Mergeable: milestone-label-check" + runs-on: ubuntu-latest + timeout-minutes: 10 + steps: + - name: Check milestone and labels + uses: actions/github-script@v7 + with: + script: | + const { data } = await github.request("GET /repos/{owner}/{repo}/pulls/{pr}", { + owner: context.repo.owner, + repo: context.repo.repo, + pr: context.payload.pull_request.number + }); + if (data.milestone) { + core.info(`This pull request has a milestone: ${data.milestone.title}`); + } else { + core.setFailed(`A maintainer needs to set the milestone for this pull request.`); + } + let labeledFlag = false; + for (const label of data.labels) { + if (['feature', 'bug', 'improvement', 'document', 'chore', 'DSIP', 'CI&CD', 'revert'].includes(label.name)) { + core.info(`This pull request has a valid label: ${label.name}`); + labeledFlag = true; + break; + } + } + if (!labeledFlag) { + core.setFailed(`A maintainer needs to set a valid label for this pull request.`); + } diff --git a/.github/workflows/owasp-dependency-check.yaml b/.github/workflows/owasp-dependency-check.yaml index 54e51aafed2d..b4ee52c57d8d 100644 --- a/.github/workflows/owasp-dependency-check.yaml +++ b/.github/workflows/owasp-dependency-check.yaml @@ -20,7 +20,8 @@ name: OWASP Dependency Check on: push: branches: - - dev + - '[0-9]+.[0-9]+.[0-9]+-prepare' + - '[0-9]+.[0-9]+.[0-9]+-release' pull_request: paths: - '**/pom.xml' @@ -30,8 +31,9 @@ env: jobs: build: runs-on: ubuntu-latest + timeout-minutes: 120 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 with: submodules: true - name: Set up JDK 8 @@ -42,9 +44,9 @@ jobs: - name: Run OWASP Dependency Check run: ./mvnw -B clean install verify dependency-check:check -DskipDepCheck=false -Dmaven.test.skip=true -Dspotless.skip=true - name: Upload report - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: ${{ cancelled() || failure() }} continue-on-error: true with: name: dependency report - path: target/dependency-check-report.html + path: target/dependency-check-report.html diff --git a/.github/workflows/publish-docker.yaml b/.github/workflows/publish-docker.yaml index 06aa8c695040..859e22f25c37 100644 --- a/.github/workflows/publish-docker.yaml +++ b/.github/workflows/publish-docker.yaml @@ -33,7 +33,7 @@ jobs: packages: write timeout-minutes: 30 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Cache local Maven repository uses: actions/cache@v3 with: diff --git a/.github/workflows/publish-helm-chart.yaml b/.github/workflows/publish-helm-chart.yaml index e38337369269..247a55f4cc27 100644 --- a/.github/workflows/publish-helm-chart.yaml +++ b/.github/workflows/publish-helm-chart.yaml @@ -33,7 +33,7 @@ jobs: packages: write timeout-minutes: 30 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Set environment variables run: | if [[ ${{ github.event_name }} == "release" ]]; then diff --git a/.github/workflows/pull-request-robot.yml b/.github/workflows/pull-request-target-robot.yml similarity index 80% rename from .github/workflows/pull-request-robot.yml rename to .github/workflows/pull-request-target-robot.yml index 731849e46de5..938e55498abc 100644 --- a/.github/workflows/pull-request-robot.yml +++ b/.github/workflows/pull-request-target-robot.yml @@ -15,20 +15,21 @@ # limitations under the License. # -name: "pull-request-robot" +name: "pull-request-target-robot" on: pull_request_target: jobs: - labelRobot: + pullRequestTargetRobot: permissions: contents: read pull-requests: write runs-on: ubuntu-latest + timeout-minutes: 10 steps: - name: "Checkout ${{ github.ref }}" - uses: actions/checkout@v2 + uses: actions/checkout@v4 with: persist-credentials: false submodules: true @@ -39,3 +40,8 @@ jobs: repo-token: "${{ secrets.GITHUB_TOKEN }}" configuration-path: .github/actions/labeler/labeler.yml sync-labels: true + + - name: "Auto assign in pull request" + uses: ./.github/actions/auto-assign-action + with: + configuration-path: .github/actions/auto-assign/auto-assign.yml diff --git a/.github/workflows/unit-test.yml b/.github/workflows/unit-test.yml index 6c9f41d7a34d..ad42ee57df5f 100644 --- a/.github/workflows/unit-test.yml +++ b/.github/workflows/unit-test.yml @@ -24,7 +24,8 @@ on: - '**/*.md' - 'dolphinscheduler-ui' branches: - - dev + - '[0-9]+.[0-9]+.[0-9]+-prepare' + - '[0-9]+.[0-9]+.[0-9]+-release' env: LOG_DIR: /tmp/dolphinscheduler @@ -40,7 +41,7 @@ jobs: outputs: not-ignore: ${{ steps.filter.outputs.not-ignore }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: dorny/paths-filter@b2feaf19c27470162a626bd6fa8438ae5b263721 id: filter with: @@ -57,7 +58,7 @@ jobs: java: ['8', '11'] timeout-minutes: 45 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 with: submodules: true - name: Sanity Check @@ -76,7 +77,7 @@ jobs: restore-keys: ${{ runner.os }}-maven- - name: Run Unit tests - run: ./mvnw clean verify -B -Dmaven.test.skip=false -Dspotless.skip=true -DskipUT=false -DskipIT=false + run: ./mvnw clean verify -B -Dmaven.test.skip=false -Dspotless.skip=true -DskipUT=false - name: Upload coverage report to codecov run: CODECOV_TOKEN="09c2663f-b091-4258-8a47-c981827eb29a" bash <(curl -s https://codecov.io/bash) @@ -99,23 +100,11 @@ jobs: -Dsonar.login=e4058004bc6be89decf558ac819aa1ecbee57682 -Dsonar.exclusions=,dolphinscheduler-ui/src/**/i18n/locale/*.js,dolphinscheduler-microbench/src/**/* -Dhttp.keepAlive=false -Dmaven.wagon.http.pool=false -Dmaven.wagon.httpconnectionManager.ttlSeconds=120 - -DskipUT=true -DskipIT=true + -DskipUT=true env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} - - name: Collect logs - continue-on-error: true - run: | - mkdir -p ${LOG_DIR} - docker-compose -f $(pwd)/docker/docker-swarm/docker-compose.yml logs dolphinscheduler-postgresql > ${LOG_DIR}/db.txt - - - name: Upload logs - uses: actions/upload-artifact@v2 - continue-on-error: true - with: - name: unit-test-logs - path: ${LOG_DIR} result: name: Unit Test runs-on: ubuntu-latest diff --git a/.gitignore b/.gitignore index 342f13bd6aa4..813ad9c4bc2f 100644 --- a/.gitignore +++ b/.gitignore @@ -7,6 +7,7 @@ .target .idea/ !.idea/icon.png +!.idea/vcs.xml .run/ target/ dist/ diff --git a/.gitmodules b/.gitmodules index 3b91677a6aa8..0467f266493d 100644 --- a/.gitmodules +++ b/.gitmodules @@ -21,3 +21,10 @@ [submodule ".github/actions/translate-on-issue"] path = .github/actions/translate-on-issue url = https://github.com/xingchun-chen/translation-helper +[submodule ".github/actions/auto-assign-action"] + path = .github/actions/auto-assign-action + url = https://github.com/kentaro-m/auto-assign-action.git + branch = 288f36f +[submodule ".github/actions/workflow-telemetry-action"] + path = .github/actions/workflow-telemetry-action + url = https://github.com/catchpoint/workflow-telemetry-action diff --git a/.idea/vcs.xml b/.idea/vcs.xml new file mode 100644 index 000000000000..f30edcd8a1a3 --- /dev/null +++ b/.idea/vcs.xml @@ -0,0 +1,16 @@ + + + + + + + + + diff --git a/.licenserc.yaml b/.licenserc.yaml index 821be80ea65d..91e8fbf86690 100644 --- a/.licenserc.yaml +++ b/.licenserc.yaml @@ -51,5 +51,6 @@ header: - '**/banner.txt' - '.terraform.lock.hcl' - deploy/kubernetes/dolphinscheduler/README.md.gotmpl + - .idea/vcs.xml comment: on-failure diff --git a/deploy/kubernetes/dolphinscheduler/README.md b/deploy/kubernetes/dolphinscheduler/README.md index 5659605b957a..4a6b880293d9 100644 --- a/deploy/kubernetes/dolphinscheduler/README.md +++ b/deploy/kubernetes/dolphinscheduler/README.md @@ -14,6 +14,8 @@ Please refer to the [Quick Start in Kubernetes](../../../docs/docs/en/guide/inst |-----|------|---------|-------------| | alert.affinity | object | `{}` | Affinity is a group of affinity scheduling rules. If specified, the pod's scheduling constraints. More info: [node-affinity](https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#node-affinity) | | alert.annotations | object | `{}` | You can use annotations to attach arbitrary non-identifying metadata to objects. Clients such as tools and libraries can retrieve this metadata. | +| alert.customizedConfig | object | `{}` | configure aligned with https://github.com/apache/dolphinscheduler/blob/dev/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/resources/application.yaml | +| alert.enableCustomizedConfig | bool | `false` | enable configure custom config | | alert.enabled | bool | `true` | Enable or disable the Alert-Server component | | alert.env.JAVA_OPTS | string | `"-Xms512m -Xmx512m -Xmn256m"` | The jvm options for alert server | | alert.livenessProbe | object | `{"enabled":true,"failureThreshold":"3","initialDelaySeconds":"30","periodSeconds":"30","successThreshold":"1","timeoutSeconds":"5"}` | Periodic probe of container liveness. Container will be restarted if the probe fails. More info: [container-probes](https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#container-probes) | @@ -52,6 +54,8 @@ Please refer to the [Quick Start in Kubernetes](../../../docs/docs/en/guide/inst | alert.tolerations | list | `[]` | Tolerations are appended (excluding duplicates) to pods running with this RuntimeClass during admission, effectively unioning the set of nodes tolerated by the pod and the RuntimeClass. | | api.affinity | object | `{}` | Affinity is a group of affinity scheduling rules. If specified, the pod's scheduling constraints. More info: [node-affinity](https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#node-affinity) | | api.annotations | object | `{}` | You can use annotations to attach arbitrary non-identifying metadata to objects. Clients such as tools and libraries can retrieve this metadata. | +| api.customizedConfig | object | `{}` | configure aligned with https://github.com/apache/dolphinscheduler/blob/dev/dolphinscheduler-api/src/main/resources/application.yaml | +| api.enableCustomizedConfig | bool | `false` | enable configure custom config | | api.enabled | bool | `true` | Enable or disable the API-Server component | | api.env.JAVA_OPTS | string | `"-Xms512m -Xmx512m -Xmn256m"` | The jvm options for api server | | api.livenessProbe | object | `{"enabled":true,"failureThreshold":"3","initialDelaySeconds":"30","periodSeconds":"30","successThreshold":"1","timeoutSeconds":"5"}` | Periodic probe of container liveness. Container will be restarted if the probe fails. More info: [container-probes](https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#container-probes) | @@ -120,6 +124,12 @@ Please refer to the [Quick Start in Kubernetes](../../../docs/docs/en/guide/inst | conf.auto | bool | `false` | auto restart, if true, all components will be restarted automatically after the common configuration is updated. if false, you need to restart the components manually. default is false | | conf.common."alert.rpc.port" | int | `50052` | rpc port | | conf.common."appId.collect" | string | `"log"` | way to collect applicationId: log, aop | +| conf.common."aws.credentials.provider.type" | string | `"AWSStaticCredentialsProvider"` | | +| conf.common."aws.s3.access.key.id" | string | `"minioadmin"` | The AWS access key. if resource.storage.type=S3, and credentials.provider.type is AWSStaticCredentialsProvider. This configuration is required | +| conf.common."aws.s3.access.key.secret" | string | `"minioadmin"` | The AWS secret access key. if resource.storage.type=S3, and credentials.provider.type is AWSStaticCredentialsProvider. This configuration is required | +| conf.common."aws.s3.bucket.name" | string | `"dolphinscheduler"` | The name of the bucket. You need to create them by yourself. Otherwise, the system cannot start. All buckets in Amazon S3 share a single namespace; ensure the bucket is given a unique name. | +| conf.common."aws.s3.endpoint" | string | `"http://minio:9000"` | You need to set this parameter when private cloud s3. If S3 uses public cloud, you only need to set resource.aws.region or set to the endpoint of a public cloud such as S3.cn-north-1.amazonaws.com.cn | +| conf.common."aws.s3.region" | string | `"ca-central-1"` | The AWS Region to use. if resource.storage.type=S3, This configuration is required | | conf.common."conda.path" | string | `"/opt/anaconda3/etc/profile.d/conda.sh"` | set path of conda.sh | | conf.common."data-quality.jar.dir" | string | `nil` | data quality option | | conf.common."data.basedir.path" | string | `"/tmp/dolphinscheduler"` | user data local directory path, please make sure the directory exists and have read write permissions | @@ -138,11 +148,6 @@ Please refer to the [Quick Start in Kubernetes](../../../docs/docs/en/guide/inst | conf.common."resource.alibaba.cloud.oss.bucket.name" | string | `"dolphinscheduler"` | oss bucket name, required if you set resource.storage.type=OSS | | conf.common."resource.alibaba.cloud.oss.endpoint" | string | `"https://oss-cn-hangzhou.aliyuncs.com"` | oss bucket endpoint, required if you set resource.storage.type=OSS | | conf.common."resource.alibaba.cloud.region" | string | `"cn-hangzhou"` | alibaba cloud region, required if you set resource.storage.type=OSS | -| conf.common."resource.aws.access.key.id" | string | `"minioadmin"` | The AWS access key. if resource.storage.type=S3 or use EMR-Task, This configuration is required | -| conf.common."resource.aws.region" | string | `"ca-central-1"` | The AWS Region to use. if resource.storage.type=S3 or use EMR-Task, This configuration is required | -| conf.common."resource.aws.s3.bucket.name" | string | `"dolphinscheduler"` | The name of the bucket. You need to create them by yourself. Otherwise, the system cannot start. All buckets in Amazon S3 share a single namespace; ensure the bucket is given a unique name. | -| conf.common."resource.aws.s3.endpoint" | string | `"http://minio:9000"` | You need to set this parameter when private cloud s3. If S3 uses public cloud, you only need to set resource.aws.region or set to the endpoint of a public cloud such as S3.cn-north-1.amazonaws.com.cn | -| conf.common."resource.aws.secret.access.key" | string | `"minioadmin"` | The AWS secret access key. if resource.storage.type=S3 or use EMR-Task, This configuration is required | | conf.common."resource.azure.client.id" | string | `"minioadmin"` | azure storage account name, required if you set resource.storage.type=ABS | | conf.common."resource.azure.client.secret" | string | `"minioadmin"` | azure storage account key, required if you set resource.storage.type=ABS | | conf.common."resource.azure.subId" | string | `"minioadmin"` | azure storage subId, required if you set resource.storage.type=ABS | @@ -158,6 +163,7 @@ Please refer to the [Quick Start in Kubernetes](../../../docs/docs/en/guide/inst | conf.common."yarn.application.status.address" | string | `"http://ds1:%s/ws/v1/cluster/apps/%s"` | if resourcemanager HA is enabled or not use resourcemanager, please keep the default value; If resourcemanager is single, you only need to replace ds1 to actual resourcemanager hostname | | conf.common."yarn.job.history.status.address" | string | `"http://ds1:19888/ws/v1/history/mapreduce/jobs/%s"` | job history status url when application number threshold is reached(default 10000, maybe it was set to 1000) | | conf.common."yarn.resourcemanager.ha.rm.ids" | string | `"192.168.xx.xx,192.168.xx.xx"` | if resourcemanager HA is enabled, please set the HA IPs; if resourcemanager is single, keep this value empty | +| datasource.profile | string | `"postgresql"` | The profile of datasource | | externalDatabase.database | string | `"dolphinscheduler"` | The database of external database | | externalDatabase.driverClassName | string | `"org.postgresql.Driver"` | The driverClassName of external database | | externalDatabase.enabled | bool | `false` | If exists external database, and set postgresql.enable value to false. external database will be used, otherwise Dolphinscheduler's internal database will be used. | @@ -174,7 +180,7 @@ Please refer to the [Quick Start in Kubernetes](../../../docs/docs/en/guide/inst | image.master | string | `"dolphinscheduler-master"` | master image | | image.pullPolicy | string | `"IfNotPresent"` | Image pull policy. Options: Always, Never, IfNotPresent | | image.pullSecret | string | `""` | Specify a imagePullSecrets | -| image.registry | string | `"apache/dolphinscheduler"` | Docker image repository for the DolphinScheduler | +| image.registry | string | `"apache"` | Docker image repository for the DolphinScheduler | | image.tag | string | `"latest"` | Docker image version for the DolphinScheduler | | image.tools | string | `"dolphinscheduler-tools"` | tools image | | image.worker | string | `"dolphinscheduler-worker"` | worker image | @@ -189,6 +195,8 @@ Please refer to the [Quick Start in Kubernetes](../../../docs/docs/en/guide/inst | initImage.pullPolicy | string | `"IfNotPresent"` | Image pull policy. Options: Always, Never, IfNotPresent | | master.affinity | object | `{}` | Affinity is a group of affinity scheduling rules. If specified, the pod's scheduling constraints. More info: [node-affinity](https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#node-affinity) | | master.annotations | object | `{}` | You can use annotations to attach arbitrary non-identifying metadata to objects. Clients such as tools and libraries can retrieve this metadata. | +| master.customizedConfig | object | `{}` | configure aligned with https://github.com/apache/dolphinscheduler/blob/dev/dolphinscheduler-master/src/main/resources/application.yaml | +| master.enableCustomizedConfig | bool | `false` | enable configure custom config | | master.enabled | bool | `true` | Enable or disable the Master component | | master.env.JAVA_OPTS | string | `"-Xms1g -Xmx1g -Xmn512m"` | The jvm options for master server | | master.env.MASTER_DISPATCH_TASK_NUM | string | `"3"` | Master dispatch task number per batch | @@ -200,9 +208,9 @@ Please refer to the [Quick Start in Kubernetes](../../../docs/docs/en/guide/inst | master.env.MASTER_KILL_APPLICATION_WHEN_HANDLE_FAILOVER | string | `"true"` | Master kill application when handle failover | | master.env.MASTER_MAX_HEARTBEAT_INTERVAL | string | `"10s"` | Master max heartbeat interval | | master.env.MASTER_SERVER_LOAD_PROTECTION_ENABLED | bool | `false` | If set true, will open master overload protection | -| master.env.MASTER_SERVER_LOAD_PROTECTION_MAX_CPU_USAGE_PERCENTAGE_THRESHOLDS | float | `0.7` | Master max cpu usage, when the master's cpu usage is smaller then this value, master server can execute workflow. | | master.env.MASTER_SERVER_LOAD_PROTECTION_MAX_DISK_USAGE_PERCENTAGE_THRESHOLDS | float | `0.7` | Master max disk usage , when the master's disk usage is smaller then this value, master server can execute workflow. | -| master.env.MASTER_SERVER_LOAD_PROTECTION_MAX_JVM_MEMORY_USAGE_PERCENTAGE_THRESHOLDS | float | `0.7` | Master max JVM memory usage , when the master's jvm memory usage is smaller then this value, master server can execute workflow. | +| master.env.MASTER_SERVER_LOAD_PROTECTION_MAX_JVM_CPU_USAGE_PERCENTAGE_THRESHOLDS | float | `0.7` | Master max jvm cpu usage, when the master's jvm cpu usage is smaller then this value, master server can execute workflow. | +| master.env.MASTER_SERVER_LOAD_PROTECTION_MAX_SYSTEM_CPU_USAGE_PERCENTAGE_THRESHOLDS | float | `0.7` | Master max system cpu usage, when the master's system cpu usage is smaller then this value, master server can execute workflow. | | master.env.MASTER_SERVER_LOAD_PROTECTION_MAX_SYSTEM_MEMORY_USAGE_PERCENTAGE_THRESHOLDS | float | `0.7` | Master max System memory usage , when the master's system memory usage is smaller then this value, master server can execute workflow. | | master.env.MASTER_STATE_WHEEL_INTERVAL | string | `"5s"` | master state wheel interval, the unit is second | | master.env.MASTER_TASK_COMMIT_INTERVAL | string | `"1s"` | master commit task interval, the unit is second | @@ -295,18 +303,19 @@ Please refer to the [Quick Start in Kubernetes](../../../docs/docs/en/guide/inst | timezone | string | `"Asia/Shanghai"` | World time and date for cities in all time zones | | worker.affinity | object | `{}` | Affinity is a group of affinity scheduling rules. If specified, the pod's scheduling constraints. More info: [node-affinity](https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#node-affinity) | | worker.annotations | object | `{}` | You can use annotations to attach arbitrary non-identifying metadata to objects. Clients such as tools and libraries can retrieve this metadata. | +| worker.customizedConfig | object | `{}` | configure aligned with https://github.com/apache/dolphinscheduler/blob/dev/dolphinscheduler-worker/src/main/resources/application.yaml | +| worker.enableCustomizedConfig | bool | `false` | enable configure custom config | | worker.enabled | bool | `true` | Enable or disable the Worker component | | worker.env.DEFAULT_TENANT_ENABLED | bool | `false` | If set true, will use worker bootstrap user as the tenant to execute task when the tenant is `default`; | | worker.env.WORKER_EXEC_THREADS | string | `"100"` | Worker execute thread number to limit task instances | | worker.env.WORKER_HOST_WEIGHT | string | `"100"` | Worker host weight to dispatch tasks | | worker.env.WORKER_MAX_HEARTBEAT_INTERVAL | string | `"10s"` | Worker heartbeat interval | | worker.env.WORKER_SERVER_LOAD_PROTECTION_ENABLED | bool | `false` | If set true, will open worker overload protection | -| worker.env.WORKER_SERVER_LOAD_PROTECTION_MAX_CPU_USAGE_PERCENTAGE_THRESHOLDS | float | `0.7` | Worker max cpu usage, when the worker's cpu usage is smaller then this value, worker server can be dispatched tasks. | | worker.env.WORKER_SERVER_LOAD_PROTECTION_MAX_DISK_USAGE_PERCENTAGE_THRESHOLDS | float | `0.7` | Worker max disk usage , when the worker's disk usage is smaller then this value, worker server can be dispatched tasks. | -| worker.env.WORKER_SERVER_LOAD_PROTECTION_MAX_JVM_MEMORY_USAGE_PERCENTAGE_THRESHOLDS | float | `0.7` | Worker max jvm memory usage , when the worker's jvm memory usage is smaller then this value, worker server can be dispatched tasks. | +| worker.env.WORKER_SERVER_LOAD_PROTECTION_MAX_JVM_CPU_USAGE_PERCENTAGE_THRESHOLDS | float | `0.7` | Worker max jvm cpu usage, when the worker's jvm cpu usage is smaller then this value, worker server can be dispatched tasks. | +| worker.env.WORKER_SERVER_LOAD_PROTECTION_MAX_SYSTEM_CPU_USAGE_PERCENTAGE_THRESHOLDS | float | `0.7` | Worker max system cpu usage, when the worker's system cpu usage is smaller then this value, worker server can be dispatched tasks. | | worker.env.WORKER_SERVER_LOAD_PROTECTION_MAX_SYSTEM_MEMORY_USAGE_PERCENTAGE_THRESHOLDS | float | `0.7` | Worker max memory usage , when the worker's memory usage is smaller then this value, worker server can be dispatched tasks. | | worker.env.WORKER_TENANT_CONFIG_AUTO_CREATE_TENANT_ENABLED | bool | `true` | tenant corresponds to the user of the system, which is used by the worker to submit the job. If system does not have this user, it will be automatically created after the parameter worker.tenant.auto.create is true. | -| worker.env.WORKER_TENANT_CONFIG_DISTRIBUTED_TENANT | bool | `false` | Scenes to be used for distributed users. For example, users created by FreeIpa are stored in LDAP. This parameter only applies to Linux, When this parameter is true, worker.tenant.auto.create has no effect and will not automatically create tenants. | | worker.keda.advanced | object | `{}` | Specify HPA related options | | worker.keda.cooldownPeriod | int | `30` | How many seconds KEDA will wait before scaling to zero. Note that HPA has a separate cooldown period for scale-downs | | worker.keda.enabled | bool | `false` | Enable or disable the Keda component | @@ -314,7 +323,6 @@ Please refer to the [Quick Start in Kubernetes](../../../docs/docs/en/guide/inst | worker.keda.minReplicaCount | int | `0` | Minimum number of workers created by keda | | worker.keda.namespaceLabels | object | `{}` | Keda namespace labels | | worker.keda.pollingInterval | int | `5` | How often KEDA polls the DolphinScheduler DB to report new scale requests to the HPA | -| worker.livenessProbe | object | `{"enabled":true,"failureThreshold":"3","initialDelaySeconds":"30","periodSeconds":"30","successThreshold":"1","timeoutSeconds":"5"}` | Periodic probe of container liveness. Container will be restarted if the probe fails. More info: [container-probes](https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#container-probes) | | worker.livenessProbe.enabled | bool | `true` | Turn on and off liveness probe | | worker.livenessProbe.failureThreshold | string | `"3"` | Minimum consecutive failures for the probe | | worker.livenessProbe.initialDelaySeconds | string | `"30"` | Delay before liveness probe is initiated | diff --git a/deploy/kubernetes/dolphinscheduler/templates/_helpers.tpl b/deploy/kubernetes/dolphinscheduler/templates/_helpers.tpl index 0b2a542cc1d0..368e0b290f0b 100644 --- a/deploy/kubernetes/dolphinscheduler/templates/_helpers.tpl +++ b/deploy/kubernetes/dolphinscheduler/templates/_helpers.tpl @@ -51,7 +51,6 @@ Create a default common labels. {{- define "dolphinscheduler.common.labels" -}} app.kubernetes.io/instance: {{ .Release.Name }} app.kubernetes.io/managed-by: {{ .Release.Service }} -app.kubernetes.io/version: {{ .Chart.AppVersion }} {{- end -}} {{/* @@ -146,6 +145,10 @@ Create a database environment variables. {{- else }} value: {{ .Values.externalDatabase.type | quote }} {{- end }} +{{- if or .Values.mysql.enabled (eq .Values.externalDatabase.type "mysql") }} +- name: SPRING_PROFILES_ACTIVE + value: mysql +{{- end }} - name: SPRING_DATASOURCE_URL {{- if .Values.postgresql.enabled }} value: jdbc:postgresql://{{ template "dolphinscheduler.postgresql.fullname" . }}:5432/{{ .Values.postgresql.postgresqlDatabase }}?{{ .Values.postgresql.params }} diff --git a/deploy/kubernetes/dolphinscheduler/templates/configmap-dolphinscheduler-alert.yaml b/deploy/kubernetes/dolphinscheduler/templates/configmap-dolphinscheduler-alert.yaml new file mode 100644 index 000000000000..15bffe06f217 --- /dev/null +++ b/deploy/kubernetes/dolphinscheduler/templates/configmap-dolphinscheduler-alert.yaml @@ -0,0 +1,30 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +{{- if .Values.alert.enableCustomizedConfig }} +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{ include "dolphinscheduler.fullname" . }}-alert + labels: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-alert + {{- include "dolphinscheduler.alert.labels" . | nindent 4 }} +data: +{{- range $path, $config := .Values.alert.customizedConfig }} + {{ $path }}: | +{{ $config | indent 4 -}} +{{- end -}} +{{- end -}} diff --git a/deploy/kubernetes/dolphinscheduler/templates/configmap-dolphinscheduler-api.yaml b/deploy/kubernetes/dolphinscheduler/templates/configmap-dolphinscheduler-api.yaml new file mode 100644 index 000000000000..211d3dfda9cc --- /dev/null +++ b/deploy/kubernetes/dolphinscheduler/templates/configmap-dolphinscheduler-api.yaml @@ -0,0 +1,30 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +{{- if .Values.api.enableCustomizedConfig }} +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{ include "dolphinscheduler.fullname" . }}-api + labels: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-api + {{- include "dolphinscheduler.api.labels" . | nindent 4 }} +data: +{{- range $path, $config := .Values.api.customizedConfig }} + {{ $path }}: | +{{ $config | indent 4 -}} +{{- end -}} +{{- end -}} diff --git a/deploy/kubernetes/dolphinscheduler/templates/configmap-dolphinscheduler-master.yaml b/deploy/kubernetes/dolphinscheduler/templates/configmap-dolphinscheduler-master.yaml new file mode 100644 index 000000000000..9bcb7dd4112f --- /dev/null +++ b/deploy/kubernetes/dolphinscheduler/templates/configmap-dolphinscheduler-master.yaml @@ -0,0 +1,30 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +{{- if .Values.master.enableCustomizedConfig }} +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{ include "dolphinscheduler.fullname" . }}-master + labels: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-master + {{- include "dolphinscheduler.master.labels" . | nindent 4 }} +data: +{{- range $path, $config := .Values.master.customizedConfig }} + {{ $path }}: | +{{ $config | indent 4 -}} +{{- end -}} +{{- end -}} diff --git a/deploy/kubernetes/dolphinscheduler/templates/configmap-dolphinscheduler-worker.yaml b/deploy/kubernetes/dolphinscheduler/templates/configmap-dolphinscheduler-worker.yaml new file mode 100644 index 000000000000..c1d81a18024e --- /dev/null +++ b/deploy/kubernetes/dolphinscheduler/templates/configmap-dolphinscheduler-worker.yaml @@ -0,0 +1,30 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +{{- if .Values.worker.enableCustomizedConfig }} +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{ include "dolphinscheduler.fullname" . }}-worker + labels: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-worker + {{- include "dolphinscheduler.worker.labels" . | nindent 4 }} +data: +{{- range $path, $config := .Values.worker.customizedConfig }} + {{ $path }}: | +{{ $config | indent 4 -}} +{{- end -}} +{{- end -}} diff --git a/deploy/kubernetes/dolphinscheduler/templates/configmap.yaml b/deploy/kubernetes/dolphinscheduler/templates/configmap.yaml index 66ea53854bfa..8c1d515ca78f 100644 --- a/deploy/kubernetes/dolphinscheduler/templates/configmap.yaml +++ b/deploy/kubernetes/dolphinscheduler/templates/configmap.yaml @@ -32,12 +32,12 @@ data: {{- end }} {{- end }} {{- end }} - common_properties: |- + common.properties: |- {{- if index .Values.conf "common" }} {{- range $key, $value := index .Values.conf "common" }} {{- if and $.Values.minio.enabled }} {{- if eq $key "resource.storage.type" }}{{ $value = "S3" }}{{- end }} - {{- if eq $key "resource.aws.s3.endpoint" }}{{ $value = print "http://" (include "dolphinscheduler.minio.fullname" $) ":9000" }}{{- end }} + {{- if eq $key "aws.s3.endpoint" }}{{ $value = print "http://" (include "dolphinscheduler.minio.fullname" $) ":9000" }}{{- end }} {{- end }} {{ $key }}={{ $value }} {{- end }} diff --git a/deploy/kubernetes/dolphinscheduler/templates/deployment-dolphinscheduler-alert.yaml b/deploy/kubernetes/dolphinscheduler/templates/deployment-dolphinscheduler-alert.yaml index 41e01ef386a9..7a90d0802586 100644 --- a/deploy/kubernetes/dolphinscheduler/templates/deployment-dolphinscheduler-alert.yaml +++ b/deploy/kubernetes/dolphinscheduler/templates/deployment-dolphinscheduler-alert.yaml @@ -114,7 +114,12 @@ spec: name: {{ include "dolphinscheduler.fullname" . }}-alert - name: config-volume mountPath: /opt/dolphinscheduler/conf/common.properties - subPath: common_properties + subPath: common.properties + {{- if .Values.alert.enableCustomizedConfig }} + - name: alert-config-volume + mountPath: /opt/dolphinscheduler/conf/application.yaml + subPath: application.yaml + {{- end }} volumes: - name: {{ include "dolphinscheduler.fullname" . }}-alert {{- if .Values.alert.persistentVolumeClaim.enabled }} @@ -126,4 +131,9 @@ spec: - name: config-volume configMap: name: {{ include "dolphinscheduler.fullname" . }}-configs + {{- if .Values.alert.enableCustomizedConfig }} + - name: alert-config-volume + configMap: + name: {{ include "dolphinscheduler.fullname" . }}-alert + {{- end }} {{- end }} diff --git a/deploy/kubernetes/dolphinscheduler/templates/deployment-dolphinscheduler-api.yaml b/deploy/kubernetes/dolphinscheduler/templates/deployment-dolphinscheduler-api.yaml index c2770cfd98c5..b4fa07256c05 100644 --- a/deploy/kubernetes/dolphinscheduler/templates/deployment-dolphinscheduler-api.yaml +++ b/deploy/kubernetes/dolphinscheduler/templates/deployment-dolphinscheduler-api.yaml @@ -115,12 +115,17 @@ spec: name: {{ include "dolphinscheduler.fullname" . }}-api - name: config-volume mountPath: /opt/dolphinscheduler/conf/common.properties - subPath: common_properties + subPath: common.properties {{- if .Values.api.taskTypeFilter.enabled }} - name: config-volume mountPath: /opt/dolphinscheduler/conf/task-type-config.yaml subPath: task-type-config.yaml {{- end }} + {{- if .Values.api.enableCustomizedConfig }} + - name: api-config-volume + mountPath: /opt/dolphinscheduler/conf/application.yaml + subPath: application.yaml + {{- end }} {{- include "dolphinscheduler.sharedStorage.volumeMount" . | nindent 12 }} {{- include "dolphinscheduler.fsFileResource.volumeMount" . | nindent 12 }} {{- include "dolphinscheduler.ldap.ssl.volumeMount" . | nindent 12 }} @@ -136,6 +141,11 @@ spec: - name: config-volume configMap: name: {{ include "dolphinscheduler.fullname" . }}-configs + {{- if .Values.api.enableCustomizedConfig }} + - name: api-config-volume + configMap: + name: {{ include "dolphinscheduler.fullname" . }}-api + {{- end }} {{- include "dolphinscheduler.sharedStorage.volume" . | nindent 8 }} {{- include "dolphinscheduler.fsFileResource.volume" . | nindent 8 }} {{- include "dolphinscheduler.ldap.ssl.volume" . | nindent 8 }} diff --git a/deploy/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-master.yaml b/deploy/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-master.yaml index 888c35607eb1..c4174b9ca017 100644 --- a/deploy/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-master.yaml +++ b/deploy/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-master.yaml @@ -109,10 +109,15 @@ spec: volumeMounts: - mountPath: "/opt/dolphinscheduler/logs" name: {{ include "dolphinscheduler.fullname" . }}-master + {{- if .Values.master.enableCustomizedConfig }} + - name: master-config-volume + mountPath: /opt/dolphinscheduler/conf/application.yaml + subPath: application.yaml + {{- end }} {{- include "dolphinscheduler.sharedStorage.volumeMount" . | nindent 12 }} - name: config-volume mountPath: /opt/dolphinscheduler/conf/common.properties - subPath: common_properties + subPath: common.properties {{- include "dolphinscheduler.etcd.ssl.volumeMount" . | nindent 12 }} volumes: - name: {{ include "dolphinscheduler.fullname" . }}-master @@ -122,6 +127,11 @@ spec: {{- else }} emptyDir: {} {{- end }} + {{- if .Values.master.enableCustomizedConfig }} + - name: master-config-volume + configMap: + name: {{ include "dolphinscheduler.fullname" . }}-master + {{- end }} {{- include "dolphinscheduler.sharedStorage.volume" . | nindent 8 }} - name: config-volume configMap: diff --git a/deploy/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-worker.yaml b/deploy/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-worker.yaml index 7a75849faea5..4c66ca5ffe65 100644 --- a/deploy/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-worker.yaml +++ b/deploy/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-worker.yaml @@ -111,9 +111,14 @@ spec: name: {{ include "dolphinscheduler.fullname" . }}-worker-data - mountPath: "/opt/dolphinscheduler/logs" name: {{ include "dolphinscheduler.fullname" . }}-worker-logs + {{- if .Values.worker.enableCustomizedConfig }} + - name: worker-config-volume + mountPath: /opt/dolphinscheduler/conf/application.yaml + subPath: application.yaml + {{- end }} - name: config-volume mountPath: /opt/dolphinscheduler/conf/common.properties - subPath: common_properties + subPath: common.properties {{- include "dolphinscheduler.sharedStorage.volumeMount" . | nindent 12 }} {{- include "dolphinscheduler.fsFileResource.volumeMount" . | nindent 12 }} {{- include "dolphinscheduler.etcd.ssl.volumeMount" . | nindent 12 }} @@ -139,6 +144,11 @@ spec: - name: {{ include "dolphinscheduler.fullname" . }}-worker-logs emptyDir: {} {{- end }} + {{- if .Values.worker.enableCustomizedConfig }} + - name: worker-config-volume + configMap: + name: {{ include "dolphinscheduler.fullname" . }}-worker + {{- end }} - name: config-volume configMap: name: {{ include "dolphinscheduler.fullname" . }}-configs diff --git a/deploy/kubernetes/dolphinscheduler/values.yaml b/deploy/kubernetes/dolphinscheduler/values.yaml index a8d9a34875ca..5d924d6bba04 100644 --- a/deploy/kubernetes/dolphinscheduler/values.yaml +++ b/deploy/kubernetes/dolphinscheduler/values.yaml @@ -31,7 +31,7 @@ initImage: image: # -- Docker image repository for the DolphinScheduler - registry: apache/dolphinscheduler + registry: apache # -- Docker image version for the DolphinScheduler tag: latest # -- Image pull policy. Options: Always, Never, IfNotPresent @@ -49,6 +49,10 @@ image: # -- tools image tools: dolphinscheduler-tools +datasource: + # -- The profile of datasource + profile: postgresql + postgresql: # -- If not exists external PostgreSQL, by default, the DolphinScheduler will use a internal PostgreSQL enabled: true @@ -246,20 +250,25 @@ conf: # -- resource store on HDFS/S3 path, resource file will store to this base path, self configuration, please make sure the directory exists on hdfs and have read write permissions. "/dolphinscheduler" is recommended resource.storage.upload.base.path: /dolphinscheduler - # -- The AWS access key. if resource.storage.type=S3 or use EMR-Task, This configuration is required - resource.aws.access.key.id: minioadmin + # The AWS credentials provider type. support: AWSStaticCredentialsProvider, InstanceProfileCredentialsProvider + # AWSStaticCredentialsProvider: use the access key and secret key to authenticate + # InstanceProfileCredentialsProvider: use the IAM role to authenticate + aws.credentials.provider.type: AWSStaticCredentialsProvider + + # -- The AWS access key. if resource.storage.type=S3, and credentials.provider.type is AWSStaticCredentialsProvider. This configuration is required + aws.s3.access.key.id: minioadmin - # -- The AWS secret access key. if resource.storage.type=S3 or use EMR-Task, This configuration is required - resource.aws.secret.access.key: minioadmin + # -- The AWS secret access key. if resource.storage.type=S3, and credentials.provider.type is AWSStaticCredentialsProvider. This configuration is required + aws.s3.access.key.secret: minioadmin - # -- The AWS Region to use. if resource.storage.type=S3 or use EMR-Task, This configuration is required - resource.aws.region: ca-central-1 + # -- The AWS Region to use. if resource.storage.type=S3, This configuration is required + aws.s3.region: ca-central-1 # -- The name of the bucket. You need to create them by yourself. Otherwise, the system cannot start. All buckets in Amazon S3 share a single namespace; ensure the bucket is given a unique name. - resource.aws.s3.bucket.name: dolphinscheduler + aws.s3.bucket.name: dolphinscheduler # -- You need to set this parameter when private cloud s3. If S3 uses public cloud, you only need to set resource.aws.region or set to the endpoint of a public cloud such as S3.cn-north-1.amazonaws.com.cn - resource.aws.s3.endpoint: http://minio:9000 + aws.s3.endpoint: http://minio:9000 # -- alibaba cloud access key id, required if you set resource.storage.type=OSS resource.alibaba.cloud.access.key.id: @@ -440,7 +449,19 @@ master: # requests: # memory: "2Gi" # cpu: "500m" - + # -- enable configure custom config + enableCustomizedConfig: false + # -- configure aligned with https://github.com/apache/dolphinscheduler/blob/dev/dolphinscheduler-master/src/main/resources/application.yaml + customizedConfig: { } + # customizedConfig: + # application.yaml: | + # profiles: + # active: postgresql + # banner: + # charset: UTF-8 + # jackson: + # time-zone: UTC + # date-format: "yyyy-MM-dd HH:mm:ss" # -- Periodic probe of container liveness. Container will be restarted if the probe fails. # More info: [container-probes](https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#container-probes) livenessProbe: @@ -508,10 +529,10 @@ master: MASTER_STATE_WHEEL_INTERVAL: "5s" # -- If set true, will open master overload protection MASTER_SERVER_LOAD_PROTECTION_ENABLED: false - # -- Master max cpu usage, when the master's cpu usage is smaller then this value, master server can execute workflow. - MASTER_SERVER_LOAD_PROTECTION_MAX_CPU_USAGE_PERCENTAGE_THRESHOLDS: 0.7 - # -- Master max JVM memory usage , when the master's jvm memory usage is smaller then this value, master server can execute workflow. - MASTER_SERVER_LOAD_PROTECTION_MAX_JVM_MEMORY_USAGE_PERCENTAGE_THRESHOLDS: 0.7 + # -- Master max system cpu usage, when the master's system cpu usage is smaller then this value, master server can execute workflow. + MASTER_SERVER_LOAD_PROTECTION_MAX_SYSTEM_CPU_USAGE_PERCENTAGE_THRESHOLDS: 0.7 + # -- Master max jvm cpu usage, when the master's jvm cpu usage is smaller then this value, master server can execute workflow. + MASTER_SERVER_LOAD_PROTECTION_MAX_JVM_CPU_USAGE_PERCENTAGE_THRESHOLDS: 0.7 # -- Master max System memory usage , when the master's system memory usage is smaller then this value, master server can execute workflow. MASTER_SERVER_LOAD_PROTECTION_MAX_SYSTEM_MEMORY_USAGE_PERCENTAGE_THRESHOLDS: 0.7 # -- Master max disk usage , when the master's disk usage is smaller then this value, master server can execute workflow. @@ -569,6 +590,17 @@ worker: # -- Periodic probe of container liveness. Container will be restarted if the probe fails. # More info: [container-probes](https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#container-probes) + # -- enable configure custom config + enableCustomizedConfig: false + # -- configure aligned with https://github.com/apache/dolphinscheduler/blob/dev/dolphinscheduler-worker/src/main/resources/application.yaml + customizedConfig: { } +# customizedConfig: +# application.yaml: | +# banner: +# charset: UTF-8 +# jackson: +# time-zone: UTC +# date-format: "yyyy-MM-dd HH:mm:ss" livenessProbe: # -- Turn on and off liveness probe enabled: true @@ -629,10 +661,10 @@ worker: env: # -- If set true, will open worker overload protection WORKER_SERVER_LOAD_PROTECTION_ENABLED: false - # -- Worker max cpu usage, when the worker's cpu usage is smaller then this value, worker server can be dispatched tasks. - WORKER_SERVER_LOAD_PROTECTION_MAX_CPU_USAGE_PERCENTAGE_THRESHOLDS: 0.7 - # -- Worker max jvm memory usage , when the worker's jvm memory usage is smaller then this value, worker server can be dispatched tasks. - WORKER_SERVER_LOAD_PROTECTION_MAX_JVM_MEMORY_USAGE_PERCENTAGE_THRESHOLDS: 0.7 + # -- Worker max system cpu usage, when the worker's system cpu usage is smaller then this value, worker server can be dispatched tasks. + WORKER_SERVER_LOAD_PROTECTION_MAX_SYSTEM_CPU_USAGE_PERCENTAGE_THRESHOLDS: 0.7 + # -- Worker max jvm cpu usage, when the worker's jvm cpu usage is smaller then this value, worker server can be dispatched tasks. + WORKER_SERVER_LOAD_PROTECTION_MAX_JVM_CPU_USAGE_PERCENTAGE_THRESHOLDS: 0.7 # -- Worker max memory usage , when the worker's memory usage is smaller then this value, worker server can be dispatched tasks. WORKER_SERVER_LOAD_PROTECTION_MAX_SYSTEM_MEMORY_USAGE_PERCENTAGE_THRESHOLDS: 0.7 # -- Worker max disk usage , when the worker's disk usage is smaller then this value, worker server can be dispatched tasks. @@ -645,8 +677,6 @@ worker: WORKER_HOST_WEIGHT: "100" # -- tenant corresponds to the user of the system, which is used by the worker to submit the job. If system does not have this user, it will be automatically created after the parameter worker.tenant.auto.create is true. WORKER_TENANT_CONFIG_AUTO_CREATE_TENANT_ENABLED: true - # -- Scenes to be used for distributed users. For example, users created by FreeIpa are stored in LDAP. This parameter only applies to Linux, When this parameter is true, worker.tenant.auto.create has no effect and will not automatically create tenants. - WORKER_TENANT_CONFIG_DISTRIBUTED_TENANT: false # -- If set true, will use worker bootstrap user as the tenant to execute task when the tenant is `default`; DEFAULT_TENANT_ENABLED: false @@ -733,7 +763,19 @@ alert: # requests: # memory: "1Gi" # cpu: "500m" - + # -- enable configure custom config + enableCustomizedConfig: false + # -- configure aligned with https://github.com/apache/dolphinscheduler/blob/dev/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/resources/application.yaml + customizedConfig: { } + # customizedConfig: + # application.yaml: | + # profiles: + # active: postgresql + # banner: + # charset: UTF-8 + # jackson: + # time-zone: UTC + # date-format: "yyyy-MM-dd HH:mm:ss" # -- Periodic probe of container liveness. Container will be restarted if the probe fails. # More info: [container-probes](https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#container-probes) livenessProbe: @@ -833,7 +875,19 @@ api: # requests: # memory: "1Gi" # cpu: "500m" - + # -- enable configure custom config + enableCustomizedConfig: false + # -- configure aligned with https://github.com/apache/dolphinscheduler/blob/dev/dolphinscheduler-api/src/main/resources/application.yaml + customizedConfig: { } + # customizedConfig: + # application.yaml: | + # profiles: + # active: postgresql + # banner: + # charset: UTF-8 + # jackson: + # time-zone: UTC + # date-format: "yyyy-MM-dd HH:mm:ss" # -- Periodic probe of container liveness. Container will be restarted if the probe fails. # More info: [container-probes](https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#container-probes) livenessProbe: diff --git a/docs/configs/docsdev.js b/docs/configs/docsdev.js index 323e8b21d902..08155a006b23 100644 --- a/docs/configs/docsdev.js +++ b/docs/configs/docsdev.js @@ -145,10 +145,6 @@ export default { title: 'Sqoop', link: '/en-us/docs/dev/user_doc/guide/task/sqoop.html', }, - { - title: 'Pigeon', - link: '/en-us/docs/dev/user_doc/guide/task/pigeon.html', - }, { title: 'Conditions', link: '/en-us/docs/dev/user_doc/guide/task/conditions.html', @@ -432,10 +428,6 @@ export default { title: 'File Manage', link: '/en-us/docs/dev/user_doc/guide/resource/file-manage.html' }, - { - title: 'UDF Manage', - link: '/en-us/docs/dev/user_doc/guide/resource/udf-manage.html' - }, { title: 'Task Group Manage', link: '/en-us/docs/dev/user_doc/guide/resource/task-group.html' @@ -881,10 +873,6 @@ export default { title: 'Sqoop', link: '/zh-cn/docs/dev/user_doc/guide/task/sqoop.html', }, - { - title: 'Pigeon', - link: '/zh-cn/docs/dev/user_doc/guide/task/pigeon.html', - }, { title: 'Conditions', link: '/zh-cn/docs/dev/user_doc/guide/task/conditions.html', @@ -1153,10 +1141,6 @@ export default { title: '文件管理', link: '/zh-cn/docs/dev/user_doc/guide/resource/file-manage.html' }, - { - title: 'UDF 管理', - link: '/zh-cn/docs/dev/user_doc/guide/resource/udf-manage.html' - }, { title: '任务组管理', link: '/zh-cn/docs/dev/user_doc/guide/resource/task-group.html' diff --git a/docs/docs/en/architecture/configuration.md b/docs/docs/en/architecture/configuration.md index b9a26b865c77..16fc7899d9bf 100644 --- a/docs/docs/en/architecture/configuration.md +++ b/docs/docs/en/architecture/configuration.md @@ -110,7 +110,8 @@ The directory structure of DolphinScheduler is as follows: dolphinscheduler-daemon.sh is responsible for DolphinScheduler startup and shutdown. Essentially, start-all.sh or stop-all.sh startup and shutdown the cluster via dolphinscheduler-daemon.sh. -Currently, DolphinScheduler just makes a basic config, remember to config further JVM options based on your practical situation of resources. +Currently, DolphinScheduler just makes a basic config, remember to config further JVM options based on your practical +situation of resources. Default simplified parameters are: @@ -128,44 +129,47 @@ export DOLPHINSCHEDULER_OPTS=" " ``` -> "-XX:DisableExplicitGC" is not recommended due to may lead to memory link (DolphinScheduler dependent on Netty to communicate). -> If add "-Djava.net.preferIPv6Addresses=true" will use ipv6 address, if add "-Djava.net.preferIPv4Addresses=true" will use ipv4 address, if doesn't set the two parameter will use ipv4 or ipv6. +> "-XX:DisableExplicitGC" is not recommended due to may lead to memory link (DolphinScheduler dependent on Netty to +> communicate). +> If add "-Djava.net.preferIPv6Addresses=true" will use ipv6 address, if add "-Djava.net.preferIPv4Addresses=true" will +> use ipv4 address, if doesn't set the two parameter will use ipv4 or ipv6. ### Database connection related configuration DolphinScheduler uses Spring Hikari to manage database connections, configuration file location: -|Service| Configuration file | -|--|--| -|Master Server | `master-server/conf/application.yaml`| -|Api Server| `api-server/conf/application.yaml`| -|Worker Server| `worker-server/conf/application.yaml`| -|Alert Server| `alert-server/conf/application.yaml`| +| Service | Configuration file | +|---------------|---------------------------------------| +| Master Server | `master-server/conf/application.yaml` | +| Api Server | `api-server/conf/application.yaml` | +| Worker Server | `worker-server/conf/application.yaml` | +| Alert Server | `alert-server/conf/application.yaml` | The default configuration is as follows: -|Parameters | Default value| Description| -|--|--|--| -|spring.datasource.driver-class-name| org.postgresql.Driver |datasource driver| -|spring.datasource.url| jdbc:postgresql://127.0.0.1:5432/dolphinscheduler |datasource connection url| -|spring.datasource.username|root|datasource username| -|spring.datasource.password|root|datasource password| -|spring.datasource.hikari.connection-test-query|select 1|validate connection by running the SQL| -|spring.datasource.hikari.minimum-idle| 5| minimum connection pool size number| -|spring.datasource.hikari.auto-commit|true|whether auto commit| -|spring.datasource.hikari.pool-name|DolphinScheduler|name of the connection pool| -|spring.datasource.hikari.maximum-pool-size|50| maximum connection pool size number| -|spring.datasource.hikari.connection-timeout|30000|connection timeout| -|spring.datasource.hikari.idle-timeout|600000|Maximum idle connection survival time| -|spring.datasource.hikari.leak-detection-threshold|0|Connection leak detection threshold| -|spring.datasource.hikari.initialization-fail-timeout|1|Connection pool initialization failed timeout| +| Parameters | Default value | Description | +|------------------------------------------------------|---------------------------------------------------|-----------------------------------------------| +| spring.datasource.driver-class-name | org.postgresql.Driver | datasource driver | +| spring.datasource.url | jdbc:postgresql://127.0.0.1:5432/dolphinscheduler | datasource connection url | +| spring.datasource.username | root | datasource username | +| spring.datasource.password | root | datasource password | +| spring.datasource.hikari.connection-test-query | select 1 | validate connection by running the SQL | +| spring.datasource.hikari.minimum-idle | 5 | minimum connection pool size number | +| spring.datasource.hikari.auto-commit | true | whether auto commit | +| spring.datasource.hikari.pool-name | DolphinScheduler | name of the connection pool | +| spring.datasource.hikari.maximum-pool-size | 50 | maximum connection pool size number | +| spring.datasource.hikari.connection-timeout | 30000 | connection timeout | +| spring.datasource.hikari.idle-timeout | 600000 | Maximum idle connection survival time | +| spring.datasource.hikari.leak-detection-threshold | 0 | Connection leak detection threshold | +| spring.datasource.hikari.initialization-fail-timeout | 1 | Connection pool initialization failed timeout | Note that DolphinScheduler also supports database configuration through `bin/env/dolphinscheduler_env.sh`. -### Zookeeper related configuration +### Registry Related configuration -DolphinScheduler uses Zookeeper for cluster management, fault tolerance, event monitoring and other functions. Configuration file location: -|Service| Configuration file | +DolphinScheduler uses Zookeeper for cluster management, fault tolerance, event monitoring and other functions. +Configuration file location: +|Service| Configuration file | |--|--| |Master Server | `master-server/conf/application.yaml`| |Api Server| `api-server/conf/application.yaml`| @@ -173,30 +177,35 @@ DolphinScheduler uses Zookeeper for cluster management, fault tolerance, event m The default configuration is as follows: -|Parameters | Default value| Description| -|--|--|--| -|registry.zookeeper.namespace|dolphinscheduler|namespace of zookeeper| -|registry.zookeeper.connect-string|localhost:2181| the connection string of zookeeper| -|registry.zookeeper.retry-policy.base-sleep-time|60ms|time to wait between subsequent retries| -|registry.zookeeper.retry-policy.max-sleep|300ms|maximum time to wait between subsequent retries| -|registry.zookeeper.retry-policy.max-retries|5|maximum retry times| -|registry.zookeeper.session-timeout|30s|session timeout| -|registry.zookeeper.connection-timeout|30s|connection timeout| -|registry.zookeeper.block-until-connected|600ms|waiting time to block until the connection succeeds| -|registry.zookeeper.digest|{username}:{password}|digest of zookeeper to access znode, works only when acl is enabled, for more details please check [https://zookeeper.apache.org/doc/r3.4.14/zookeeperAdmin.html](Apache Zookeeper doc) | +| Parameters | Default value | Description | +|-------------------------------------------------|-----------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| registry.zookeeper.namespace | dolphinscheduler | namespace of zookeeper | +| registry.zookeeper.connect-string | localhost:2181 | the connection string of zookeeper | +| registry.zookeeper.retry-policy.base-sleep-time | 60ms | time to wait between subsequent retries | +| registry.zookeeper.retry-policy.max-sleep | 300ms | maximum time to wait between subsequent retries | +| registry.zookeeper.retry-policy.max-retries | 5 | maximum retry times | +| registry.zookeeper.session-timeout | 30s | session timeout | +| registry.zookeeper.connection-timeout | 30s | connection timeout | +| registry.zookeeper.block-until-connected | 600ms | waiting time to block until the connection succeeds | +| registry.zookeeper.digest | {username}:{password} | digest of zookeeper to access znode, works only when acl is enabled, for more details please check [https://zookeeper.apache.org/doc/r3.4.14/zookeeperAdmin.html](Apache Zookeeper doc) | Note that DolphinScheduler also supports zookeeper related configuration through `bin/env/dolphinscheduler_env.sh`. +For ETCD Registry, please see more details +on [link](https://github.com/apache/dolphinscheduler/blob/dev/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/README.md). +For JDBC Registry, please see more details +on [link](https://github.com/apache/dolphinscheduler/blob/dev/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/README.md). + ### common.properties [hadoop、s3、yarn config properties] Currently, common.properties mainly configures Hadoop,s3a related configurations. Configuration file location: -|Service| Configuration file | -|--|--| -|Master Server | `master-server/conf/common.properties`| -|Api Server| `api-server/conf/common.properties`| -|Worker Server| `worker-server/conf/common.properties`| -|Alert Server| `alert-server/conf/common.properties`| +| Service | Configuration file | +|---------------|-----------------------------------------------------------------------| +| Master Server | `master-server/conf/common.properties` | +| Api Server | `api-server/conf/common.properties`, `api-server/conf/aws.yaml` | +| Worker Server | `worker-server/conf/common.properties`, `worker-server/conf/aws.yaml` | +| Alert Server | `alert-server/conf/common.properties` | The default configuration is as follows: @@ -205,10 +214,6 @@ The default configuration is as follows: | data.basedir.path | /tmp/dolphinscheduler | local directory used to store temp files | | resource.storage.type | NONE | type of resource files: HDFS, S3, OSS, GCS, ABS, NONE | | resource.upload.path | /dolphinscheduler | storage path of resource files | -| aws.access.key.id | minioadmin | access key id of S3 | -| aws.secret.access.key | minioadmin | secret access key of S3 | -| aws.region | us-east-1 | region of S3 | -| aws.s3.endpoint | http://minio:9000 | endpoint of S3 | | hdfs.root.user | hdfs | configure users with corresponding permissions if storage type is HDFS | | fs.defaultFS | hdfs://mycluster:8020 | If resource.storage.type=S3, then the request url would be similar to 's3a://dolphinscheduler'. Otherwise if resource.storage.type=HDFS and hadoop supports HA, copy core-site.xml and hdfs-site.xml into 'conf' directory | | hadoop.security.authentication.startup.state | false | whether hadoop grant kerberos permission | @@ -237,43 +242,43 @@ The default configuration is as follows: Location: `api-server/conf/application.yaml` -|Parameters | Default value| Description| -|--|--|--| -|server.port|12345|api service communication port| -|server.servlet.session.timeout|120m|session timeout| -|server.servlet.context-path|/dolphinscheduler/ |request path| -|spring.servlet.multipart.max-file-size|1024MB|maximum file size| -|spring.servlet.multipart.max-request-size|1024MB|maximum request size| -|server.jetty.max-http-post-size|5000000|jetty maximum post size| -|spring.banner.charset|UTF-8|message encoding| -|spring.jackson.time-zone|UTC|time zone| -|spring.jackson.date-format|"yyyy-MM-dd HH:mm:ss"|time format| -|spring.messages.basename|i18n/messages|i18n config| -|security.authentication.type|PASSWORD|authentication type| -|security.authentication.ldap.user.admin|read-only-admin|admin user account when you log-in with LDAP| -|security.authentication.ldap.urls|ldap://ldap.forumsys.com:389/|LDAP urls| -|security.authentication.ldap.base.dn|dc=example,dc=com|LDAP base dn| -|security.authentication.ldap.username|cn=read-only-admin,dc=example,dc=com|LDAP username| -|security.authentication.ldap.password|password|LDAP password| -|security.authentication.ldap.user.identity-attribute|uid|LDAP user identity attribute| -|security.authentication.ldap.user.email-attribute|mail|LDAP user email attribute| -|security.authentication.ldap.user.not-exist-action|CREATE|action when ldap user is not exist,default value: CREATE. Optional values include(CREATE,DENY)| -|security.authentication.ldap.ssl.enable|false|LDAP ssl switch| -|security.authentication.ldap.ssl.trust-store|ldapkeystore.jks|LDAP jks file absolute path| -|security.authentication.ldap.ssl.trust-store-password|password|LDAP jks password| -|security.authentication.casdoor.user.admin||admin user account when you log-in with Casdoor| -|casdoor.endpoint||Casdoor server url| -|casdoor.client-id||id in Casdoor| -|casdoor.client-secret||secret in Casdoor| -|casdoor.certificate||certificate in Casdoor| -|casdoor.organization-name||organization name in Casdoor| -|casdoor.application-name||application name in Casdoor| -|casdoor.redirect-url||doplhinscheduler login url| -|api.traffic.control.global.switch|false|traffic control global switch| -|api.traffic.control.max-global-qps-rate|300|global max request number per second| -|api.traffic.control.tenant-switch|false|traffic control tenant switch| -|api.traffic.control.default-tenant-qps-rate|10|default tenant max request number per second| -|api.traffic.control.customize-tenant-qps-rate||customize tenant max request number per second| +| Parameters | Default value | Description | +|-------------------------------------------------------|--------------------------------------|------------------------------------------------------------------------------------------------| +| server.port | 12345 | api service communication port | +| server.servlet.session.timeout | 120m | session timeout | +| server.servlet.context-path | /dolphinscheduler/ | request path | +| spring.servlet.multipart.max-file-size | 1024MB | maximum file size | +| spring.servlet.multipart.max-request-size | 1024MB | maximum request size | +| server.jetty.max-http-post-size | 5000000 | jetty maximum post size | +| spring.banner.charset | UTF-8 | message encoding | +| spring.jackson.time-zone | UTC | time zone | +| spring.jackson.date-format | "yyyy-MM-dd HH:mm:ss" | time format | +| spring.messages.basename | i18n/messages | i18n config | +| security.authentication.type | PASSWORD | authentication type | +| security.authentication.ldap.user.admin | read-only-admin | admin user account when you log-in with LDAP | +| security.authentication.ldap.urls | ldap://ldap.forumsys.com:389/ | LDAP urls | +| security.authentication.ldap.base.dn | dc=example,dc=com | LDAP base dn | +| security.authentication.ldap.username | cn=read-only-admin,dc=example,dc=com | LDAP username | +| security.authentication.ldap.password | password | LDAP password | +| security.authentication.ldap.user.identity-attribute | uid | LDAP user identity attribute | +| security.authentication.ldap.user.email-attribute | mail | LDAP user email attribute | +| security.authentication.ldap.user.not-exist-action | CREATE | action when ldap user is not exist,default value: CREATE. Optional values include(CREATE,DENY) | +| security.authentication.ldap.ssl.enable | false | LDAP ssl switch | +| security.authentication.ldap.ssl.trust-store | ldapkeystore.jks | LDAP jks file absolute path | +| security.authentication.ldap.ssl.trust-store-password | password | LDAP jks password | +| security.authentication.casdoor.user.admin | | admin user account when you log-in with Casdoor | +| casdoor.endpoint | | Casdoor server url | +| casdoor.client-id | | id in Casdoor | +| casdoor.client-secret | | secret in Casdoor | +| casdoor.certificate | | certificate in Casdoor | +| casdoor.organization-name | | organization name in Casdoor | +| casdoor.application-name | | application name in Casdoor | +| casdoor.redirect-url | | doplhinscheduler login url | +| api.traffic.control.global.switch | false | traffic control global switch | +| api.traffic.control.max-global-qps-rate | 300 | global max request number per second | +| api.traffic.control.tenant-switch | false | traffic control tenant switch | +| api.traffic.control.default-tenant-qps-rate | 10 | default tenant max request number per second | +| api.traffic.control.customize-tenant-qps-rate | | customize tenant max request number per second | ### Master Server related configuration @@ -282,7 +287,6 @@ Location: `master-server/conf/application.yaml` | Parameters | Default value | Description | |-----------------------------------------------------------------------------|---------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | master.listen-port | 5678 | master listen port | -| master.fetch-command-num | 10 | the number of commands fetched by master | | master.pre-exec-threads | 10 | master prepare execute thread number to limit handle commands in parallel | | master.exec-threads | 100 | master execute thread number to limit process instances in parallel | | master.dispatch-task-number | 3 | master dispatch task number per batch | @@ -292,37 +296,39 @@ Location: `master-server/conf/application.yaml` | master.task-commit-interval | 1000 | master commit task interval, the unit is millisecond | | master.state-wheel-interval | 5 | time to check status | | master.server-load-protection.enabled | true | If set true, will open master overload protection | -| master.server-load-protection.max-cpu-usage-percentage-thresholds | 0.7 | Master max cpu usage, when the master's cpu usage is smaller then this value, master server can execute workflow. | -| master.server-load-protection.max-jvm-memory-usage-percentage-thresholds | 0.7 | Master max JVM memory usage , when the master's jvm memory usage is smaller then this value, master server can execute workflow. | -| master.server-load-protection.max-system-memory-usage-percentage-thresholds | 0.7 | Master max System memory usage , when the master's system memory usage is smaller then this value, master server can execute workflow. | +| master.server-load-protection.max-system-cpu-usage-percentage-thresholds | 0.7 | Master max system cpu usage, when the master's system cpu usage is smaller then this value, master server can execute workflow. | +| master.server-load-protection.max-jvm-cpu-usage-percentage-thresholds | 0.7 | Master max JVM cpu usage, when the master's jvm cpu usage is smaller then this value, master server can execute workflow. | +| master.server-load-protection.max-system-memory-usage-percentage-thresholds | 0.7 | Master max system memory usage , when the master's system memory usage is smaller then this value, master server can execute workflow. | | master.server-load-protection.max-disk-usage-percentage-thresholds | 0.7 | Master max disk usage , when the master's disk usage is smaller then this value, master server can execute workflow. | | master.failover-interval | 10 | failover interval, the unit is minute | | master.kill-application-when-task-failover | true | whether to kill yarn/k8s application when failover taskInstance | | master.registry-disconnect-strategy.strategy | stop | Used when the master disconnect from registry, default value: stop. Optional values include stop, waiting | | master.registry-disconnect-strategy.max-waiting-time | 100s | Used when the master disconnect from registry, and the disconnect strategy is waiting, this config means the master will waiting to reconnect to registry in given times, and after the waiting times, if the master still cannot connect to registry, will stop itself, if the value is 0s, the Master will wait infinitely | | master.worker-group-refresh-interval | 10s | The interval to refresh worker group from db to memory | +| master.command-fetch-strategy.type | ID_SLOT_BASED | The command fetch strategy, only support `ID_SLOT_BASED` | +| master.command-fetch-strategy.config.id-step | 1 | The id auto incremental step of t_ds_command in db | +| master.command-fetch-strategy.config.fetch-size | 10 | The number of commands fetched by master | ### Worker Server related configuration Location: `worker-server/conf/application.yaml` -| Parameters | Default value | Description | -|--------------------------------------------------------------------------------|---------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| worker.listen-port | 1234 | worker-service listen port | -| worker.exec-threads | 100 | worker-service execute thread number, used to limit the number of task instances in parallel | -| worker.max-heartbeat-interval | 10s | worker-service max heartbeat interval | -| worker.host-weight | 100 | worker host weight to dispatch tasks | -| worker.server-load-protection.enabled | true | If set true will open worker overload protection | -| worker.max-cpu-usage-percentage-thresholds.max-cpu-usage-percentage-thresholds | 0.7 | Master max cpu usage, when the master's cpu usage is smaller then this value, master server can execute workflow. | -| worker.server-load-protection.max-jvm-memory-usage-percentage-thresholds | 0.7 | Master max JVM memory usage , when the master's jvm memory usage is smaller then this value, master server can execute workflow. | -| worker.server-load-protection.max-system-memory-usage-percentage-thresholds | 0.7 | Master max System memory usage , when the master's system memory usage is smaller then this value, master server can execute workflow. | -| worker.server-load-protection.max-disk-usage-percentage-thresholds | 0.7 | Master max disk usage , when the master's disk usage is smaller then this value, master server can execute workflow. | -| worker.registry-disconnect-strategy.strategy | stop | Used when the worker disconnect from registry, default value: stop. Optional values include stop, waiting | -| worker.registry-disconnect-strategy.max-waiting-time | 100s | Used when the worker disconnect from registry, and the disconnect strategy is waiting, this config means the worker will waiting to reconnect to registry in given times, and after the waiting times, if the worker still cannot connect to registry, will stop itself, if the value is 0s, will wait infinitely | -| worker.task-execute-threads-full-policy | REJECT | If REJECT, when the task waiting in the worker reaches exec-threads, it will reject the received task and the Master will redispatch it; If CONTINUE, it will put the task into the worker's execution queue and wait for a free thread to start execution | -| worker.tenant-config.auto-create-tenant-enabled | true | tenant corresponds to the user of the system, which is used by the worker to submit the job. If system does not have this user, it will be automatically created after the parameter worker.tenant.auto.create is true. | -| worker.tenant-config.distributed-tenant-enabled | false | When this parameter is true, auto-create-tenant-enabled has no effect and will not automatically create tenants | -| worker.tenant-config.default-tenant-enabled | false | If set true, will use worker bootstrap user as the tenant to execute task when the tenant is `default`. | +| Parameters | Default value | Description | +|-----------------------------------------------------------------------------|---------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| worker.listen-port | 1234 | worker-service listen port | +| worker.exec-threads | 100 | worker-service execute thread number, used to limit the number of task instances in parallel | +| worker.max-heartbeat-interval | 10s | worker-service max heartbeat interval | +| worker.host-weight | 100 | worker host weight to dispatch tasks | +| worker.server-load-protection.enabled | true | If set true will open worker overload protection | +| worker.server-load-protection.max-system-cpu-usage-percentage-thresholds | 0.7 | Worker max system cpu usage, when the worker's system cpu usage is smaller then this value, master server can execute workflow. | +| worker.server-load-protection.max-jvm-cpu-usage-percentage-thresholds | 0.7 | Worker max JVM cpu usage, when the worker's jvm cpu usage is smaller then this value, master server can execute workflow. | +| worker.server-load-protection.max-system-memory-usage-percentage-thresholds | 0.7 | Worker max system memory usage , when the worker's system memory usage is smaller then this value, master server can execute workflow. | +| worker.server-load-protection.max-disk-usage-percentage-thresholds | 0.7 | Worker max disk usage , when the worker's disk usage is smaller then this value, master server can execute workflow. | +| worker.registry-disconnect-strategy.strategy | stop | Used when the worker disconnect from registry, default value: stop. Optional values include stop, waiting | +| worker.registry-disconnect-strategy.max-waiting-time | 100s | Used when the worker disconnect from registry, and the disconnect strategy is waiting, this config means the worker will waiting to reconnect to registry in given times, and after the waiting times, if the worker still cannot connect to registry, will stop itself, if the value is 0s, will wait infinitely | +| worker.task-execute-threads-full-policy | REJECT | If REJECT, when the task waiting in the worker reaches exec-threads, it will reject the received task and the Master will redispatch it; If CONTINUE, it will put the task into the worker's execution queue and wait for a free thread to start execution | +| worker.tenant-config.auto-create-tenant-enabled | true | tenant corresponds to the user of the system, which is used by the worker to submit the job. If system does not have this user, it will be automatically created after the parameter worker.tenant.auto.create is true. | +| worker.tenant-config.default-tenant-enabled | false | If set true, will use worker bootstrap user as the tenant to execute task when the tenant is `default`. | ### Alert Server related configuration @@ -337,10 +343,10 @@ Location: `alert-server/conf/application.yaml` This part describes quartz configs and configure them based on your practical situation and resources. -|Service| Configuration file | -|--|--| -|Master Server | `master-server/conf/application.yaml`| -|Api Server| `api-server/conf/application.yaml`| +| Service | Configuration file | +|---------------|---------------------------------------| +| Master Server | `master-server/conf/application.yaml` | +| Api Server | `api-server/conf/application.yaml` | The default configuration is as follows: @@ -358,7 +364,8 @@ The default configuration is as follows: | spring.quartz.properties.org.quartz.jobStore.driverDelegateClass | org.quartz.impl.jdbcjobstore.PostgreSQLDelegate | | spring.quartz.properties.org.quartz.jobStore.clusterCheckinInterval | 5000 | -The above configuration items is the same in *Master Server* and *Api Server*, but their *Quartz Scheduler* threadpool configuration is different. +The above configuration items is the same in *Master Server* and *Api Server*, but their *Quartz Scheduler* threadpool +configuration is different. The default quartz threadpool configuration in *Master Server* is as follows: @@ -369,7 +376,8 @@ The default quartz threadpool configuration in *Master Server* is as follows: | spring.quartz.properties.org.quartz.threadPool.threadPriority | 5 | | spring.quartz.properties.org.quartz.threadPool.class | org.quartz.simpl.SimpleThreadPool | -Since *Api Server* will not start *Quartz Scheduler* instance, as a client only, therefore it's threadpool is configured as `QuartzZeroSizeThreadPool` which has zero thread; +Since *Api Server* will not start *Quartz Scheduler* instance, as a client only, therefore it's threadpool is configured +as `QuartzZeroSizeThreadPool` which has zero thread; The default configuration is as follows: | Parameters | Default value | @@ -378,7 +386,8 @@ The default configuration is as follows: ### dolphinscheduler_env.sh [load environment variables configs] -When using shell to commit tasks, DolphinScheduler will export environment variables from `bin/env/dolphinscheduler_env.sh`. The +When using shell to commit tasks, DolphinScheduler will export environment variables +from `bin/env/dolphinscheduler_env.sh`. The mainly configuration including `JAVA_HOME` and other environment paths. ```bash @@ -406,9 +415,10 @@ export FLINK_ENV_JAVA_OPTS="-javaagent:${DOLPHINSCHEDULER_HOME}/tools/libs/aspec ### Log related configuration -|Service| Configuration file | -|--|--| -|Master Server | `master-server/conf/logback-spring.xml`| -|Api Server| `api-server/conf/logback-spring.xml`| -|Worker Server| `worker-server/conf/logback-spring.xml`| -|Alert Server| `alert-server/conf/logback-spring.xml`| +| Service | Configuration file | +|---------------|-----------------------------------------| +| Master Server | `master-server/conf/logback-spring.xml` | +| Api Server | `api-server/conf/logback-spring.xml` | +| Worker Server | `worker-server/conf/logback-spring.xml` | +| Alert Server | `alert-server/conf/logback-spring.xml` | + diff --git a/docs/docs/en/architecture/metadata.md b/docs/docs/en/architecture/metadata.md index b4633707f532..d2d11a832363 100644 --- a/docs/docs/en/architecture/metadata.md +++ b/docs/docs/en/architecture/metadata.md @@ -22,8 +22,6 @@ see sql files in `dolphinscheduler/dolphinscheduler-dao/src/main/resources/sql` - User can have multiple projects, user project authorization completes the relationship binding using `project_id` and `user_id` in `t_ds_relation_project_user` table. - The `user_id` in the `t_ds_projcet` table represents the user who create the project, and the `user_id` in the `t_ds_relation_project_user` table represents users who have permission to the project. -- The `user_id` in the `t_ds_resources` table represents the user who create the resource, and the `user_id` in `t_ds_relation_resources_user` represents the user who has permissions to the resource. -- The `user_id` in the `t_ds_udfs` table represents the user who create the UDF, and the `user_id` in the `t_ds_relation_udfs_user` table represents a user who has permission to the UDF. ### Project - Tenant - ProcessDefinition - Schedule diff --git a/docs/docs/en/architecture/task-structure.md b/docs/docs/en/architecture/task-structure.md index dc0b9d520a3f..041d753403f2 100644 --- a/docs/docs/en/architecture/task-structure.md +++ b/docs/docs/en/architecture/task-structure.md @@ -146,7 +146,6 @@ No.|parameter name||type|description |note 5| |type |String |database type 6| |datasource |Int |datasource id 7| |sql |String |query SQL statement -8| |udfs | String| udf functions|specify UDF function ids, separate by comma 9| |sqlType | String| SQL node type |0 for query and 1 for none-query SQL 10| |title |String | mail title 11| |receivers |String |receivers @@ -180,7 +179,6 @@ No.|parameter name||type|description |note "type":"MYSQL", "datasource":1, "sql":"select id , namge , age from emp where id = ${id}", - "udfs":"", "sqlType":"0", "title":"xxxx@xxx.com", "receivers":"xxxx@xxx.com", diff --git a/docs/docs/en/contribute/development-environment-setup.md b/docs/docs/en/contribute/development-environment-setup.md index 4755f558570f..53e92ba7b78d 100644 --- a/docs/docs/en/contribute/development-environment-setup.md +++ b/docs/docs/en/contribute/development-environment-setup.md @@ -153,7 +153,7 @@ The browser access address [http://localhost:5173](http://localhost:5173) can lo #### zookeeper -Download [ZooKeeper](https://www.apache.org/dyn/closer.lua/zookeeper/zookeeper-3.6.3), and extract it. +Download [ZooKeeper](https://zookeeper.apache.org/releases.html), and extract it. - Create directory `zkData` and `zkLog` - Go to the zookeeper installation directory, copy configure file `zoo_sample.cfg` to `conf/zoo.cfg`, and change value of dataDir in conf/zoo.cfg to dataDir=./tmp/zookeeper diff --git a/docs/docs/en/contribute/frontend-development.md b/docs/docs/en/contribute/frontend-development.md index 0cc86811f898..47276caeaf6a 100644 --- a/docs/docs/en/contribute/frontend-development.md +++ b/docs/docs/en/contribute/frontend-development.md @@ -33,7 +33,7 @@ Use the command line mode `cd` enter the `dolphinscheduler-ui` project director > If `npm install` is very slow, you can set the taobao mirror ``` -npm config set registry http://registry.npm.taobao.org/ +npm config set registry http://registry.npmmirror.com/ ``` - Modify `API_BASE` in the file `dolphinscheduler-ui/.env` to interact with the backend: @@ -163,9 +163,6 @@ Resource Management => `http://localhost:8888/#/resource/file` ``` | File Management -| udf Management - - Resource Management - - Function management ``` Data Source Management => `http://localhost:8888/#/datasource/list` diff --git a/docs/docs/en/contribute/join/pull-request.md b/docs/docs/en/contribute/join/pull-request.md index ad954a7c6e55..10b51ba58530 100644 --- a/docs/docs/en/contribute/join/pull-request.md +++ b/docs/docs/en/contribute/join/pull-request.md @@ -31,7 +31,7 @@ The corresponding relationship between `Pull Request Type` and `Issue Type` is a Bug Fix - [Fix-3333][server] Fix xxx + [Fix-3333][ui] Fix xxx Improvement @@ -44,9 +44,24 @@ The corresponding relationship between `Pull Request Type` and `Issue Type` is a [Test-3333][api] Add the e2e test of xxx - Sub-Task - (Parent type corresponding to Sub-Task) - [Feature-3333][server] Implement xxx + Doc + Doc + [Doc-3333] Improve xxx + + + E2E + E2E + [E2E-3333] Implement xxx + + + CI + CI + [CI] Improve xxx + + + Chore + Chore + [Chore] Improve xxx diff --git a/docs/docs/en/faq.md b/docs/docs/en/faq.md index b0954b3468fe..7ac4afb76e99 100644 --- a/docs/docs/en/faq.md +++ b/docs/docs/en/faq.md @@ -459,11 +459,11 @@ A: 1, cd dolphinscheduler-ui and delete node_modules directory sudo rm -rf node_modules ``` -​ 2, install node-sass through npm.taobao.org +​ 2, install node-sass through npmmirror.com ``` sudo npm uninstall node-sass -sudo npm i node-sass --sass_binary_site=https://npm.taobao.org/mirrors/node-sass/ +sudo npm i node-sass --sass_binary_site=https://npmmirror.com/mirrors/node-sass/ ``` 3, if the 2nd step failure, please, [referer url](https://github.com/apache/dolphinscheduler/blob/dev/docs/docs/en/contribute/frontend-development.md) diff --git a/docs/docs/en/guide/installation/cluster.md b/docs/docs/en/guide/installation/cluster.md index 14ae58a47978..ce5b60baa295 100644 --- a/docs/docs/en/guide/installation/cluster.md +++ b/docs/docs/en/guide/installation/cluster.md @@ -14,21 +14,7 @@ Configure all the configurations refer to [pseudo-cluster deployment](pseudo-clu ### Modify Configuration -This step differs quite a lot from [pseudo-cluster deployment](pseudo-cluster.md), because the deployment script transfers the required resources for installation to each deployment machine by using `scp`. So we only need to modify the configuration of the machine that runs `install.sh` script and configurations will dispatch to cluster by `scp`. The configuration file is under the path `bin/env/install_env.sh`, here we only need to modify section **INSTALL MACHINE**, **DolphinScheduler ENV, Database, Registry Server** and keep other sections the same as [pseudo-cluster deployment](pseudo-cluster .md), the following describes the parameters that must be modified: - -```shell -# --------------------------------------------------------- -# INSTALL MACHINE -# --------------------------------------------------------- -# Using IP or machine hostname for the server going to deploy master, worker, API server, the IP of the server -# If you using a hostname, make sure machines could connect each other by hostname -# As below, the hostname of the machine deploying DolphinScheduler is ds1, ds2, ds3, ds4, ds5, where ds1, ds2 install the master server, ds3, ds4, and ds5 installs worker server, the alert server is installed in ds4, and the API server is installed in ds5 -ips="ds1,ds2,ds3,ds4,ds5" -masters="ds1,ds2" -workers="ds3:default,ds4:default,ds5:default" -alertServer="ds4" -apiServers="ds5" -``` +This step differs quite a lot from [pseudo-cluster deployment](pseudo-cluster.md), please use `scp` or other methods to distribute the configuration files to each machine, then modify the configuration files. ## Start and Login DolphinScheduler diff --git a/docs/docs/en/guide/installation/kubernetes.md b/docs/docs/en/guide/installation/kubernetes.md index 8e58fdeccd25..a6587a585511 100644 --- a/docs/docs/en/guide/installation/kubernetes.md +++ b/docs/docs/en/guide/installation/kubernetes.md @@ -14,16 +14,15 @@ If you are a new hand and want to experience DolphinScheduler functions, we reco ## Install DolphinScheduler -Please download the source code package `apache-dolphinscheduler--src.tar.gz`, download address: [download address](https://dolphinscheduler.apache.org/en-us/download) - -To publish the release name `dolphinscheduler` version, please execute the following commands: - -``` -$ tar -zxvf apache-dolphinscheduler--src.tar.gz -$ cd apache-dolphinscheduler--src/deploy/kubernetes/dolphinscheduler -$ helm repo add bitnami https://charts.bitnami.com/bitnami -$ helm dependency update . -$ helm install dolphinscheduler . --set image.tag= +```bash +# Choose the corresponding version yourself +export VERSION=3.2.1 +helm pull oci://registry-1.docker.io/apache/dolphinscheduler-helm --version ${VERSION} +tar -xvf dolphinscheduler-helm-${VERSION}.tgz +cd dolphinscheduler-helm +helm repo add bitnami https://charts.bitnami.com/bitnami +helm dependency update . +helm install dolphinscheduler . ``` To publish the release name `dolphinscheduler` version to `test` namespace: diff --git a/docs/docs/en/guide/installation/pseudo-cluster.md b/docs/docs/en/guide/installation/pseudo-cluster.md index e63436f203bb..f0dc100d8472 100644 --- a/docs/docs/en/guide/installation/pseudo-cluster.md +++ b/docs/docs/en/guide/installation/pseudo-cluster.md @@ -2,7 +2,7 @@ The purpose of the pseudo-cluster deployment is to deploy the DolphinScheduler service on a single machine. In this mode, DolphinScheduler's master, worker, API server, are all on the same machine. -If you are a new hand and want to experience DolphinScheduler functions, we recommend you install follow [Standalone deployment](standalone.md). If you want to experience more complete functions and schedule massive tasks, we recommend you install follow[pseudo-cluster deployment. If you want to deploy DolphinScheduler in production, we recommend you follow [cluster deployment](cluster.md) or [Kubernetes deployment](kubernetes.md). +If you are a new hand and want to experience DolphinScheduler functions, we recommend you install follow [Standalone deployment](standalone.md). If you want to experience more complete functions and schedule massive tasks, we recommend you install follow [pseudo-cluster deployment](pseudo-cluster.md). If you want to deploy DolphinScheduler in production, we recommend you follow [cluster deployment](cluster.md) or [Kubernetes deployment](kubernetes.md). ## Preparation @@ -71,31 +71,7 @@ Go to the ZooKeeper installation directory, copy configure file `zoo_sample.cfg` ## Modify Configuration After completing the preparation of the basic environment, you need to modify the configuration file according to the -environment you used. Change the environment configurations via `export =`. The configuration files are located in directory `bin/env` as `install_env.sh` and `dolphinscheduler_env.sh`. - -### Modify `install_env.sh` - -File `install_env.sh` describes which machines will be installed DolphinScheduler and what server will be installed on -each machine. You could find this file in the path `bin/env/install_env.sh` and the detail of the configuration as below. - -```shell -# --------------------------------------------------------- -# INSTALL MACHINE -# --------------------------------------------------------- -# Due to the master, worker, and API server being deployed on a single node, the IP of the server is the machine IP or localhost -ips="localhost" -sshPort="22" -masters="localhost" -workers="localhost:default" -alertServer="localhost" -apiServers="localhost" - -# DolphinScheduler installation path, it will auto-create if not exists -installPath=~/dolphinscheduler - -# Deploy user, use the user you create in section **Configure machine SSH password-free login** -deployUser="dolphinscheduler" -``` +environment you used. Change the environment configurations via `export =`. The configuration files are located in directory `bin/env` as `dolphinscheduler_env.sh`. ### Modify `dolphinscheduler_env.sh` @@ -123,7 +99,6 @@ export SPRING_DATASOURCE_PASSWORD={password} # DolphinScheduler server related configuration export SPRING_CACHE_TYPE=${SPRING_CACHE_TYPE:-none} export SPRING_JACKSON_TIME_ZONE=${SPRING_JACKSON_TIME_ZONE:-UTC} -export MASTER_FETCH_COMMAND_NUM=${MASTER_FETCH_COMMAND_NUM:-10} # Registry center configuration, determines the type and link of the registry center export REGISTRY_TYPE=${REGISTRY_TYPE:-zookeeper} @@ -147,11 +122,7 @@ Follow the instructions in [datasource-setting](../howto/datasource-setting.md) ## Start DolphinScheduler -Use **deployment user** you created above, running the following command to complete the deployment, and the server log will be stored in the logs folder. - -```shell -bash ./bin/install.sh -``` +Use **deployment user** you created above, running the command to complete the deployment, and the server log will be stored in the logs folder. > **_Note:_** For the first time deployment, there maybe occur five times of `sh: bin/dolphinscheduler-daemon.sh: No such file or directory` in the terminal, > this is non-important information that you can ignore. diff --git a/docs/docs/en/guide/installation/standalone.md b/docs/docs/en/guide/installation/standalone.md index bd2d73f1ce94..ac78e7817ff4 100644 --- a/docs/docs/en/guide/installation/standalone.md +++ b/docs/docs/en/guide/installation/standalone.md @@ -36,7 +36,7 @@ Access address `http://localhost:12345/dolphinscheduler/ui` and login DolphinSch ### Start or Stop Server -The script `./bin/dolphinscheduler-daemon.sh`can be used not only quickly start standalone, but also to stop the service operation. The following are all the commands: +The script `./bin/dolphinscheduler-daemon.sh` can be used not only quickly start standalone, but also to stop the service operation. The following are all the commands: ```shell # Start Standalone Server diff --git a/docs/docs/en/guide/metrics/metrics.md b/docs/docs/en/guide/metrics/metrics.md index 347034046509..9ba7cc73e786 100644 --- a/docs/docs/en/guide/metrics/metrics.md +++ b/docs/docs/en/guide/metrics/metrics.md @@ -91,6 +91,11 @@ For example, you can get the master metrics by `curl http://localhost:5679/actua - stop: the number of stopped workflow instances - failover: the number of workflow instance fail-overs +### RPC Related Metrics + +- ds.rpc.client.sync.request.exception.count: (counter) the number of exceptions occurred in sync rpc requests +- ds.rpc.client.sync.request.duration.time: (histogram) the time cost of sync rpc requests + ### Master Server Metrics - ds.master.overload.count: (counter) the number of times the master overloaded diff --git a/docs/docs/en/guide/monitor.md b/docs/docs/en/guide/monitor.md index eb8600d8b7da..95a6c9c75b95 100644 --- a/docs/docs/en/guide/monitor.md +++ b/docs/docs/en/guide/monitor.md @@ -16,6 +16,12 @@ ![worker](../../../img/new_ui/dev/monitor/worker.png) +### Alert Server + +- Mainly related to alert server information. + +![alert-server](../../../img/new_ui/dev/monitor/alert-server.png) + ### Database - Mainly the health status of the DB. @@ -26,18 +32,17 @@ ### Statistics -![statistics](../../../img/new_ui/dev/monitor/statistics.png) +![Command Statistics List](../../../img/new_ui/dev/monitor/command-list.png) + +Shows the command list in the system. Data is from the `t_ds_command` table. + +![Failure Command Statistics List](../../../img/new_ui/dev/monitor/failure-command-list.png) -| **Parameter** | **Description** | -|----------------------------------------|----------------------------------------------------| -| Number of commands wait to be executed | Statistics of the `t_ds_command` table data. | -| The number of failed commands | Statistics of the `t_ds_error_command` table data. | -| Number of tasks wait to run | Count the data of `task_queue` in the ZooKeeper. | -| Number of tasks wait to be killed | Count the data of `task_kill` in the ZooKeeper. | +Shows the failure command list in the system. Data is from the `t_ds_error_command` table. ### Audit Log The audit log provides information about who accesses the system and the operations made to the system and record related time, which strengthen the security of the system and maintenance. -![audit-log](../../../img/new_ui/dev/monitor/audit-log.jpg) +![audit-log](../../../img/new_ui/dev/monitor/audit-log.png) diff --git a/docs/docs/en/guide/parameter/global.md b/docs/docs/en/guide/parameter/global.md index 86cfd699293f..46e9fb151d7b 100644 --- a/docs/docs/en/guide/parameter/global.md +++ b/docs/docs/en/guide/parameter/global.md @@ -22,7 +22,7 @@ Create a shell task and enter `echo ${dt}` in the script content. In this case, ### Save the workflow and set global parameters -Set global parameter: On the workflow definition page, click the plus sign to the right of "Set Global", after filling in the variable name and value, save it. +Set global parameter: On the workflow definition page, click the plus sign to the right of "Set Global", after filling in the variable name and value, select the appropriate parameter value type, save it. ![global-parameter02](../../../../img/new_ui/dev/parameter/global_parameter02.png) diff --git a/docs/docs/en/guide/parameter/project-parameter.md b/docs/docs/en/guide/parameter/project-parameter.md index 235cd23512ff..f9509114d727 100644 --- a/docs/docs/en/guide/parameter/project-parameter.md +++ b/docs/docs/en/guide/parameter/project-parameter.md @@ -8,7 +8,7 @@ Project-level parameters are valid for all task nodes under the entire project. ### Define project-level parameters -On the project page, click Project Parameters and Create Parameters, and fill in the parameter name and parameter value. As shown below: +On the project page, click Project Parameters and Create Parameters, and fill in the parameter name and parameter value, select the appropriate parameter value type. As shown below: ![project-parameter01](../../../../img/new_ui/dev/parameter/project_parameter01.png) diff --git a/docs/docs/en/guide/parameter/startup-parameter.md b/docs/docs/en/guide/parameter/startup-parameter.md index bf3acc2034b3..11cf5a9f631d 100644 --- a/docs/docs/en/guide/parameter/startup-parameter.md +++ b/docs/docs/en/guide/parameter/startup-parameter.md @@ -6,7 +6,7 @@ Parameters are valid for all task nodes of the entire workflow. It can be config ## Usage -Usage of startup parameters is: at the task launch page, click the '+' below the 'Startup Parameter' and fill in the key and value to save. The workflow will add them into global parameters. +Usage of startup parameters is: at the task launch page, click the '+' below the 'Startup Parameter' and fill in the key and value, select the appropriate parameter value type then to save. The workflow will add them into global parameters. ## Example diff --git a/docs/docs/en/guide/remote-logging.md b/docs/docs/en/guide/remote-logging.md index a29dc065829c..7753fe4116a2 100644 --- a/docs/docs/en/guide/remote-logging.md +++ b/docs/docs/en/guide/remote-logging.md @@ -10,7 +10,7 @@ If you deploy DolphinScheduler in `Standalone` mode, you only need to configure ```properties # Whether to enable remote logging remote.logging.enable=false -# if remote.logging.enable = true, set the target of remote logging +# if remote.logging.enable = true, set the target of remote logging, currently support OSS, S3, GCS, ABS remote.logging.target=OSS # if remote.logging.enable = true, set the log base directory remote.logging.base.dir=logs @@ -66,12 +66,12 @@ remote.logging.google.cloud.storage.bucket.name= Configure `common.properties` as follows: ```properties -# abs container name, required if you set resource.storage.type=ABS -resource.azure.blob.storage.container.name= # abs account name, required if you set resource.storage.type=ABS -resource.azure.blob.storage.account.name= -# abs connection string, required if you set resource.storage.type=ABS -resource.azure.blob.storage.connection.string= +remote.logging.abs.account.name= +# abs account key, required if you set resource.storage.type=ABS +remote.logging.abs.account.key= +# abs container name, required if you set resource.storage.type=ABS +remote.logging.abs.container.name= ``` ### Notice diff --git a/docs/docs/en/guide/resource/configuration.md b/docs/docs/en/guide/resource/configuration.md index 67c68a22c224..4ff2ee9d8dce 100644 --- a/docs/docs/en/guide/resource/configuration.md +++ b/docs/docs/en/guide/resource/configuration.md @@ -1,9 +1,9 @@ # Resource Center Configuration -- You could use `Resource Center` to upload text files, UDFs and other task-related files. +- You could use `Resource Center` to upload text files and other task-related files. - You could configure `Resource Center` to use distributed file system like [Hadoop](https://hadoop.apache.org/docs/r2.7.0/) (2.6+), [MinIO](https://github.com/minio/minio) cluster or remote storage products like [AWS S3](https://aws.amazon.com/s3/), [Alibaba Cloud OSS](https://www.aliyun.com/product/oss), [Huawei Cloud OBS](https://support.huaweicloud.com/obs/index.html) etc. -- You could configure `Resource Center` to use local file system. If you deploy `DolphinScheduler` in `Standalone` mode, you could configure it to use local file system for `Resouce Center` without the need of an external `HDFS` system or `S3`. -- Furthermore, if you deploy `DolphinScheduler` in `Cluster` mode, you could use [S3FS-FUSE](https://github.com/s3fs-fuse/s3fs-fuse) to mount `S3` or [JINDO-FUSE](https://help.aliyun.com/document_detail/187410.html) to mount `OSS` to your machines and use the local file system for `Resouce Center`. In this way, you could operate remote files as if on your local machines. +- You could configure `Resource Center` to use local file system. If you deploy `DolphinScheduler` in `Standalone` mode, you could configure it to use local file system for `Resource Center` without the need of an external `HDFS` system or `S3`. +- Furthermore, if you deploy `DolphinScheduler` in `Cluster` mode, you could use [S3FS-FUSE](https://github.com/s3fs-fuse/s3fs-fuse) to mount `S3` or [JINDO-FUSE](https://help.aliyun.com/document_detail/187410.html) to mount `OSS` to your machines and use the local file system for `Resource Center`. In this way, you could operate remote files as if on your local machines. ## Use Local File System @@ -28,74 +28,37 @@ The configuration you may need to change: ## connect AWS S3 -if you want to upload resources to `Resource Center` connected to `S3`, you need to configure `api-server/conf/common.properties` and `worker-server/conf/common.properties`. You can refer to the following: +if you want to upload resources to `Resource Center` connected to `S3`, you need to configure `api-server/conf/common.properties`, `api-server/conf/aws.yaml` and `worker-server/conf/common.properties`, `worker-server/conf/aws.yaml`. You can refer to the following: config the following fields ```properties -...... resource.storage.type=S3 +``` -...... - -resource.aws.access.key.id=aws_access_key_id -# The AWS secret access key. if resource.storage.type=S3 or use EMR-Task, This configuration is required -resource.aws.secret.access.key=aws_secret_access_key -# The AWS Region to use. if resource.storage.type=S3 or use EMR-Task, This configuration is required -resource.aws.region=us-west-2 -# The name of the bucket. You need to create them by yourself. Otherwise, the system cannot start. All buckets in Amazon S3 share a single namespace; ensure the bucket is given a unique name. -resource.aws.s3.bucket.name=dolphinscheduler -# You need to set this parameter when private cloud s4. If S3 uses public cloud, you only need to set resource.aws.region or set to the endpoint of a public cloud such as S3.cn-north-1.amazonaws.com.cn -resource.aws.s3.endpoint= +```yaml +aws: + s3: + # The AWS credentials provider type. support: AWSStaticCredentialsProvider, InstanceProfileCredentialsProvider + # AWSStaticCredentialsProvider: use the access key and secret key to authenticate + # InstanceProfileCredentialsProvider: use the IAM role to authenticate + credentials.provider.type: AWSStaticCredentialsProvider + access.key.id: + access.key.secret: + region: + bucket.name: + endpoint: -...... ``` -## Use HDFS or Remote Object Storage +## connect OSS S3 -After version 3.0.0-alpha, if you want to upload resources to `Resource Center` connected to `HDFS`, you need to configure `api-server/conf/common.properties` and `worker-server/conf/common.properties`. +if you want to upload resources to `Resource Center` connected to `OSS`, you need to configure `api-server/conf/common.properties` and `worker-server/conf/common.properties`. You can refer to the following: -```properties -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# user data local directory path, please make sure the directory exists and have read write permissions -data.basedir.path=/tmp/dolphinscheduler - -# resource view suffixs -#resource.view.suffixs=txt,log,sh,bat,conf,cfg,py,java,sql,xml,hql,properties,json,yml,yaml,ini,js - -# resource storage type: LOCAL, HDFS, S3, OSS, GCS, ABS, OBS -resource.storage.type=LOCAL -# resource store on HDFS/S3/OSS path, resource file will store to this base path, self configuration, please make sure the directory exists on hdfs and have read write permissions. "/dolphinscheduler" is recommended -resource.storage.upload.base.path=/tmp/dolphinscheduler - -# The AWS access key. if resource.storage.type=S3 or use EMR-Task, This configuration is required -resource.aws.access.key.id=minioadmin -# The AWS secret access key. if resource.storage.type=S3 or use EMR-Task, This configuration is required -resource.aws.secret.access.key=minioadmin -# The AWS Region to use. if resource.storage.type=S3 or use EMR-Task, This configuration is required -resource.aws.region=cn-north-1 -# The name of the bucket. You need to create them by yourself. Otherwise, the system cannot start. All buckets in Amazon S3 share a single namespace; ensure the bucket is given a unique name. -resource.aws.s3.bucket.name=dolphinscheduler -# You need to set this parameter when private cloud s3. If S3 uses public cloud, you only need to set resource.aws.region or set to the endpoint of a public cloud such as S3.cn-north-1.amazonaws.com.cn -resource.aws.s3.endpoint=http://localhost:9000 +config the following fields +```properties # alibaba cloud access key id, required if you set resource.storage.type=OSS resource.alibaba.cloud.access.key.id= # alibaba cloud access key secret, required if you set resource.storage.type=OSS @@ -107,89 +70,24 @@ resource.alibaba.cloud.oss.bucket.name=dolphinscheduler # oss bucket endpoint, required if you set resource.storage.type=OSS resource.alibaba.cloud.oss.endpoint=https://oss-cn-hangzhou.aliyuncs.com -# alibaba cloud access key id, required if you set resource.storage.type=OBS +``` + +## connect OBS S3 + +if you want to upload resources to `Resource Center` connected to `OBS`, you need to configure `api-server/conf/common.properties` and `worker-server/conf/common.properties`. You can refer to the following: + +config the following fields + +```properties +# access key id, required if you set resource.storage.type=OBS resource.huawei.cloud.access.key.id= -# alibaba cloud access key secret, required if you set resource.storage.type=OBS +# access key secret, required if you set resource.storage.type=OBS resource.huawei.cloud.access.key.secret= # oss bucket name, required if you set resource.storage.type=OBS resource.huawei.cloud.obs.bucket.name=dolphinscheduler # oss bucket endpoint, required if you set resource.storage.type=OBS resource.huawei.cloud.obs.endpoint=obs.cn-southwest-2.huaweicloud.com -# if resource.storage.type=HDFS, the user must have the permission to create directories under the HDFS root path -resource.hdfs.root.user=hdfs -# if resource.storage.type=S3, the value like: s3a://dolphinscheduler; if resource.storage.type=HDFS and namenode HA is enabled, you need to copy core-site.xml and hdfs-site.xml to conf dir -resource.hdfs.fs.defaultFS=hdfs://mycluster:8020 - -# whether to startup kerberos -hadoop.security.authentication.startup.state=false - -# java.security.krb5.conf path -java.security.krb5.conf.path=/opt/krb5.conf - -# login user from keytab username -login.user.keytab.username=hdfs-mycluster@ESZ.COM - -# login user from keytab path -login.user.keytab.path=/opt/hdfs.headless.keytab - -# kerberos expire time, the unit is hour -kerberos.expire.time=2 - - -# resourcemanager port, the default value is 8088 if not specified -resource.manager.httpaddress.port=8088 -# if resourcemanager HA is enabled, please set the HA IPs; if resourcemanager is single, keep this value empty -yarn.resourcemanager.ha.rm.ids=192.168.xx.xx,192.168.xx.xx -# if resourcemanager HA is enabled or not use resourcemanager, please keep the default value; If resourcemanager is single, you only need to replace ds1 to actual resourcemanager hostname -yarn.application.status.address=http://ds1:%s/ws/v1/cluster/apps/%s -# job history status url when application number threshold is reached(default 10000, maybe it was set to 1000) -yarn.job.history.status.address=http://ds1:19888/ws/v1/history/mapreduce/jobs/%s - -# datasource encryption enable -datasource.encryption.enable=false - -# datasource encryption salt -datasource.encryption.salt=!@#$%^&* - -# data quality jar directory path, it would auto discovery data quality jar from this given dir. You should keep it empty if you do not change anything in -# data-quality, it will auto discovery by dolphinscheduler itself. Change it only if you want to use your own data-quality jar and it is not in worker-server -# libs directory(but may sure your jar name start with `dolphinscheduler-data-quality`). -data-quality.jar.dir= - -#data-quality.error.output.path=/tmp/data-quality-error-data - -# Network IP gets priority, default inner outer - -# Whether hive SQL is executed in the same session -support.hive.oneSession=false - -# use sudo or not, if set true, executing user is tenant user and deploy user needs sudo permissions; if set false, executing user is the deploy user and doesn't need sudo permissions -sudo.enable=true - -# network interface preferred like eth0, default: empty -#dolphin.scheduler.network.interface.preferred= - -# network IP gets priority, default: inner outer -#dolphin.scheduler.network.priority.strategy=default - -# system env path -#dolphinscheduler.env.path=dolphinscheduler_env.sh - -# development state -development.state=false - -# rpc port -alert.rpc.port=50052 - -# set path of conda.sh -conda.path=/opt/anaconda3/etc/profile.d/conda.sh - -# Task resource limit state -task.resource.limit.state=false - -# way to collect applicationId: log(original regex match), aop -appId.collect: log ``` > **Note:** diff --git a/docs/docs/en/guide/resource/intro.md b/docs/docs/en/guide/resource/intro.md index 786d71e83434..fbb6c2cf436c 100644 --- a/docs/docs/en/guide/resource/intro.md +++ b/docs/docs/en/guide/resource/intro.md @@ -1,5 +1,5 @@ # Resource Center Introduction -The Resource Center is typically used for uploading files, UDF functions, and task group management. For a stand-alone +The Resource Center is typically used for uploading files and task group management. For a stand-alone environment, you can select the local file directory as the upload folder (**this operation does not require Hadoop or HDFS deployment**). Of course, you can also choose to upload to Hadoop or MinIO cluster. In this case, you need to have Hadoop (2.6+) or MinIO and other related environments. diff --git a/docs/docs/en/guide/resource/udf-manage.md b/docs/docs/en/guide/resource/udf-manage.md deleted file mode 100644 index c7235900ccfa..000000000000 --- a/docs/docs/en/guide/resource/udf-manage.md +++ /dev/null @@ -1,45 +0,0 @@ -# UDF Manage - -## Resource Management - -- The resource management and file management functions are similar. The difference is that the resource management is the UDF upload function, and the file management uploads the user programs, scripts and configuration files. -- It mainly includes the following operations: rename, download, delete, etc. -- Upload UDF resources: Same as uploading files. - -## Function Management - -### Create UDF function - -Click `Create UDF Function`, enter the UDF function parameters, select the UDF resource, and click `Submit` to create the UDF function. -Currently only temporary UDF functions for HIVE are supported. - -- UDF function name: Enter the name of the UDF function. -- Package name Class name: Enter the full path of the UDF function. -- UDF resource: Set the resource file corresponding to the created UDF function. - -![create-udf](../../../../img/new_ui/dev/resource/create-udf.png) - -## Example - -### Write UDF functions - -Users can customize the desired UDF function according to actual production requirements. Here's a function that appends "HelloWorld" to the end of any string. As shown below: - -![code-udf](../../../../img/new_ui/dev/resource/demo/udf-demo01.png) - -### Configure the UDF function - -Before configuring UDF functions, you need to upload the required function jar package through resource management. Then enter the function management and configure the relevant information. As shown below: - -![conf-udf](../../../../img/new_ui/dev/resource/demo/udf-demo02.png) - -### Use UDF functions - -In the process of using UDF functions, users only need to pay attention to the specific function writing, and upload the configuration through the resource center. The system will automatically configure the create function statement, refer to the following: [SqlTask](https://github.com/apache/dolphinscheduler/blob/923f3f38e3271d7f1d22b3abc3497cecb6957e4a/dolphinscheduler-task-plugin/dolphinscheduler-task-sql/src/main/java/org/apache/dolphinscheduler/plugin/task/sql/SqlTask.java#L507-L531) - -Enter the workflow to define an SQL node, the data source type is HIVE, and the data source instance type is HIVE/IMPALA. - -- SQL statement: `select HwUdf("abc");` This function is used in the same way as the built-in functions, and can be accessed directly using the function name. -- UDF function: Select the one configured for the resource center. - -![use-udf](../../../../img/new_ui/dev/resource/demo/udf-demo03.png) diff --git a/docs/docs/en/guide/security/security.md b/docs/docs/en/guide/security/security.md index d892571b29ce..646243a9641c 100644 --- a/docs/docs/en/guide/security/security.md +++ b/docs/docs/en/guide/security/security.md @@ -103,8 +103,8 @@ public void doPOSTParam()throws Exception{ ## Granted Permissions -* Granted permissions include project permissions, resource permissions, data source permissions, and UDF function permissions. -* Administrators can authorize projects, resources, data sources, and UDF functions that ordinary users do not create. Because the authorization methods of projects, resources, data sources and UDF functions are all the same, the project authorization is used as an example to introduce. +* Granted permissions include project permissions, resource permissions, data source permissions. +* Administrators can authorize projects, resources, data sources that ordinary users do not create. Because the authorization methods of projects, resources, data sources are all the same, the project authorization is used as an example to introduce. * Note: For projects created by the user, the user has all permissions. Therefore, permission changes to projects created by users themselves are not valid. - The administrator enters the `Security Center -> User Management` page, and clicks the "Authorize" button of the user to be authorized, as shown in the following figure: @@ -118,7 +118,7 @@ public void doPOSTParam()throws Exception{ ![no-permission-error](../../../../img/new_ui/dev/security/no-permission-error.png) -- Resources, data sources, and UDF function authorization are the same as project authorization. +- Resources, data sources authorization are the same as project authorization. ## Worker Grouping diff --git a/docs/docs/en/guide/start/docker.md b/docs/docs/en/guide/start/docker.md index b98d0572ae22..f29bcd5c75fe 100644 --- a/docs/docs/en/guide/start/docker.md +++ b/docs/docs/en/guide/start/docker.md @@ -128,7 +128,7 @@ and use `admin` and `dolphinscheduler123` as default username and password in th ![login](../../../../img/new_ui/dev/quick-start/login.png) > Note: If you start the services by the way [using exists PostgreSQL ZooKeeper](#using-exists-postgresql-zookeeper), and -> strating with multiple machine, you should change URL domain from `localhost` to IP or hostname the api server running. +> starting with multiple machine, you should change URL domain from `localhost` to IP or hostname the api server running. ## Change Environment Variable diff --git a/docs/docs/en/guide/task/datafactory.md b/docs/docs/en/guide/task/datafactory.md index 1936db81f179..67d00c122736 100644 --- a/docs/docs/en/guide/task/datafactory.md +++ b/docs/docs/en/guide/task/datafactory.md @@ -19,11 +19,11 @@ DolphinScheduler DataFactory functions: ### Application Permission Setting -First, visit the `Subcription` page and choose `Access control (IAM)`, then click `Add role assignment` to the authorization page. -![Subcription-IAM](../../../../img/tasks/demo/datafactory_auth1.png) +First, visit the `Subscription` page and choose `Access control (IAM)`, then click `Add role assignment` to the authorization page. +![Subscription-IAM](../../../../img/tasks/demo/datafactory_auth1.png) After that, select `Contributor` role which satisfy functions calls in data factory. Then click `Members` page, and click `Select members`. Search application name or application `Object ID` to assign `Contributor` role to application. -![Subcription-Role](../../../../img/tasks/demo/datafactory_auth2.png) +![Subscription-Role](../../../../img/tasks/demo/datafactory_auth2.png) ## Configurations diff --git a/docs/docs/en/guide/task/dinky.md b/docs/docs/en/guide/task/dinky.md index c229c42ded2d..b758fa8691e2 100644 --- a/docs/docs/en/guide/task/dinky.md +++ b/docs/docs/en/guide/task/dinky.md @@ -17,11 +17,12 @@ it will call `Dinky API` to trigger dinky task. Click [here](http://www.dlink.to - Please refer to [DolphinScheduler Task Parameters Appendix](appendix.md) `Default Task Parameters` section for default parameters. -| **Parameter** | **Description** | -|---------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| Dinky Address | The url for a dinky server. | -| Dinky Task ID | The unique task id for a dinky task. | -| Online Task | Specify whether the current dinky job is online. If yes, the submitted job can only be submitted successfully when it is published and there is no corresponding Flink job instance running. | +| **Parameter** | **Description** | +|-------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| Dinky Address | The URL for the Dinky service, e.g., http://localhost:8888. | +| Dinky Task ID | The unique task id for a dinky task. | +| Online Task | Specify whether the current dinky job is online. If yes, the submitted job can only be submitted successfully when it is published and there is no corresponding Flink job instance running. | +| Custom Parameters | Starting from Dinky 1.0, support for passing custom parameters is available. Currently, only `IN` type inputs are supported, with no support for `OUT` type outputs. Supports the `${param}` syntax for retrieving global or local dynamic parameters. | ## Task Example diff --git a/docs/docs/en/guide/task/dms.md b/docs/docs/en/guide/task/dms.md index a19ec4ba4bbb..89cf2ef4dbdc 100644 --- a/docs/docs/en/guide/task/dms.md +++ b/docs/docs/en/guide/task/dms.md @@ -73,14 +73,17 @@ Parameters of restarting the task by interface ## Environment to prepare -Some AWS configuration is required, modify a field in file `common.properties` +Some AWS configuration is required, modify a field in file `aws.yaml` ```yaml -# The AWS access key. if resource.storage.type=S3 or use EMR-Task, This configuration is required -resource.aws.access.key.id= -# The AWS secret access key. if resource.storage.type=S3 or use EMR-Task, This configuration is required -resource.aws.secret.access.key= -# The AWS Region to use. if resource.storage.type=S3 or use EMR-Task, This configuration is required -resource.aws.region= +dms: + # The AWS credentials provider type. support: AWSStaticCredentialsProvider, InstanceProfileCredentialsProvider + # AWSStaticCredentialsProvider: use the access key and secret key to authenticate + # InstanceProfileCredentialsProvider: use the IAM role to authenticate + credentials.provider.type: AWSStaticCredentialsProvider + access.key.id: + access.key.secret: + region: + endpoint: ``` diff --git a/docs/docs/en/guide/task/kubernetes.md b/docs/docs/en/guide/task/kubernetes.md index 332669a60cbe..871a020294c9 100644 --- a/docs/docs/en/guide/task/kubernetes.md +++ b/docs/docs/en/guide/task/kubernetes.md @@ -26,7 +26,7 @@ K8S task type used to execute a batch task. In this task, the worker submits the | Command | The container execution command (yaml-style array), for example: ["printenv"] | | Args | The args of execution command (yaml-style array), for example: ["HOSTNAME", "KUBERNETES_PORT"] | | Custom label | The customized labels for k8s Job. | -| Node selector | The label selectors for running k8s pod. Different value in value set should be seperated by comma, for example: `value1,value2`. You can refer to https://kubernetes.io/docs/reference/kubernetes-api/common-definitions/node-selector-requirement/ for configuration of different operators. | +| Node selector | The label selectors for running k8s pod. Different value in value set should be separated by comma, for example: `value1,value2`. You can refer to https://kubernetes.io/docs/reference/kubernetes-api/common-definitions/node-selector-requirement/ for configuration of different operators. | | Custom parameter | It is a local user-defined parameter for K8S task, these params will pass to container as environment variables. | ## Task Example diff --git a/docs/docs/en/guide/task/mlflow.md b/docs/docs/en/guide/task/mlflow.md index a14ac1248322..d500e4cbf68e 100644 --- a/docs/docs/en/guide/task/mlflow.md +++ b/docs/docs/en/guide/task/mlflow.md @@ -148,7 +148,7 @@ After this, you can visit the MLflow service (`http://localhost:5000`) page to v ### Preset Algorithm Repository Configuration -If you can't access github, you can modify the following fields in the `commom.properties` configuration file to replace the github address with an accessible address. +If you can't access github, you can modify the following fields in the `common.properties` configuration file to replace the github address with an accessible address. ```yaml # mlflow task plugin preset repository diff --git a/docs/docs/en/guide/task/pigeon.md b/docs/docs/en/guide/task/pigeon.md deleted file mode 100644 index 43d538bf8fad..000000000000 --- a/docs/docs/en/guide/task/pigeon.md +++ /dev/null @@ -1,22 +0,0 @@ -# Pigeon - -## Overview - -Pigeon is a task used to trigger remote tasks, acquire logs or status by calling remote WebSocket service. It is DolphinScheduler uses a remote WebSocket service to call tasks. - -## Create Task - -- Click `Project Management -> Project Name -> Workflow Definition`, and click the `Create Workflow` button to enter the DAG editing page. -- Drag from the toolbar to the canvas to create a new Pigeon task. - -## Task Parameters - -[//]: # (TODO: use the commented anchor below once our website template supports this syntax) -[//]: # (- Please refer to [DolphinScheduler Task Parameters Appendix](appendix.md#default-task-parameters) `Default Task Parameters` section for default parameters.) - -- Please refer to [DolphinScheduler Task Parameters Appendix](appendix.md) `Default Task Parameters` section for default parameters. - -| **Parameter** | **Description** | -|------------------|---------------------------------------| -| Target task name | Target task name of this Pigeon node. | - diff --git a/docs/docs/en/guide/task/sagemaker.md b/docs/docs/en/guide/task/sagemaker.md index 7782377130cf..b520dd33cc1f 100644 --- a/docs/docs/en/guide/task/sagemaker.md +++ b/docs/docs/en/guide/task/sagemaker.md @@ -35,14 +35,17 @@ The task plugin are shown as follows: ## Environment to prepare -Some AWS configuration is required, modify a field in file `common.properties` +Some AWS configuration is required, modify a field in file `aws.yaml` ```yaml -# The AWS access key. if resource.storage.type=S3 or use EMR-Task, This configuration is required -resource.aws.access.key.id= -# The AWS secret access key. if resource.storage.type=S3 or use EMR-Task, This configuration is required -resource.aws.secret.access.key= -# The AWS Region to use. if resource.storage.type=S3 or use EMR-Task, This configuration is required -resource.aws.region= +sagemaker: + # The AWS credentials provider type. support: AWSStaticCredentialsProvider, InstanceProfileCredentialsProvider + # AWSStaticCredentialsProvider: use the access key and secret key to authenticate + # InstanceProfileCredentialsProvider: use the IAM role to authenticate + credentials.provider.type: AWSStaticCredentialsProvider + access.key.id: + access.key.secret: + region: + endpoint: ``` diff --git a/docs/docs/en/guide/task/spark.md b/docs/docs/en/guide/task/spark.md index 3e0f83b253cf..930f2cd0b00b 100644 --- a/docs/docs/en/guide/task/spark.md +++ b/docs/docs/en/guide/task/spark.md @@ -24,6 +24,7 @@ Spark task type for executing Spark application. When executing the Spark task, |----------------------------|------------------------------------------------------------------------------------------------------------------------------------| | Program type | Supports Java, Scala, Python, and SQL. | | The class of main function | The **full path** of Main Class, the entry point of the Spark program. | +| Master | The The master URL for the cluster. | | Main jar package | The Spark jar package (upload by Resource Center). | | SQL scripts | SQL statements in .sql files that Spark sql runs. | | Deployment mode |
  • spark submit supports three modes: cluster, client and local.
  • spark sql supports client and local modes.
| diff --git a/docs/docs/en/guide/task/sql.md b/docs/docs/en/guide/task/sql.md index 0cf2d98f5ef2..08f4e92392b7 100644 --- a/docs/docs/en/guide/task/sql.md +++ b/docs/docs/en/guide/task/sql.md @@ -21,12 +21,11 @@ Refer to [datasource-setting](../howto/datasource-setting.md) `DataSource Center - Please refer to [DolphinScheduler Task Parameters Appendix](appendix.md) `Default Task Parameters` section for default parameters. | **Parameter** | **Description** | -|-------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +|-------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---| | Data source | Select the corresponding DataSource. | | SQL type | Supports query and non-query.
  • Query: supports `DML select` type commands, which return a result set. You can specify three templates for email notification as form, attachment or form attachment;
  • Non-query: support `DDL` all commands and `DML update, delete, insert` three types of commands;
    • Segmented execution symbol: When the data source does not support executing multiple SQL statements at a time, the symbol for splitting SQL statements is provided to call the data source execution method multiple times. Example: 1. When the Hive data source is selected as the data source, please do not use `;\n` due to Hive JDBC does not support executing multiple SQL statements at one time; 2. When the MySQL data source is selected as the data source, and multi-segment SQL statements are to be executed, this parameter needs to be filled in with a semicolon `;. Because the MySQL data source does not support executing multiple SQL statements at one time.
| | SQL parameter | The input parameter format is `key1=value1;key2=value2...`. | -| SQL statement | SQL statement. | -| UDF function | For Hive DataSources, you can refer to UDF functions created in the resource center, but other DataSource do not support UDF functions. | +| SQL statement | SQL statement. | | | Custom parameters | SQL task type, and stored procedure is a custom parameter order, to set customized parameter type and data type for the method is the same as the stored procedure task type. The difference is that the custom parameter of the SQL task type replaces the `${variable}` in the SQL statement. | | Pre-SQL | Pre-SQL executes before the SQL statement. | | Post-SQL | Post-SQL executes after the SQL statement. | @@ -57,5 +56,3 @@ Table created in the Pre-SQL, after use in the SQL statement, cleaned in the Pos ## Note Pay attention to the selection of SQL type. If it is an insert operation, need to change to "Non-Query" type. - -To compatible with long session,UDF function are created by the syntax(CREATE OR REPLACE) diff --git a/docs/docs/en/guide/upgrade/incompatible.md b/docs/docs/en/guide/upgrade/incompatible.md index f45af712c3a9..d20dba51ba97 100644 --- a/docs/docs/en/guide/upgrade/incompatible.md +++ b/docs/docs/en/guide/upgrade/incompatible.md @@ -26,3 +26,8 @@ This document records the incompatible updates between each version. You need to * Add required field `database` in /datasources/tables && /datasources/tableColumns Api [#14406](https://github.com/apache/dolphinscheduler/pull/14406) +## 3.3.0 + +* Remove the `udf-manage` function from the `resource center` ([#16209]) +* Remove the `Pigeon` from the `Task Plugin` ([#16218]) + diff --git a/docs/docs/zh/architecture/configuration.md b/docs/docs/zh/architecture/configuration.md index 0b3ea9bc5bd1..d377e2ed973a 100644 --- a/docs/docs/zh/architecture/configuration.md +++ b/docs/docs/zh/architecture/configuration.md @@ -130,42 +130,44 @@ export DOLPHINSCHEDULER_OPTS=" > 不建议设置"-XX:DisableExplicitGC" , DolphinScheduler使用Netty进行通讯,设置该参数,可能会导致内存泄漏. > ->> 如果设置"-Djava.net.preferIPv6Addresses=true" 将会使用ipv6的IP地址, 如果设置"-Djava.net.preferIPv4Addresses=true"将会使用ipv4的IP地址, 如果都不设置,将会随机使用ipv4或者ipv6. +>> 如果设置"-Djava.net.preferIPv6Addresses=true" 将会使用ipv6的IP地址, 如果设置"-Djava.net.preferIPv4Addresses=true" +>> 将会使用ipv4的IP地址, 如果都不设置,将会随机使用ipv4或者ipv6. ## 数据库连接相关配置 在DolphinScheduler中使用Spring Hikari对数据库连接进行管理,配置文件位置: -|服务名称| 配置文件 | -|--|--| -|Master Server | `master-server/conf/application.yaml`| -|Api Server| `api-server/conf/application.yaml`| -|Worker Server| `worker-server/conf/application.yaml`| -|Alert Server| `alert-server/conf/application.yaml`| +| 服务名称 | 配置文件 | +|---------------|---------------------------------------| +| Master Server | `master-server/conf/application.yaml` | +| Api Server | `api-server/conf/application.yaml` | +| Worker Server | `worker-server/conf/application.yaml` | +| Alert Server | `alert-server/conf/application.yaml` | 默认配置如下: -|参数 | 默认值| 描述| -|--|--|--| -|spring.datasource.driver-class-name| org.postgresql.Driver |数据库驱动| -|spring.datasource.url| jdbc:postgresql://127.0.0.1:5432/dolphinscheduler |数据库连接地址| -|spring.datasource.username|root|数据库用户名| -|spring.datasource.password|root|数据库密码| -|spring.datasource.hikari.connection-test-query|select 1|检测连接是否有效的sql| -|spring.datasource.hikari.minimum-idle| 5|最小空闲连接池数量| -|spring.datasource.hikari.auto-commit|true|是否自动提交| -|spring.datasource.hikari.pool-name|DolphinScheduler|连接池名称| -|spring.datasource.hikari.maximum-pool-size|50|连接池最大连接数| -|spring.datasource.hikari.connection-timeout|30000|连接超时时长| -|spring.datasource.hikari.idle-timeout|600000|空闲连接存活最大时间| -|spring.datasource.hikari.leak-detection-threshold|0|连接泄露检测阈值| -|spring.datasource.hikari.initialization-fail-timeout|1|连接池初始化失败timeout| - -DolphinScheduler同样可以通过设置环境变量进行数据库连接相关的配置, 将以上小写字母转成大写并把`.`换成`_`作为环境变量名, 设置值即可。 - -## Zookeeper相关配置 - -DolphinScheduler使用Zookeeper进行集群管理、容错、事件监听等功能,配置文件位置: +| 参数 | 默认值 | 描述 | +|------------------------------------------------------|---------------------------------------------------|-----------------| +| spring.datasource.driver-class-name | org.postgresql.Driver | 数据库驱动 | +| spring.datasource.url | jdbc:postgresql://127.0.0.1:5432/dolphinscheduler | 数据库连接地址 | +| spring.datasource.username | root | 数据库用户名 | +| spring.datasource.password | root | 数据库密码 | +| spring.datasource.hikari.connection-test-query | select 1 | 检测连接是否有效的sql | +| spring.datasource.hikari.minimum-idle | 5 | 最小空闲连接池数量 | +| spring.datasource.hikari.auto-commit | true | 是否自动提交 | +| spring.datasource.hikari.pool-name | DolphinScheduler | 连接池名称 | +| spring.datasource.hikari.maximum-pool-size | 50 | 连接池最大连接数 | +| spring.datasource.hikari.connection-timeout | 30000 | 连接超时时长 | +| spring.datasource.hikari.idle-timeout | 600000 | 空闲连接存活最大时间 | +| spring.datasource.hikari.leak-detection-threshold | 0 | 连接泄露检测阈值 | +| spring.datasource.hikari.initialization-fail-timeout | 1 | 连接池初始化失败timeout | + +DolphinScheduler同样可以通过设置环境变量进行数据库连接相关的配置, 将以上小写字母转成大写并把`.`换成`_`作为环境变量名, +设置值即可。 + +## 注册中心相关配置 + +DolphinScheduler默认使用Zookeeper进行集群管理、容错、事件监听等功能,配置文件位置: |服务名称| 配置文件 | |--|--| |Master Server | `master-server/conf/application.yaml`| @@ -174,20 +176,23 @@ DolphinScheduler使用Zookeeper进行集群管理、容错、事件监听等功 默认配置如下: -|参数 |默认值| 描述| -|--|--|--| -|registry.zookeeper.namespace|dolphinscheduler|Zookeeper集群使用的namespace| -|registry.zookeeper.connect-string|localhost:2181| Zookeeper集群连接信息| -|registry.zookeeper.retry-policy.base-sleep-time|60ms|基本重试时间差| -|registry.zookeeper.retry-policy.max-sleep|300ms|最大重试时间| -|registry.zookeeper.retry-policy.max-retries|5|最大重试次数| -|registry.zookeeper.session-timeout|30s|session超时时间| -|registry.zookeeper.connection-timeout|30s|连接超时时间| -|registry.zookeeper.block-until-connected|600ms|阻塞直到连接成功的等待时间| -|registry.zookeeper.digest|{用户名:密码}|如果zookeeper打开了acl,则需要填写认证信息访问znode,认证信息格式为{用户名}:{密码}。关于Zookeeper ACL详见[https://zookeeper.apache.org/doc/r3.4.14/zookeeperAdmin.html](Apache Zookeeper官方文档)| +| 参数 | 默认值 | 描述 | +|-------------------------------------------------|------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------| +| registry.zookeeper.namespace | dolphinscheduler | Zookeeper集群使用的namespace | +| registry.zookeeper.connect-string | localhost:2181 | Zookeeper集群连接信息 | +| registry.zookeeper.retry-policy.base-sleep-time | 60ms | 基本重试时间差 | +| registry.zookeeper.retry-policy.max-sleep | 300ms | 最大重试时间 | +| registry.zookeeper.retry-policy.max-retries | 5 | 最大重试次数 | +| registry.zookeeper.session-timeout | 30s | session超时时间 | +| registry.zookeeper.connection-timeout | 30s | 连接超时时间 | +| registry.zookeeper.block-until-connected | 600ms | 阻塞直到连接成功的等待时间 | +| registry.zookeeper.digest | {用户名:密码} | 如果zookeeper打开了acl,则需要填写认证信息访问znode,认证信息格式为{用户名}:{密码}。关于Zookeeper ACL详见[https://zookeeper.apache.org/doc/r3.4.14/zookeeperAdmin.html](Apache Zookeeper官方文档) | DolphinScheduler同样可以通过`bin/env/dolphinscheduler_env.sh`进行Zookeeper相关的配置。 +如果使用etcd作为注册中心,详细请参考[链接](https://github.com/apache/dolphinscheduler/blob/dev/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/README.md)。 +如果使用jdbc作为注册中心,详细请参考[链接](https://github.com/apache/dolphinscheduler/blob/dev/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/README.md)。 + ## common.properties [hadoop、s3、yarn配置] common.properties配置文件目前主要是配置hadoop/s3/yarn/applicationId收集相关的配置,配置文件位置: @@ -200,8 +205,8 @@ common.properties配置文件目前主要是配置hadoop/s3/yarn/applicationId 默认配置如下: -| 参数 | 默认值 | 描述 | -|-----------------------------------------------|--------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 参数 | 默认值 | 描述 | +|-----------------------------------------------|--------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | data.basedir.path | /tmp/dolphinscheduler | 本地工作目录,用于存放临时文件 | | resource.storage.type | NONE | 资源文件存储类型: HDFS,S3,OSS,GCS,ABS,NONE | | resource.upload.path | /dolphinscheduler | 资源文件存储路径 | @@ -279,48 +284,54 @@ common.properties配置文件目前主要是配置hadoop/s3/yarn/applicationId 位置:`master-server/conf/application.yaml` -| 参数 | 默认值 | 描述 | -|--------------------------------------------------------|--------------|-----------------------------------------------------------------------------------| -| master.listen-port | 5678 | master监听端口 | -| master.fetch-command-num | 10 | master拉取command数量 | -| master.pre-exec-threads | 10 | master准备执行任务的数量,用于限制并行的command | -| master.exec-threads | 100 | master工作线程数量,用于限制并行的流程实例数量 | -| master.dispatch-task-number | 3 | master每个批次的派发任务数量 | -| master.host-selector | lower_weight | master host选择器,用于选择合适的worker执行任务,可选值: random, round_robin, lower_weight | -| master.max-heartbeat-interval | 10s | master最大心跳间隔 | -| master.task-commit-retry-times | 5 | 任务重试次数 | -| master.task-commit-interval | 1000 | 任务提交间隔,单位为毫秒 | -| master.state-wheel-interval | 5 | 轮询检查状态时间 | -| master.max-cpu-load-avg | 1 | master最大cpuload均值,只有高于系统cpuload均值时,master服务才能调度任务. 默认值为1: 会使用100%的CPU | -| master.reserved-memory | 0.3 | master预留内存,只有低于系统可用内存时,master服务才能调度任务. 默认值为0.3:当系统内存低于30%时会停止调度新的工作流 | -| master.failover-interval | 10 | failover间隔,单位为分钟 | -| master.kill-application-when-task-failover | true | 当任务实例failover时,是否kill掉yarn或k8s application | -| master.registry-disconnect-strategy.strategy | stop | 当Master与注册中心失联之后采取的策略, 默认值是: stop. 可选值包括: stop, waiting | -| master.registry-disconnect-strategy.max-waiting-time | 100s | 当Master与注册中心失联之后重连时间, 之后当strategy为waiting时,该值生效。 该值表示当Master与注册中心失联时会在给定时间之内进行重连, | -| 在给定时间之内重连失败将会停止自己,在重连时,Master会丢弃目前正在执行的工作流,值为0表示会无限期等待 | -| master.master.worker-group-refresh-interval | 10s | 定期将workerGroup从数据库中同步到内存的时间间隔 | +| 参数 | 默认值 | 描述 | +|-----------------------------------------------------------------------------|---------------|------------------------------------------------------------------------------------------------------------------------------------------| +| master.listen-port | 5678 | master监听端口 | +| master.pre-exec-threads | 10 | master准备执行任务的数量,用于限制并行的command | +| master.exec-threads | 100 | master工作线程数量,用于限制并行的流程实例数量 | +| master.dispatch-task-number | 3 | master每个批次的派发任务数量 | +| master.host-selector | lower_weight | master host选择器,用于选择合适的worker执行任务,可选值: random, round_robin, lower_weight | +| master.max-heartbeat-interval | 10s | master最大心跳间隔 | +| master.task-commit-retry-times | 5 | 任务重试次数 | +| master.task-commit-interval | 1000 | 任务提交间隔,单位为毫秒 | +| master.state-wheel-interval | 5 | 轮询检查状态时间 | +| master.server-load-protection.enabled | true | 是否开启系统保护策略 | +| master.server-load-protection.max-system-cpu-usage-percentage-thresholds | 0.7 | master最大系统cpu使用值,只有当前系统cpu使用值低于最大系统cpu使用值,master服务才能调度任务. 默认值为0.7: 会使用70%的操作系统CPU | +| master.server-load-protection.max-jvm-cpu-usage-percentage-thresholds | 0.7 | master最大JVM cpu使用值,只有当前JVM cpu使用值低于最大JVM cpu使用值,master服务才能调度任务. 默认值为0.7: 会使用70%的JVM CPU | +| master.server-load-protection.max-system-memory-usage-percentage-thresholds | 0.7 | master最大系统 内存使用值,只有当前系统内存使用值低于最大系统内存使用值,master服务才能调度任务. 默认值为0.7: 会使用70%的操作系统内存 | +| master.server-load-protection.max-disk-usage-percentage-thresholds | 0.7 | master最大系统磁盘使用值,只有当前系统磁盘使用值低于最大系统磁盘使用值,master服务才能调度任务. 默认值为0.7: 会使用70%的操作系统磁盘空间 | +| master.failover-interval | 10 | failover间隔,单位为分钟 | +| master.kill-application-when-task-failover | true | 当任务实例failover时,是否kill掉yarn或k8s application | +| master.registry-disconnect-strategy.strategy | stop | 当Master与注册中心失联之后采取的策略, 默认值是: stop. 可选值包括: stop, waiting | +| master.registry-disconnect-strategy.max-waiting-time | 100s | 当Master与注册中心失联之后重连时间, 之后当strategy为waiting时,该值生效。 该值表示当Master与注册中心失联时会在给定时间之内进行重连, 在给定时间之内重连失败将会停止自己,在重连时,Master会丢弃目前正在执行的工作流,值为0表示会无限期等待 | +| master.master.worker-group-refresh-interval | 10s | 定期将workerGroup从数据库中同步到内存的时间间隔 | +| master.command-fetch-strategy.type | ID_SLOT_BASED | Command拉取策略, 目前仅支持 `ID_SLOT_BASED` | +| master.command-fetch-strategy.config.id-step | 1 | 数据库中t_ds_command的id自增步长 | +| master.command-fetch-strategy.config.fetch-size | 10 | master拉取command数量 | ## Worker Server相关配置 位置:`worker-server/conf/application.yaml` -| 参数 | 默认值 | 描述 | -|------------------------------------------------------|-----------|-------------------------------------------------------------------------------------------------------------------------------------------| -| worker.listen-port | 1234 | worker监听端口 | -| worker.exec-threads | 100 | worker工作线程数量,用于限制并行的任务实例数量 | -| worker.max-heartbeat-interval | 10s | worker最大心跳间隔 | -| worker.host-weight | 100 | 派发任务时,worker主机的权重 | -| worker.tenant-auto-create | true | 租户对应于系统的用户,由worker提交作业.如果系统没有该用户,则在参数worker.tenant.auto.create为true后自动创建。 | -| worker.max-cpu-load-avg | 1 | worker最大cpuload均值,只有高于系统cpuload均值时,worker服务才能被派发任务. 默认值为1: 会使用100%的CPU | -| worker.reserved-memory | 0.3 | worker预留内存,只有低于系统可用内存时,worker服务才能被派发任务. 默认值为0.3:当系统内存低于30%时会停止调度新的工作流 | -| worker.alert-listen-host | localhost | alert监听host | -| worker.alert-listen-port | 50052 | alert监听端口 | -| worker.registry-disconnect-strategy.strategy | stop | 当Worker与注册中心失联之后采取的策略, 默认值是: stop. 可选值包括: stop, waiting | -| worker.registry-disconnect-strategy.max-waiting-time | 100s | 当Worker与注册中心失联之后重连时间, 之后当strategy为waiting时,该值生效。 该值表示当Worker与注册中心失联时会在给定时间之内进行重连, 在给定时间之内重连失败将会停止自己,在重连时,Worker会丢弃kill正在执行的任务。值为0表示会无限期等待 | -| worker.task-execute-threads-full-policy | REJECT | 如果是 REJECT, 当Worker中等待队列中的任务数达到exec-threads时, Worker将会拒绝接下来新接收的任务,Master将会重新分发该任务; 如果是 CONTINUE, Worker将会接收任务,放入等待队列中等待空闲线程去执行该任务 | -| worker.tenant-config.auto-create-tenant-enabled | true | 租户对应于系统的用户,由worker提交作业.如果系统没有该用户,则在参数worker.tenant.auto.create为true后自动创建。 | -| worker.tenant-config.distributed-tenant-enabled | false | 如果设置为true, auto-create-tenant-enabled 将会不起作用。 | -| worker.tenant-config.default-tenant-enabled | false | 如果设置为true, 将会使用worker服务启动用户作为 `default` 租户。 | +| 参数 | 默认值 | 描述 | +|-----------------------------------------------------------------------------|-----------|-------------------------------------------------------------------------------------------------------------------------------------------| +| worker.listen-port | 1234 | worker监听端口 | +| worker.exec-threads | 100 | worker工作线程数量,用于限制并行的任务实例数量 | +| worker.max-heartbeat-interval | 10s | worker最大心跳间隔 | +| worker.host-weight | 100 | 派发任务时,worker主机的权重 | +| worker.tenant-auto-create | true | 租户对应于系统的用户,由worker提交作业.如果系统没有该用户,则在参数worker.tenant.auto.create为true后自动创建。 | +| worker.server-load-protection.enabled | true | 是否开启系统保护策略 | +| worker.server-load-protection.max-system-cpu-usage-percentage-thresholds | 0.7 | worker最大系统cpu使用值,只有当前系统cpu使用值低于最大系统cpu使用值,worker服务才能接收任务. 默认值为0.7: 会使用70%的操作系统CPU | +| worker.server-load-protection.max-jvm-cpu-usage-percentage-thresholds | 0.7 | worker最大JVM cpu使用值,只有当前JVM cpu使用值低于最大JVM cpu使用值,worker服务才能接收任务. 默认值为0.7: 会使用70%的JVM CPU | +| worker.server-load-protection.max-system-memory-usage-percentage-thresholds | 0.7 | worker最大系统 内存使用值,只有当前系统内存使用值低于最大系统内存使用值,worker服务才能接收任务. 默认值为0.7: 会使用70%的操作系统内存 | +| worker.server-load-protection.max-disk-usage-percentage-thresholds | 0.7 | worker最大系统磁盘使用值,只有当前系统磁盘使用值低于最大系统磁盘使用值,worker服务才能接收任务. 默认值为0.7: 会使用70%的操作系统磁盘空间 | +| worker.alert-listen-host | localhost | alert监听host | +| worker.alert-listen-port | 50052 | alert监听端口 | +| worker.registry-disconnect-strategy.strategy | stop | 当Worker与注册中心失联之后采取的策略, 默认值是: stop. 可选值包括: stop, waiting | +| worker.registry-disconnect-strategy.max-waiting-time | 100s | 当Worker与注册中心失联之后重连时间, 之后当strategy为waiting时,该值生效。 该值表示当Worker与注册中心失联时会在给定时间之内进行重连, 在给定时间之内重连失败将会停止自己,在重连时,Worker会丢弃kill正在执行的任务。值为0表示会无限期等待 | +| worker.task-execute-threads-full-policy | REJECT | 如果是 REJECT, 当Worker中等待队列中的任务数达到exec-threads时, Worker将会拒绝接下来新接收的任务,Master将会重新分发该任务; 如果是 CONTINUE, Worker将会接收任务,放入等待队列中等待空闲线程去执行该任务 | +| worker.tenant-config.auto-create-tenant-enabled | true | 租户对应于系统的用户,由worker提交作业.如果系统没有该用户,则在参数worker.tenant.auto.create为true后自动创建。 | +| worker.tenant-config.default-tenant-enabled | false | 如果设置为true, 将会使用worker服务启动用户作为 `default` 租户。 | ## Alert Server相关配置 @@ -366,7 +377,9 @@ common.properties配置文件目前主要是配置hadoop/s3/yarn/applicationId | spring.quartz.properties.org.quartz.threadPool.threadPriority | 5 | | spring.quartz.properties.org.quartz.threadPool.class | org.quartz.simpl.SimpleThreadPool | -因为*Api Server*不会启动*Quartz Scheduler*实例,只会作为Scheduler客户端使用,因此它的Quartz线程池将会使用`QuartzZeroSizeThreadPool`。`QuartzZeroSizeThreadPool`不会启动任何线程。具体的默认配置如下: +因为*Api Server*不会启动*Quartz Scheduler* +实例,只会作为Scheduler客户端使用,因此它的Quartz线程池将会使用`QuartzZeroSizeThreadPool`。`QuartzZeroSizeThreadPool` +不会启动任何线程。具体的默认配置如下: | Parameters | Default value | |------------------------------------------------------|-----------------------------------------------------------------------| @@ -374,7 +387,8 @@ common.properties配置文件目前主要是配置hadoop/s3/yarn/applicationId ## dolphinscheduler_env.sh [环境变量配置] -通过类似shell方式提交任务的的时候,会加载该配置文件中的环境变量到主机中。涉及到的 `JAVA_HOME` 任务类型的环境配置,其中任务类型主要有: Shell任务、Python任务、Spark任务、Flink任务、Datax任务等等。 +通过类似shell方式提交任务的的时候,会加载该配置文件中的环境变量到主机中。涉及到的 `JAVA_HOME` +任务类型的环境配置,其中任务类型主要有: Shell任务、Python任务、Spark任务、Flink任务、Datax任务等等。 ```bash # JAVA_HOME, will use it to start DolphinScheduler server @@ -401,9 +415,10 @@ export FLINK_ENV_JAVA_OPTS="-javaagent:${DOLPHINSCHEDULER_HOME}/tools/libs/aspec ## 日志相关配置 -|服务名称| 配置文件 | -|--|--| -|Master Server | `master-server/conf/logback-spring.xml`| -|Api Server| `api-server/conf/logback-spring.xml`| -|Worker Server| `worker-server/conf/logback-spring.xml`| -|Alert Server| `alert-server/conf/logback-spring.xml`| +| 服务名称 | 配置文件 | +|---------------|-----------------------------------------| +| Master Server | `master-server/conf/logback-spring.xml` | +| Api Server | `api-server/conf/logback-spring.xml` | +| Worker Server | `worker-server/conf/logback-spring.xml` | +| Alert Server | `alert-server/conf/logback-spring.xml` | + diff --git a/docs/docs/zh/architecture/metadata.md b/docs/docs/zh/architecture/metadata.md index 7f82a7360979..14999e0b55b0 100644 --- a/docs/docs/zh/architecture/metadata.md +++ b/docs/docs/zh/architecture/metadata.md @@ -20,8 +20,6 @@ - 一个用户可以有多个项目,用户项目授权通过`t_ds_relation_project_user`表完成project_id和user_id的关系绑定;
- `t_ds_projcet`表中的`user_id`表示创建该项目的用户,`t_ds_relation_project_user`表中的`user_id`表示对项目有权限的用户;
-- `t_ds_resources`表中的`user_id`表示创建该资源的用户,`t_ds_relation_resources_user`中的`user_id`表示对资源有权限的用户;
-- `t_ds_udfs`表中的`user_id`表示创建该UDF的用户,`t_ds_relation_udfs_user`表中的`user_id`表示对UDF有权限的用户;
### 项目 - 租户 - 工作流定义 - 定时 diff --git a/docs/docs/zh/architecture/task-structure.md b/docs/docs/zh/architecture/task-structure.md index aad8454a19d0..656e57eed708 100644 --- a/docs/docs/zh/architecture/task-structure.md +++ b/docs/docs/zh/architecture/task-structure.md @@ -145,7 +145,6 @@ 5| |type |String | 数据库类型 6| |datasource |Int | 数据源id 7| |sql |String | 查询SQL语句 -8| |udfs | String| udf函数|UDF函数id,以逗号分隔. 9| |sqlType | String| SQL节点类型 |0 查询 , 1 非查询 10| |title |String | 邮件标题 11| |receivers |String | 收件人 @@ -179,7 +178,6 @@ "type":"MYSQL", "datasource":1, "sql":"select id , namge , age from emp where id = ${id}", - "udfs":"", "sqlType":"0", "title":"xxxx@xxx.com", "receivers":"xxxx@xxx.com", diff --git a/docs/docs/zh/contribute/development-environment-setup.md b/docs/docs/zh/contribute/development-environment-setup.md index 3c97087b1829..8e7fe502c3c9 100644 --- a/docs/docs/zh/contribute/development-environment-setup.md +++ b/docs/docs/zh/contribute/development-environment-setup.md @@ -148,7 +148,7 @@ pnpm run dev #### zookeeper -下载 [ZooKeeper](https://www.apache.org/dyn/closer.lua/zookeeper/zookeeper-3.6.3),解压 +下载 [ZooKeeper](https://zookeeper.apache.org/releases.html),解压 * 在 ZooKeeper 的目录下新建 zkData、zkLog文件夹 * 将 conf 目录下的 `zoo_sample.cfg` 文件,复制一份,重命名为 `zoo.cfg`,修改其中数据和日志的配置,如: diff --git a/docs/docs/zh/contribute/frontend-development.md b/docs/docs/zh/contribute/frontend-development.md index 42eb2973ddf1..46d2e9c82de0 100644 --- a/docs/docs/zh/contribute/frontend-development.md +++ b/docs/docs/zh/contribute/frontend-development.md @@ -33,7 +33,7 @@ Node包下载 (注意版本 v12.20.2) `https://nodejs.org/download/release/v12.2 > 如果 `npm install` 速度非常慢,你可以设置淘宝镜像 ``` -npm config set registry http://registry.npm.taobao.org/ +npm config set registry http://registry.npmmirror.com/ ``` - 修改 `dolphinscheduler-ui/.env` 文件中的 `API_BASE`,用于跟后端交互: @@ -163,9 +163,6 @@ npm install node-sass --unsafe-perm #单独安装node-sass依赖 ``` | 文件管理 -| UDF管理 - - 资源管理 - - 函数管理 ``` 数据源管理 => `http://localhost:8888/#/datasource/list` diff --git a/docs/docs/zh/contribute/join/contribute.md b/docs/docs/zh/contribute/join/contribute.md index fb74d24d6de6..69917c76bdd9 100644 --- a/docs/docs/zh/contribute/join/contribute.md +++ b/docs/docs/zh/contribute/join/contribute.md @@ -32,7 +32,7 @@ 如果你想实现某个 Feature 或者修复某个 Bug。请参考以下内容: * 所有的 Bug 与新 Feature 建议使用 Issues Page 进行管理。 -* 如果想要开发实现某个 Feature 功能,请先回复该功能所关联的 Issue,表明你当前正在这个 Issue 上工作。 并在回复的时候为自己设置一个 **deadline**,并添加的回复内容中。 +* 如果想要开发实现某个 Feature 功能,请先回复该功能所关联的 Issue,表明你当前正在这个 Issue 上工作。 并在回复的时候为自己设置一个 **deadline**,并添加到回复内容中。 * 最好在核心贡献者找到一个导师(指导者),导师会在设计与功能实现上给予即时的反馈。 * 你应该新建一个分支来开始你的工作,分支的名字参考[参与贡献 Pull Request 需知](./pull-request.md)。比如,你想完成 feature 功能并提交了 Issue 111,那么你的 branch 名字应为 feature-111。 功能名称可与导师讨论后确定。 * 完成后,发送一个 Pull Request 到 dolphinscheduler,提交过程具体请参考下面《[提交代码流程](./submit-code.md)》。 diff --git a/docs/docs/zh/contribute/join/pull-request.md b/docs/docs/zh/contribute/join/pull-request.md index e147a0a200b5..3ee6caf75e3d 100644 --- a/docs/docs/zh/contribute/join/pull-request.md +++ b/docs/docs/zh/contribute/join/pull-request.md @@ -33,7 +33,7 @@ waste time。 Bug Fix - [Fix-3333][server] Fix xxx + [Fix-3333][ui] Fix xxx Improvement @@ -46,9 +46,23 @@ waste time。 [Test-3333][api] Add the e2e test of xxx - Sub-Task - Sub-Task 对应的父类型 - [Feature-3333][server] Implement xxx + Doc + Doc + [Doc-3333] Improve xxx + + E2E + E2E + [E2E-3333] Implement xxx + + + CI + CI + [CI] Improve xxx + + + Chore + Chore + [Chore] Improve xxx diff --git a/docs/docs/zh/faq.md b/docs/docs/zh/faq.md index 13e0dc8b84c0..a4f32d08a86a 100644 --- a/docs/docs/zh/faq.md +++ b/docs/docs/zh/faq.md @@ -430,11 +430,11 @@ A:1,cd dolphinscheduler-ui 然后删除 node_modules 目录 sudo rm -rf node_modules ``` -​ 2,通过 npm.taobao.org 下载 node-sass +​ 2,通过 npmmirror.com 下载 node-sass ``` sudo npm uninstall node-sass -sudo npm i node-sass --sass_binary_site=https://npm.taobao.org/mirrors/node-sass/ +sudo npm i node-sass --sass_binary_site=https://npmmirror.com/mirrors/node-sass/ ``` 3,如果步骤 2 报错,请重新构建 node-saas [参考链接](https://github.com/apache/dolphinscheduler/blob/dev/docs/docs/zh/contribute/frontend-development.md) diff --git a/docs/docs/zh/guide/installation/cluster.md b/docs/docs/zh/guide/installation/cluster.md index 97c179278787..c266a671e016 100644 --- a/docs/docs/zh/guide/installation/cluster.md +++ b/docs/docs/zh/guide/installation/cluster.md @@ -14,21 +14,7 @@ ### 修改相关配置 -这个是与[伪集群部署](pseudo-cluster.md)差异较大的一步,因为部署脚本会通过 `scp` 的方式将安装需要的资源传输到各个机器上,所以这一步我们仅需要修改运行`install.sh`脚本的所在机器的配置即可。配置文件在路径在`bin/env/install_env.sh`下,此处我们仅需修改**INSTALL MACHINE**,**DolphinScheduler ENV、Database、Registry Server**与伪集群部署保持一致,下面对必须修改参数进行说明 - -```shell -# --------------------------------------------------------- -# INSTALL MACHINE -# --------------------------------------------------------- -# 需要配置master、worker、API server,所在服务器的IP均为机器IP或者localhost -# 如果是配置hostname的话,需要保证机器间可以通过hostname相互链接 -# 如下图所示,部署 DolphinScheduler 机器的 hostname 为 ds1,ds2,ds3,ds4,ds5,其中 ds1,ds2 安装 master 服务,ds3,ds4,ds5安装 worker 服务,alert server安装在ds4中,api server 安装在ds5中 -ips="ds1,ds2,ds3,ds4,ds5" -masters="ds1,ds2" -workers="ds3:default,ds4:default,ds5:default" -alertServer="ds4" -apiServers="ds5" -``` +这个是与[伪集群部署](pseudo-cluster.md)差异较大的一步,请使用 scp 等方式将配置文件分发到各台机器上,然后修改配置文件 ## 启动 DolphinScheduler && 登录 DolphinScheduler && 启停服务 diff --git a/docs/docs/zh/guide/installation/kubernetes.md b/docs/docs/zh/guide/installation/kubernetes.md index 20cd8907e8f3..f4b95de27bb0 100644 --- a/docs/docs/zh/guide/installation/kubernetes.md +++ b/docs/docs/zh/guide/installation/kubernetes.md @@ -14,16 +14,15 @@ Kubernetes 部署目的是在 Kubernetes 集群中部署 DolphinScheduler 服务 ## 安装 dolphinscheduler -请下载源码包 apache-dolphinscheduler--src.tar.gz,下载地址: [下载](https://dolphinscheduler.apache.org/zh-cn/download) - -发布一个名为 `dolphinscheduler` 的版本(release),请执行以下命令: - -``` -$ tar -zxvf apache-dolphinscheduler--src.tar.gz -$ cd apache-dolphinscheduler--src/deploy/kubernetes/dolphinscheduler -$ helm repo add bitnami https://charts.bitnami.com/bitnami -$ helm dependency update . -$ helm install dolphinscheduler . --set image.tag= +```bash +# 自行选择对应的版本 +export VERSION=3.2.1 +helm pull oci://registry-1.docker.io/apache/dolphinscheduler-helm --version ${VERSION} +tar -xvf dolphinscheduler-helm-${VERSION}.tgz +cd dolphinscheduler-helm +helm repo add bitnami https://charts.bitnami.com/bitnami +helm dependency update . +helm install dolphinscheduler . ``` 将名为 `dolphinscheduler` 的版本(release) 发布到 `test` 的命名空间中: diff --git a/docs/docs/zh/guide/installation/pseudo-cluster.md b/docs/docs/zh/guide/installation/pseudo-cluster.md index 13479e0d9e05..8e1c133c07bd 100644 --- a/docs/docs/zh/guide/installation/pseudo-cluster.md +++ b/docs/docs/zh/guide/installation/pseudo-cluster.md @@ -70,30 +70,7 @@ chmod 600 ~/.ssh/authorized_keys ## 修改相关配置 -完成基础环境的准备后,需要根据你的机器环境修改配置文件。配置文件可以在目录 `bin/env` 中找到,他们分别是 并命名为 `install_env.sh` 和 `dolphinscheduler_env.sh`。 - -### 修改 `install_env.sh` 文件 - -文件 `install_env.sh` 描述了哪些机器将被安装 DolphinScheduler 以及每台机器对应安装哪些服务。您可以在路径 `bin/env/install_env.sh` 中找到此文件,可通过以下方式更改 env 变量,export =,配置详情如下。 - -```shell -# --------------------------------------------------------- -# INSTALL MACHINE -# --------------------------------------------------------- -# Due to the master, worker, and API server being deployed on a single node, the IP of the server is the machine IP or localhost -ips="localhost" -sshPort="22" -masters="localhost" -workers="localhost:default" -alertServer="localhost" -apiServers="localhost" - -# DolphinScheduler installation path, it will auto-create if not exists -installPath=~/dolphinscheduler - -# Deploy user, use the user you create in section **Configure machine SSH password-free login** -deployUser="dolphinscheduler" -``` +完成基础环境的准备后,需要根据你的机器环境修改配置文件。配置文件可以在目录 `bin/env/dolphinscheduler_env.sh` 中找到。 ### 修改 `dolphinscheduler_env.sh` 文件 @@ -118,7 +95,6 @@ export SPRING_DATASOURCE_PASSWORD={password} # DolphinScheduler server related configuration export SPRING_CACHE_TYPE=${SPRING_CACHE_TYPE:-none} export SPRING_JACKSON_TIME_ZONE=${SPRING_JACKSON_TIME_ZONE:-UTC} -export MASTER_FETCH_COMMAND_NUM=${MASTER_FETCH_COMMAND_NUM:-10} # Registry center configuration, determines the type and link of the registry center export REGISTRY_TYPE=${REGISTRY_TYPE:-zookeeper} @@ -142,11 +118,7 @@ export PATH=$HADOOP_HOME/bin:$SPARK_HOME/bin:$PYTHON_LAUNCHER:$JAVA_HOME/bin:$HI ## 启动 DolphinScheduler -使用上面创建的**部署用户**运行以下命令完成部署,部署后的运行日志将存放在 logs 文件夹内 - -```shell -bash ./bin/install.sh -``` +使用上面创建的**部署用户**运行命令完成部署,部署后的运行日志将存放在 logs 文件夹内 > **_注意:_** 第一次部署的话,可能出现 5 次`sh: bin/dolphinscheduler-daemon.sh: No such file or directory`相关信息,此为非重要信息直接忽略即可 diff --git a/docs/docs/zh/guide/metrics/metrics.md b/docs/docs/zh/guide/metrics/metrics.md index aa620b873e8d..4865a14222b2 100644 --- a/docs/docs/zh/guide/metrics/metrics.md +++ b/docs/docs/zh/guide/metrics/metrics.md @@ -91,6 +91,11 @@ metrics exporter端口`server.port`是在application.yaml里定义的: master: ` - stop:停止的工作流实例数量 - failover:容错的工作流实例数量 +### RPC相关指标 + +- ds.rpc.client.sync.request.exception.count: (counter) 同步rpc请求异常数 +- ds.rpc.client.sync.request.duration.time: (histogram) 同步rpc请求耗时 + ### Master Server指标 - ds.master.overload.count: (counter) master过载次数 diff --git a/docs/docs/zh/guide/monitor.md b/docs/docs/zh/guide/monitor.md index b45d7a59b3ce..8033fbb966c4 100644 --- a/docs/docs/zh/guide/monitor.md +++ b/docs/docs/zh/guide/monitor.md @@ -16,6 +16,12 @@ ![worker](../../../img/new_ui/dev/monitor/worker.png) +### Alert Server + +- 主要是 alert server 的相关信息。 + +![alert-server](../../../img/new_ui/dev/monitor/alert-server.png) + ### Database - 主要是 DB 的健康状况 @@ -26,15 +32,16 @@ ### Statistics -![statistics](../../../img/new_ui/dev/monitor/statistics.png) +![Command Statistics List](../../../img/new_ui/dev/monitor/command-list.png) + +展示系统中的命令列表,数据来自`t_ds_command`表。 + +![Failure Command Statistics List](../../../img/new_ui/dev/monitor/failure-command-list.png) -- 待执行命令数:统计 t_ds_command 表的数据 -- 执行失败的命令数:统计 t_ds_error_command 表的数据 -- 待运行任务数:统计 Zookeeper 中 task_queue 的数据 -- 待杀死任务数:统计 Zookeeper 中 task_kill 的数据 +展示系统中的失败命令列表,数据来自`t_ds_error_command`表。 ### 审计日志 -审计日志的记录提供了有关谁访问了系统,以及他或她在给定时间段内执行了哪些操作的信息,他对于维护安全都很有用。 +审计日志的记录提供了有关谁访问了系统,以及他或她在给定时间段内执行了哪些操作的信息,对于维护安全都很有用。 -![audit-log](../../../img/new_ui/dev/monitor/audit-log.jpg) +![audit-log](../../../img/new_ui/dev/monitor/audit-log.png) diff --git a/docs/docs/zh/guide/parameter/global.md b/docs/docs/zh/guide/parameter/global.md index cd26a291b0a6..3b8b307f91b0 100644 --- a/docs/docs/zh/guide/parameter/global.md +++ b/docs/docs/zh/guide/parameter/global.md @@ -20,7 +20,7 @@ ### 保存工作流,并设置全局参数 -全局参数配置方式如下:在工作流定义页面,点击“设置全局”右边的加号,填写对应的变量名称和对应的值,保存即可。如下图所示: +全局参数配置方式如下:在工作流定义页面,点击“设置全局”右边的加号,填写对应的变量名称和对应的值,选择相应的参数值类型,保存即可。如下图所示: ![global-parameter02](../../../../img/new_ui/dev/parameter/global_parameter02.png) diff --git a/docs/docs/zh/guide/parameter/project-parameter.md b/docs/docs/zh/guide/parameter/project-parameter.md index df26e7a7df20..8692b198a82f 100644 --- a/docs/docs/zh/guide/parameter/project-parameter.md +++ b/docs/docs/zh/guide/parameter/project-parameter.md @@ -8,7 +8,7 @@ ### 定义项目级别参数 -在项目管理页面,点击项目级别参数,点击创建项目级别参数,填写参数名称和参数值。如下图所示: +在项目管理页面,点击项目级别参数,点击创建项目级别参数,填写参数名称和参数值,选择相应的参数值类型。如下图所示: ![project-parameter01](../../../../img/new_ui/dev/parameter/project_parameter01.png) diff --git a/docs/docs/zh/guide/parameter/startup-parameter.md b/docs/docs/zh/guide/parameter/startup-parameter.md index 7da65657a7a3..38e15787d0f1 100644 --- a/docs/docs/zh/guide/parameter/startup-parameter.md +++ b/docs/docs/zh/guide/parameter/startup-parameter.md @@ -6,7 +6,7 @@ ## 使用方式 -启动参数配置方式如下:在启动前参数设置界面,点击“启动参数“下面的加号,填写对应的参数名称和对应的值,点击确定,工作流会将启动参数加入全局参数中。 +启动参数配置方式如下:在启动前参数设置界面,点击“启动参数“下面的加号,填写对应的参数名称和对应的值,选择相应的参数值类型,点击确定,工作流会将启动参数加入全局参数中。 ## 任务样例 diff --git a/docs/docs/zh/guide/remote-logging.md b/docs/docs/zh/guide/remote-logging.md index 7321badb1aa3..0e45353636b4 100644 --- a/docs/docs/zh/guide/remote-logging.md +++ b/docs/docs/zh/guide/remote-logging.md @@ -10,7 +10,7 @@ Apache DolphinScheduler支持将任务日志传输到远端存储上。当配置 ```properties # 是否开启远程日志存储 remote.logging.enable=true -# 任务日志写入的远端存储,目前支持OSS, S3, GCS +# 任务日志写入的远端存储,目前支持OSS, S3, GCS, ABS remote.logging.target=OSS # 任务日志在远端存储上的目录 remote.logging.base.dir=logs @@ -66,12 +66,12 @@ remote.logging.google.cloud.storage.bucket.name= 配置`common.propertis`如下: ```properties -# abs container name, required if you set resource.storage.type=ABS -resource.azure.blob.storage.container.name= # abs account name, required if you set resource.storage.type=ABS -resource.azure.blob.storage.account.name= -# abs connection string, required if you set resource.storage.type=ABS -resource.azure.blob.storage.connection.string= +remote.logging.abs.account.name= +# abs account key, required if you set resource.storage.type=ABS +remote.logging.abs.account.key= +# abs container name, required if you set resource.storage.type=ABS +remote.logging.abs.container.name= ``` ### 注意事项 diff --git a/docs/docs/zh/guide/resource/configuration.md b/docs/docs/zh/guide/resource/configuration.md index 739d6fb30cf6..62c8cf135e7f 100644 --- a/docs/docs/zh/guide/resource/configuration.md +++ b/docs/docs/zh/guide/resource/configuration.md @@ -1,6 +1,6 @@ # 资源中心配置详情 -- 资源中心通常用于上传文件、UDF 函数,以及任务组管理等操作。 +- 资源中心通常用于上传文件以及任务组管理等操作。 - 资源中心可以对接分布式的文件存储系统,如[Hadoop](https://hadoop.apache.org/docs/r2.7.0/)(2.6+)或者[MinIO](https://github.com/minio/minio)集群,也可以对接远端的对象存储,如[AWS S3](https://aws.amazon.com/s3/)或者[阿里云 OSS](https://www.aliyun.com/product/oss),[华为云 OBS](https://support.huaweicloud.com/obs/index.html) 等。 - 资源中心也可以直接对接本地文件系统。在单机模式下,您无需依赖`Hadoop`或`S3`一类的外部存储系统,可以方便地对接本地文件系统进行体验。 - 除此之外,对于集群模式下的部署,您可以通过使用[S3FS-FUSE](https://github.com/s3fs-fuse/s3fs-fuse)将`S3`挂载到本地,或者使用[JINDO-FUSE](https://help.aliyun.com/document_detail/187410.html)将`OSS`挂载到本地等,再用资源中心对接本地文件系统方式来操作远端对象存储中的文件。 @@ -26,77 +26,35 @@ Dolphinscheduler 资源中心使用本地系统默认是开启的,不需要用 ## 对接AWS S3 -如果需要使用到资源中心的 S3 上传资源,我们需要对以下路径的进行配置:`api-server/conf/common.properties` 和 `worker-server/conf/common.properties`。可参考如下: +如果需要使用到资源中心的 S3 上传资源,我们需要对以下路径的进行配置:`api-server/conf/common.properties`, `api-server/conf/aws.yaml` 和 `worker-server/conf/common.properties`, `worker-server/conf/aws.yaml`。可参考如下: 配置以下字段 ```properties -...... resource.storage.type=S3 - -...... - -resource.aws.access.key.id=aws_access_key_id -# The AWS secret access key. if resource.storage.type=S3 or use EMR-Task, This configuration is required -resource.aws.secret.access.key=aws_secret_access_key -# The AWS Region to use. if resource.storage.type=S3 or use EMR-Task, This configuration is required -resource.aws.region=us-west-2 -# The name of the bucket. You need to create them by yourself. Otherwise, the system cannot start. All buckets in Amazon S3 share a single namespace; ensure the bucket is given a unique name. -resource.aws.s3.bucket.name=dolphinscheduler -# You need to set this parameter when private cloud s4. If S3 uses public cloud, you only need to set resource.aws.region or set to the endpoint of a public cloud such as S3.cn-north-1.amazonaws.com.cn -resource.aws.s3.endpoint= - -...... ``` -## 对接分布式或远端对象存储 +```yaml +aws: + s3: + # The AWS credentials provider type. support: AWSStaticCredentialsProvider, InstanceProfileCredentialsProvider + # AWSStaticCredentialsProvider: use the access key and secret key to authenticate + # InstanceProfileCredentialsProvider: use the IAM role to authenticate + credentials.provider.type: AWSStaticCredentialsProvider + access.key.id: + access.key.secret: + region: + bucket.name: + endpoint: -当需要使用资源中心进行相关文件的创建或者上传操作时,所有的文件和资源都会被存储在分布式文件系统`HDFS`或者远端的对象存储,如`S3`上。所以需要进行以下配置: +``` -### 配置 common.properties 文件 +## 对接阿里云 OSS -在 3.0.0-alpha 版本之后,如果需要使用到资源中心的 HDFS 或 S3 上传资源,我们需要对以下路径的进行配置:`api-server/conf/common.properties` 和 `worker-server/conf/common.properties`。可参考如下: +如果需要使用到资源中心的 OSS 上传资源,我们需要对以下路径的进行配置:`api-server/conf/common.properties` 和 `worker-server/conf/common.properties`。可参考如下: ```properties -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# user data local directory path, please make sure the directory exists and have read write permissions -data.basedir.path=/tmp/dolphinscheduler - -# resource storage type: LOCAL, HDFS, S3, OSS, GCS, ABS, OBS -resource.storage.type=LOCAL - -# resource store on HDFS/S3/OSS path, resource file will store to this hadoop hdfs path, self configuration, -# please make sure the directory exists on hdfs and have read write permissions. "/dolphinscheduler" is recommended -resource.storage.upload.base.path=/tmp/dolphinscheduler - -# The AWS access key. if resource.storage.type=S3 or use EMR-Task, This configuration is required -resource.aws.access.key.id=minioadmin -# The AWS secret access key. if resource.storage.type=S3 or use EMR-Task, This configuration is required -resource.aws.secret.access.key=minioadmin -# The AWS Region to use. if resource.storage.type=S3 or use EMR-Task, This configuration is required -resource.aws.region=cn-north-1 -# The name of the bucket. You need to create them by yourself. Otherwise, the system cannot start. All buckets in Amazon S3 share a single namespace; ensure the bucket is given a unique name. -resource.aws.s3.bucket.name=dolphinscheduler -# You need to set this parameter when private cloud s3. If S3 uses public cloud, you only need to set resource.aws.region or set to the endpoint of a public cloud such as S3.cn-north-1.amazonaws.com.cn -resource.aws.s3.endpoint=http://localhost:9000 - # alibaba cloud access key id, required if you set resource.storage.type=OSS resource.alibaba.cloud.access.key.id= # alibaba cloud access key secret, required if you set resource.storage.type=OSS @@ -108,92 +66,27 @@ resource.alibaba.cloud.oss.bucket.name=dolphinscheduler # oss bucket endpoint, required if you set resource.storage.type=OSS resource.alibaba.cloud.oss.endpoint=https://oss-cn-hangzhou.aliyuncs.com -# alibaba cloud access key id, required if you set resource.storage.type=OBS +``` + +## 对接华为云 OBS + +如果需要使用到资源中心的 OBS 上传资源,我们需要对以下路径的进行配置:`api-server/conf/common.properties` 和 `worker-server/conf/common.properties`。可参考如下: + +```properties +# access key id, required if you set resource.storage.type=OBS resource.huawei.cloud.access.key.id= -# alibaba cloud access key secret, required if you set resource.storage.type=OBS +# access key secret, required if you set resource.storage.type=OBS resource.huawei.cloud.access.key.secret= # oss bucket name, required if you set resource.storage.type=OBS resource.huawei.cloud.obs.bucket.name=dolphinscheduler # oss bucket endpoint, required if you set resource.storage.type=OBS resource.huawei.cloud.obs.endpoint=obs.cn-southwest-2.huaweicloud.com -# if resource.storage.type=HDFS, the user must have the permission to create directories under the HDFS root path -resource.hdfs.root.user=root -# if resource.storage.type=S3, the value like: s3a://dolphinscheduler; -# if resource.storage.type=HDFS and namenode HA is enabled, you need to copy core-site.xml and hdfs-site.xml to conf dir -resource.hdfs.fs.defaultFS=hdfs://localhost:8020 - -# whether to startup kerberos -hadoop.security.authentication.startup.state=false - -# java.security.krb5.conf path -java.security.krb5.conf.path=/opt/krb5.conf - -# login user from keytab username -login.user.keytab.username=hdfs-mycluster@ESZ.COM - -# login user from keytab path -login.user.keytab.path=/opt/hdfs.headless.keytab - -# kerberos expire time, the unit is hour -kerberos.expire.time=2 -# resource view suffixs -#resource.view.suffixs=txt,log,sh,bat,conf,cfg,py,java,sql,xml,hql,properties,json,yml,yaml,ini,js - -# resourcemanager port, the default value is 8088 if not specified -resource.manager.httpaddress.port=8088 -# if resourcemanager HA is enabled, please set the HA IPs; if resourcemanager is single, keep this value empty -yarn.resourcemanager.ha.rm.ids=192.168.xx.xx,192.168.xx.xx -# if resourcemanager HA is enabled or not use resourcemanager, please keep the default value; -# If resourcemanager is single, you only need to replace ds1 to actual resourcemanager hostname -yarn.application.status.address=http://localhost:%s/ds/v1/cluster/apps/%s -# job history status url when application number threshold is reached(default 10000, maybe it was set to 1000) -yarn.job.history.status.address=http://localhost:19888/ds/v1/history/mapreduce/jobs/%s - -# datasource encryption enable -datasource.encryption.enable=false - -# datasource encryption salt -datasource.encryption.salt=!@#$%^&* - -# data quality jar directory path, it would auto discovery data quality jar from this given dir. You should keep it empty if you do not change anything in -# data-quality, it will auto discovery by dolphinscheduler itself. Change it only if you want to use your own data-quality jar and it is not in worker-server -# libs directory(but may sure your jar name start with `dolphinscheduler-data-quality`). -data-quality.jar.dir= - -#data-quality.error.output.path=/tmp/data-quality-error-data - -# Network IP gets priority, default inner outer - -# Whether hive SQL is executed in the same session -support.hive.oneSession=false - -# use sudo or not, if set true, executing user is tenant user and deploy user needs sudo permissions; -# if set false, executing user is the deploy user and doesn't need sudo permissions -sudo.enable=true - -# network interface preferred like eth0, default: empty -#dolphin.scheduler.network.interface.preferred= - -# network IP gets priority, default: inner outer -#dolphin.scheduler.network.priority.strategy=default - -# system env path -#dolphinscheduler.env.path=env/dolphinscheduler_env.sh - -# development state -development.state=false - -# rpc port -alert.rpc.port=50052 - -# way to collect applicationId: log(original regex match), aop -appId.collect: log ``` > **注意**: > > * 如果只配置了 `api-server/conf/common.properties` 的文件,则只是开启了资源上传的操作,并不能满足正常使用。如果想要在工作流中执行相关文件则需要额外配置 `worker-server/conf/common.properties`。 > * 如果用到资源上传的功能,那么[安装部署](../installation/standalone.md)中,部署用户需要有这部分的操作权限。 -> * 如果 Hadoop 集群的 NameNode 配置了 HA 的话,需要开启 HDFS 类型的资源上传,同时需要将 Hadoop 集群下的 `core-site.xml` 和 `hdfs-site.xml` 复制到 `worker-server/conf` 以及 `api-server/conf`,非 NameNode HA 跳过次步骤。 +> * 如果 Hadoop 集群的 NameNode 配置了 HA 的话,需要开启 HDFS 类型的资源上传,同时需要将 Hadoop 集群下的 `core-site.xml` 和 `hdfs-site.xml` 复制到 `worker-server/conf` 以及 `api-server/conf`,非 NameNode HA 跳过此步骤。 diff --git a/docs/docs/zh/guide/resource/intro.md b/docs/docs/zh/guide/resource/intro.md index fd691ec32cb6..f045cc8328c3 100644 --- a/docs/docs/zh/guide/resource/intro.md +++ b/docs/docs/zh/guide/resource/intro.md @@ -1,4 +1,4 @@ # 资源中心简介 -资源中心通常用于上传文件、UDF 函数和任务组管理。 对于 standalone 环境,可以选择本地文件目录作为上传文件夹(此操作不需要Hadoop部署)。当然,你也可以 +资源中心通常用于上传文件和任务组管理。 对于 standalone 环境,可以选择本地文件目录作为上传文件夹(此操作不需要Hadoop部署)。当然,你也可以 选择上传到 Hadoop 或者 MinIO 集群。 在这种情况下,您需要有 Hadoop(2.6+)或 MinIO 等相关环境。 diff --git a/docs/docs/zh/guide/resource/udf-manage.md b/docs/docs/zh/guide/resource/udf-manage.md deleted file mode 100644 index cc7c77a0abe7..000000000000 --- a/docs/docs/zh/guide/resource/udf-manage.md +++ /dev/null @@ -1,46 +0,0 @@ -# UDF 管理 - -- 资源管理和文件管理功能类似,不同之处是资源管理是上传的 UDF 函数,文件管理上传的是用户程序,脚本及配置文件。 -- 主要包括以下操作:重命名、下载、删除等。 -* 上传 UDF 资源 - -> 和上传文件相同。 - -## 函数管理 - -* 创建 UDF 函数 - - > 点击“创建 UDF 函数”,输入 UDF 函数参数,选择udf资源,点击“提交”,创建 UDF 函数。 - > 目前只支持 HIVE 的临时 UDF 函数 - -- UDF 函数名称:输入 UDF 函数时的名称 -- 包名类名:输入 UDF 函数的全路径 -- UDF 资源:设置创建的 UDF 对应的资源文件 - -![create-udf](../../../../img/new_ui/dev/resource/create-udf.png) - -## 任务样例 - -### 编写 UDF 函数 - -用户可以根据实际生产需求,自定义想要的 UDF 函数。这里编写一个在任意字符串的末尾添加 "HelloWorld" 的函数。如下图所示: - -![code-udf](../../../../img/new_ui/dev/resource/demo/udf-demo01.png) - -### 配置 UDF 函数 - -配置 UDF 函数前,需要先通过资源管理上传所需的函数 jar 包。然后进入函数管理,配置相关信息即可。如下图所示: - -![conf-udf](../../../../img/new_ui/dev/resource/demo/udf-demo02.png) - -### 使用 UDF 函数 - -在使用 UDF 函数过程中,用户只需关注具体的函数编写,通过资源中心上传配置完成即可。系统会自动配置 create function 语句,参考如下:[SqlTask](https://github.com/apache/dolphinscheduler/blob/923f3f38e3271d7f1d22b3abc3497cecb6957e4a/dolphinscheduler-task-plugin/dolphinscheduler-task-sql/src/main/java/org/apache/dolphinscheduler/plugin/task/sql/SqlTask.java#L507-L531) - -进入工作流定义一个 SQL 节点,数据源类型选择为 HIVE,数据源实例类型为 HIVE/IMPALA。 - -- SQL 语句:`select HwUdf("abc");` 该函数与内置函数使用方式一样,直接使用函数名称即可访问。 -- UDF 函数:选择资源中心所配置的即可。 - -![use-udf](../../../../img/new_ui/dev/resource/demo/udf-demo03.png) - diff --git a/docs/docs/zh/guide/security/security.md b/docs/docs/zh/guide/security/security.md index 5004d69e0e5d..a48954cfec2f 100644 --- a/docs/docs/zh/guide/security/security.md +++ b/docs/docs/zh/guide/security/security.md @@ -97,8 +97,8 @@ ## 授予权限 -* 授予权限包括项目权限,资源权限,数据源权限,UDF函数权限,k8s命名空间。 -* 管理员可以对普通用户进行非其创建的项目、资源、数据源、UDF函数、k8s命名空间。因为项目、资源、数据源、UDF函数、k8s命名空间授权方式都是一样的,所以以项目授权为例介绍。 +* 授予权限包括项目权限,数据源权限和k8s命名空间。 +* 管理员可以对普通用户进行非其创建的项目、数据源k8s命名空间。因为项目、数据源、k8s命名空间授权方式都是一样的,所以以项目授权为例介绍。 * 注意:对于用户自己创建的项目,该用户默认拥有所有的权限,因此对用户自己创建的项目进行权限变更是无效的。 - 管理员进入`安全中心->用户管理页面`,点击需授权用户的“授权”按钮,如下图所示: @@ -112,7 +112,7 @@ ![no-permission-error](../../../../img/new_ui/dev/security/no-permission-error.png) -- 资源、数据源、UDF 函数授权同项目授权。 +- 数据源授权同项目授权。 ## Worker 分组 diff --git a/docs/docs/zh/guide/task/appendix.md b/docs/docs/zh/guide/task/appendix.md index 6ba27ad38ba9..9cc9002dd3fa 100644 --- a/docs/docs/zh/guide/task/appendix.md +++ b/docs/docs/zh/guide/task/appendix.md @@ -8,7 +8,7 @@ |----------|--------------------------------------------------------------------------------------------------------------------------------------| | 任务名称 | 任务的名称,同一个工作流定义中的节点名称不能重复。 | | 运行标志 | 标识这个节点是否需要调度执行,如果不需要执行,可以打开禁止执行开关。 | -| 缓存执行 | 标识这个节点是否需要进行缓存,如果缓存,则对于相同标识(相同任务版本,相同任务定义,相同参数传入)的任务进行缓存,运行时若已经存在缓存过的任务时,不在重复执行,直接复用结果。 | +| 缓存执行 | 标识这个节点是否需要进行缓存,如果缓存,则对于相同标识(相同任务版本,相同任务定义,相同参数传入)的任务进行缓存,运行时若已经存在缓存过的任务时,不再重复执行,直接复用结果。 | | 描述 | 当前节点的功能描述。 | | 任务优先级 | worker线程数不足时,根据优先级从高到低依次执行任务,优先级一样时根据先到先得原则执行。 | | Worker分组 | 设置分组后,任务会被分配给worker组的机器机执行。若选择Default,则会随机选择一个worker执行。 | diff --git a/docs/docs/zh/guide/task/datafactory.md b/docs/docs/zh/guide/task/datafactory.md index 0fa3375bc1f6..0822f60bec5e 100644 --- a/docs/docs/zh/guide/task/datafactory.md +++ b/docs/docs/zh/guide/task/datafactory.md @@ -19,10 +19,10 @@ DolphinScheduler DataFactory 组件的功能: ### 应用权限设置 -首先打开当前`Subcription`页面,点击`Access control (IAM)`,再点击`Add role assignment`进入授权页面。 -![Subcription-IAM](../../../../img/tasks/demo/datafactory_auth1.png) +首先打开当前`Subscription`页面,点击`Access control (IAM)`,再点击`Add role assignment`进入授权页面。 +![Subscription-IAM](../../../../img/tasks/demo/datafactory_auth1.png) 首先选择`Contributor`角色足够满足调用数据工厂。然后选择`Members`页面,再选择`Select members`,检索APP名称或APP的`Object ID`并添加,从给指定APP添加权限. -![Subcription-Role](../../../../img/tasks/demo/datafactory_auth2.png) +![Subscription-Role](../../../../img/tasks/demo/datafactory_auth2.png) ## 环境配置 diff --git a/docs/docs/zh/guide/task/dinky.md b/docs/docs/zh/guide/task/dinky.md index cab9a24ce40c..6b7ab3cc4bbb 100644 --- a/docs/docs/zh/guide/task/dinky.md +++ b/docs/docs/zh/guide/task/dinky.md @@ -17,11 +17,12 @@ - 默认参数说明请参考[DolphinScheduler任务参数附录](appendix.md)`默认任务参数`一栏。 -| **任务参数** | **描述** | -|-------------|---------------------------------------------------------------------| -| Dinky 地址 | Dinky 服务的 url。 | -| Dinky 任务 ID | Dinky 作业对应的唯一ID。 | -| 上线作业 | 指定当前 Dinky 作业是否上线,如果是,则该被提交的作业只能处于已发布且当前无对应的 Flink Job 实例在运行才可提交成功。 | +| **任务参数** | **描述** | +|-------------|----------------------------------------------------------------------------| +| Dinky 地址 | Dinky 服务的 url,例如:`http://localhost:8888`。 | +| Dinky 任务 ID | Dinky 作业对应的唯一ID。 | +| 上线作业 | 指定当前 Dinky 作业是否上线,如果是,则该被提交的作业只能处于已发布且当前无对应的 Flink Job 实例在运行才可提交成功。 | +| 自定义参数 | 从Dinky 1.0开始支持传递自定义参数,目前仅支持`IN`类型输入,不支持`OUT`类型输出。支持`${param}`方式获取全局或局部动态参数 | ## Task Example diff --git a/docs/docs/zh/guide/task/dms.md b/docs/docs/zh/guide/task/dms.md index 6013f45aefa1..8a87a36f9a85 100644 --- a/docs/docs/zh/guide/task/dms.md +++ b/docs/docs/zh/guide/task/dms.md @@ -73,14 +73,17 @@ DolphinScheduler 在 启动DMS 任务后,会跟中DMS任务状态,直至DMS ## 环境配置 -需要进行AWS的一些配置,修改`common.properties`中的以下配置信息 +需要进行AWS的一些配置,修改`aws.yml`中的以下配置信息 ```yaml -# The AWS access key. if resource.storage.type=S3 or use EMR-Task, This configuration is required -resource.aws.access.key.id= -# The AWS secret access key. if resource.storage.type=S3 or use EMR-Task, This configuration is required -resource.aws.secret.access.key= -# The AWS Region to use. if resource.storage.type=S3 or use EMR-Task, This configuration is required -resource.aws.region= +dms: + # The AWS credentials provider type. support: AWSStaticCredentialsProvider, InstanceProfileCredentialsProvider + # AWSStaticCredentialsProvider: use the access key and secret key to authenticate + # InstanceProfileCredentialsProvider: use the IAM role to authenticate + credentials.provider.type: AWSStaticCredentialsProvider + access.key.id: + access.key.secret: + region: + endpoint: ``` diff --git a/docs/docs/zh/guide/task/mlflow.md b/docs/docs/zh/guide/task/mlflow.md index 3c384ce2215e..12d2bc5089f7 100644 --- a/docs/docs/zh/guide/task/mlflow.md +++ b/docs/docs/zh/guide/task/mlflow.md @@ -139,7 +139,7 @@ mlflow server -h 0.0.0.0 -p 5000 --serve-artifacts --backend-store-uri sqlite:// ### 内置算法仓库配置 -如果遇到github无法访问的情况,可以修改`commom.properties`配置文件的以下字段,将github地址替换能访问的地址。 +如果遇到github无法访问的情况,可以修改`common.properties`配置文件的以下字段,将github地址替换能访问的地址。 ```yaml # mlflow task plugin preset repository diff --git a/docs/docs/zh/guide/task/pigeon.md b/docs/docs/zh/guide/task/pigeon.md deleted file mode 100644 index d8138d3c0216..000000000000 --- a/docs/docs/zh/guide/task/pigeon.md +++ /dev/null @@ -1,19 +0,0 @@ -# Pigeon - -Pigeon任务类型是通过调用远程websocket服务,实现远程任务的触发,状态、日志的获取,是 DolphinScheduler 通用远程 websocket 服务调用任务 - -## 创建任务 - -拖动工具栏中的任务节点到画板中即能完成任务创建 - -## 任务参数 - -[//]: # (TODO: use the commented anchor below once our website template supports this syntax) -[//]: # (- 默认参数说明请参考[DolphinScheduler任务参数附录](appendix.md#默认任务参数)`默认任务参数`一栏。) - -- 默认参数说明请参考[DolphinScheduler任务参数附录](appendix.md)`默认任务参数`一栏。 - -| **任务参数** | **描述** | -|----------|-------------------| -| 目标任务名 | 输入Pigeon任务的目标任务名称 | - diff --git a/docs/docs/zh/guide/task/sagemaker.md b/docs/docs/zh/guide/task/sagemaker.md index e4b4c6154203..22927171e9cb 100644 --- a/docs/docs/zh/guide/task/sagemaker.md +++ b/docs/docs/zh/guide/task/sagemaker.md @@ -33,14 +33,17 @@ DolphinScheduler SageMaker 组件的功能: ## 环境配置 -需要进行AWS的一些配置,修改`common.properties`中的`xxxxx`为你的配置信息 +需要进行AWS的一些配置,修改`aws.yml`中的以下配置信息 ```yaml -# The AWS access key. if resource.storage.type=S3 or use EMR-Task, This configuration is required -resource.aws.access.key.id= -# The AWS secret access key. if resource.storage.type=S3 or use EMR-Task, This configuration is required -resource.aws.secret.access.key= -# The AWS Region to use. if resource.storage.type=S3 or use EMR-Task, This configuration is required -resource.aws.region= +sagemaker: + # The AWS credentials provider type. support: AWSStaticCredentialsProvider, InstanceProfileCredentialsProvider + # AWSStaticCredentialsProvider: use the access key and secret key to authenticate + # InstanceProfileCredentialsProvider: use the IAM role to authenticate + credentials.provider.type: AWSStaticCredentialsProvider + access.key.id: + access.key.secret: + region: + endpoint: ``` diff --git a/docs/docs/zh/guide/task/spark.md b/docs/docs/zh/guide/task/spark.md index a392f5582694..2f7b2ee3469c 100644 --- a/docs/docs/zh/guide/task/spark.md +++ b/docs/docs/zh/guide/task/spark.md @@ -23,6 +23,7 @@ Spark 任务类型用于执行 Spark 应用。对于 Spark 节点,worker 支 - 程序类型:支持 Java、Scala、Python 和 SQL 四种语言。 - 主函数的 Class:Spark 程序的入口 Main class 的全路径。 - 主程序包:执行 Spark 程序的 jar 包(通过资源中心上传)。 +- Master:执行 Spark 集群的 Master Url。 - SQL脚本:Spark sql 运行的 .sql 文件中的 SQL 语句。 - 部署方式:(1) spark submit 支持 cluster、client 和 local 三种模式。 (2) spark sql 支持 client 和 local 两种模式。 diff --git a/docs/docs/zh/guide/task/sql.md b/docs/docs/zh/guide/task/sql.md index b71f44e41610..2c97130c96ed 100644 --- a/docs/docs/zh/guide/task/sql.md +++ b/docs/docs/zh/guide/task/sql.md @@ -26,7 +26,6 @@ SQL任务类型,用于连接数据库并执行相应SQL。 - 默认采用`;\n`作为SQL分隔符,拆分成多段SQL语句执行。Hive的JDBC不支持一次执行多段SQL语句,请不要使用`;\n`。 - sql参数:输入参数格式为key1=value1;key2=value2… - sql语句:SQL语句 -- UDF函数:对于HIVE类型的数据源,可以引用资源中心中创建的UDF函数,其他类型的数据源暂不支持UDF函数。 - 自定义参数:SQL任务类型,而存储过程是自定义参数顺序,给方法设置值自定义参数类型和数据类型,同存储过程任务类型一样。区别在于SQL任务类型自定义参数会替换sql语句中${变量}。 - 前置sql:前置sql在sql语句之前执行。 - 后置sql:后置sql在sql语句之后执行。 @@ -57,5 +56,4 @@ SQL任务类型,用于连接数据库并执行相应SQL。 ## 注意事项 * 注意SQL类型的选择,如果是INSERT等操作需要选择非查询类型。 -* 为了兼容长会话情况,UDF函数的创建是通过CREATE OR REPLACE语句 diff --git a/docs/docs/zh/guide/upgrade/incompatible.md b/docs/docs/zh/guide/upgrade/incompatible.md index dd47fa8de875..a5260a06957f 100644 --- a/docs/docs/zh/guide/upgrade/incompatible.md +++ b/docs/docs/zh/guide/upgrade/incompatible.md @@ -24,3 +24,8 @@ * 在 /datasources/tables && /datasources/tableColumns 接口中添加了必选字段`database` [#14406](https://github.com/apache/dolphinscheduler/pull/14406) +## 3.3.0 + +* 从 `资源中心` 中移除了 `udf-manage` 功能 ([#16209]) +* 从 `任务插件` 中移除了 `Pigeon` 类型 ([#16218]) + diff --git a/docs/img/new_ui/dev/monitor/alert-server.png b/docs/img/new_ui/dev/monitor/alert-server.png new file mode 100644 index 000000000000..8ffc7725dc35 Binary files /dev/null and b/docs/img/new_ui/dev/monitor/alert-server.png differ diff --git a/docs/img/new_ui/dev/monitor/audit-log.jpg b/docs/img/new_ui/dev/monitor/audit-log.jpg deleted file mode 100644 index bc396ad30355..000000000000 Binary files a/docs/img/new_ui/dev/monitor/audit-log.jpg and /dev/null differ diff --git a/docs/img/new_ui/dev/monitor/audit-log.png b/docs/img/new_ui/dev/monitor/audit-log.png new file mode 100644 index 000000000000..72acd1f839f0 Binary files /dev/null and b/docs/img/new_ui/dev/monitor/audit-log.png differ diff --git a/docs/img/new_ui/dev/monitor/command-list.png b/docs/img/new_ui/dev/monitor/command-list.png new file mode 100644 index 000000000000..d8fca25d10b0 Binary files /dev/null and b/docs/img/new_ui/dev/monitor/command-list.png differ diff --git a/docs/img/new_ui/dev/monitor/failure-command-list.png b/docs/img/new_ui/dev/monitor/failure-command-list.png new file mode 100644 index 000000000000..75c7cda1da83 Binary files /dev/null and b/docs/img/new_ui/dev/monitor/failure-command-list.png differ diff --git a/docs/img/new_ui/dev/monitor/master.png b/docs/img/new_ui/dev/monitor/master.png index 6505595d6ae9..bbbb808ca71d 100644 Binary files a/docs/img/new_ui/dev/monitor/master.png and b/docs/img/new_ui/dev/monitor/master.png differ diff --git a/docs/img/new_ui/dev/monitor/statistics.png b/docs/img/new_ui/dev/monitor/statistics.png deleted file mode 100644 index a7f4a4a4387a..000000000000 Binary files a/docs/img/new_ui/dev/monitor/statistics.png and /dev/null differ diff --git a/docs/img/new_ui/dev/monitor/worker.png b/docs/img/new_ui/dev/monitor/worker.png index 4a6827938545..166820c9309b 100644 Binary files a/docs/img/new_ui/dev/monitor/worker.png and b/docs/img/new_ui/dev/monitor/worker.png differ diff --git a/docs/img/new_ui/dev/parameter/context-subprocess02.png b/docs/img/new_ui/dev/parameter/context-subprocess02.png index 8c1acf5f119c..7d7cd850ea49 100644 Binary files a/docs/img/new_ui/dev/parameter/context-subprocess02.png and b/docs/img/new_ui/dev/parameter/context-subprocess02.png differ diff --git a/docs/img/new_ui/dev/parameter/context_parameter04.png b/docs/img/new_ui/dev/parameter/context_parameter04.png index b1dfd8b3cdbc..e307e607e761 100644 Binary files a/docs/img/new_ui/dev/parameter/context_parameter04.png and b/docs/img/new_ui/dev/parameter/context_parameter04.png differ diff --git a/docs/img/new_ui/dev/parameter/global_parameter02.png b/docs/img/new_ui/dev/parameter/global_parameter02.png index 1eeaf4b43a96..3d6d53531168 100644 Binary files a/docs/img/new_ui/dev/parameter/global_parameter02.png and b/docs/img/new_ui/dev/parameter/global_parameter02.png differ diff --git a/docs/img/new_ui/dev/parameter/priority_parameter01.png b/docs/img/new_ui/dev/parameter/priority_parameter01.png index ec34510e9032..1789379a2a17 100644 Binary files a/docs/img/new_ui/dev/parameter/priority_parameter01.png and b/docs/img/new_ui/dev/parameter/priority_parameter01.png differ diff --git a/docs/img/new_ui/dev/parameter/priority_parameter02.png b/docs/img/new_ui/dev/parameter/priority_parameter02.png index 044b43852258..edcb13cbd78d 100644 Binary files a/docs/img/new_ui/dev/parameter/priority_parameter02.png and b/docs/img/new_ui/dev/parameter/priority_parameter02.png differ diff --git a/docs/img/new_ui/dev/parameter/project_parameter01.png b/docs/img/new_ui/dev/parameter/project_parameter01.png index 48ea149b402c..5f6a483d4c9a 100644 Binary files a/docs/img/new_ui/dev/parameter/project_parameter01.png and b/docs/img/new_ui/dev/parameter/project_parameter01.png differ diff --git a/docs/img/new_ui/dev/parameter/startup_parameter02.png b/docs/img/new_ui/dev/parameter/startup_parameter02.png index cbeb96b65842..d1819ad78e7c 100644 Binary files a/docs/img/new_ui/dev/parameter/startup_parameter02.png and b/docs/img/new_ui/dev/parameter/startup_parameter02.png differ diff --git a/docs/img/new_ui/dev/parameter/startup_parameter04.png b/docs/img/new_ui/dev/parameter/startup_parameter04.png index e32afb4b3198..b5799b24a5e8 100644 Binary files a/docs/img/new_ui/dev/parameter/startup_parameter04.png and b/docs/img/new_ui/dev/parameter/startup_parameter04.png differ diff --git a/docs/img/new_ui/dev/resource/create-udf.png b/docs/img/new_ui/dev/resource/create-udf.png deleted file mode 100644 index f570b7f84db0..000000000000 Binary files a/docs/img/new_ui/dev/resource/create-udf.png and /dev/null differ diff --git a/docs/img/new_ui/dev/resource/demo/udf-demo01.png b/docs/img/new_ui/dev/resource/demo/udf-demo01.png deleted file mode 100644 index d1e0ee5c6081..000000000000 Binary files a/docs/img/new_ui/dev/resource/demo/udf-demo01.png and /dev/null differ diff --git a/docs/img/new_ui/dev/resource/demo/udf-demo02.png b/docs/img/new_ui/dev/resource/demo/udf-demo02.png deleted file mode 100644 index f20a247aa22a..000000000000 Binary files a/docs/img/new_ui/dev/resource/demo/udf-demo02.png and /dev/null differ diff --git a/docs/img/new_ui/dev/resource/demo/udf-demo03.png b/docs/img/new_ui/dev/resource/demo/udf-demo03.png deleted file mode 100644 index 49c932b395f3..000000000000 Binary files a/docs/img/new_ui/dev/resource/demo/udf-demo03.png and /dev/null differ diff --git a/docs/img/pigeon.png b/docs/img/pigeon.png deleted file mode 100644 index 6fe21d2b1ee5..000000000000 Binary files a/docs/img/pigeon.png and /dev/null differ diff --git a/docs/img/tasks/demo/dinky_task_id.png b/docs/img/tasks/demo/dinky_task_id.png index f1e791caad67..b1ebb3ec0377 100644 Binary files a/docs/img/tasks/demo/dinky_task_id.png and b/docs/img/tasks/demo/dinky_task_id.png differ diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-aliyunVoice/src/main/java/org/apache/dolphinscheduler/plugin/alert/voice/VoiceAlertChannel.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-aliyunVoice/src/main/java/org/apache/dolphinscheduler/plugin/alert/voice/VoiceAlertChannel.java index eeaaba5d0128..4aa29c19c5ad 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-aliyunVoice/src/main/java/org/apache/dolphinscheduler/plugin/alert/voice/VoiceAlertChannel.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-aliyunVoice/src/main/java/org/apache/dolphinscheduler/plugin/alert/voice/VoiceAlertChannel.java @@ -40,7 +40,7 @@ public AlertResult process(AlertInfo info) { Map paramsMap = info.getAlertParams(); if (null == paramsMap) { - return new AlertResult("false", "aliyun-voice params is null"); + return new AlertResult(false, "aliyun-voice params is null"); } VoiceParam voiceParam = buildVoiceParam(paramsMap); return new VoiceSender(voiceParam).send(); diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-aliyunVoice/src/main/java/org/apache/dolphinscheduler/plugin/alert/voice/VoiceSender.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-aliyunVoice/src/main/java/org/apache/dolphinscheduler/plugin/alert/voice/VoiceSender.java index c6c29d8735ec..fe0fc65986f8 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-aliyunVoice/src/main/java/org/apache/dolphinscheduler/plugin/alert/voice/VoiceSender.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-aliyunVoice/src/main/java/org/apache/dolphinscheduler/plugin/alert/voice/VoiceSender.java @@ -46,7 +46,7 @@ public VoiceSender(VoiceParam voiceParam) { public AlertResult send() { AlertResult alertResult = new AlertResult(); - alertResult.setStatus("false"); + alertResult.setSuccess(false); try { Client client = createClient(voiceParam.getConnection()); SingleCallByTtsRequest singleCallByTtsRequest = new SingleCallByTtsRequest() @@ -61,7 +61,7 @@ public AlertResult send() { } SingleCallByTtsResponseBody body = response.getBody(); if (body.code.equalsIgnoreCase("ok")) { - alertResult.setStatus("true"); + alertResult.setSuccess(true); alertResult.setMessage(body.getCallId()); } else { alertResult.setMessage(body.getMessage()); diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-aliyunVoice/src/test/java/VoiceSenderTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-aliyunVoice/src/test/java/VoiceSenderTest.java index 515a410b6386..15f4871392cd 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-aliyunVoice/src/test/java/VoiceSenderTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-aliyunVoice/src/test/java/VoiceSenderTest.java @@ -46,7 +46,7 @@ void testSendWeChatTableMsg() { VoiceSender weChatSender = new VoiceSender(voiceParam); AlertResult alertResult = weChatSender.send(); - Assertions.assertEquals("false", alertResult.getStatus()); + Assertions.assertFalse(alertResult.isSuccess()); } } diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UDFUserMapper.xml b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-aliyunVoice/src/test/resources/logback.xml similarity index 62% rename from dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UDFUserMapper.xml rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-aliyunVoice/src/test/resources/logback.xml index 61b4e2c3727e..9a182a18ef12 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UDFUserMapper.xml +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-aliyunVoice/src/test/resources/logback.xml @@ -1,4 +1,4 @@ - + - - - - delete from t_ds_relation_udfs_user - where user_id = #{userId} - - - delete from t_ds_relation_udfs_user - where udf_id = #{udfFuncId} - - \ No newline at end of file + + + diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-api/src/main/java/org/apache/dolphinscheduler/alert/api/AlertChannel.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-api/src/main/java/org/apache/dolphinscheduler/alert/api/AlertChannel.java index 530a5483425d..a4eaae232fbd 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-api/src/main/java/org/apache/dolphinscheduler/alert/api/AlertChannel.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-api/src/main/java/org/apache/dolphinscheduler/alert/api/AlertChannel.java @@ -35,6 +35,6 @@ public interface AlertChannel { AlertResult process(AlertInfo info); default @NonNull AlertResult closeAlert(AlertInfo info) { - return new AlertResult("true", "no need to close alert"); + return new AlertResult(true, "no need to close alert"); } } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-api/src/main/java/org/apache/dolphinscheduler/alert/api/AlertData.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-api/src/main/java/org/apache/dolphinscheduler/alert/api/AlertData.java index 37a3f3357c26..004e8b3bdaf2 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-api/src/main/java/org/apache/dolphinscheduler/alert/api/AlertData.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-api/src/main/java/org/apache/dolphinscheduler/alert/api/AlertData.java @@ -53,14 +53,6 @@ public class AlertData { */ private String log; - /** - * 0 do not send warning; - * 1 send if process success; - * 2 send if process failed; - * 3 send if process ends, whatever the result; - */ - private int warnType; - /** * AlertType#code */ diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-api/src/main/java/org/apache/dolphinscheduler/alert/api/AlertResult.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-api/src/main/java/org/apache/dolphinscheduler/alert/api/AlertResult.java index b6c5db38e980..ceeed97510f1 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-api/src/main/java/org/apache/dolphinscheduler/alert/api/AlertResult.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-api/src/main/java/org/apache/dolphinscheduler/alert/api/AlertResult.java @@ -33,15 +33,19 @@ @NoArgsConstructor public class AlertResult { - /** - * todo: use enum - * false or true - */ - private String status; + private boolean success; /** * alert result message, each plugin can have its own message */ private String message; + public static AlertResult success() { + return new AlertResult(true, null); + } + + public static AlertResult fail(String message) { + return new AlertResult(false, message); + } + } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertChannel.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertChannel.java index 74c440fe76c5..f5cc938246fa 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertChannel.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertChannel.java @@ -31,7 +31,7 @@ public AlertResult process(AlertInfo alertInfo) { AlertData alertData = alertInfo.getAlertData(); Map paramsMap = alertInfo.getAlertParams(); if (null == paramsMap) { - return new AlertResult("false", "ding talk params is null"); + return new AlertResult(false, "ding talk params is null"); } return new DingTalkSender(paramsMap).sendDingTalkMsg(alertData.getTitle(), alertData.getContent()); } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkSender.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkSender.java index c0070ac11c1e..527e38cf7752 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkSender.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkSender.java @@ -48,6 +48,8 @@ import javax.crypto.Mac; import javax.crypto.spec.SecretKeySpec; +import lombok.Getter; +import lombok.Setter; import lombok.extern.slf4j.Slf4j; /** @@ -124,7 +126,7 @@ private static RequestConfig getProxyConfig(String proxy, int port) { private AlertResult checkSendDingTalkSendMsgResult(String result) { AlertResult alertResult = new AlertResult(); - alertResult.setStatus("false"); + alertResult.setSuccess(false); if (null == result) { alertResult.setMessage("send ding talk msg error"); @@ -138,7 +140,7 @@ private AlertResult checkSendDingTalkSendMsgResult(String result) { return alertResult; } if (sendMsgResponse.errcode == 0) { - alertResult.setStatus("true"); + alertResult.setSuccess(true); alertResult.setMessage("send ding talk msg success"); return alertResult; } @@ -162,7 +164,7 @@ public AlertResult sendDingTalkMsg(String title, String content) { } catch (Exception e) { log.info("send ding talk alert msg exception : {}", e.getMessage()); alertResult = new AlertResult(); - alertResult.setStatus("false"); + alertResult.setSuccess(false); alertResult.setMessage("send ding talk alert fail."); } return alertResult; @@ -189,7 +191,7 @@ private String sendMsg(String title, String content) throws IOException { String resp; try { HttpEntity entity = response.getEntity(); - resp = EntityUtils.toString(entity, "UTF-8"); + resp = EntityUtils.toString(entity, StandardCharsets.UTF_8); EntityUtils.consume(entity); } finally { response.close(); @@ -317,15 +319,17 @@ private String generateSignedUrl() { String sign = org.apache.commons.lang3.StringUtils.EMPTY; try { Mac mac = Mac.getInstance("HmacSHA256"); - mac.init(new SecretKeySpec(secret.getBytes("UTF-8"), "HmacSHA256")); - byte[] signData = mac.doFinal(stringToSign.getBytes("UTF-8")); - sign = URLEncoder.encode(new String(Base64.encodeBase64(signData)), "UTF-8"); + mac.init(new SecretKeySpec(secret.getBytes(StandardCharsets.UTF_8), "HmacSHA256")); + byte[] signData = mac.doFinal(stringToSign.getBytes(StandardCharsets.UTF_8)); + sign = URLEncoder.encode(new String(Base64.encodeBase64(signData)), StandardCharsets.UTF_8.name()); } catch (Exception e) { log.error("generate sign error, message:{}", e); } return url + "×tamp=" + timestamp + "&sign=" + sign; } + @Getter + @Setter static final class DingTalkSendMsgResponse { private Integer errcode; @@ -334,22 +338,6 @@ static final class DingTalkSendMsgResponse { public DingTalkSendMsgResponse() { } - public Integer getErrcode() { - return this.errcode; - } - - public void setErrcode(Integer errcode) { - this.errcode = errcode; - } - - public String getErrmsg() { - return this.errmsg; - } - - public void setErrmsg(String errmsg) { - this.errmsg = errmsg; - } - @Override public boolean equals(final Object o) { if (o == this) { diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/test/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkSenderTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/test/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkSenderTest.java index 7a8df0549cb4..90f64d7bb230 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/test/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkSenderTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/test/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkSenderTest.java @@ -19,6 +19,7 @@ import org.apache.dolphinscheduler.alert.api.AlertResult; +import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.Map; @@ -47,11 +48,11 @@ public void initDingTalkConfig() { @Test public void testSend() { DingTalkSender dingTalkSender = new DingTalkSender(dingTalkConfig); - dingTalkSender.sendDingTalkMsg("keyWord+Welcome", "UTF-8"); + dingTalkSender.sendDingTalkMsg("keyWord+Welcome", StandardCharsets.UTF_8.name()); dingTalkConfig.put(DingTalkParamsConstants.NAME_DING_TALK_PROXY_ENABLE, "true"); dingTalkSender = new DingTalkSender(dingTalkConfig); AlertResult alertResult = dingTalkSender.sendDingTalkMsg("title", "content test"); - Assertions.assertEquals("false", alertResult.getStatus()); + Assertions.assertEquals(false, alertResult.isSuccess()); } } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/test/resources/logback.xml b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/test/resources/logback.xml new file mode 100644 index 000000000000..9a182a18ef12 --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/test/resources/logback.xml @@ -0,0 +1,21 @@ + + + + + + diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannel.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannel.java index 5728461ae633..06aecd35db02 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannel.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannel.java @@ -35,24 +35,20 @@ public AlertResult process(AlertInfo info) { AlertData alert = info.getAlertData(); Map paramsMap = info.getAlertParams(); if (null == paramsMap) { - return new AlertResult("false", "mail params is null"); + return new AlertResult(false, "mail params is null"); } MailSender mailSender = new MailSender(paramsMap); AlertResult alertResult = mailSender.sendMails(alert.getTitle(), alert.getContent()); - boolean flag; - if (alertResult == null) { alertResult = new AlertResult(); - alertResult.setStatus("false"); + alertResult.setSuccess(false); alertResult.setMessage("alert send error."); log.info("alert send error : {}", alertResult.getMessage()); return alertResult; } - flag = Boolean.parseBoolean(String.valueOf(alertResult.getStatus())); - - if (flag) { + if (alertResult.isSuccess()) { log.info("alert send success"); alertResult.setMessage("email send success."); } else { diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailConstants.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailConstants.java index 94eb4efa39bd..3eec7022fdd8 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailConstants.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailConstants.java @@ -56,8 +56,6 @@ public final class EmailConstants { public static final String TABLE_BODY_HTML_TAIL = ""; - public static final String UTF_8 = "UTF-8"; - public static final String EXCEL_SUFFIX_XLSX = ".xlsx"; public static final String SINGLE_SLASH = "/"; diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/MailSender.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/MailSender.java index 2e400efbceb7..58e1eb10b3aa 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/MailSender.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/MailSender.java @@ -34,6 +34,7 @@ import java.io.File; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -153,7 +154,7 @@ public AlertResult sendMails(String title, String content) { */ public AlertResult sendMails(List receivers, List receiverCcs, String title, String content) { AlertResult alertResult = new AlertResult(); - alertResult.setStatus("false"); + alertResult.setSuccess(false); // if there is no receivers && no receiversCc, no need to process if (CollectionUtils.isEmpty(receivers) && CollectionUtils.isEmpty(receiverCcs)) { @@ -171,7 +172,7 @@ public AlertResult sendMails(List receivers, List receiverCcs, S Session session = getSession(); email.setMailSession(session); email.setFrom(mailSenderEmail); - email.setCharset(EmailConstants.UTF_8); + email.setCharset(StandardCharsets.UTF_8.name()); if (CollectionUtils.isNotEmpty(receivers)) { // receivers mail for (String receiver : receivers) { @@ -200,7 +201,7 @@ public AlertResult sendMails(List receivers, List receiverCcs, S attachment(title, content, partContent); - alertResult.setStatus("true"); + alertResult.setSuccess(true); return alertResult; } catch (Exception e) { handleException(alertResult, e); @@ -344,7 +345,8 @@ private void attachContent(String title, String content, String partContent, ExcelUtils.genExcelFile(content, randomFilename, xlsFilePath); part2.attachFile(file); - part2.setFileName(MimeUtility.encodeText(title + EmailConstants.EXCEL_SUFFIX_XLSX, EmailConstants.UTF_8, "B")); + part2.setFileName( + MimeUtility.encodeText(title + EmailConstants.EXCEL_SUFFIX_XLSX, StandardCharsets.UTF_8.name(), "B")); // add components to collection partList.addBodyPart(part1); partList.addBodyPart(part2); @@ -378,7 +380,7 @@ private AlertResult getStringObjectMap(String title, String content, AlertResult email.setDebug(true); email.send(); - alertResult.setStatus("true"); + alertResult.setSuccess(true); return alertResult; } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannelTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannelTest.java index 9df19154aade..643cd8a01ea3 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannelTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannelTest.java @@ -66,7 +66,7 @@ public void testProcess() { alertInfo.setAlertParams(paramsMap); AlertResult alertResult = emailAlertChannel.process(alertInfo); Assertions.assertNotNull(alertResult); - Assertions.assertEquals("false", alertResult.getStatus()); + Assertions.assertFalse(alertResult.isSuccess()); } public String getEmailAlertParams() { diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/ExcelUtilsTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/ExcelUtilsTest.java index f428a16d9228..f28df77de971 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/ExcelUtilsTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/ExcelUtilsTest.java @@ -66,8 +66,15 @@ public void testGenExcelFile() { @Test public void testGenExcelFileByCheckDir() { - ExcelUtils.genExcelFile("[{\"a\": \"a\"},{\"a\": \"a\"}]", "t", "/tmp/xls"); - File file = new File("/tmp/xls" + EmailConstants.SINGLE_SLASH + "t" + EmailConstants.EXCEL_SUFFIX_XLSX); + String path = "/tmp/xls"; + ExcelUtils.genExcelFile("[{\"a\": \"a\"},{\"a\": \"a\"}]", "t", path); + File file = + new File( + path + + EmailConstants.SINGLE_SLASH + + "t" + + EmailConstants.EXCEL_SUFFIX_XLSX); file.delete(); + Assertions.assertFalse(file.exists()); } } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/MailUtilsTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/MailUtilsTest.java index f562999be02e..9a4b5e82579f 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/MailUtilsTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/MailUtilsTest.java @@ -18,10 +18,9 @@ package org.apache.dolphinscheduler.plugin.alert.email; import org.apache.dolphinscheduler.alert.api.AlertConstants; +import org.apache.dolphinscheduler.alert.api.AlertResult; import org.apache.dolphinscheduler.alert.api.ShowType; import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.apache.dolphinscheduler.plugin.alert.email.template.AlertTemplate; -import org.apache.dolphinscheduler.plugin.alert.email.template.DefaultHTMLTemplate; import java.util.ArrayList; import java.util.HashMap; @@ -42,7 +41,6 @@ public class MailUtilsTest { private static final Logger logger = LoggerFactory.getLogger(MailUtilsTest.class); static MailSender mailSender; private static Map emailConfig = new HashMap<>(); - private static AlertTemplate alertTemplate; @BeforeAll public static void initEmailConfig() { @@ -59,7 +57,6 @@ public static void initEmailConfig() { emailConfig.put(MailParamsConstants.NAME_PLUGIN_DEFAULT_EMAIL_RECEIVERS, "347801120@qq.com"); emailConfig.put(MailParamsConstants.NAME_PLUGIN_DEFAULT_EMAIL_RECEIVERCCS, "347801120@qq.com"); emailConfig.put(AlertConstants.NAME_SHOW_TYPE, ShowType.TEXT.getDescp()); - alertTemplate = new DefaultHTMLTemplate(); mailSender = new MailSender(emailConfig); } @@ -77,9 +74,10 @@ public void testSendMails() { + "\"Host: 192.168.xx.xx\"," + "\"Notify group :4\"]"; - mailSender.sendMails( + AlertResult alertResult = mailSender.sendMails( "Mysql Exception", content); + Assertions.assertFalse(alertResult.isSuccess()); } @Test @@ -108,7 +106,8 @@ void testAuthCheck() { emailConfig.put(MailParamsConstants.NAME_MAIL_USER, "user"); emailConfig.put(MailParamsConstants.NAME_MAIL_PASSWD, "passwd"); mailSender = new MailSender(emailConfig); - mailSender.sendMails(title, content); + AlertResult alertResult = mailSender.sendMails(title, content); + Assertions.assertFalse(alertResult.isSuccess()); } public String list2String() { @@ -134,7 +133,6 @@ public String list2String() { logger.info(mapjson); return mapjson; - } @Test @@ -143,23 +141,25 @@ public void testSendTableMail() { String content = list2String(); emailConfig.put(AlertConstants.NAME_SHOW_TYPE, ShowType.TABLE.getDescp()); mailSender = new MailSender(emailConfig); - mailSender.sendMails(title, content); + AlertResult alertResult = mailSender.sendMails(title, content); + Assertions.assertFalse(alertResult.isSuccess()); } @Test - public void testAttachmentFile() throws Exception { + public void testAttachmentFile() { String content = list2String(); emailConfig.put(AlertConstants.NAME_SHOW_TYPE, ShowType.ATTACHMENT.getDescp()); mailSender = new MailSender(emailConfig); - mailSender.sendMails("gaojing", content); + AlertResult alertResult = mailSender.sendMails("gaojing", content); + Assertions.assertFalse(alertResult.isSuccess()); } @Test - public void testTableAttachmentFile() throws Exception { + public void testTableAttachmentFile() { String content = list2String(); emailConfig.put(AlertConstants.NAME_SHOW_TYPE, ShowType.TABLE_ATTACHMENT.getDescp()); mailSender = new MailSender(emailConfig); - mailSender.sendMails("gaojing", content); + AlertResult alertResult = mailSender.sendMails("gaojing", content); + Assertions.assertFalse(alertResult.isSuccess()); } - } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/test/resources/logback.xml b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/test/resources/logback.xml new file mode 100644 index 000000000000..9a182a18ef12 --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/test/resources/logback.xml @@ -0,0 +1,21 @@ + + + + + + diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuAlertChannel.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuAlertChannel.java index 8959c8aaec13..29c78a9d1bb6 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuAlertChannel.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuAlertChannel.java @@ -31,7 +31,7 @@ public AlertResult process(AlertInfo alertInfo) { AlertData alertData = alertInfo.getAlertData(); Map paramsMap = alertInfo.getAlertParams(); if (null == paramsMap) { - return new AlertResult("false", "fei shu params is null"); + return new AlertResult(false, "fei shu params is null"); } return new FeiShuSender(paramsMap).sendFeiShuMsg(alertData); } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuSender.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuSender.java index 14a1d63ff08c..1c2f3656ea08 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuSender.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuSender.java @@ -31,6 +31,7 @@ import org.apache.http.util.EntityUtils; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -79,7 +80,7 @@ private static String textToJsonString(AlertData alertData) { public static AlertResult checkSendFeiShuSendMsgResult(String result) { AlertResult alertResult = new AlertResult(); - alertResult.setStatus("false"); + alertResult.setSuccess(false); if (org.apache.commons.lang3.StringUtils.isBlank(result)) { alertResult.setMessage("send fei shu msg error"); @@ -94,7 +95,7 @@ public static AlertResult checkSendFeiShuSendMsgResult(String result) { return alertResult; } if (sendMsgResponse.statusCode == 0) { - alertResult.setStatus("true"); + alertResult.setSuccess(true); alertResult.setMessage("send fei shu msg success"); return alertResult; } @@ -135,7 +136,7 @@ public AlertResult sendFeiShuMsg(AlertData alertData) { } catch (Exception e) { log.info("send fei shu alert msg exception : {}", e.getMessage()); alertResult = new AlertResult(); - alertResult.setStatus("false"); + alertResult.setSuccess(false); alertResult.setMessage("send fei shu alert fail."); } return alertResult; @@ -161,7 +162,7 @@ private String sendMsg(AlertData alertData) throws IOException { String resp; try { HttpEntity entity = response.getEntity(); - resp = EntityUtils.toString(entity, "utf-8"); + resp = EntityUtils.toString(entity, StandardCharsets.UTF_8); EntityUtils.consume(entity); } finally { response.close(); diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/test/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuSenderTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/test/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuSenderTest.java index 41f372b85c12..829b02dea655 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/test/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuSenderTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/test/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuSenderTest.java @@ -43,7 +43,7 @@ public void testSend() { alertData.setContent("feishu test content"); FeiShuSender feiShuSender = new FeiShuSender(feiShuConfig); AlertResult alertResult = feiShuSender.sendFeiShuMsg(alertData); - Assertions.assertEquals("false", alertResult.getStatus()); + Assertions.assertFalse(alertResult.isSuccess()); } @Test @@ -87,12 +87,12 @@ public void testCheckSendFeiShuSendMsgResult() { FeiShuSender feiShuSender = new FeiShuSender(feiShuConfig); AlertResult alertResult = feiShuSender.checkSendFeiShuSendMsgResult(""); - Assertions.assertFalse(Boolean.valueOf(alertResult.getStatus())); + Assertions.assertFalse(alertResult.isSuccess()); AlertResult alertResult2 = feiShuSender.checkSendFeiShuSendMsgResult("123"); Assertions.assertEquals("send fei shu msg fail", alertResult2.getMessage()); String response = "{\"StatusCode\":\"0\",\"extra\":\"extra\",\"StatusMessage\":\"StatusMessage\"}"; AlertResult alertResult3 = feiShuSender.checkSendFeiShuSendMsgResult(response); - Assertions.assertTrue(Boolean.valueOf(alertResult3.getStatus())); + Assertions.assertTrue(alertResult3.isSuccess()); } } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/test/resources/logback.xml b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/test/resources/logback.xml new file mode 100644 index 000000000000..9a182a18ef12 --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/test/resources/logback.xml @@ -0,0 +1,21 @@ + + + + + + diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertChannel.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertChannel.java index 944762f13f22..caf1c4c59835 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertChannel.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertChannel.java @@ -31,7 +31,7 @@ public AlertResult process(AlertInfo alertInfo) { AlertData alertData = alertInfo.getAlertData(); Map paramsMap = alertInfo.getAlertParams(); if (null == paramsMap) { - return new AlertResult("false", "http params is null"); + return new AlertResult(false, "http params is null"); } return new HttpSender(paramsMap).send(alertData.getContent()); diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpSender.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpSender.java index 999a0c9599bd..e2a6606a3938 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpSender.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpSender.java @@ -39,6 +39,7 @@ import java.net.URISyntaxException; import java.net.URL; import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.Map; @@ -58,7 +59,6 @@ public final class HttpSender { * request type get */ private static final String REQUEST_TYPE_GET = "GET"; - private static final String DEFAULT_CHARSET = "utf-8"; private final String headerParams; private final String bodyParams; private final String contentField; @@ -92,18 +92,18 @@ public AlertResult send(String msg) { } if (httpRequest == null) { - alertResult.setStatus("false"); + alertResult.setSuccess(false); alertResult.setMessage("Request types are not supported"); return alertResult; } try { String resp = this.getResponseString(httpRequest); - alertResult.setStatus("true"); + alertResult.setSuccess(true); alertResult.setMessage(resp); } catch (Exception e) { log.error("send http alert msg exception : {}", e.getMessage()); - alertResult.setStatus("false"); + alertResult.setSuccess(false); alertResult.setMessage( String.format("Send http request alert failed: %s", e.getMessage())); } @@ -124,7 +124,7 @@ public String getResponseString(HttpRequestBase httpRequest) throws Exception { CloseableHttpResponse response = httpClient.execute(httpRequest); HttpEntity entity = response.getEntity(); - return EntityUtils.toString(entity, DEFAULT_CHARSET); + return EntityUtils.toString(entity, StandardCharsets.UTF_8); } private void createHttpRequest(String msg) throws MalformedURLException, URISyntaxException { @@ -157,7 +157,8 @@ private void setMsgInUrl(String msg) { type = URL_SPLICE_CHAR; } try { - url = String.format("%s%s%s=%s", url, type, contentField, URLEncoder.encode(msg, DEFAULT_CHARSET)); + url = String.format("%s%s%s=%s", url, type, contentField, + URLEncoder.encode(msg, StandardCharsets.UTF_8.name())); } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); } @@ -190,7 +191,7 @@ private void setMsgInRequestBody(String msg) { } // set msg content field objectNode.put(contentField, msg); - StringEntity entity = new StringEntity(JSONUtils.toJsonString(objectNode), DEFAULT_CHARSET); + StringEntity entity = new StringEntity(JSONUtils.toJsonString(objectNode), StandardCharsets.UTF_8); ((HttpPost) httpRequest).setEntity(entity); } catch (Exception e) { log.error("send http alert msg exception : {}", e.getMessage()); diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/test/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertChannelTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/test/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertChannelTest.java index aebf6f9d50bf..ee67db47f1fb 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/test/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertChannelTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/test/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertChannelTest.java @@ -62,9 +62,9 @@ public void testProcessSuccess() { // HttpSender(paramsMap).send(alertData.getContent()); already test in HttpSenderTest.sendTest. so we can mock // it - doReturn(new AlertResult("true", "success")).when(alertChannel).process(any()); + doReturn(new AlertResult(true, "success")).when(alertChannel).process(any()); AlertResult alertResult = alertChannel.process(alertInfo); - Assertions.assertEquals("true", alertResult.getStatus()); + Assertions.assertTrue(alertResult.isSuccess()); } /** diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/test/java/org/apache/dolphinscheduler/plugin/alert/http/HttpSenderTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/test/java/org/apache/dolphinscheduler/plugin/alert/http/HttpSenderTest.java index be013457ac29..40f589a10b9d 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/test/java/org/apache/dolphinscheduler/plugin/alert/http/HttpSenderTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/test/java/org/apache/dolphinscheduler/plugin/alert/http/HttpSenderTest.java @@ -46,7 +46,7 @@ public void sendTest() throws Exception { HttpSender httpSender = spy(new HttpSender(paramsMap)); doReturn("success").when(httpSender).getResponseString(any()); AlertResult alertResult = httpSender.send("Fault tolerance warning"); - Assertions.assertEquals("true", alertResult.getStatus()); + Assertions.assertTrue(alertResult.isSuccess()); Assertions.assertTrue(httpSender.getRequestUrl().contains(url)); Assertions.assertTrue(httpSender.getRequestUrl().contains(contentField)); } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/test/resources/logback.xml b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/test/resources/logback.xml new file mode 100644 index 000000000000..9a182a18ef12 --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/test/resources/logback.xml @@ -0,0 +1,21 @@ + + + + + + diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-pagerduty/src/main/java/org/apache/dolphinscheduler/plugin/alert/pagerduty/PagerDutyAlertChannel.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-pagerduty/src/main/java/org/apache/dolphinscheduler/plugin/alert/pagerduty/PagerDutyAlertChannel.java index b03313952072..430bacbf63a3 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-pagerduty/src/main/java/org/apache/dolphinscheduler/plugin/alert/pagerduty/PagerDutyAlertChannel.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-pagerduty/src/main/java/org/apache/dolphinscheduler/plugin/alert/pagerduty/PagerDutyAlertChannel.java @@ -30,8 +30,8 @@ public final class PagerDutyAlertChannel implements AlertChannel { public AlertResult process(AlertInfo alertInfo) { AlertData alertData = alertInfo.getAlertData(); Map alertParams = alertInfo.getAlertParams(); - if (alertParams == null || alertParams.size() == 0) { - return new AlertResult("false", "PagerDuty alert params is empty"); + if (alertParams == null || alertParams.isEmpty()) { + return new AlertResult(false, "PagerDuty alert params is empty"); } return new PagerDutySender(alertParams).sendPagerDutyAlter(alertData.getTitle(), alertData.getContent()); diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-pagerduty/src/main/java/org/apache/dolphinscheduler/plugin/alert/pagerduty/PagerDutySender.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-pagerduty/src/main/java/org/apache/dolphinscheduler/plugin/alert/pagerduty/PagerDutySender.java index 65792c8eae10..11dd01048ab8 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-pagerduty/src/main/java/org/apache/dolphinscheduler/plugin/alert/pagerduty/PagerDutySender.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-pagerduty/src/main/java/org/apache/dolphinscheduler/plugin/alert/pagerduty/PagerDutySender.java @@ -53,7 +53,7 @@ public PagerDutySender(Map config) { public AlertResult sendPagerDutyAlter(String title, String content) { AlertResult alertResult = new AlertResult(); - alertResult.setStatus("false"); + alertResult.setSuccess(false); alertResult.setMessage("send pager duty alert fail."); try { @@ -83,7 +83,7 @@ private AlertResult send(AlertResult alertResult, String url, String requestBody String responseContent = EntityUtils.toString(entity, StandardCharsets.UTF_8); try { if (statusCode == HttpStatus.SC_OK || statusCode == HttpStatus.SC_ACCEPTED) { - alertResult.setStatus("true"); + alertResult.setSuccess(true); alertResult.setMessage("send pager duty alert success"); } else { alertResult.setMessage( diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-pagerduty/src/test/java/org/apache/dolphinscheduler/plugin/alert/pagerduty/PagerDutySenderTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-pagerduty/src/test/java/org/apache/dolphinscheduler/plugin/alert/pagerduty/PagerDutySenderTest.java index 16cf16f62f7c..52a47aa20e9c 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-pagerduty/src/test/java/org/apache/dolphinscheduler/plugin/alert/pagerduty/PagerDutySenderTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-pagerduty/src/test/java/org/apache/dolphinscheduler/plugin/alert/pagerduty/PagerDutySenderTest.java @@ -39,6 +39,6 @@ public void initDingTalkConfig() { public void testSend() { PagerDutySender pagerDutySender = new PagerDutySender(pagerDutyConfig); AlertResult alertResult = pagerDutySender.sendPagerDutyAlter("pagerduty test title", "pagerduty test content"); - Assertions.assertEquals("false", alertResult.getStatus()); + Assertions.assertFalse(alertResult.isSuccess()); } } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-pagerduty/src/test/resources/logback.xml b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-pagerduty/src/test/resources/logback.xml new file mode 100644 index 000000000000..9a182a18ef12 --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-pagerduty/src/test/resources/logback.xml @@ -0,0 +1,21 @@ + + + + + + diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-prometheus/src/main/java/org/apache/dolphinscheduler/plugin/alert/prometheus/PrometheusAlertChannel.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-prometheus/src/main/java/org/apache/dolphinscheduler/plugin/alert/prometheus/PrometheusAlertChannel.java index 7ca79253fadf..5928faacec2b 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-prometheus/src/main/java/org/apache/dolphinscheduler/plugin/alert/prometheus/PrometheusAlertChannel.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-prometheus/src/main/java/org/apache/dolphinscheduler/plugin/alert/prometheus/PrometheusAlertChannel.java @@ -31,7 +31,7 @@ public AlertResult process(AlertInfo info) { AlertData alertData = info.getAlertData(); Map paramsMap = info.getAlertParams(); if (null == paramsMap) { - return new AlertResult("false", "prometheus alert manager params is null"); + return new AlertResult(false, "prometheus alert manager params is null"); } return new PrometheusAlertSender(paramsMap).sendMessage(alertData); diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-prometheus/src/main/java/org/apache/dolphinscheduler/plugin/alert/prometheus/PrometheusAlertSender.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-prometheus/src/main/java/org/apache/dolphinscheduler/plugin/alert/prometheus/PrometheusAlertSender.java index d27745049e07..48fda566b105 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-prometheus/src/main/java/org/apache/dolphinscheduler/plugin/alert/prometheus/PrometheusAlertSender.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-prometheus/src/main/java/org/apache/dolphinscheduler/plugin/alert/prometheus/PrometheusAlertSender.java @@ -23,6 +23,7 @@ import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.http.HttpEntity; import org.apache.http.HttpStatus; import org.apache.http.client.methods.CloseableHttpResponse; @@ -34,6 +35,7 @@ import org.apache.http.util.EntityUtils; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; @@ -63,11 +65,10 @@ public AlertResult sendMessage(AlertData alertData) { String resp = sendMsg(alertData); return checkSendAlertManageMsgResult(resp); } catch (Exception e) { - String errorMsg = String.format("send prometheus alert manager alert error, exception: %s", e.getMessage()); - log.error(errorMsg); + log.error("Send prometheus alert manager alert error", e); alertResult = new AlertResult(); - alertResult.setStatus("false"); - alertResult.setMessage(errorMsg); + alertResult.setSuccess(false); + alertResult.setMessage(ExceptionUtils.getMessage(e)); } return alertResult; } @@ -90,7 +91,7 @@ private String sendMsg(AlertData alertData) throws IOException { } HttpEntity entity = response.getEntity(); - resp = EntityUtils.toString(entity, "utf-8"); + resp = EntityUtils.toString(entity, StandardCharsets.UTF_8); EntityUtils.consume(entity); log.error( "Prometheus alert manager send alert failed, http status code: {}, title: {} ,content: {}, resp: {}", @@ -105,10 +106,10 @@ private String sendMsg(AlertData alertData) throws IOException { public AlertResult checkSendAlertManageMsgResult(String resp) { AlertResult alertResult = new AlertResult(); - alertResult.setStatus("false"); + alertResult.setSuccess(false); if (Objects.equals(resp, PrometheusAlertConstants.ALERT_SUCCESS)) { - alertResult.setStatus("true"); + alertResult.setSuccess(true); alertResult.setMessage("prometheus alert manager send success"); return alertResult; } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-prometheus/src/test/java/org/apache/dolphinscheduler/plugin/alert/prometheus/PrometheusAlertSenderTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-prometheus/src/test/java/org/apache/dolphinscheduler/plugin/alert/prometheus/PrometheusAlertSenderTest.java index 2347d9726280..c0d18396e43b 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-prometheus/src/test/java/org/apache/dolphinscheduler/plugin/alert/prometheus/PrometheusAlertSenderTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-prometheus/src/test/java/org/apache/dolphinscheduler/plugin/alert/prometheus/PrometheusAlertSenderTest.java @@ -55,17 +55,17 @@ public void testSendAlert() { " }]"); PrometheusAlertSender sender = new PrometheusAlertSender(config); AlertResult result = sender.sendMessage(alertData); - Assertions.assertEquals("false", result.getStatus()); + Assertions.assertFalse(result.isSuccess()); } @Test public void testCheckSendAlertManageMsgResult() { PrometheusAlertSender prometheusAlertSender = new PrometheusAlertSender(config); AlertResult alertResult1 = prometheusAlertSender.checkSendAlertManageMsgResult(""); - Assertions.assertFalse(Boolean.parseBoolean(alertResult1.getStatus())); + Assertions.assertFalse(alertResult1.isSuccess()); Assertions.assertEquals("prometheus alert manager send fail, resp is ", alertResult1.getMessage()); AlertResult alertResult2 = prometheusAlertSender.checkSendAlertManageMsgResult("alert success"); - Assertions.assertTrue(Boolean.parseBoolean(alertResult2.getStatus())); + Assertions.assertTrue(alertResult2.isSuccess()); Assertions.assertEquals("prometheus alert manager send success", alertResult2.getMessage()); } } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-prometheus/src/test/resources/logback.xml b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-prometheus/src/test/resources/logback.xml new file mode 100644 index 000000000000..9a182a18ef12 --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-prometheus/src/test/resources/logback.xml @@ -0,0 +1,21 @@ + + + + + + diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptAlertChannel.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptAlertChannel.java index d091eb9d8271..81cd59a5a9be 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptAlertChannel.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptAlertChannel.java @@ -33,7 +33,7 @@ public AlertResult process(AlertInfo alertinfo) { AlertData alertData = alertinfo.getAlertData(); Map paramsMap = alertinfo.getAlertParams(); if (MapUtils.isEmpty(paramsMap)) { - return new AlertResult("false", "script params is empty"); + return new AlertResult(false, "script params is empty"); } return new ScriptSender(paramsMap).sendScriptAlert(alertData.getTitle(), alertData.getContent()); } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSender.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSender.java index a18adb2c7e2a..19b7149e74ee 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSender.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSender.java @@ -56,7 +56,7 @@ AlertResult sendScriptAlert(String title, String content) { } // If it is another type of alarm script can be added here, such as python - alertResult.setStatus("false"); + alertResult.setSuccess(false); log.error("script type error: {}", scriptType); alertResult.setMessage("script type error : " + scriptType); return alertResult; @@ -64,7 +64,7 @@ AlertResult sendScriptAlert(String title, String content) { private AlertResult executeShellScript(String title, String content) { AlertResult alertResult = new AlertResult(); - alertResult.setStatus("false"); + alertResult.setSuccess(false); if (Boolean.TRUE.equals(OSUtils.isWindows())) { alertResult.setMessage("shell script not support windows os"); return alertResult; @@ -111,7 +111,7 @@ private AlertResult executeShellScript(String title, String content) { int exitCode = ProcessUtils.executeScript(cmd); if (exitCode == 0) { - alertResult.setStatus("true"); + alertResult.setSuccess(true); alertResult.setMessage("send script alert msg success"); return alertResult; } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/test/java/org/apache/dolphinscheduler/plugin/alert/script/ProcessUtilsTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/test/java/org/apache/dolphinscheduler/plugin/alert/script/ProcessUtilsTest.java index a34f062264bc..3d85d5d638a3 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/test/java/org/apache/dolphinscheduler/plugin/alert/script/ProcessUtilsTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/test/java/org/apache/dolphinscheduler/plugin/alert/script/ProcessUtilsTest.java @@ -33,6 +33,7 @@ public class ProcessUtilsTest { @Test public void testExecuteScript() { - ProcessUtils.executeScript(cmd); + int code = ProcessUtils.executeScript(cmd); + assert code != -1; } } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/test/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSenderTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/test/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSenderTest.java index 32e996f5e887..c392b6f7587f 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/test/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSenderTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/test/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSenderTest.java @@ -17,6 +17,8 @@ package org.apache.dolphinscheduler.plugin.alert.script; +import static org.junit.jupiter.api.Assertions.assertFalse; + import org.apache.dolphinscheduler.alert.api.AlertResult; import java.util.HashMap; @@ -48,9 +50,9 @@ public void testScriptSenderTest() { ScriptSender scriptSender = new ScriptSender(scriptConfig); AlertResult alertResult; alertResult = scriptSender.sendScriptAlert("test title Kris", "test content"); - Assertions.assertEquals("true", alertResult.getStatus()); + Assertions.assertTrue(alertResult.isSuccess()); alertResult = scriptSender.sendScriptAlert("error msg title", "test content"); - Assertions.assertEquals("false", alertResult.getStatus()); + Assertions.assertFalse(alertResult.isSuccess()); } @Test @@ -58,7 +60,7 @@ public void testScriptSenderInjectionTest() { scriptConfig.put(ScriptParamsConstants.NAME_SCRIPT_USER_PARAMS, "' ; calc.exe ; '"); ScriptSender scriptSender = new ScriptSender(scriptConfig); AlertResult alertResult = scriptSender.sendScriptAlert("test title Kris", "test content"); - Assertions.assertEquals("false", alertResult.getStatus()); + Assertions.assertFalse(alertResult.isSuccess()); } @Test @@ -67,7 +69,7 @@ public void testUserParamsNPE() { ScriptSender scriptSender = new ScriptSender(scriptConfig); AlertResult alertResult; alertResult = scriptSender.sendScriptAlert("test user params NPE", "test content"); - Assertions.assertEquals("true", alertResult.getStatus()); + Assertions.assertTrue(alertResult.isSuccess()); } @Test @@ -76,7 +78,7 @@ public void testPathNPE() { ScriptSender scriptSender = new ScriptSender(scriptConfig); AlertResult alertResult; alertResult = scriptSender.sendScriptAlert("test path NPE", "test content"); - Assertions.assertEquals("false", alertResult.getStatus()); + Assertions.assertFalse(alertResult.isSuccess()); } @Test @@ -85,7 +87,7 @@ public void testPathError() { ScriptSender scriptSender = new ScriptSender(scriptConfig); AlertResult alertResult; alertResult = scriptSender.sendScriptAlert("test path NPE", "test content"); - Assertions.assertEquals("false", alertResult.getStatus()); + assertFalse(alertResult.isSuccess()); Assertions.assertTrue(alertResult.getMessage().contains("shell script is invalid, only support .sh file")); } @@ -95,7 +97,7 @@ public void testTypeIsError() { ScriptSender scriptSender = new ScriptSender(scriptConfig); AlertResult alertResult; alertResult = scriptSender.sendScriptAlert("test type is error", "test content"); - Assertions.assertEquals("false", alertResult.getStatus()); + assertFalse(alertResult.isSuccess()); } } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/test/resources/logback.xml b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/test/resources/logback.xml new file mode 100644 index 000000000000..9a182a18ef12 --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/test/resources/logback.xml @@ -0,0 +1,21 @@ + + + + + + diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackAlertChannel.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackAlertChannel.java index c8cb36a78b39..8052c7c4f13b 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackAlertChannel.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackAlertChannel.java @@ -30,11 +30,11 @@ public final class SlackAlertChannel implements AlertChannel { public AlertResult process(AlertInfo alertInfo) { AlertData alertData = alertInfo.getAlertData(); Map alertParams = alertInfo.getAlertParams(); - if (alertParams == null || alertParams.size() == 0) { - return new AlertResult("false", "Slack alert params is empty"); + if (alertParams == null || alertParams.isEmpty()) { + return new AlertResult(false, "Slack alert params is empty"); } SlackSender slackSender = new SlackSender(alertParams); String response = slackSender.sendMessage(alertData.getTitle(), alertData.getContent()); - return new AlertResult("ok".equals(response) ? "true" : "false", response); + return new AlertResult("ok".equals(response), response); } } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackSender.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackSender.java index 60b2f8281a63..4096ccc998c7 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackSender.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackSender.java @@ -29,6 +29,7 @@ import org.apache.http.impl.client.HttpClients; import org.apache.http.util.EntityUtils; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; @@ -81,11 +82,11 @@ public String sendMessage(String title, String content) { } HttpPost httpPost = new HttpPost(webHookUrl); - httpPost.setEntity(new StringEntity(JSONUtils.toJsonString(paramMap), "UTF-8")); + httpPost.setEntity(new StringEntity(JSONUtils.toJsonString(paramMap), StandardCharsets.UTF_8)); CloseableHttpResponse response = httpClient.execute(httpPost); HttpEntity entity = response.getEntity(); - return EntityUtils.toString(entity, "UTF-8"); + return EntityUtils.toString(entity, StandardCharsets.UTF_8); } catch (Exception e) { log.error("Send message to slack error.", e); return "System Exception"; diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/test/resources/logback.xml b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/test/resources/logback.xml new file mode 100644 index 000000000000..9a182a18ef12 --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/test/resources/logback.xml @@ -0,0 +1,21 @@ + + + + + + diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-telegram/src/main/java/org/apache/dolphinscheduler/plugin/alert/telegram/TelegramAlertChannel.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-telegram/src/main/java/org/apache/dolphinscheduler/plugin/alert/telegram/TelegramAlertChannel.java index efc8912d1a0a..ed33ef549767 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-telegram/src/main/java/org/apache/dolphinscheduler/plugin/alert/telegram/TelegramAlertChannel.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-telegram/src/main/java/org/apache/dolphinscheduler/plugin/alert/telegram/TelegramAlertChannel.java @@ -30,7 +30,7 @@ public final class TelegramAlertChannel implements AlertChannel { public AlertResult process(AlertInfo info) { Map alertParams = info.getAlertParams(); if (alertParams == null || alertParams.isEmpty()) { - return new AlertResult("false", "Telegram alert params is empty"); + return AlertResult.fail("Telegram alert params is empty"); } AlertData data = info.getAlertData(); return new TelegramSender(alertParams).sendMessage(data); diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-telegram/src/main/java/org/apache/dolphinscheduler/plugin/alert/telegram/TelegramSender.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-telegram/src/main/java/org/apache/dolphinscheduler/plugin/alert/telegram/TelegramSender.java index 8aba9f5c2b0b..417e97d4cd2a 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-telegram/src/main/java/org/apache/dolphinscheduler/plugin/alert/telegram/TelegramSender.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-telegram/src/main/java/org/apache/dolphinscheduler/plugin/alert/telegram/TelegramSender.java @@ -105,7 +105,7 @@ public AlertResult sendMessage(AlertData alertData) { } catch (Exception e) { log.warn("send telegram alert msg exception : {}", e.getMessage()); result = new AlertResult(); - result.setStatus("false"); + result.setSuccess(false); result.setMessage(String.format("send telegram alert fail. %s", e.getMessage())); } return result; @@ -113,7 +113,7 @@ public AlertResult sendMessage(AlertData alertData) { private AlertResult parseRespToResult(String resp) { AlertResult result = new AlertResult(); - result.setStatus("false"); + result.setSuccess(false); if (null == resp || resp.isEmpty()) { result.setMessage("send telegram msg error. telegram server resp is empty"); return result; @@ -127,7 +127,7 @@ private AlertResult parseRespToResult(String resp) { result.setMessage(String.format("send telegram alert fail. telegram server error_code: %d, description: %s", response.errorCode, response.description)); } else { - result.setStatus("true"); + result.setSuccess(true); result.setMessage("send telegram msg success."); } return result; @@ -153,7 +153,7 @@ private String sendInvoke(String title, String content) throws IOException { String resp; try { HttpEntity entity = response.getEntity(); - resp = EntityUtils.toString(entity, "UTF-8"); + resp = EntityUtils.toString(entity, StandardCharsets.UTF_8); EntityUtils.consume(entity); } finally { response.close(); diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-telegram/src/test/java/org/apache/dolphinscheduler/plugin/alert/telegram/TelegramSenderTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-telegram/src/test/java/org/apache/dolphinscheduler/plugin/alert/telegram/TelegramSenderTest.java index a57de30219d6..d05a45d73f64 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-telegram/src/test/java/org/apache/dolphinscheduler/plugin/alert/telegram/TelegramSenderTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-telegram/src/test/java/org/apache/dolphinscheduler/plugin/alert/telegram/TelegramSenderTest.java @@ -52,7 +52,7 @@ public void testSendMessageFailByParamToken() { TelegramParamsConstants.NAME_TELEGRAM_BOT_TOKEN, "XXXXXXX"); TelegramSender telegramSender = new TelegramSender(telegramConfig); AlertResult result = telegramSender.sendMessage(alertData); - Assertions.assertEquals("false", result.getStatus()); + Assertions.assertFalse(result.isSuccess()); } @@ -65,7 +65,7 @@ public void testSendMessageFailByChatId() { TelegramParamsConstants.NAME_TELEGRAM_CHAT_ID, "-XXXXXXX"); TelegramSender telegramSender = new TelegramSender(telegramConfig); AlertResult result = telegramSender.sendMessage(alertData); - Assertions.assertEquals("false", result.getStatus()); + Assertions.assertFalse(result.isSuccess()); } @Test @@ -75,7 +75,7 @@ public void testSendMessage() { alertData.setContent("telegram test content"); TelegramSender telegramSender = new TelegramSender(telegramConfig); AlertResult result = telegramSender.sendMessage(alertData); - Assertions.assertEquals("false", result.getStatus()); + Assertions.assertFalse(result.isSuccess()); } @@ -89,7 +89,7 @@ public void testSendMessageByMarkdown() { TelegramParamsConstants.NAME_TELEGRAM_PARSE_MODE, TelegramAlertConstants.PARSE_MODE_MARKDOWN); TelegramSender telegramSender = new TelegramSender(telegramConfig); AlertResult result = telegramSender.sendMessage(alertData); - Assertions.assertEquals("false", result.getStatus()); + Assertions.assertFalse(result.isSuccess()); } @@ -102,7 +102,7 @@ public void testSendMessageByHtml() { TelegramParamsConstants.NAME_TELEGRAM_PARSE_MODE, TelegramAlertConstants.PARSE_MODE_HTML); TelegramSender telegramSender = new TelegramSender(telegramConfig); AlertResult result = telegramSender.sendMessage(alertData); - Assertions.assertEquals("false", result.getStatus()); + Assertions.assertFalse(result.isSuccess()); } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-telegram/src/test/resources/logback.xml b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-telegram/src/test/resources/logback.xml new file mode 100644 index 000000000000..9a182a18ef12 --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-telegram/src/test/resources/logback.xml @@ -0,0 +1,21 @@ + + + + + + diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-webexteams/src/main/java/org/apache/dolphinscheduler/plugin/alert/webexteams/WebexTeamsAlertChannel.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-webexteams/src/main/java/org/apache/dolphinscheduler/plugin/alert/webexteams/WebexTeamsAlertChannel.java index 38a582f1c63d..94f77aed6e04 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-webexteams/src/main/java/org/apache/dolphinscheduler/plugin/alert/webexteams/WebexTeamsAlertChannel.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-webexteams/src/main/java/org/apache/dolphinscheduler/plugin/alert/webexteams/WebexTeamsAlertChannel.java @@ -33,7 +33,7 @@ public AlertResult process(AlertInfo alertInfo) { AlertData alertData = alertInfo.getAlertData(); Map alertParams = alertInfo.getAlertParams(); if (MapUtils.isEmpty(alertParams)) { - return new AlertResult("false", "WebexTeams alert params is empty"); + return new AlertResult(false, "WebexTeams alert params is empty"); } return new WebexTeamsSender(alertParams).sendWebexTeamsAlter(alertData); diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-webexteams/src/main/java/org/apache/dolphinscheduler/plugin/alert/webexteams/WebexTeamsSender.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-webexteams/src/main/java/org/apache/dolphinscheduler/plugin/alert/webexteams/WebexTeamsSender.java index f8201a40e096..3b8b3d21c818 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-webexteams/src/main/java/org/apache/dolphinscheduler/plugin/alert/webexteams/WebexTeamsSender.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-webexteams/src/main/java/org/apache/dolphinscheduler/plugin/alert/webexteams/WebexTeamsSender.java @@ -67,7 +67,7 @@ public WebexTeamsSender(Map config) { public AlertResult sendWebexTeamsAlter(AlertData alertData) { AlertResult alertResult = new AlertResult(); - alertResult.setStatus("false"); + alertResult.setSuccess(false); alertResult.setMessage("send webex teams alert fail."); try { @@ -93,7 +93,7 @@ private void send(AlertResult alertResult, AlertData alertData) throws IOExcepti String responseContent = EntityUtils.toString(entity, StandardCharsets.UTF_8); try { if (statusCode == HttpStatus.SC_OK) { - alertResult.setStatus("true"); + alertResult.setSuccess(true); alertResult.setMessage("send webex teams alert success"); } else { alertResult.setMessage(String.format( diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-webexteams/src/test/java/org/apache/dolphinscheduler/plugin/alert/webexteams/WebexTeamsSenderTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-webexteams/src/test/java/org/apache/dolphinscheduler/plugin/alert/webexteams/WebexTeamsSenderTest.java index 1d3070cb55bf..ddc806e593ec 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-webexteams/src/test/java/org/apache/dolphinscheduler/plugin/alert/webexteams/WebexTeamsSenderTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-webexteams/src/test/java/org/apache/dolphinscheduler/plugin/alert/webexteams/WebexTeamsSenderTest.java @@ -85,6 +85,6 @@ public void testSendToPersonId() { public void testSend() { WebexTeamsSender webexTeamsSender = new WebexTeamsSender(webexTeamsConfig); AlertResult alertResult = webexTeamsSender.sendWebexTeamsAlter(alertData); - Assertions.assertEquals("false", alertResult.getStatus()); + Assertions.assertFalse(alertResult.isSuccess()); } } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-webexteams/src/test/resources/logback.xml b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-webexteams/src/test/resources/logback.xml new file mode 100644 index 000000000000..9a182a18ef12 --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-webexteams/src/test/resources/logback.xml @@ -0,0 +1,21 @@ + + + + + + diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertChannel.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertChannel.java index 786cdb159f07..dcc53c7f59a2 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertChannel.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertChannel.java @@ -31,7 +31,7 @@ public AlertResult process(AlertInfo info) { AlertData alertData = info.getAlertData(); Map paramsMap = info.getAlertParams(); if (null == paramsMap) { - return new AlertResult("false", "we chat params is null"); + return new AlertResult(false, "we chat params is null"); } return new WeChatSender(paramsMap).sendEnterpriseWeChat(alertData.getTitle(), alertData.getContent()); diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertConstants.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertConstants.java index 7f5eaef4f968..76ad4800153a 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertConstants.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertConstants.java @@ -23,8 +23,6 @@ public final class WeChatAlertConstants { static final String MARKDOWN_ENTER = "\n"; - static final String CHARSET = "UTF-8"; - static final String WE_CHAT_PUSH_URL = "https://qyapi.weixin.qq.com/cgi-bin/message/send?access_token={token}"; static final String WE_CHAT_APP_CHAT_PUSH_URL = "https://qyapi.weixin.qq.com/cgi-bin/appchat/send?access_token" + diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatSender.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatSender.java index 4b49e0436dc6..d3fba217dcb0 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatSender.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatSender.java @@ -38,6 +38,7 @@ import org.apache.http.util.EntityUtils; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; @@ -45,13 +46,14 @@ import java.util.Map.Entry; import java.util.Set; +import lombok.Getter; +import lombok.Setter; import lombok.extern.slf4j.Slf4j; @Slf4j public final class WeChatSender { private static final String MUST_NOT_NULL = " must not null"; - private static final String ALERT_STATUS = "false"; private static final String AGENT_ID_REG_EXP = "{agentId}"; private static final String MSG_REG_EXP = "{msg}"; private static final String USER_REG_EXP = "{toUser}"; @@ -85,12 +87,12 @@ private static String post(String url, String data) throws IOException { CloseableHttpClient httpClient = HttpClients.custom().setRetryHandler(HttpServiceRetryStrategy.retryStrategy).build()) { HttpPost httpPost = new HttpPost(url); - httpPost.setEntity(new StringEntity(data, WeChatAlertConstants.CHARSET)); + httpPost.setEntity(new StringEntity(data, StandardCharsets.UTF_8)); CloseableHttpResponse response = httpClient.execute(httpPost); String resp; try { HttpEntity entity = response.getEntity(); - resp = EntityUtils.toString(entity, WeChatAlertConstants.CHARSET); + resp = EntityUtils.toString(entity, StandardCharsets.UTF_8); EntityUtils.consume(entity); } finally { response.close(); @@ -142,7 +144,7 @@ private static String get(String url) throws IOException { HttpGet httpGet = new HttpGet(url); try (CloseableHttpResponse response = httpClient.execute(httpGet)) { HttpEntity entity = response.getEntity(); - resp = EntityUtils.toString(entity, WeChatAlertConstants.CHARSET); + resp = EntityUtils.toString(entity, StandardCharsets.UTF_8); EntityUtils.consume(entity); } @@ -175,7 +177,7 @@ private static String mkString(Iterable list) { private static AlertResult checkWeChatSendMsgResult(String result) { AlertResult alertResult = new AlertResult(); - alertResult.setStatus(ALERT_STATUS); + alertResult.setSuccess(false); if (null == result) { alertResult.setMessage("we chat send fail"); @@ -189,11 +191,11 @@ private static AlertResult checkWeChatSendMsgResult(String result) { return alertResult; } if (sendMsgResponse.errcode == 0) { - alertResult.setStatus("true"); + alertResult.setSuccess(true); alertResult.setMessage("we chat alert send success"); return alertResult; } - alertResult.setStatus(ALERT_STATUS); + alertResult.setSuccess(false); alertResult.setMessage(sendMsgResponse.getErrmsg()); return alertResult; } @@ -209,7 +211,7 @@ public AlertResult sendEnterpriseWeChat(String title, String content) { if (null == weChatToken) { alertResult = new AlertResult(); alertResult.setMessage("send we chat alert fail,get weChat token error"); - alertResult.setStatus(ALERT_STATUS); + alertResult.setSuccess(false); return alertResult; } String enterpriseWeChatPushUrlReplace = ""; @@ -236,7 +238,7 @@ public AlertResult sendEnterpriseWeChat(String title, String content) { log.info("send we chat alert msg exception : {}", e.getMessage()); alertResult = new AlertResult(); alertResult.setMessage("send we chat alert fail"); - alertResult.setStatus(ALERT_STATUS); + alertResult.setSuccess(false); } return alertResult; } @@ -259,6 +261,8 @@ private String getToken() { return null; } + @Getter + @Setter static final class WeChatSendMsgResponse { private Integer errcode; @@ -267,22 +271,6 @@ static final class WeChatSendMsgResponse { public WeChatSendMsgResponse() { } - public Integer getErrcode() { - return this.errcode; - } - - public void setErrcode(Integer errcode) { - this.errcode = errcode; - } - - public String getErrmsg() { - return this.errmsg; - } - - public void setErrmsg(String errmsg) { - this.errmsg = errmsg; - } - public boolean equals(final Object o) { if (o == this) { return true; diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/test/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatSenderTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/test/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatSenderTest.java index e0c934f436e9..6e4c318d131f 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/test/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatSenderTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/test/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatSenderTest.java @@ -71,7 +71,7 @@ public void testSendWeChatTableMsg() { WeChatSender weChatSender = new WeChatSender(weChatConfig); AlertResult alertResult = weChatSender.sendEnterpriseWeChat("test", content); - Assertions.assertEquals("false", alertResult.getStatus()); + Assertions.assertFalse(alertResult.isSuccess()); } @Test @@ -79,7 +79,7 @@ public void testSendWeChatTextMsg() { weChatConfig.put(AlertConstants.NAME_SHOW_TYPE, ShowType.TEXT.getDescp()); WeChatSender weChatSender = new WeChatSender(weChatConfig); AlertResult alertResult = weChatSender.sendEnterpriseWeChat("test", content); - Assertions.assertEquals("false", alertResult.getStatus()); + Assertions.assertFalse(alertResult.isSuccess()); } } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/test/resources/logback.xml b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/test/resources/logback.xml new file mode 100644 index 000000000000..9a182a18ef12 --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/test/resources/logback.xml @@ -0,0 +1,21 @@ + + + + + + diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/pom.xml b/dolphinscheduler-alert/dolphinscheduler-alert-server/pom.xml index 507d7acb45c4..844a5983df8f 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-server/pom.xml +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/pom.xml @@ -66,6 +66,12 @@ org.springframework.cloud spring-cloud-starter-kubernetes-client-config + + + org.springframework.boot + spring-boot-starter-test + test + diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/assembly/dolphinscheduler-alert-server.xml b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/assembly/dolphinscheduler-alert-server.xml index 6f3909622133..24c8fb2f11fe 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/assembly/dolphinscheduler-alert-server.xml +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/assembly/dolphinscheduler-alert-server.xml @@ -52,6 +52,14 @@ ${basedir}/../../dolphinscheduler-common/src/main/resources **/*.properties + **/*.yaml + + conf + + + ${basedir}/../dolphinscheduler-authentication/dolphinscheduler-aws-authentication/src/main/resources + + **/*.yaml conf diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/bin/jvm_args_env.sh b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/bin/jvm_args_env.sh index c668944139ce..d953e04d2f04 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/bin/jvm_args_env.sh +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/bin/jvm_args_env.sh @@ -24,6 +24,7 @@ -XX:+PrintGCDetails -Xloggc:gc.log +-XX:-OmitStackTraceInFastThrow -XX:+ExitOnOutOfMemoryError -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=dump.hprof diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertServer.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertServer.java index fd3d4b02e34a..24357110470a 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertServer.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertServer.java @@ -18,73 +18,52 @@ package org.apache.dolphinscheduler.alert; import org.apache.dolphinscheduler.alert.metrics.AlertServerMetrics; -import org.apache.dolphinscheduler.alert.plugin.AlertPluginManager; -import org.apache.dolphinscheduler.alert.registry.AlertRegistryClient; -import org.apache.dolphinscheduler.alert.rpc.AlertRpcServer; import org.apache.dolphinscheduler.alert.service.AlertBootstrapService; -import org.apache.dolphinscheduler.alert.service.ListenerEventPostService; +import org.apache.dolphinscheduler.common.CommonConfiguration; import org.apache.dolphinscheduler.common.constants.Constants; import org.apache.dolphinscheduler.common.lifecycle.ServerLifeCycleManager; import org.apache.dolphinscheduler.common.thread.DefaultUncaughtExceptionHandler; import org.apache.dolphinscheduler.common.thread.ThreadUtils; +import org.apache.dolphinscheduler.dao.DaoConfiguration; +import org.apache.dolphinscheduler.registry.api.RegistryConfiguration; +import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; -import org.springframework.boot.builder.SpringApplicationBuilder; -import org.springframework.boot.context.event.ApplicationReadyEvent; -import org.springframework.context.annotation.ComponentScan; -import org.springframework.context.event.EventListener; +import org.springframework.context.annotation.Import; -@SpringBootApplication -@ComponentScan("org.apache.dolphinscheduler") @Slf4j +@Import({CommonConfiguration.class, + DaoConfiguration.class, + RegistryConfiguration.class}) +@SpringBootApplication public class AlertServer { @Autowired private AlertBootstrapService alertBootstrapService; - @Autowired - private ListenerEventPostService listenerEventPostService; - @Autowired - private AlertRpcServer alertRpcServer; - @Autowired - private AlertPluginManager alertPluginManager; - @Autowired - private AlertRegistryClient alertRegistryClient; public static void main(String[] args) { AlertServerMetrics.registerUncachedException(DefaultUncaughtExceptionHandler::getUncaughtExceptionCount); Thread.setDefaultUncaughtExceptionHandler(DefaultUncaughtExceptionHandler.getInstance()); Thread.currentThread().setName(Constants.THREAD_NAME_ALERT_SERVER); - new SpringApplicationBuilder(AlertServer.class).run(args); + SpringApplication.run(AlertServer.class, args); } - @EventListener - public void run(ApplicationReadyEvent readyEvent) { - log.info("Alert server is staring ..."); - alertPluginManager.start(); - alertRegistryClient.start(); + @PostConstruct + public void run() { + log.info("AlertServer is staring ..."); alertBootstrapService.start(); - listenerEventPostService.start(); - alertRpcServer.start(); - log.info("Alert server is started ..."); + log.info("AlertServer is started ..."); } @PreDestroy public void close() { - destroy("alert server destroy"); - } - - /** - * gracefully stop - * - * @param cause stop cause - */ - public void destroy(String cause) { - + String cause = "AlertServer destroy"; try { // set stop signal is true // execute only once @@ -92,19 +71,14 @@ public void destroy(String cause) { log.warn("AlterServer is already stopped"); return; } - log.info("Alert server is stopping, cause: {}", cause); - try ( - AlertRpcServer closedAlertRpcServer = alertRpcServer; - AlertBootstrapService closedAlertBootstrapService = alertBootstrapService; - ListenerEventPostService closedListenerEventPostService = listenerEventPostService; - AlertRegistryClient closedAlertRegistryClient = alertRegistryClient) { - // close resource - } + log.info("AlertServer is stopping, cause: {}", cause); + alertBootstrapService.close(); // thread sleep 3 seconds for thread quietly stop ThreadUtils.sleep(Constants.SERVER_CLOSE_WAIT_TIME.toMillis()); - log.info("Alter server stopped, cause: {}", cause); + log.info("AlertServer stopped, cause: {}", cause); } catch (Exception e) { - log.error("Alert server stop failed, cause: {}", cause, e); + log.error("AlertServer stop failed, cause: {}", cause, e); } } + } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/config/AlertConfig.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/config/AlertConfig.java index 824851fd92a0..240f92b846f8 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/config/AlertConfig.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/config/AlertConfig.java @@ -43,6 +43,8 @@ public final class AlertConfig implements Validator { private Duration maxHeartbeatInterval = Duration.ofSeconds(60); + private int senderParallelism = 100; + private String alertServerAddress; @Override @@ -58,6 +60,10 @@ public void validate(Object target, Errors errors) { errors.rejectValue("max-heartbeat-interval", null, "should be a valid duration"); } + if (senderParallelism <= 0) { + errors.rejectValue("sender-parallelism", null, "should be a positive number"); + } + if (StringUtils.isEmpty(alertServerAddress)) { alertConfig.setAlertServerAddress(NetUtils.getAddr(alertConfig.getPort())); } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/metrics/AlertServerMetrics.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/metrics/AlertServerMetrics.java index db75a49371a5..dfe819fbfa71 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/metrics/AlertServerMetrics.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/metrics/AlertServerMetrics.java @@ -39,12 +39,23 @@ public class AlertServerMetrics { .description("Alert failure count") .register(Metrics.globalRegistry); + private final Counter alertHeartBeatCounter = + Counter.builder("ds.alert.heartbeat.count") + .description("alert heartbeat count") + .register(Metrics.globalRegistry); + public void registerPendingAlertGauge(final Supplier supplier) { Gauge.builder("ds.alert.pending", supplier) .description("Number of pending alert") .register(Metrics.globalRegistry); } + public void registerSendingAlertGauge(final Supplier supplier) { + Gauge.builder("ds.alert.sending", supplier) + .description("Number of sending alert") + .register(Metrics.globalRegistry); + } + public static void registerUncachedException(final Supplier supplier) { Gauge.builder("ds.alert.uncached.exception", supplier) .description("number of uncached exception") @@ -59,4 +70,7 @@ public void incAlertFailCount() { alertFailCounter.increment(); } + public void incAlertHeartbeatCount() { + alertHeartBeatCounter.increment(); + } } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/plugin/AlertPluginManager.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/plugin/AlertPluginManager.java index 1035018e9cd8..badd463166af 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/plugin/AlertPluginManager.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/plugin/AlertPluginManager.java @@ -36,8 +36,8 @@ import org.springframework.stereotype.Component; -@Component @Slf4j +@Component public final class AlertPluginManager { private final PluginDao pluginDao; diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/registry/AlertHeartbeatTask.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/registry/AlertHeartbeatTask.java index 3b2d588f8554..b8640dd539cf 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/registry/AlertHeartbeatTask.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/registry/AlertHeartbeatTask.java @@ -18,6 +18,8 @@ package org.apache.dolphinscheduler.alert.registry; import org.apache.dolphinscheduler.alert.config.AlertConfig; +import org.apache.dolphinscheduler.alert.metrics.AlertServerMetrics; +import org.apache.dolphinscheduler.alert.service.AlertHAServer; import org.apache.dolphinscheduler.common.enums.ServerStatus; import org.apache.dolphinscheduler.common.model.AlertServerHeartBeat; import org.apache.dolphinscheduler.common.model.BaseHeartBeatTask; @@ -42,12 +44,15 @@ public class AlertHeartbeatTask extends BaseHeartBeatTask private final RegistryClient registryClient; private final MetricsProvider metricsProvider; + + private final AlertHAServer alertHAServer; private final String heartBeatPath; private final long startupTime; public AlertHeartbeatTask(AlertConfig alertConfig, MetricsProvider metricsProvider, - RegistryClient registryClient) { + RegistryClient registryClient, + AlertHAServer alertHAServer) { super("AlertHeartbeatTask", alertConfig.getMaxHeartbeatInterval().toMillis()); this.startupTime = System.currentTimeMillis(); this.alertConfig = alertConfig; @@ -55,6 +60,7 @@ public AlertHeartbeatTask(AlertConfig alertConfig, this.registryClient = registryClient; this.heartBeatPath = RegistryNodeType.ALERT_SERVER.getRegistryPath() + "/" + alertConfig.getAlertServerAddress(); + this.alertHAServer = alertHAServer; this.processId = OSUtils.getProcessID(); } @@ -65,10 +71,12 @@ public AlertServerHeartBeat getHeartBeat() { .processId(processId) .startupTime(startupTime) .reportTime(System.currentTimeMillis()) - .cpuUsage(systemMetrics.getTotalCpuUsedPercentage()) + .jvmCpuUsage(systemMetrics.getJvmCpuUsagePercentage()) + .cpuUsage(systemMetrics.getSystemCpuUsagePercentage()) .memoryUsage(systemMetrics.getSystemMemoryUsedPercentage()) .jvmMemoryUsage(systemMetrics.getJvmMemoryUsedPercentage()) .serverStatus(ServerStatus.NORMAL) + .isActive(alertHAServer.isActive()) .host(NetUtils.getHost()) .port(alertConfig.getPort()) .build(); @@ -78,6 +86,7 @@ public AlertServerHeartBeat getHeartBeat() { public void writeHeartBeat(AlertServerHeartBeat heartBeat) { String heartBeatJson = JSONUtils.toJsonString(heartBeat); registryClient.persistEphemeral(heartBeatPath, heartBeatJson); + AlertServerMetrics.incAlertHeartbeatCount(); log.debug("Success write master heartBeatInfo into registry, masterRegistryPath: {}, heartBeatInfo: {}", heartBeatPath, heartBeatJson); } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/registry/AlertRegistryClient.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/registry/AlertRegistryClient.java index 616220bd1bba..1b7839d81628 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/registry/AlertRegistryClient.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/registry/AlertRegistryClient.java @@ -18,9 +18,9 @@ package org.apache.dolphinscheduler.alert.registry; import org.apache.dolphinscheduler.alert.config.AlertConfig; +import org.apache.dolphinscheduler.alert.service.AlertHAServer; import org.apache.dolphinscheduler.meter.metrics.MetricsProvider; import org.apache.dolphinscheduler.registry.api.RegistryClient; -import org.apache.dolphinscheduler.registry.api.enums.RegistryNodeType; import lombok.extern.slf4j.Slf4j; @@ -42,10 +42,12 @@ public class AlertRegistryClient implements AutoCloseable { private AlertHeartbeatTask alertHeartbeatTask; + @Autowired + private AlertHAServer alertHAServer; + public void start() { log.info("AlertRegistryClient starting..."); - registryClient.getLock(RegistryNodeType.ALERT_LOCK.getRegistryPath()); - alertHeartbeatTask = new AlertHeartbeatTask(alertConfig, metricsProvider, registryClient); + alertHeartbeatTask = new AlertHeartbeatTask(alertConfig, metricsProvider, registryClient, alertHAServer); alertHeartbeatTask.start(); // start heartbeat task log.info("AlertRegistryClient started..."); @@ -55,7 +57,6 @@ public void start() { public void close() { log.info("AlertRegistryClient closing..."); alertHeartbeatTask.shutdown(); - registryClient.releaseLock(RegistryNodeType.ALERT_LOCK.getRegistryPath()); log.info("AlertRegistryClient closed..."); } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/rpc/AlertOperatorImpl.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/rpc/AlertOperatorImpl.java index 9f11fa6c2e5a..6a5ed3e0bec5 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/rpc/AlertOperatorImpl.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/rpc/AlertOperatorImpl.java @@ -16,7 +16,7 @@ */ package org.apache.dolphinscheduler.alert.rpc; -import org.apache.dolphinscheduler.alert.service.AlertBootstrapService; +import org.apache.dolphinscheduler.alert.service.AlertSender; import org.apache.dolphinscheduler.extract.alert.IAlertOperator; import org.apache.dolphinscheduler.extract.alert.request.AlertSendRequest; import org.apache.dolphinscheduler.extract.alert.request.AlertSendResponse; @@ -32,16 +32,15 @@ public class AlertOperatorImpl implements IAlertOperator { @Autowired - private AlertBootstrapService alertBootstrapService; + private AlertSender alertSender; @Override public AlertSendResponse sendAlert(AlertSendRequest alertSendRequest) { log.info("Received AlertSendRequest : {}", alertSendRequest); - AlertSendResponse alertSendResponse = alertBootstrapService.syncHandler( + AlertSendResponse alertSendResponse = alertSender.syncHandler( alertSendRequest.getGroupId(), alertSendRequest.getTitle(), - alertSendRequest.getContent(), - alertSendRequest.getWarnType()); + alertSendRequest.getContent()); log.info("Handle AlertSendRequest finish: {}", alertSendResponse); return alertSendResponse; } @@ -49,7 +48,7 @@ public AlertSendResponse sendAlert(AlertSendRequest alertSendRequest) { @Override public AlertSendResponse sendTestAlert(AlertTestSendRequest alertSendRequest) { log.info("Received AlertTestSendRequest : {}", alertSendRequest); - AlertSendResponse alertSendResponse = alertBootstrapService.syncTestSend( + AlertSendResponse alertSendResponse = alertSender.syncTestSend( alertSendRequest.getPluginDefineId(), alertSendRequest.getPluginInstanceParams()); log.info("Handle AlertTestSendRequest finish: {}", alertSendResponse); diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/rpc/AlertRpcServer.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/rpc/AlertRpcServer.java index 3bd368573a79..d73e4755ddd4 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/rpc/AlertRpcServer.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/rpc/AlertRpcServer.java @@ -18,7 +18,6 @@ package org.apache.dolphinscheduler.alert.rpc; import org.apache.dolphinscheduler.alert.config.AlertConfig; -import org.apache.dolphinscheduler.extract.base.NettyRemotingServerFactory; import org.apache.dolphinscheduler.extract.base.config.NettyServerConfig; import org.apache.dolphinscheduler.extract.base.server.SpringServerMethodInvokerDiscovery; @@ -31,20 +30,7 @@ public class AlertRpcServer extends SpringServerMethodInvokerDiscovery implements AutoCloseable { public AlertRpcServer(AlertConfig alertConfig) { - super(NettyRemotingServerFactory.buildNettyRemotingServer( - NettyServerConfig.builder().serverName("AlertRpcServer").listenPort(alertConfig.getPort()).build())); + super(NettyServerConfig.builder().serverName("AlertRpcServer").listenPort(alertConfig.getPort()).build()); } - public void start() { - log.info("Starting AlertRpcServer..."); - nettyRemotingServer.start(); - log.info("Started AlertRpcServer..."); - } - - @Override - public void close() { - log.info("Closing AlertRpcServer..."); - nettyRemotingServer.close(); - log.info("Closed AlertRpcServer..."); - } } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AbstractEventFetcher.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AbstractEventFetcher.java new file mode 100644 index 000000000000..1c61659ce38b --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AbstractEventFetcher.java @@ -0,0 +1,100 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.alert.service; + +import org.apache.dolphinscheduler.common.thread.BaseDaemonThread; + +import org.apache.commons.collections4.CollectionUtils; + +import java.util.List; +import java.util.concurrent.atomic.AtomicBoolean; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public abstract class AbstractEventFetcher extends BaseDaemonThread implements EventFetcher { + + protected static final int FETCH_SIZE = 100; + + protected static final long FETCH_INTERVAL = 5_000; + + protected final AlertHAServer alertHAServer; + + private final EventPendingQueue eventPendingQueue; + + private final AtomicBoolean runningFlag = new AtomicBoolean(false); + + private Integer eventOffset; + + protected AbstractEventFetcher(String fetcherName, + AlertHAServer alertHAServer, + EventPendingQueue eventPendingQueue) { + super(fetcherName); + this.alertHAServer = alertHAServer; + this.eventPendingQueue = eventPendingQueue; + this.eventOffset = -1; + } + + @Override + public synchronized void start() { + if (!runningFlag.compareAndSet(false, true)) { + throw new IllegalArgumentException("AlertEventFetcher is already started"); + } + log.info("AlertEventFetcher starting..."); + super.start(); + log.info("AlertEventFetcher started..."); + } + + @Override + public void run() { + while (runningFlag.get()) { + try { + if (!alertHAServer.isActive()) { + log.debug("The current node is not active, will not loop Alert"); + Thread.sleep(FETCH_INTERVAL); + continue; + } + List pendingEvents = fetchPendingEvent(eventOffset); + if (CollectionUtils.isEmpty(pendingEvents)) { + log.debug("No pending events found"); + Thread.sleep(FETCH_INTERVAL); + continue; + } + for (T alert : pendingEvents) { + eventPendingQueue.put(alert); + } + eventOffset = Math.max(eventOffset, + pendingEvents.stream().map(this::getEventOffset).max(Integer::compareTo).get()); + } catch (InterruptedException interruptedException) { + Thread.currentThread().interrupt(); + } catch (Exception ex) { + log.error("AlertEventFetcher error", ex); + } + } + } + + protected abstract int getEventOffset(T event); + + @Override + public void shutdown() { + if (!runningFlag.compareAndSet(true, false)) { + log.warn("The AlertEventFetcher is not started"); + } + } + +} diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AbstractEventLoop.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AbstractEventLoop.java new file mode 100644 index 000000000000..568125002e70 --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AbstractEventLoop.java @@ -0,0 +1,101 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.alert.service; + +import org.apache.dolphinscheduler.common.thread.BaseDaemonThread; + +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public abstract class AbstractEventLoop extends BaseDaemonThread implements EventLoop { + + private final EventPendingQueue eventPendingQueue; + + private final AtomicInteger handlingEventCount; + + private final int eventHandleWorkerNum; + + private final ThreadPoolExecutor threadPoolExecutor; + + private final AtomicBoolean runningFlag = new AtomicBoolean(false); + + protected AbstractEventLoop(String name, + ThreadPoolExecutor threadPoolExecutor, + EventPendingQueue eventPendingQueue) { + super(name); + this.handlingEventCount = new AtomicInteger(0); + this.eventHandleWorkerNum = threadPoolExecutor.getMaximumPoolSize(); + this.threadPoolExecutor = threadPoolExecutor; + this.eventPendingQueue = eventPendingQueue; + } + + @Override + public synchronized void start() { + if (!runningFlag.compareAndSet(false, true)) { + throw new IllegalArgumentException(getClass().getName() + " is already started"); + } + log.info("{} starting...", getClass().getName()); + super.start(); + log.info("{} started...", getClass().getName()); + } + + @Override + public void run() { + while (runningFlag.get()) { + try { + if (handlingEventCount.get() >= eventHandleWorkerNum) { + log.debug("There is no idle event worker, waiting for a while..."); + Thread.sleep(1000); + continue; + } + T pendingEvent = eventPendingQueue.take(); + handlingEventCount.incrementAndGet(); + CompletableFuture.runAsync(() -> handleEvent(pendingEvent), threadPoolExecutor) + .whenComplete((aVoid, throwable) -> { + if (throwable != null) { + log.error("Handle event: {} error", pendingEvent, throwable); + } + handlingEventCount.decrementAndGet(); + }); + } catch (InterruptedException interruptedException) { + Thread.currentThread().interrupt(); + log.error("Loop event thread has been interrupted..."); + break; + } catch (Exception ex) { + log.error("Loop event error", ex); + } + } + } + + @Override + public int getHandlingEventCount() { + return handlingEventCount.get(); + } + + @Override + public void shutdown() { + if (!runningFlag.compareAndSet(true, false)) { + log.warn(getClass().getName() + " is not started"); + } + } +} diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AbstractEventPendingQueue.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AbstractEventPendingQueue.java new file mode 100644 index 000000000000..1d7e213ab95b --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AbstractEventPendingQueue.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.alert.service; + +import java.util.concurrent.LinkedBlockingQueue; + +public abstract class AbstractEventPendingQueue implements EventPendingQueue { + + private final LinkedBlockingQueue pendingAlertQueue; + + private final int capacity; + + protected AbstractEventPendingQueue(int capacity) { + this.capacity = capacity; + this.pendingAlertQueue = new LinkedBlockingQueue<>(capacity); + } + + @Override + public void put(T alert) throws InterruptedException { + pendingAlertQueue.put(alert); + } + + @Override + public T take() throws InterruptedException { + return pendingAlertQueue.take(); + } + + @Override + public int size() { + return pendingAlertQueue.size(); + } + + @Override + public int capacity() { + return capacity; + } + +} diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AbstractEventSender.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AbstractEventSender.java new file mode 100644 index 000000000000..deff97da49d7 --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AbstractEventSender.java @@ -0,0 +1,191 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.alert.service; + +import static com.google.common.base.Preconditions.checkNotNull; + +import org.apache.dolphinscheduler.alert.api.AlertChannel; +import org.apache.dolphinscheduler.alert.api.AlertConstants; +import org.apache.dolphinscheduler.alert.api.AlertData; +import org.apache.dolphinscheduler.alert.api.AlertInfo; +import org.apache.dolphinscheduler.alert.api.AlertResult; +import org.apache.dolphinscheduler.alert.plugin.AlertPluginManager; +import org.apache.dolphinscheduler.common.enums.AlertStatus; +import org.apache.dolphinscheduler.common.enums.AlertType; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.dao.entity.AlertPluginInstance; +import org.apache.dolphinscheduler.dao.entity.AlertSendStatus; +import org.apache.dolphinscheduler.extract.alert.request.AlertSendResponse; +import org.apache.dolphinscheduler.spi.params.PluginParamsTransfer; + +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.exception.ExceptionUtils; + +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.Optional; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.TimeUnit; + +import lombok.extern.slf4j.Slf4j; + +import com.google.common.base.Preconditions; +import com.google.common.collect.Lists; + +@Slf4j +public abstract class AbstractEventSender implements EventSender { + + protected final AlertPluginManager alertPluginManager; + + private final long sendEventTimeout; + + protected AbstractEventSender(AlertPluginManager alertPluginManager, long sendEventTimeout) { + this.alertPluginManager = alertPluginManager; + this.sendEventTimeout = sendEventTimeout; + } + + @Override + public void sendEvent(T event) { + List alertPluginInstanceList = getAlertPluginInstanceList(event); + if (CollectionUtils.isEmpty(alertPluginInstanceList)) { + onError(event, "No bind plugin instance found"); + return; + } + AlertData alertData = getAlertData(event); + List alertSendStatuses = new ArrayList<>(); + for (AlertPluginInstance instance : alertPluginInstanceList) { + AlertResult alertResult = doSendEvent(instance, alertData); + AlertStatus alertStatus = + alertResult.isSuccess() ? AlertStatus.EXECUTION_SUCCESS : AlertStatus.EXECUTION_FAILURE; + AlertSendStatus alertSendStatus = AlertSendStatus.builder() + .alertId(getEventId(event)) + .alertPluginInstanceId(instance.getId()) + .sendStatus(alertStatus) + .log(JSONUtils.toJsonString(alertResult)) + .createTime(new Date()) + .build(); + alertSendStatuses.add(alertSendStatus); + } + long failureCount = alertSendStatuses.stream() + .map(alertSendStatus -> alertSendStatus.getSendStatus() == AlertStatus.EXECUTION_FAILURE) + .count(); + long successCount = alertSendStatuses.stream() + .map(alertSendStatus -> alertSendStatus.getSendStatus() == AlertStatus.EXECUTION_SUCCESS) + .count(); + if (successCount == 0) { + onError(event, JSONUtils.toJsonString(alertSendStatuses)); + } else { + if (failureCount > 0) { + onPartialSuccess(event, JSONUtils.toJsonString(alertSendStatuses)); + } else { + onSuccess(event, JSONUtils.toJsonString(alertSendStatuses)); + } + } + } + + public abstract List getAlertPluginInstanceList(T event); + + public abstract AlertData getAlertData(T event); + + public abstract Integer getEventId(T event); + + public abstract void onError(T event, String log); + + public abstract void onPartialSuccess(T event, String log); + + public abstract void onSuccess(T event, String log); + + @Override + public AlertResult doSendEvent(AlertPluginInstance instance, AlertData alertData) { + int pluginDefineId = instance.getPluginDefineId(); + Optional alertChannelOptional = alertPluginManager.getAlertChannel(pluginDefineId); + if (!alertChannelOptional.isPresent()) { + return AlertResult.fail("Cannot find the alertPlugin: " + pluginDefineId); + } + AlertChannel alertChannel = alertChannelOptional.get(); + + AlertInfo alertInfo = AlertInfo.builder() + .alertData(alertData) + .alertParams(PluginParamsTransfer.getPluginParamsMap(instance.getPluginInstanceParams())) + .alertPluginInstanceId(instance.getId()) + .build(); + try { + AlertResult alertResult; + if (sendEventTimeout <= 0) { + if (alertData.getAlertType() == AlertType.CLOSE_ALERT.getCode()) { + alertResult = alertChannel.closeAlert(alertInfo); + } else { + alertResult = alertChannel.process(alertInfo); + } + } else { + CompletableFuture future; + if (alertData.getAlertType() == AlertType.CLOSE_ALERT.getCode()) { + future = CompletableFuture.supplyAsync(() -> alertChannel.closeAlert(alertInfo)); + } else { + future = CompletableFuture.supplyAsync(() -> alertChannel.process(alertInfo)); + } + alertResult = future.get(sendEventTimeout, TimeUnit.MILLISECONDS); + } + checkNotNull(alertResult, "AlertResult cannot be null"); + return alertResult; + } catch (InterruptedException interruptedException) { + Thread.currentThread().interrupt(); + return AlertResult.fail(ExceptionUtils.getMessage(interruptedException)); + } catch (Exception e) { + log.error("Send alert data {} failed", alertData, e); + return AlertResult.fail(ExceptionUtils.getMessage(e)); + } + } + + @Override + public AlertSendResponse syncTestSend(int pluginDefineId, String pluginInstanceParams) { + + Optional alertChannelOptional = alertPluginManager.getAlertChannel(pluginDefineId); + if (!alertChannelOptional.isPresent()) { + AlertSendResponse.AlertSendResponseResult alertSendResponseResult = + AlertSendResponse.AlertSendResponseResult.fail("Cannot find the alertPlugin: " + pluginDefineId); + return AlertSendResponse.fail(Lists.newArrayList(alertSendResponseResult)); + } + AlertData alertData = AlertData.builder() + .title(AlertConstants.TEST_TITLE) + .content(AlertConstants.TEST_CONTENT) + .build(); + + AlertInfo alertInfo = AlertInfo.builder() + .alertData(alertData) + .alertParams(PluginParamsTransfer.getPluginParamsMap(pluginInstanceParams)) + .build(); + + try { + AlertResult alertResult = alertChannelOptional.get().process(alertInfo); + Preconditions.checkNotNull(alertResult, "AlertResult cannot be null"); + if (alertResult.isSuccess()) { + return AlertSendResponse + .success(Lists.newArrayList(AlertSendResponse.AlertSendResponseResult.success())); + } + return AlertSendResponse.fail( + Lists.newArrayList(AlertSendResponse.AlertSendResponseResult.fail(alertResult.getMessage()))); + } catch (Exception e) { + log.error("Test send alert error", e); + return new AlertSendResponse(false, + Lists.newArrayList(AlertSendResponse.AlertSendResponseResult.fail(ExceptionUtils.getMessage(e)))); + } + + } +} diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AlertBootstrapService.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AlertBootstrapService.java index 77e62a65a0b0..5553e01bc9b9 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AlertBootstrapService.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AlertBootstrapService.java @@ -17,350 +17,84 @@ package org.apache.dolphinscheduler.alert.service; -import org.apache.dolphinscheduler.alert.api.AlertChannel; -import org.apache.dolphinscheduler.alert.api.AlertConstants; -import org.apache.dolphinscheduler.alert.api.AlertData; -import org.apache.dolphinscheduler.alert.api.AlertInfo; -import org.apache.dolphinscheduler.alert.api.AlertResult; -import org.apache.dolphinscheduler.alert.config.AlertConfig; -import org.apache.dolphinscheduler.alert.metrics.AlertServerMetrics; import org.apache.dolphinscheduler.alert.plugin.AlertPluginManager; -import org.apache.dolphinscheduler.common.constants.Constants; -import org.apache.dolphinscheduler.common.enums.AlertStatus; -import org.apache.dolphinscheduler.common.enums.AlertType; -import org.apache.dolphinscheduler.common.enums.WarningType; -import org.apache.dolphinscheduler.common.lifecycle.ServerLifeCycleManager; -import org.apache.dolphinscheduler.common.thread.BaseDaemonThread; -import org.apache.dolphinscheduler.common.thread.ThreadUtils; -import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.apache.dolphinscheduler.dao.AlertDao; -import org.apache.dolphinscheduler.dao.entity.Alert; -import org.apache.dolphinscheduler.dao.entity.AlertPluginInstance; -import org.apache.dolphinscheduler.dao.entity.AlertSendStatus; -import org.apache.dolphinscheduler.extract.alert.request.AlertSendResponse; -import org.apache.dolphinscheduler.spi.params.PluginParamsTransfer; - -import org.apache.commons.collections4.CollectionUtils; -import org.apache.commons.collections4.MapUtils; - -import java.util.ArrayList; -import java.util.Date; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.TimeUnit; - -import javax.annotation.Nullable; +import org.apache.dolphinscheduler.alert.registry.AlertRegistryClient; +import org.apache.dolphinscheduler.alert.rpc.AlertRpcServer; import lombok.extern.slf4j.Slf4j; -import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; -import com.google.common.collect.Lists; - -@Service +/** + * The bootstrap service for alert server. it will start all the necessary component for alert server. + */ @Slf4j -public final class AlertBootstrapService extends BaseDaemonThread implements AutoCloseable { - - @Autowired - private AlertDao alertDao; - @Autowired - private AlertPluginManager alertPluginManager; - @Autowired - private AlertConfig alertConfig; - - public AlertBootstrapService() { - super("AlertBootstrapService"); - } - - @Override - public void run() { - log.info("Alert sender thread started"); - while (!ServerLifeCycleManager.isStopped()) { - try { - List alerts = alertDao.listPendingAlerts(); - if (CollectionUtils.isEmpty(alerts)) { - log.debug("There is not waiting alerts"); - continue; - } - AlertServerMetrics.registerPendingAlertGauge(alerts::size); - this.send(alerts); - } catch (Exception e) { - log.error("Alert sender thread meet an exception", e); - } finally { - ThreadUtils.sleep(Constants.SLEEP_TIME_MILLIS * 5L); - } - } - log.info("Alert sender thread stopped"); - } - - public void send(List alerts) { - for (Alert alert : alerts) { - // get alert group from alert - int alertId = alert.getId(); - int alertGroupId = Optional.ofNullable(alert.getAlertGroupId()).orElse(0); - List alertInstanceList = alertDao.listInstanceByAlertGroupId(alertGroupId); - if (CollectionUtils.isEmpty(alertInstanceList)) { - log.error("send alert msg fail,no bind plugin instance."); - List alertResults = Lists.newArrayList(new AlertResult("false", - "no bind plugin instance")); - alertDao.updateAlert(AlertStatus.EXECUTION_FAILURE, JSONUtils.toJsonString(alertResults), alertId); - continue; - } - AlertData alertData = AlertData.builder() - .id(alertId) - .content(alert.getContent()) - .log(alert.getLog()) - .title(alert.getTitle()) - .warnType(alert.getWarningType().getCode()) - .alertType(alert.getAlertType().getCode()) - .build(); - - int sendSuccessCount = 0; - List alertSendStatuses = new ArrayList<>(); - List alertResults = new ArrayList<>(); - for (AlertPluginInstance instance : alertInstanceList) { - AlertResult alertResult = this.alertResultHandler(instance, alertData); - if (alertResult != null) { - AlertStatus sendStatus = Boolean.parseBoolean(alertResult.getStatus()) - ? AlertStatus.EXECUTION_SUCCESS - : AlertStatus.EXECUTION_FAILURE; - AlertSendStatus alertSendStatus = AlertSendStatus.builder() - .alertId(alertId) - .alertPluginInstanceId(instance.getId()) - .sendStatus(sendStatus) - .log(JSONUtils.toJsonString(alertResult)) - .createTime(new Date()) - .build(); - alertSendStatuses.add(alertSendStatus); - if (AlertStatus.EXECUTION_SUCCESS.equals(sendStatus)) { - sendSuccessCount++; - AlertServerMetrics.incAlertSuccessCount(); - } else { - AlertServerMetrics.incAlertFailCount(); - } - alertResults.add(alertResult); - } - } - AlertStatus alertStatus = AlertStatus.EXECUTION_SUCCESS; - if (sendSuccessCount == 0) { - alertStatus = AlertStatus.EXECUTION_FAILURE; - } else if (sendSuccessCount < alertInstanceList.size()) { - alertStatus = AlertStatus.EXECUTION_PARTIAL_SUCCESS; - } - // we update the alert first to avoid duplicate key in alertSendStatus - // this may loss the alertSendStatus if the server restart - // todo: use transaction to update these two table - alertDao.updateAlert(alertStatus, JSONUtils.toJsonString(alertResults), alertId); - alertDao.insertAlertSendStatus(alertSendStatuses); - } - } - - /** - * sync send alert handler - * - * @param alertGroupId alertGroupId - * @param title title - * @param content content - * @return AlertSendResponseCommand - */ - public AlertSendResponse syncHandler(int alertGroupId, String title, String content, int warnType) { - List alertInstanceList = alertDao.listInstanceByAlertGroupId(alertGroupId); - AlertData alertData = AlertData.builder() - .content(content) - .title(title) - .warnType(warnType) - .build(); - - boolean sendResponseStatus = true; - List sendResponseResults = new ArrayList<>(); - - if (CollectionUtils.isEmpty(alertInstanceList)) { - AlertSendResponse.AlertSendResponseResult alertSendResponseResult = - new AlertSendResponse.AlertSendResponseResult(); - String message = String.format("Alert GroupId %s send error : not found alert instance", alertGroupId); - alertSendResponseResult.setSuccess(false); - alertSendResponseResult.setMessage(message); - sendResponseResults.add(alertSendResponseResult); - log.error("Alert GroupId {} send error : not found alert instance", alertGroupId); - return new AlertSendResponse(false, sendResponseResults); - } - - for (AlertPluginInstance instance : alertInstanceList) { - AlertResult alertResult = this.alertResultHandler(instance, alertData); - if (alertResult != null) { - AlertSendResponse.AlertSendResponseResult alertSendResponseResult = - new AlertSendResponse.AlertSendResponseResult( - Boolean.parseBoolean(alertResult.getStatus()), - alertResult.getMessage()); - sendResponseStatus = sendResponseStatus && alertSendResponseResult.isSuccess(); - sendResponseResults.add(alertSendResponseResult); - } - } +@Service +public final class AlertBootstrapService implements AutoCloseable { - return new AlertSendResponse(sendResponseStatus, sendResponseResults); - } + private final AlertRpcServer alertRpcServer; - /** - * alert result handler - * - * @param instance instance - * @param alertData alertData - * @return AlertResult - */ - private @Nullable AlertResult alertResultHandler(AlertPluginInstance instance, AlertData alertData) { - String pluginInstanceName = instance.getInstanceName(); - int pluginDefineId = instance.getPluginDefineId(); - Optional alertChannelOptional = alertPluginManager.getAlertChannel(instance.getPluginDefineId()); - if (!alertChannelOptional.isPresent()) { - String message = String.format("Alert Plugin %s send error: the channel doesn't exist, pluginDefineId: %s", - pluginInstanceName, - pluginDefineId); - log.error("Alert Plugin {} send error : not found plugin {}", pluginInstanceName, pluginDefineId); - return new AlertResult("false", message); - } - AlertChannel alertChannel = alertChannelOptional.get(); + private final AlertRegistryClient alertRegistryClient; - Map paramsMap = JSONUtils.toMap(instance.getPluginInstanceParams()); - String instanceWarnType = WarningType.ALL.getDescp(); + private final AlertPluginManager alertPluginManager; - if (MapUtils.isNotEmpty(paramsMap)) { - instanceWarnType = paramsMap.getOrDefault(AlertConstants.NAME_WARNING_TYPE, WarningType.ALL.getDescp()); - } + private final AlertHAServer alertHAServer; - WarningType warningType = WarningType.of(instanceWarnType); + private final AlertEventFetcher alertEventFetcher; - if (warningType == null) { - String message = String.format("Alert Plugin %s send error : plugin warnType is null", pluginInstanceName); - log.error("Alert Plugin {} send error : plugin warnType is null", pluginInstanceName); - return new AlertResult("false", message); - } + private final AlertEventLoop alertEventLoop; - boolean sendWarning = false; - switch (warningType) { - case ALL: - sendWarning = true; - break; - case SUCCESS: - if (alertData.getWarnType() == WarningType.SUCCESS.getCode()) { - sendWarning = true; - } - break; - case FAILURE: - if (alertData.getWarnType() == WarningType.FAILURE.getCode()) { - sendWarning = true; - } - break; - default: - } + private final ListenerEventLoop listenerEventLoop; - if (!sendWarning) { - String message = String.format( - "Alert Plugin %s send ignore warning type not match: plugin warning type is %s, alert data warning type is %s", - pluginInstanceName, warningType.getCode(), alertData.getWarnType()); - log.info( - "Alert Plugin {} send ignore warning type not match: plugin warning type is {}, alert data warning type is {}", - pluginInstanceName, warningType.getCode(), alertData.getWarnType()); - return new AlertResult("false", message); - } + private final ListenerEventFetcher listenerEventFetcher; - AlertInfo alertInfo = AlertInfo.builder() - .alertData(alertData) - .alertParams(paramsMap) - .alertPluginInstanceId(instance.getId()) - .build(); - int waitTimeout = alertConfig.getWaitTimeout(); - try { - AlertResult alertResult; - if (waitTimeout <= 0) { - if (alertData.getAlertType() == AlertType.CLOSE_ALERT.getCode()) { - alertResult = alertChannel.closeAlert(alertInfo); - } else { - alertResult = alertChannel.process(alertInfo); - } - } else { - CompletableFuture future; - if (alertData.getAlertType() == AlertType.CLOSE_ALERT.getCode()) { - future = CompletableFuture.supplyAsync(() -> alertChannel.closeAlert(alertInfo)); - } else { - future = CompletableFuture.supplyAsync(() -> alertChannel.process(alertInfo)); - } - alertResult = future.get(waitTimeout, TimeUnit.MILLISECONDS); - } - if (alertResult == null) { - throw new RuntimeException("Alert result cannot be null"); - } - return alertResult; - } catch (InterruptedException e) { - log.error("send alert error alert data id :{},", alertData.getId(), e); - Thread.currentThread().interrupt(); - return new AlertResult("false", e.getMessage()); - } catch (Exception e) { - log.error("send alert error alert data id :{},", alertData.getId(), e); - return new AlertResult("false", e.getMessage()); - } + public AlertBootstrapService(AlertRpcServer alertRpcServer, + AlertRegistryClient alertRegistryClient, + AlertPluginManager alertPluginManager, + AlertHAServer alertHAServer, + AlertEventFetcher alertEventFetcher, + AlertEventLoop alertEventLoop, + ListenerEventLoop listenerEventLoop, + ListenerEventFetcher listenerEventFetcher) { + this.alertRpcServer = alertRpcServer; + this.alertRegistryClient = alertRegistryClient; + this.alertPluginManager = alertPluginManager; + this.alertHAServer = alertHAServer; + this.alertEventFetcher = alertEventFetcher; + this.alertEventLoop = alertEventLoop; + this.listenerEventLoop = listenerEventLoop; + this.listenerEventFetcher = listenerEventFetcher; } - public AlertSendResponse syncTestSend(int pluginDefineId, String pluginInstanceParams) { - - boolean sendResponseStatus = true; - List sendResponseResults = new ArrayList<>(); - - Optional alertChannelOptional = alertPluginManager.getAlertChannel(pluginDefineId); - if (!alertChannelOptional.isPresent()) { - String message = String.format("Test send alert error: the channel doesn't exist, pluginDefineId: %s", - pluginDefineId); - AlertSendResponse.AlertSendResponseResult alertSendResponseResult = - new AlertSendResponse.AlertSendResponseResult(); - alertSendResponseResult.setSuccess(false); - alertSendResponseResult.setMessage(message); - sendResponseResults.add(alertSendResponseResult); - log.error("Test send alert error : not found plugin {}", pluginDefineId); - return new AlertSendResponse(false, sendResponseResults); - } - AlertChannel alertChannel = alertChannelOptional.get(); - - Map paramsMap = PluginParamsTransfer.getPluginParamsMap(pluginInstanceParams); + public void start() { + log.info("AlertBootstrapService starting..."); + alertPluginManager.start(); + alertRpcServer.start(); + alertRegistryClient.start(); + alertHAServer.start(); - AlertData alertData = AlertData.builder() - .title(AlertConstants.TEST_TITLE) - .content(AlertConstants.TEST_CONTENT) - .warnType(WarningType.ALL.getCode()) - .build(); + listenerEventFetcher.start(); + alertEventFetcher.start(); - AlertInfo alertInfo = AlertInfo.builder() - .alertData(alertData) - .alertParams(paramsMap) - .build(); - - try { - AlertResult alertResult = alertChannel.process(alertInfo); - if (alertResult != null) { - AlertSendResponse.AlertSendResponseResult alertSendResponseResult = - new AlertSendResponse.AlertSendResponseResult( - Boolean.parseBoolean(alertResult.getStatus()), - alertResult.getMessage()); - sendResponseStatus = alertSendResponseResult.isSuccess(); - sendResponseResults.add(alertSendResponseResult); - } - } catch (Exception e) { - log.error("Test send alert error", e); - AlertSendResponse.AlertSendResponseResult alertSendResponseResult = - new AlertSendResponse.AlertSendResponseResult(); - alertSendResponseResult.setSuccess(false); - alertSendResponseResult.setMessage(e.getMessage()); - sendResponseResults.add(alertSendResponseResult); - return new AlertSendResponse(false, sendResponseResults); - } - - return new AlertSendResponse(sendResponseStatus, sendResponseResults); + listenerEventLoop.start(); + alertEventLoop.start(); + log.info("AlertBootstrapService started..."); } @Override public void close() { - log.info("Closed AlertBootstrapService..."); + log.info("AlertBootstrapService stopping..."); + try ( + AlertRpcServer closedAlertRpcServer = alertRpcServer; + AlertRegistryClient closedAlertRegistryClient = alertRegistryClient) { + // close resource + listenerEventFetcher.shutdown(); + alertEventFetcher.shutdown(); + + listenerEventLoop.shutdown(); + alertEventLoop.shutdown(); + alertHAServer.shutdown(); + } + log.info("AlertBootstrapService stopped..."); } - } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AlertEventFetcher.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AlertEventFetcher.java new file mode 100644 index 000000000000..11a668ae1ded --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AlertEventFetcher.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.alert.service; + +import org.apache.dolphinscheduler.dao.AlertDao; +import org.apache.dolphinscheduler.dao.entity.Alert; + +import java.util.List; + +import lombok.extern.slf4j.Slf4j; + +import org.springframework.stereotype.Component; + +@Slf4j +@Component +public class AlertEventFetcher extends AbstractEventFetcher { + + private final AlertDao alertDao; + + public AlertEventFetcher(AlertHAServer alertHAServer, + AlertDao alertDao, + AlertEventPendingQueue alertEventPendingQueue) { + super("AlertEventFetcher", alertHAServer, alertEventPendingQueue); + this.alertDao = alertDao; + } + + @Override + public List fetchPendingEvent(int eventOffset) { + return alertDao.listPendingAlerts(eventOffset); + } + + @Override + protected int getEventOffset(Alert event) { + return event.getId(); + } +} diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AlertEventLoop.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AlertEventLoop.java new file mode 100644 index 000000000000..e975f1ad51aa --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AlertEventLoop.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.alert.service; + +import org.apache.dolphinscheduler.alert.metrics.AlertServerMetrics; +import org.apache.dolphinscheduler.dao.entity.Alert; + +import lombok.extern.slf4j.Slf4j; + +import org.springframework.stereotype.Component; + +@Slf4j +@Component +public class AlertEventLoop extends AbstractEventLoop { + + private final AlertSender alertSender; + + public AlertEventLoop(AlertEventPendingQueue alertEventPendingQueue, + AlertSenderThreadPoolFactory alertSenderThreadPoolFactory, + AlertSender alertSender) { + super("AlertEventLoop", alertSenderThreadPoolFactory.getThreadPool(), alertEventPendingQueue); + this.alertSender = alertSender; + AlertServerMetrics.registerPendingAlertGauge(this::getHandlingEventCount); + } + + @Override + public void handleEvent(Alert event) { + alertSender.sendEvent(event); + } + +} diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AlertEventPendingQueue.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AlertEventPendingQueue.java new file mode 100644 index 000000000000..17fe7ccd0b73 --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AlertEventPendingQueue.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.alert.service; + +import org.apache.dolphinscheduler.alert.config.AlertConfig; +import org.apache.dolphinscheduler.alert.metrics.AlertServerMetrics; +import org.apache.dolphinscheduler.dao.entity.Alert; + +import org.springframework.stereotype.Component; + +@Component +public class AlertEventPendingQueue extends AbstractEventPendingQueue { + + public AlertEventPendingQueue(AlertConfig alertConfig) { + super(alertConfig.getSenderParallelism() * 3 + 1); + AlertServerMetrics.registerPendingAlertGauge(this::size); + } +} diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AlertHAServer.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AlertHAServer.java new file mode 100644 index 000000000000..998bc655c4a9 --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AlertHAServer.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.alert.service; + +import org.apache.dolphinscheduler.registry.api.Registry; +import org.apache.dolphinscheduler.registry.api.enums.RegistryNodeType; +import org.apache.dolphinscheduler.registry.api.ha.AbstractHAServer; + +import lombok.extern.slf4j.Slf4j; + +import org.springframework.stereotype.Component; + +@Slf4j +@Component +public class AlertHAServer extends AbstractHAServer { + + public AlertHAServer(Registry registry) { + super(registry, RegistryNodeType.ALERT_LOCK.getRegistryPath()); + } + +} diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AlertSender.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AlertSender.java new file mode 100644 index 000000000000..9c9cd034bdb6 --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AlertSender.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.alert.service; + +import org.apache.dolphinscheduler.alert.api.AlertData; +import org.apache.dolphinscheduler.alert.api.AlertResult; +import org.apache.dolphinscheduler.alert.config.AlertConfig; +import org.apache.dolphinscheduler.alert.plugin.AlertPluginManager; +import org.apache.dolphinscheduler.common.enums.AlertStatus; +import org.apache.dolphinscheduler.dao.AlertDao; +import org.apache.dolphinscheduler.dao.entity.Alert; +import org.apache.dolphinscheduler.dao.entity.AlertPluginInstance; +import org.apache.dolphinscheduler.extract.alert.request.AlertSendResponse; + +import org.apache.commons.collections4.CollectionUtils; + +import java.util.ArrayList; +import java.util.List; + +import lombok.extern.slf4j.Slf4j; + +import org.springframework.stereotype.Component; + +@Slf4j +@Component +public class AlertSender extends AbstractEventSender { + + private final AlertDao alertDao; + + public AlertSender(AlertDao alertDao, + AlertPluginManager alertPluginManager, + AlertConfig alertConfig) { + super(alertPluginManager, alertConfig.getWaitTimeout()); + this.alertDao = alertDao; + } + + /** + * sync send alert handler + * + * @param alertGroupId alertGroupId + * @param title title + * @param content content + * @return AlertSendResponseCommand + */ + public AlertSendResponse syncHandler(int alertGroupId, String title, String content) { + List alertInstanceList = alertDao.listInstanceByAlertGroupId(alertGroupId); + AlertData alertData = AlertData.builder() + .content(content) + .title(title) + .build(); + + boolean sendResponseStatus = true; + List sendResponseResults = new ArrayList<>(); + + if (CollectionUtils.isEmpty(alertInstanceList)) { + AlertSendResponse.AlertSendResponseResult alertSendResponseResult = + new AlertSendResponse.AlertSendResponseResult(); + String message = String.format("Alert GroupId %s send error : not found alert instance", alertGroupId); + alertSendResponseResult.setSuccess(false); + alertSendResponseResult.setMessage(message); + sendResponseResults.add(alertSendResponseResult); + log.error("Alert GroupId {} send error : not found alert instance", alertGroupId); + return new AlertSendResponse(false, sendResponseResults); + } + + for (AlertPluginInstance instance : alertInstanceList) { + AlertResult alertResult = doSendEvent(instance, alertData); + if (alertResult != null) { + AlertSendResponse.AlertSendResponseResult alertSendResponseResult = + new AlertSendResponse.AlertSendResponseResult( + alertResult.isSuccess(), + alertResult.getMessage()); + sendResponseStatus = sendResponseStatus && alertSendResponseResult.isSuccess(); + sendResponseResults.add(alertSendResponseResult); + } + } + + return new AlertSendResponse(sendResponseStatus, sendResponseResults); + } + + @Override + public List getAlertPluginInstanceList(Alert event) { + return alertDao.listInstanceByAlertGroupId(event.getAlertGroupId()); + } + + @Override + public AlertData getAlertData(Alert event) { + return AlertData.builder() + .id(event.getId()) + .content(event.getContent()) + .log(event.getLog()) + .title(event.getTitle()) + .alertType(event.getAlertType().getCode()) + .build(); + } + + @Override + public Integer getEventId(Alert event) { + return event.getId(); + } + + @Override + public void onError(Alert event, String log) { + alertDao.updateAlert(AlertStatus.EXECUTION_FAILURE, log, event.getId()); + } + + @Override + public void onPartialSuccess(Alert event, String log) { + alertDao.updateAlert(AlertStatus.EXECUTION_PARTIAL_SUCCESS, log, event.getId()); + } + + @Override + public void onSuccess(Alert event, String log) { + alertDao.updateAlert(AlertStatus.EXECUTION_SUCCESS, log, event.getId()); + } +} diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AlertSenderThreadPoolFactory.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AlertSenderThreadPoolFactory.java new file mode 100644 index 000000000000..fd8c731b1721 --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/AlertSenderThreadPoolFactory.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.alert.service; + +import org.apache.dolphinscheduler.alert.config.AlertConfig; +import org.apache.dolphinscheduler.common.thread.ThreadUtils; + +import java.util.concurrent.ThreadPoolExecutor; + +import org.springframework.stereotype.Component; + +@Component +public class AlertSenderThreadPoolFactory { + + private final ThreadPoolExecutor threadPool; + + public AlertSenderThreadPoolFactory(AlertConfig alertConfig) { + this.threadPool = ThreadUtils.newDaemonFixedThreadExecutor("AlertSenderThread", + alertConfig.getSenderParallelism()); + } + + public ThreadPoolExecutor getThreadPool() { + return threadPool; + } + +} diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/EventFetcher.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/EventFetcher.java new file mode 100644 index 000000000000..089fb4edc941 --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/EventFetcher.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.alert.service; + +import java.util.List; + +/** + * The interface responsible for fetching events. + * + * @param the type of event + */ +public interface EventFetcher { + + void start(); + + List fetchPendingEvent(int eventOffset); + + void shutdown(); +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UDFUserMapper.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/EventLoop.java similarity index 60% rename from dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UDFUserMapper.java rename to dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/EventLoop.java index 2a5bc6a417df..04219f99ae83 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UDFUserMapper.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/EventLoop.java @@ -14,31 +14,34 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.dao.mapper; -import org.apache.dolphinscheduler.dao.entity.UDFUser; - -import org.apache.ibatis.annotations.Param; - -import com.baomidou.mybatisplus.core.mapper.BaseMapper; +package org.apache.dolphinscheduler.alert.service; /** - * udf user realtion mapper interface + * The interface responsible for consuming event from upstream, e.g {@link EventPendingQueue}. + * + * @param the type of event */ -public interface UDFUserMapper extends BaseMapper { +public interface EventLoop { + + /** + * Start the event loop, once the event loop is started, it will keep consuming event from upstream. + */ + void start(); + + /** + * Handle the given event. + */ + void handleEvent(T event); /** - * delete udf user realtion by userId - * @param userId userId - * @return delete result + * Get the count of handling event. */ - int deleteByUserId(@Param("userId") int userId); + int getHandlingEventCount(); /** - * delete udf user realtion by function id - * @param udfFuncId udfFuncId - * @return delete result + * Shutdown the event loop, once the event loop is shutdown, it will stop consuming event from upstream. */ - int deleteByUdfFuncId(@Param("udfFuncId") int udfFuncId); + void shutdown(); } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/EventPendingQueue.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/EventPendingQueue.java new file mode 100644 index 000000000000..c8538138bc92 --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/EventPendingQueue.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.alert.service; + +/** + * The interface responsible for managing pending events. + * + * @param the type of event + */ +public interface EventPendingQueue { + + void put(T alert) throws InterruptedException; + + T take() throws InterruptedException; + + int size(); + + int capacity(); +} diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/EventSender.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/EventSender.java new file mode 100644 index 000000000000..04bc85e573e6 --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/EventSender.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.alert.service; + +import org.apache.dolphinscheduler.alert.api.AlertData; +import org.apache.dolphinscheduler.alert.api.AlertResult; +import org.apache.dolphinscheduler.dao.entity.AlertPluginInstance; +import org.apache.dolphinscheduler.extract.alert.request.AlertSendResponse; + +public interface EventSender { + + void sendEvent(T event); + + AlertResult doSendEvent(AlertPluginInstance instance, AlertData alertData); + + AlertSendResponse syncTestSend(int pluginDefineId, String pluginInstanceParams); + +} diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/ListenerEventFetcher.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/ListenerEventFetcher.java new file mode 100644 index 000000000000..57549d7e975b --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/ListenerEventFetcher.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.alert.service; + +import org.apache.dolphinscheduler.dao.entity.ListenerEvent; +import org.apache.dolphinscheduler.dao.repository.ListenerEventDao; + +import java.util.List; + +import lombok.extern.slf4j.Slf4j; + +import org.springframework.stereotype.Component; + +@Slf4j +@Component +public class ListenerEventFetcher extends AbstractEventFetcher { + + private final ListenerEventDao listenerEventDao; + + protected ListenerEventFetcher(AlertHAServer alertHAServer, + ListenerEventDao listenerEventDao, + ListenerEventPendingQueue listenerEventPendingQueue) { + super("ListenerEventFetcher", alertHAServer, listenerEventPendingQueue); + this.listenerEventDao = listenerEventDao; + } + + @Override + protected int getEventOffset(ListenerEvent event) { + return event.getId(); + } + + @Override + public List fetchPendingEvent(int eventOffset) { + return listenerEventDao.listingPendingEvents(eventOffset, FETCH_SIZE); + } +} diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/ListenerEventLoop.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/ListenerEventLoop.java new file mode 100644 index 000000000000..f1c00967f996 --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/ListenerEventLoop.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.alert.service; + +import org.apache.dolphinscheduler.dao.entity.ListenerEvent; + +import org.springframework.stereotype.Component; + +@Component +public class ListenerEventLoop extends AbstractEventLoop { + + private final ListenerEventSender listenerEventSender; + + protected ListenerEventLoop(AlertSenderThreadPoolFactory alertSenderThreadPoolFactory, + ListenerEventSender listenerEventSender, + ListenerEventPendingQueue listenerEventPendingQueue) { + super("ListenerEventLoop", alertSenderThreadPoolFactory.getThreadPool(), listenerEventPendingQueue); + this.listenerEventSender = listenerEventSender; + } + + @Override + public void handleEvent(ListenerEvent event) { + listenerEventSender.sendEvent(event); + } +} diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/ListenerEventPendingQueue.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/ListenerEventPendingQueue.java new file mode 100644 index 000000000000..47d0c77dff7e --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/ListenerEventPendingQueue.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.alert.service; + +import org.apache.dolphinscheduler.alert.config.AlertConfig; +import org.apache.dolphinscheduler.dao.entity.ListenerEvent; + +import org.springframework.stereotype.Component; + +@Component +public class ListenerEventPendingQueue extends AbstractEventPendingQueue { + + public ListenerEventPendingQueue(AlertConfig alertConfig) { + super(alertConfig.getSenderParallelism() * 3 + 1); + } + +} diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/ListenerEventPostService.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/ListenerEventPostService.java deleted file mode 100644 index b57562c71101..000000000000 --- a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/ListenerEventPostService.java +++ /dev/null @@ -1,262 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.alert.service; - -import org.apache.dolphinscheduler.alert.api.AlertChannel; -import org.apache.dolphinscheduler.alert.api.AlertData; -import org.apache.dolphinscheduler.alert.api.AlertInfo; -import org.apache.dolphinscheduler.alert.api.AlertResult; -import org.apache.dolphinscheduler.alert.config.AlertConfig; -import org.apache.dolphinscheduler.alert.plugin.AlertPluginManager; -import org.apache.dolphinscheduler.common.constants.Constants; -import org.apache.dolphinscheduler.common.enums.AlertStatus; -import org.apache.dolphinscheduler.common.enums.AlertType; -import org.apache.dolphinscheduler.common.enums.WarningType; -import org.apache.dolphinscheduler.common.lifecycle.ServerLifeCycleManager; -import org.apache.dolphinscheduler.common.thread.BaseDaemonThread; -import org.apache.dolphinscheduler.common.thread.ThreadUtils; -import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.apache.dolphinscheduler.dao.entity.AlertPluginInstance; -import org.apache.dolphinscheduler.dao.entity.AlertSendStatus; -import org.apache.dolphinscheduler.dao.entity.ListenerEvent; -import org.apache.dolphinscheduler.dao.entity.event.AbstractListenerEvent; -import org.apache.dolphinscheduler.dao.entity.event.ProcessDefinitionCreatedListenerEvent; -import org.apache.dolphinscheduler.dao.entity.event.ProcessDefinitionDeletedListenerEvent; -import org.apache.dolphinscheduler.dao.entity.event.ProcessDefinitionUpdatedListenerEvent; -import org.apache.dolphinscheduler.dao.entity.event.ProcessEndListenerEvent; -import org.apache.dolphinscheduler.dao.entity.event.ProcessFailListenerEvent; -import org.apache.dolphinscheduler.dao.entity.event.ProcessStartListenerEvent; -import org.apache.dolphinscheduler.dao.entity.event.ServerDownListenerEvent; -import org.apache.dolphinscheduler.dao.entity.event.TaskEndListenerEvent; -import org.apache.dolphinscheduler.dao.entity.event.TaskFailListenerEvent; -import org.apache.dolphinscheduler.dao.entity.event.TaskStartListenerEvent; -import org.apache.dolphinscheduler.dao.mapper.AlertPluginInstanceMapper; -import org.apache.dolphinscheduler.dao.mapper.ListenerEventMapper; - -import org.apache.commons.collections4.CollectionUtils; -import org.apache.curator.shaded.com.google.common.collect.Lists; - -import java.util.ArrayList; -import java.util.Date; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.TimeUnit; - -import javax.annotation.Nullable; - -import lombok.extern.slf4j.Slf4j; - -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.stereotype.Service; - -@Service -@Slf4j -public final class ListenerEventPostService extends BaseDaemonThread implements AutoCloseable { - - @Value("${alert.query_alert_threshold:100}") - private Integer QUERY_ALERT_THRESHOLD; - @Autowired - private ListenerEventMapper listenerEventMapper; - @Autowired - private AlertPluginInstanceMapper alertPluginInstanceMapper; - @Autowired - private AlertPluginManager alertPluginManager; - @Autowired - private AlertConfig alertConfig; - - public ListenerEventPostService() { - super("ListenerEventPostService"); - } - - @Override - public void run() { - log.info("listener event post thread started"); - while (!ServerLifeCycleManager.isStopped()) { - try { - List listenerEvents = listenerEventMapper - .listingListenerEventByStatus(AlertStatus.WAIT_EXECUTION, QUERY_ALERT_THRESHOLD); - if (CollectionUtils.isEmpty(listenerEvents)) { - log.debug("There is no waiting listener events"); - continue; - } - this.send(listenerEvents); - } catch (Exception e) { - log.error("listener event post thread meet an exception", e); - } finally { - ThreadUtils.sleep(Constants.SLEEP_TIME_MILLIS * 5L); - } - } - log.info("listener event post thread stopped"); - } - - public void send(List listenerEvents) { - for (ListenerEvent listenerEvent : listenerEvents) { - int eventId = listenerEvent.getId(); - List globalAlertInstanceList = - alertPluginInstanceMapper.queryAllGlobalAlertPluginInstanceList(); - if (CollectionUtils.isEmpty(globalAlertInstanceList)) { - log.error("post listener event fail,no bind global plugin instance."); - listenerEventMapper.updateListenerEvent(eventId, AlertStatus.EXECUTION_FAILURE, - "no bind plugin instance", new Date()); - continue; - } - AbstractListenerEvent event = generateEventFromContent(listenerEvent); - if (event == null) { - log.error("parse listener event to abstract listener event fail.ed {}", listenerEvent.getContent()); - listenerEventMapper.updateListenerEvent(eventId, AlertStatus.EXECUTION_FAILURE, - "parse listener event to abstract listener event failed", new Date()); - continue; - } - List events = Lists.newArrayList(event); - AlertData alertData = AlertData.builder() - .id(eventId) - .content(JSONUtils.toJsonString(events)) - .log(listenerEvent.getLog()) - .title(event.getTitle()) - .warnType(WarningType.GLOBAL.getCode()) - .alertType(event.getEventType().getCode()) - .build(); - - int sendSuccessCount = 0; - List failedPostResults = new ArrayList<>(); - for (AlertPluginInstance instance : globalAlertInstanceList) { - AlertResult alertResult = this.alertResultHandler(instance, alertData); - if (alertResult != null) { - AlertStatus sendStatus = Boolean.parseBoolean(alertResult.getStatus()) - ? AlertStatus.EXECUTION_SUCCESS - : AlertStatus.EXECUTION_FAILURE; - if (AlertStatus.EXECUTION_SUCCESS.equals(sendStatus)) { - sendSuccessCount++; - } else { - AlertSendStatus alertSendStatus = AlertSendStatus.builder() - .alertId(eventId) - .alertPluginInstanceId(instance.getId()) - .sendStatus(sendStatus) - .log(JSONUtils.toJsonString(alertResult)) - .createTime(new Date()) - .build(); - failedPostResults.add(alertSendStatus); - } - } - } - if (sendSuccessCount == globalAlertInstanceList.size()) { - listenerEventMapper.deleteById(eventId); - } else { - AlertStatus alertStatus = - sendSuccessCount == 0 ? AlertStatus.EXECUTION_FAILURE : AlertStatus.EXECUTION_PARTIAL_SUCCESS; - listenerEventMapper.updateListenerEvent(eventId, alertStatus, JSONUtils.toJsonString(failedPostResults), - new Date()); - } - } - } - - /** - * alert result handler - * - * @param instance instance - * @param alertData alertData - * @return AlertResult - */ - private @Nullable AlertResult alertResultHandler(AlertPluginInstance instance, AlertData alertData) { - String pluginInstanceName = instance.getInstanceName(); - int pluginDefineId = instance.getPluginDefineId(); - Optional alertChannelOptional = alertPluginManager.getAlertChannel(instance.getPluginDefineId()); - if (!alertChannelOptional.isPresent()) { - String message = - String.format("Global Alert Plugin %s send error: the channel doesn't exist, pluginDefineId: %s", - pluginInstanceName, - pluginDefineId); - log.error("Global Alert Plugin {} send error : not found plugin {}", pluginInstanceName, pluginDefineId); - return new AlertResult("false", message); - } - AlertChannel alertChannel = alertChannelOptional.get(); - - Map paramsMap = JSONUtils.toMap(instance.getPluginInstanceParams()); - - AlertInfo alertInfo = AlertInfo.builder() - .alertData(alertData) - .alertParams(paramsMap) - .alertPluginInstanceId(instance.getId()) - .build(); - int waitTimeout = alertConfig.getWaitTimeout(); - try { - AlertResult alertResult; - if (waitTimeout <= 0) { - if (alertData.getAlertType() == AlertType.CLOSE_ALERT.getCode()) { - alertResult = alertChannel.closeAlert(alertInfo); - } else { - alertResult = alertChannel.process(alertInfo); - } - } else { - CompletableFuture future; - if (alertData.getAlertType() == AlertType.CLOSE_ALERT.getCode()) { - future = CompletableFuture.supplyAsync(() -> alertChannel.closeAlert(alertInfo)); - } else { - future = CompletableFuture.supplyAsync(() -> alertChannel.process(alertInfo)); - } - alertResult = future.get(waitTimeout, TimeUnit.MILLISECONDS); - } - if (alertResult == null) { - throw new RuntimeException("Alert result cannot be null"); - } - return alertResult; - } catch (InterruptedException e) { - log.error("post listener event error alert data id :{},", alertData.getId(), e); - Thread.currentThread().interrupt(); - return new AlertResult("false", e.getMessage()); - } catch (Exception e) { - log.error("post listener event error alert data id :{},", alertData.getId(), e); - return new AlertResult("false", e.getMessage()); - } - } - - private AbstractListenerEvent generateEventFromContent(ListenerEvent listenerEvent) { - String content = listenerEvent.getContent(); - switch (listenerEvent.getEventType()) { - case SERVER_DOWN: - return JSONUtils.parseObject(content, ServerDownListenerEvent.class); - case PROCESS_DEFINITION_CREATED: - return JSONUtils.parseObject(content, ProcessDefinitionCreatedListenerEvent.class); - case PROCESS_DEFINITION_UPDATED: - return JSONUtils.parseObject(content, ProcessDefinitionUpdatedListenerEvent.class); - case PROCESS_DEFINITION_DELETED: - return JSONUtils.parseObject(content, ProcessDefinitionDeletedListenerEvent.class); - case PROCESS_START: - return JSONUtils.parseObject(content, ProcessStartListenerEvent.class); - case PROCESS_END: - return JSONUtils.parseObject(content, ProcessEndListenerEvent.class); - case PROCESS_FAIL: - return JSONUtils.parseObject(content, ProcessFailListenerEvent.class); - case TASK_START: - return JSONUtils.parseObject(content, TaskStartListenerEvent.class); - case TASK_END: - return JSONUtils.parseObject(content, TaskEndListenerEvent.class); - case TASK_FAIL: - return JSONUtils.parseObject(content, TaskFailListenerEvent.class); - default: - return null; - } - } - @Override - public void close() { - log.info("Closed ListenerEventPostService..."); - } -} diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/ListenerEventSender.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/ListenerEventSender.java new file mode 100644 index 000000000000..7d06500edd2c --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/service/ListenerEventSender.java @@ -0,0 +1,146 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.alert.service; + +import org.apache.dolphinscheduler.alert.api.AlertData; +import org.apache.dolphinscheduler.alert.config.AlertConfig; +import org.apache.dolphinscheduler.alert.plugin.AlertPluginManager; +import org.apache.dolphinscheduler.common.enums.AlertStatus; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.dao.entity.AlertPluginInstance; +import org.apache.dolphinscheduler.dao.entity.ListenerEvent; +import org.apache.dolphinscheduler.dao.entity.event.AbstractListenerEvent; +import org.apache.dolphinscheduler.dao.entity.event.ProcessDefinitionCreatedListenerEvent; +import org.apache.dolphinscheduler.dao.entity.event.ProcessDefinitionDeletedListenerEvent; +import org.apache.dolphinscheduler.dao.entity.event.ProcessDefinitionUpdatedListenerEvent; +import org.apache.dolphinscheduler.dao.entity.event.ProcessEndListenerEvent; +import org.apache.dolphinscheduler.dao.entity.event.ProcessFailListenerEvent; +import org.apache.dolphinscheduler.dao.entity.event.ProcessStartListenerEvent; +import org.apache.dolphinscheduler.dao.entity.event.ServerDownListenerEvent; +import org.apache.dolphinscheduler.dao.entity.event.TaskEndListenerEvent; +import org.apache.dolphinscheduler.dao.entity.event.TaskFailListenerEvent; +import org.apache.dolphinscheduler.dao.entity.event.TaskStartListenerEvent; +import org.apache.dolphinscheduler.dao.mapper.AlertPluginInstanceMapper; +import org.apache.dolphinscheduler.dao.repository.ListenerEventDao; + +import org.apache.curator.shaded.com.google.common.collect.Lists; + +import java.util.Date; +import java.util.List; + +import lombok.extern.slf4j.Slf4j; + +import org.springframework.stereotype.Component; + +@Slf4j +@Component +public class ListenerEventSender extends AbstractEventSender { + + private final ListenerEventDao listenerEventDao; + + private final AlertPluginInstanceMapper alertPluginInstanceMapper; + + public ListenerEventSender(ListenerEventDao listenerEventDao, + AlertPluginInstanceMapper alertPluginInstanceMapper, + AlertPluginManager alertPluginManager, + AlertConfig alertConfig) { + super(alertPluginManager, alertConfig.getWaitTimeout()); + this.listenerEventDao = listenerEventDao; + this.alertPluginInstanceMapper = alertPluginInstanceMapper; + } + + private AbstractListenerEvent generateEventFromContent(ListenerEvent listenerEvent) { + String content = listenerEvent.getContent(); + AbstractListenerEvent event = null; + switch (listenerEvent.getEventType()) { + case SERVER_DOWN: + event = JSONUtils.parseObject(content, ServerDownListenerEvent.class); + break; + case PROCESS_DEFINITION_CREATED: + event = JSONUtils.parseObject(content, ProcessDefinitionCreatedListenerEvent.class); + break; + case PROCESS_DEFINITION_UPDATED: + event = JSONUtils.parseObject(content, ProcessDefinitionUpdatedListenerEvent.class); + break; + case PROCESS_DEFINITION_DELETED: + event = JSONUtils.parseObject(content, ProcessDefinitionDeletedListenerEvent.class); + break; + case PROCESS_START: + event = JSONUtils.parseObject(content, ProcessStartListenerEvent.class); + break; + case PROCESS_END: + event = JSONUtils.parseObject(content, ProcessEndListenerEvent.class); + break; + case PROCESS_FAIL: + event = JSONUtils.parseObject(content, ProcessFailListenerEvent.class); + break; + case TASK_START: + event = JSONUtils.parseObject(content, TaskStartListenerEvent.class); + break; + case TASK_END: + event = JSONUtils.parseObject(content, TaskEndListenerEvent.class); + break; + case TASK_FAIL: + event = JSONUtils.parseObject(content, TaskFailListenerEvent.class); + break; + default: + throw new IllegalArgumentException("Unsupported event type: " + listenerEvent.getEventType()); + } + if (event == null) { + throw new IllegalArgumentException("Failed to parse event from content: " + content); + } + return event; + } + + @Override + public List getAlertPluginInstanceList(ListenerEvent event) { + return alertPluginInstanceMapper.queryAllGlobalAlertPluginInstanceList(); + } + + @Override + public AlertData getAlertData(ListenerEvent listenerEvent) { + AbstractListenerEvent event = generateEventFromContent(listenerEvent); + return AlertData.builder() + .id(listenerEvent.getId()) + .content(JSONUtils.toJsonString(Lists.newArrayList(event))) + .log(listenerEvent.getLog()) + .title(event.getTitle()) + .alertType(event.getEventType().getCode()) + .build(); + } + + @Override + public Integer getEventId(ListenerEvent event) { + return event.getId(); + } + + @Override + public void onError(ListenerEvent event, String log) { + listenerEventDao.updateListenerEvent(event.getId(), AlertStatus.EXECUTION_FAILURE, log, new Date()); + } + + @Override + public void onPartialSuccess(ListenerEvent event, String log) { + listenerEventDao.updateListenerEvent(event.getId(), AlertStatus.EXECUTION_PARTIAL_SUCCESS, log, new Date()); + } + + @Override + public void onSuccess(ListenerEvent event, String log) { + listenerEventDao.updateListenerEvent(event.getId(), AlertStatus.EXECUTION_FAILURE, log, new Date()); + } +} diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/resources/application.yaml b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/resources/application.yaml index 0b28d6c8b053..927cbc3c2ce6 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/resources/application.yaml +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/resources/application.yaml @@ -30,15 +30,7 @@ spring: password: root hikari: connection-test-query: select 1 - minimum-idle: 5 - auto-commit: true - validation-timeout: 3000 pool-name: DolphinScheduler - maximum-pool-size: 50 - connection-timeout: 30000 - idle-timeout: 600000 - leak-detection-threshold: 0 - initialization-fail-timeout: 1 # Mybatis-plus configuration, you don't need to change it mybatis-plus: @@ -81,7 +73,8 @@ alert: # Define value is (0 = infinite), and alert server would be waiting alert result. wait-timeout: 0 max-heartbeat-interval: 60s - query_alert_threshold: 100 + # The maximum number of alerts that can be processed in parallel + sender-parallelism: 100 registry: type: zookeeper @@ -89,12 +82,12 @@ registry: namespace: dolphinscheduler connect-string: localhost:2181 retry-policy: - base-sleep-time: 60ms - max-sleep: 300ms + base-sleep-time: 1s + max-sleep: 3s max-retries: 5 - session-timeout: 30s - connection-timeout: 9s - block-until-connected: 600ms + session-timeout: 60s + connection-timeout: 15s + block-until-connected: 15s digest: ~ metrics: diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/java/org/apache/dolphinscheduler/alert/config/AlertConfigTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/java/org/apache/dolphinscheduler/alert/config/AlertConfigTest.java new file mode 100644 index 000000000000..1a72f0a5c93d --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/java/org/apache/dolphinscheduler/alert/config/AlertConfigTest.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.alert.config; + +import static com.google.common.truth.Truth.assertThat; + +import java.time.Duration; + +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; +import org.springframework.boot.test.context.SpringBootTest; + +@AutoConfigureMockMvc +@SpringBootTest(classes = AlertConfig.class) +class AlertConfigTest { + + @Autowired + private AlertConfig alertConfig; + + @Test + void testValidate() { + assertThat(alertConfig.getWaitTimeout()).isEqualTo(10); + assertThat(alertConfig.getMaxHeartbeatInterval()).isEqualTo(Duration.ofSeconds(59)); + assertThat(alertConfig.getSenderParallelism()).isEqualTo(101); + } + +} diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/java/org/apache/dolphinscheduler/alert/rpc/AlertRpcServerTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/java/org/apache/dolphinscheduler/alert/rpc/AlertRpcServerTest.java new file mode 100644 index 000000000000..75f16848fdf0 --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/java/org/apache/dolphinscheduler/alert/rpc/AlertRpcServerTest.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.alert.rpc; + +import org.apache.dolphinscheduler.alert.config.AlertConfig; + +import org.junit.jupiter.api.Test; + +class AlertRpcServerTest { + + private final AlertRpcServer alertRpcServer = new AlertRpcServer(new AlertConfig()); + + @Test + void testStart() { + alertRpcServer.start(); + } + + @Test + void testClose() { + alertRpcServer.close(); + } + +} diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/java/org/apache/dolphinscheduler/alert/runner/AlertBootstrapServiceTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/java/org/apache/dolphinscheduler/alert/runner/AlertSenderTest.java similarity index 73% rename from dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/java/org/apache/dolphinscheduler/alert/runner/AlertBootstrapServiceTest.java rename to dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/java/org/apache/dolphinscheduler/alert/runner/AlertSenderTest.java index eafba16585fe..400afd34dcbf 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/java/org/apache/dolphinscheduler/alert/runner/AlertBootstrapServiceTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/java/org/apache/dolphinscheduler/alert/runner/AlertSenderTest.java @@ -24,15 +24,13 @@ import org.apache.dolphinscheduler.alert.api.AlertResult; import org.apache.dolphinscheduler.alert.config.AlertConfig; import org.apache.dolphinscheduler.alert.plugin.AlertPluginManager; -import org.apache.dolphinscheduler.alert.service.AlertBootstrapService; +import org.apache.dolphinscheduler.alert.service.AlertSender; import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.AlertDao; import org.apache.dolphinscheduler.dao.PluginDao; import org.apache.dolphinscheduler.dao.entity.Alert; import org.apache.dolphinscheduler.dao.entity.AlertPluginInstance; -import org.apache.dolphinscheduler.dao.entity.ListenerEvent; -import org.apache.dolphinscheduler.dao.entity.PluginDefine; import org.apache.dolphinscheduler.extract.alert.request.AlertSendResponse; import org.apache.dolphinscheduler.spi.params.PluginParamsTransfer; @@ -42,19 +40,20 @@ import java.util.Optional; import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.MockedStatic; import org.mockito.Mockito; -import org.mockito.MockitoAnnotations; +import org.mockito.junit.jupiter.MockitoExtension; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class AlertBootstrapServiceTest { +@ExtendWith(MockitoExtension.class) +class AlertSenderTest { - private static final Logger logger = LoggerFactory.getLogger(AlertBootstrapServiceTest.class); + private static final Logger logger = LoggerFactory.getLogger(AlertSenderTest.class); @Mock private AlertDao alertDao; @@ -66,7 +65,7 @@ public class AlertBootstrapServiceTest { private AlertConfig alertConfig; @InjectMocks - private AlertBootstrapService alertBootstrapService; + private AlertSender alertSender; private static final String PLUGIN_INSTANCE_PARAMS = "{\"User\":\"xx\",\"receivers\":\"xx\",\"sender\":\"xx\",\"smtpSslTrust\":\"*\",\"enableSmtpAuth\":\"true\",\"receiverCcs\":null,\"showType\":\"table\",\"starttlsEnable\":\"false\",\"serverPort\":\"25\",\"serverHost\":\"xx\",\"Password\":\"xx\",\"sslEnable\":\"false\"}"; @@ -74,25 +73,17 @@ public class AlertBootstrapServiceTest { private static final String PLUGIN_INSTANCE_NAME = "alert-instance-mail"; private static final String TITLE = "alert mail test TITLE"; private static final String CONTENT = "alert mail test CONTENT"; - private static final List EVENTS = new ArrayList<>(); private static final int PLUGIN_DEFINE_ID = 1; private static final int ALERT_GROUP_ID = 1; - @BeforeEach - public void before() { - MockitoAnnotations.initMocks(this); - } - @Test - public void testSyncHandler() { + void testSyncHandler() { // 1.alert instance does not exist when(alertDao.listInstanceByAlertGroupId(ALERT_GROUP_ID)).thenReturn(null); - when(alertConfig.getWaitTimeout()).thenReturn(0); - AlertSendResponse alertSendResponse = - alertBootstrapService.syncHandler(ALERT_GROUP_ID, TITLE, CONTENT, WarningType.ALL.getCode()); + AlertSendResponse alertSendResponse = alertSender.syncHandler(ALERT_GROUP_ID, TITLE, CONTENT); Assertions.assertFalse(alertSendResponse.isSuccess()); alertSendResponse.getResResults().forEach(result -> logger .info("alert send response result, status:{}, message:{}", result.isSuccess(), result.getMessage())); @@ -108,12 +99,7 @@ public void testSyncHandler() { alertInstanceList.add(alertPluginInstance); when(alertDao.listInstanceByAlertGroupId(1)).thenReturn(alertInstanceList); - String pluginName = "alert-plugin-mail"; - PluginDefine pluginDefine = new PluginDefine(pluginName, "1", null); - when(pluginDao.getPluginDefineById(pluginDefineId)).thenReturn(pluginDefine); - - alertSendResponse = - alertBootstrapService.syncHandler(ALERT_GROUP_ID, TITLE, CONTENT, WarningType.ALL.getCode()); + alertSendResponse = alertSender.syncHandler(ALERT_GROUP_ID, TITLE, CONTENT); Assertions.assertFalse(alertSendResponse.isSuccess()); alertSendResponse.getResResults().forEach(result -> logger .info("alert send response result, status:{}, message:{}", result.isSuccess(), result.getMessage())); @@ -122,37 +108,32 @@ public void testSyncHandler() { AlertChannel alertChannelMock = mock(AlertChannel.class); when(alertChannelMock.process(Mockito.any())).thenReturn(null); when(alertPluginManager.getAlertChannel(1)).thenReturn(Optional.of(alertChannelMock)); - when(alertConfig.getWaitTimeout()).thenReturn(0); - alertSendResponse = - alertBootstrapService.syncHandler(ALERT_GROUP_ID, TITLE, CONTENT, WarningType.ALL.getCode()); + alertSendResponse = alertSender.syncHandler(ALERT_GROUP_ID, TITLE, CONTENT); Assertions.assertFalse(alertSendResponse.isSuccess()); alertSendResponse.getResResults().forEach(result -> logger .info("alert send response result, status:{}, message:{}", result.isSuccess(), result.getMessage())); // 4.abnormal information inside the alert plug-in code AlertResult alertResult = new AlertResult(); - alertResult.setStatus(String.valueOf(false)); + alertResult.setSuccess(false); alertResult.setMessage("Abnormal information inside the alert plug-in code"); when(alertChannelMock.process(Mockito.any())).thenReturn(alertResult); when(alertPluginManager.getAlertChannel(1)).thenReturn(Optional.of(alertChannelMock)); - alertSendResponse = - alertBootstrapService.syncHandler(ALERT_GROUP_ID, TITLE, CONTENT, WarningType.ALL.getCode()); + alertSendResponse = alertSender.syncHandler(ALERT_GROUP_ID, TITLE, CONTENT); Assertions.assertFalse(alertSendResponse.isSuccess()); alertSendResponse.getResResults().forEach(result -> logger .info("alert send response result, status:{}, message:{}", result.isSuccess(), result.getMessage())); // 5.alert plugin send success alertResult = new AlertResult(); - alertResult.setStatus(String.valueOf(true)); + alertResult.setSuccess(true); alertResult.setMessage(String.format("Alert Plugin %s send success", pluginInstanceName)); when(alertChannelMock.process(Mockito.any())).thenReturn(alertResult); when(alertPluginManager.getAlertChannel(1)).thenReturn(Optional.of(alertChannelMock)); - when(alertConfig.getWaitTimeout()).thenReturn(5000); - alertSendResponse = - alertBootstrapService.syncHandler(ALERT_GROUP_ID, TITLE, CONTENT, WarningType.ALL.getCode()); + alertSendResponse = alertSender.syncHandler(ALERT_GROUP_ID, TITLE, CONTENT); Assertions.assertTrue(alertSendResponse.isSuccess()); alertSendResponse.getResResults().forEach(result -> logger .info("alert send response result, status:{}, message:{}", result.isSuccess(), result.getMessage())); @@ -160,17 +141,13 @@ public void testSyncHandler() { } @Test - public void testRun() { - List alertList = new ArrayList<>(); + void testRun() { Alert alert = new Alert(); alert.setId(1); alert.setAlertGroupId(ALERT_GROUP_ID); alert.setTitle(TITLE); alert.setContent(CONTENT); alert.setWarningType(WarningType.FAILURE); - alertList.add(alert); - - // alertSenderService = new AlertSenderService(); int pluginDefineId = 1; String pluginInstanceParams = "alert-instance-mail-params"; @@ -181,25 +158,18 @@ public void testRun() { alertInstanceList.add(alertPluginInstance); when(alertDao.listInstanceByAlertGroupId(ALERT_GROUP_ID)).thenReturn(alertInstanceList); - String pluginName = "alert-plugin-mail"; - PluginDefine pluginDefine = new PluginDefine(pluginName, "1", null); - when(pluginDao.getPluginDefineById(pluginDefineId)).thenReturn(pluginDefine); - AlertResult alertResult = new AlertResult(); - alertResult.setStatus(String.valueOf(true)); + alertResult.setSuccess(true); alertResult.setMessage(String.format("Alert Plugin %s send success", pluginInstanceName)); - AlertChannel alertChannelMock = mock(AlertChannel.class); - when(alertChannelMock.process(Mockito.any())).thenReturn(alertResult); - when(alertPluginManager.getAlertChannel(1)).thenReturn(Optional.of(alertChannelMock)); - Assertions.assertTrue(Boolean.parseBoolean(alertResult.getStatus())); + Assertions.assertTrue(alertResult.isSuccess()); when(alertDao.listInstanceByAlertGroupId(1)).thenReturn(new ArrayList<>()); - alertBootstrapService.send(alertList); + alertSender.sendEvent(alert); } @Test - public void testSendAlert() { + void testSendAlert() { AlertResult sendResult = new AlertResult(); - sendResult.setStatus(String.valueOf(true)); + sendResult.setSuccess(true); sendResult.setMessage(String.format("Alert Plugin %s send success", PLUGIN_INSTANCE_NAME)); AlertChannel alertChannelMock = mock(AlertChannel.class); when(alertChannelMock.process(Mockito.any())).thenReturn(sendResult); @@ -209,6 +179,6 @@ public void testSendAlert() { Mockito.mockStatic(PluginParamsTransfer.class); pluginParamsTransferMockedStatic.when(() -> PluginParamsTransfer.getPluginParamsMap(PLUGIN_INSTANCE_PARAMS)) .thenReturn(paramsMap); - alertBootstrapService.syncTestSend(PLUGIN_DEFINE_ID, PLUGIN_INSTANCE_PARAMS); + alertSender.syncTestSend(PLUGIN_DEFINE_ID, PLUGIN_INSTANCE_PARAMS); } } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/java/org/apache/dolphinscheduler/alert/runner/ListenerEventPostServiceTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/java/org/apache/dolphinscheduler/alert/runner/ListenerEventSenderTest.java similarity index 82% rename from dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/java/org/apache/dolphinscheduler/alert/runner/ListenerEventPostServiceTest.java rename to dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/java/org/apache/dolphinscheduler/alert/runner/ListenerEventSenderTest.java index 33917267f0e6..0304be022c22 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/java/org/apache/dolphinscheduler/alert/runner/ListenerEventPostServiceTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/java/org/apache/dolphinscheduler/alert/runner/ListenerEventSenderTest.java @@ -24,7 +24,7 @@ import org.apache.dolphinscheduler.alert.api.AlertResult; import org.apache.dolphinscheduler.alert.config.AlertConfig; import org.apache.dolphinscheduler.alert.plugin.AlertPluginManager; -import org.apache.dolphinscheduler.alert.service.ListenerEventPostService; +import org.apache.dolphinscheduler.alert.service.ListenerEventSender; import org.apache.dolphinscheduler.common.enums.AlertPluginInstanceType; import org.apache.dolphinscheduler.common.enums.AlertStatus; import org.apache.dolphinscheduler.common.enums.ListenerEventType; @@ -33,7 +33,7 @@ import org.apache.dolphinscheduler.dao.entity.ListenerEvent; import org.apache.dolphinscheduler.dao.entity.event.ServerDownListenerEvent; import org.apache.dolphinscheduler.dao.mapper.AlertPluginInstanceMapper; -import org.apache.dolphinscheduler.dao.mapper.ListenerEventMapper; +import org.apache.dolphinscheduler.dao.repository.ListenerEventDao; import org.apache.commons.codec.digest.DigestUtils; @@ -43,39 +43,32 @@ import java.util.Optional; import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; -import org.mockito.MockitoAnnotations; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.mockito.junit.jupiter.MockitoExtension; -public class ListenerEventPostServiceTest { - - private static final Logger logger = LoggerFactory.getLogger(ListenerEventPostServiceTest.class); +@ExtendWith(MockitoExtension.class) +class ListenerEventSenderTest { @Mock - private ListenerEventMapper listenerEventMapper; + private ListenerEventDao listenerEventDao; + @Mock private AlertPluginInstanceMapper alertPluginInstanceMapper; @Mock private AlertPluginManager alertPluginManager; + @Mock private AlertConfig alertConfig; @InjectMocks - private ListenerEventPostService listenerEventPostService; - - @BeforeEach - public void before() { - MockitoAnnotations.initMocks(this); - } + private ListenerEventSender listenerEventSender; @Test - public void testSendServerDownEventSuccess() { - List events = new ArrayList<>(); + void testSendServerDownEventSuccess() { ServerDownListenerEvent serverDownListenerEvent = new ServerDownListenerEvent(); serverDownListenerEvent.setEventTime(new Date()); serverDownListenerEvent.setType("WORKER"); @@ -88,7 +81,6 @@ public void testSendServerDownEventSuccess() { successEvent.setEventType(ListenerEventType.SERVER_DOWN); successEvent.setCreateTime(new Date()); successEvent.setUpdateTime(new Date()); - events.add(successEvent); int pluginDefineId = 1; String pluginInstanceParams = @@ -103,19 +95,17 @@ public void testSendServerDownEventSuccess() { when(alertPluginInstanceMapper.queryAllGlobalAlertPluginInstanceList()).thenReturn(alertInstanceList); AlertResult sendResult = new AlertResult(); - sendResult.setStatus(String.valueOf(true)); + sendResult.setSuccess(true); sendResult.setMessage(String.format("Alert Plugin %s send success", pluginInstanceName)); AlertChannel alertChannelMock = mock(AlertChannel.class); when(alertChannelMock.process(Mockito.any())).thenReturn(sendResult); when(alertPluginManager.getAlertChannel(1)).thenReturn(Optional.of(alertChannelMock)); - Assertions.assertTrue(Boolean.parseBoolean(sendResult.getStatus())); - when(listenerEventMapper.deleteById(1)).thenReturn(1); - listenerEventPostService.send(events); + Assertions.assertTrue(sendResult.isSuccess()); + listenerEventSender.sendEvent(successEvent); } @Test - public void testSendServerDownEventFailed() { - List events = new ArrayList<>(); + void testSendServerDownEventFailed() { ServerDownListenerEvent serverDownListenerEvent = new ServerDownListenerEvent(); serverDownListenerEvent.setEventTime(new Date()); serverDownListenerEvent.setType("WORKER"); @@ -128,7 +118,6 @@ public void testSendServerDownEventFailed() { successEvent.setEventType(ListenerEventType.SERVER_DOWN); successEvent.setCreateTime(new Date()); successEvent.setUpdateTime(new Date()); - events.add(successEvent); int pluginDefineId = 1; String pluginInstanceParams = @@ -143,12 +132,12 @@ public void testSendServerDownEventFailed() { when(alertPluginInstanceMapper.queryAllGlobalAlertPluginInstanceList()).thenReturn(alertInstanceList); AlertResult sendResult = new AlertResult(); - sendResult.setStatus(String.valueOf(false)); + sendResult.setSuccess(false); sendResult.setMessage(String.format("Alert Plugin %s send failed", pluginInstanceName)); AlertChannel alertChannelMock = mock(AlertChannel.class); when(alertChannelMock.process(Mockito.any())).thenReturn(sendResult); when(alertPluginManager.getAlertChannel(1)).thenReturn(Optional.of(alertChannelMock)); - Assertions.assertFalse(Boolean.parseBoolean(sendResult.getStatus())); - listenerEventPostService.send(events); + Assertions.assertFalse(sendResult.isSuccess()); + listenerEventSender.sendEvent(successEvent); } } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/java/org/apache/dolphinscheduler/alert/service/AlertEventPendingQueueTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/java/org/apache/dolphinscheduler/alert/service/AlertEventPendingQueueTest.java new file mode 100644 index 000000000000..a643e50e760c --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/java/org/apache/dolphinscheduler/alert/service/AlertEventPendingQueueTest.java @@ -0,0 +1,94 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.alert.service; + +import static com.google.common.truth.Truth.assertThat; +import static org.awaitility.Awaitility.await; +import static org.junit.jupiter.api.Assertions.assertThrowsExactly; + +import org.apache.dolphinscheduler.alert.config.AlertConfig; +import org.apache.dolphinscheduler.dao.entity.Alert; + +import java.time.Duration; +import java.util.concurrent.CompletableFuture; + +import lombok.SneakyThrows; + +import org.awaitility.core.ConditionTimeoutException; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +class AlertEventPendingQueueTest { + + private AlertEventPendingQueue alertEventPendingQueue; + + private static final int QUEUE_SIZE = 10; + + @BeforeEach + public void before() { + AlertConfig alertConfig = new AlertConfig(); + alertConfig.setSenderParallelism(QUEUE_SIZE); + this.alertEventPendingQueue = new AlertEventPendingQueue(alertConfig); + } + + @SneakyThrows + @Test + void put() { + for (int i = 0; i < alertEventPendingQueue.capacity(); i++) { + alertEventPendingQueue.put(new Alert()); + } + + CompletableFuture completableFuture = CompletableFuture.runAsync(() -> { + try { + alertEventPendingQueue.put(new Alert()); + System.out.println(alertEventPendingQueue.size()); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }); + assertThrowsExactly(ConditionTimeoutException.class, + () -> await() + .timeout(Duration.ofSeconds(2)) + .until(completableFuture::isDone)); + + } + + @Test + void take() { + CompletableFuture completableFuture = CompletableFuture.runAsync(() -> { + try { + alertEventPendingQueue.take(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }); + assertThrowsExactly(ConditionTimeoutException.class, + () -> await() + .timeout(Duration.ofSeconds(2)) + .until(completableFuture::isDone)); + } + + @SneakyThrows + @Test + void size() { + for (int i = 0; i < alertEventPendingQueue.capacity(); i++) { + alertEventPendingQueue.put(new Alert()); + assertThat(alertEventPendingQueue.size()).isEqualTo(i + 1); + } + } +} diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/audit/AuditSubscriberTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/java/org/apache/dolphinscheduler/alert/service/AlertSenderThreadPoolFactoryTest.java similarity index 53% rename from dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/audit/AuditSubscriberTest.java rename to dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/java/org/apache/dolphinscheduler/alert/service/AlertSenderThreadPoolFactoryTest.java index fe8e5303f532..50f44fb43f31 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/audit/AuditSubscriberTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/java/org/apache/dolphinscheduler/alert/service/AlertSenderThreadPoolFactoryTest.java @@ -15,36 +15,30 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.api.audit; +package org.apache.dolphinscheduler.alert.service; -import org.apache.dolphinscheduler.common.enums.AuditOperationType; -import org.apache.dolphinscheduler.common.enums.AuditResourceType; -import org.apache.dolphinscheduler.dao.entity.AuditLog; -import org.apache.dolphinscheduler.dao.entity.User; -import org.apache.dolphinscheduler.dao.mapper.AuditLogMapper; +import static com.google.common.truth.Truth.assertThat; -import java.util.Date; +import org.apache.dolphinscheduler.alert.config.AlertConfig; + +import java.util.concurrent.ThreadPoolExecutor; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.InjectMocks; -import org.mockito.Mock; -import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; @ExtendWith(MockitoExtension.class) -public class AuditSubscriberTest { +class AlertSenderThreadPoolFactoryTest { - @Mock - private AuditLogMapper logMapper; + private final AlertConfig alertConfig = new AlertConfig(); - @InjectMocks - private AuditSubscriberImpl auditSubscriber; + private final AlertSenderThreadPoolFactory alertSenderThreadPoolFactory = + new AlertSenderThreadPoolFactory(alertConfig); @Test - public void testExecute() { - Mockito.when(logMapper.insert(Mockito.any(AuditLog.class))).thenReturn(1); - auditSubscriber.execute( - new AuditMessage(new User(), new Date(), AuditResourceType.USER_MODULE, AuditOperationType.CREATE, 1)); + void getThreadPool() { + ThreadPoolExecutor threadPool = alertSenderThreadPoolFactory.getThreadPool(); + assertThat(threadPool.getCorePoolSize()).isEqualTo(alertConfig.getSenderParallelism()); + assertThat(threadPool.getMaximumPoolSize()).isEqualTo(alertConfig.getSenderParallelism()); } } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/resources/application.yaml b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/resources/application.yaml new file mode 100644 index 000000000000..d16d05a678ac --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/resources/application.yaml @@ -0,0 +1,107 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +spring: + profiles: + active: postgresql + jackson: + time-zone: UTC + date-format: "yyyy-MM-dd HH:mm:ss" + banner: + charset: UTF-8 + datasource: + driver-class-name: org.postgresql.Driver + url: jdbc:postgresql://127.0.0.1:5432/dolphinscheduler + username: root + password: root + hikari: + connection-test-query: select 1 + pool-name: DolphinScheduler + +# Mybatis-plus configuration, you don't need to change it +mybatis-plus: + mapper-locations: classpath:org/apache/dolphinscheduler/dao/mapper/*Mapper.xml + type-aliases-package: org.apache.dolphinscheduler.dao.entity + configuration: + cache-enabled: false + call-setters-on-nulls: true + map-underscore-to-camel-case: true + jdbc-type-for-null: NULL + global-config: + db-config: + id-type: auto + banner: false + +server: + port: 50053 + +management: + endpoints: + web: + exposure: + include: health,metrics,prometheus + endpoint: + health: + enabled: true + show-details: always + health: + db: + enabled: true + defaults: + enabled: false + metrics: + tags: + application: ${spring.application.name} + +alert: + port: 50052 + # Mark each alert of alert server if late after x milliseconds as failed. + # Define value is (0 = infinite), and alert server would be waiting alert result. + wait-timeout: 10 + max-heartbeat-interval: 59s + # The maximum number of alerts that can be processed in parallel + sender-parallelism: 101 + +registry: + type: zookeeper + zookeeper: + namespace: dolphinscheduler + connect-string: localhost:2181 + retry-policy: + base-sleep-time: 60ms + max-sleep: 300ms + max-retries: 5 + session-timeout: 30s + connection-timeout: 9s + block-until-connected: 600ms + digest: ~ + +metrics: + enabled: true + +# Override by profile + +--- +spring: + config: + activate: + on-profile: mysql + datasource: + driver-class-name: com.mysql.cj.jdbc.Driver + url: jdbc:mysql://127.0.0.1:3306/dolphinscheduler + username: root + password: root diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/resources/logback.xml b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/resources/logback.xml new file mode 100644 index 000000000000..9a182a18ef12 --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/resources/logback.xml @@ -0,0 +1,21 @@ + + + + + + diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/ExecutorAPITest.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/ExecutorAPITest.java index 3b90131af9b6..6a563efb481b 100644 --- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/ExecutorAPITest.java +++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/ExecutorAPITest.java @@ -31,17 +31,8 @@ import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.enums.WarningType; -import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.User; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Order; -import org.junit.jupiter.api.Test; - -import lombok.extern.slf4j.Slf4j; - import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.util.EntityUtils; @@ -51,6 +42,14 @@ import java.util.LinkedHashMap; import java.util.List; +import lombok.extern.slf4j.Slf4j; + +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Order; +import org.junit.jupiter.api.Test; + //TODO: Some test cases rely on ProcessInstance APIs. Should complete remaining cases after ProcessInstance related API tests done. @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") @Slf4j @@ -80,7 +79,8 @@ public class ExecutorAPITest { public static void setup() { LoginPage loginPage = new LoginPage(); HttpResponse loginHttpResponse = loginPage.login(username, password); - sessionId = JSONUtils.convertValue(loginHttpResponse.getBody().getData(), LoginResponseData.class).getSessionId(); + sessionId = + JSONUtils.convertValue(loginHttpResponse.getBody().getData(), LoginResponseData.class).getSessionId(); executorPage = new ExecutorPage(sessionId); processDefinitionPage = new ProcessDefinitionPage(sessionId); projectPage = new ProjectPage(sessionId); @@ -103,24 +103,30 @@ public void testStartProcessInstance() { HttpResponse createProjectResponse = projectPage.createProject(loginUser, "project-test"); HttpResponse queryAllProjectListResponse = projectPage.queryAllProjectList(loginUser); Assertions.assertTrue(queryAllProjectListResponse.getBody().getSuccess()); - projectCode = (long) ((LinkedHashMap) ((List) queryAllProjectListResponse.getBody().getData()).get(0)).get("code"); + projectCode = (long) ((LinkedHashMap) ((List) queryAllProjectListResponse + .getBody().getData()).get(0)).get("code"); // upload test workflow definition json ClassLoader classLoader = getClass().getClassLoader(); File file = new File(classLoader.getResource("workflow-json/test.json").getFile()); CloseableHttpResponse importProcessDefinitionResponse = processDefinitionPage - .importProcessDefinition(loginUser, projectCode, file); + .importProcessDefinition(loginUser, projectCode, file); String data = EntityUtils.toString(importProcessDefinitionResponse.getEntity()); Assertions.assertTrue(data.contains("\"success\":true")); // get workflow definition code - HttpResponse queryAllProcessDefinitionByProjectCodeResponse = processDefinitionPage.queryAllProcessDefinitionByProjectCode(loginUser, projectCode); + HttpResponse queryAllProcessDefinitionByProjectCodeResponse = + processDefinitionPage.queryAllProcessDefinitionByProjectCode(loginUser, projectCode); Assertions.assertTrue(queryAllProcessDefinitionByProjectCodeResponse.getBody().getSuccess()); - Assertions.assertTrue(queryAllProcessDefinitionByProjectCodeResponse.getBody().getData().toString().contains("hello world")); - processDefinitionCode = (long) ((LinkedHashMap) ((LinkedHashMap) ((List) queryAllProcessDefinitionByProjectCodeResponse.getBody().getData()).get(0)).get("processDefinition")).get("code"); + Assertions.assertTrue(queryAllProcessDefinitionByProjectCodeResponse.getBody().getData().toString() + .contains("hello world")); + processDefinitionCode = + (long) ((LinkedHashMap) ((LinkedHashMap) ((List) queryAllProcessDefinitionByProjectCodeResponse + .getBody().getData()).get(0)).get("processDefinition")).get("code"); // release test workflow - HttpResponse releaseProcessDefinitionResponse = processDefinitionPage.releaseProcessDefinition(loginUser, projectCode, processDefinitionCode, ReleaseState.ONLINE); + HttpResponse releaseProcessDefinitionResponse = processDefinitionPage.releaseProcessDefinition(loginUser, + projectCode, processDefinitionCode, ReleaseState.ONLINE); Assertions.assertTrue(releaseProcessDefinitionResponse.getBody().getSuccess()); // trigger workflow instance @@ -128,11 +134,12 @@ public void testStartProcessInstance() { Date date = new Date(); String scheduleTime = String.format("%s,%s", formatter.format(date), formatter.format(date)); log.info("use current time {} as scheduleTime", scheduleTime); - HttpResponse startProcessInstanceResponse = executorPage.startProcessInstance(loginUser, projectCode, processDefinitionCode, scheduleTime, FailureStrategy.END, WarningType.NONE); + HttpResponse startProcessInstanceResponse = executorPage.startProcessInstance(loginUser, projectCode, + processDefinitionCode, scheduleTime, FailureStrategy.END, WarningType.NONE); Assertions.assertTrue(startProcessInstanceResponse.getBody().getSuccess()); triggerCode = (long) startProcessInstanceResponse.getBody().getData(); - } catch (Exception e) { + } catch (Exception e) { log.error("failed", e); Assertions.fail(); } @@ -141,7 +148,8 @@ public void testStartProcessInstance() { @Test @Order(2) public void testStartCheckProcessDefinition() { - HttpResponse testStartCheckProcessDefinitionResponse = executorPage.startCheckProcessDefinition(loginUser, projectCode, processDefinitionCode); + HttpResponse testStartCheckProcessDefinitionResponse = + executorPage.startCheckProcessDefinition(loginUser, projectCode, processDefinitionCode); Assertions.assertTrue(testStartCheckProcessDefinitionResponse.getBody().getSuccess()); } diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/ProcessDefinitionAPITest.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/ProcessDefinitionAPITest.java index 4c4507e78591..a5870ceca29d 100644 --- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/ProcessDefinitionAPITest.java +++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/ProcessDefinitionAPITest.java @@ -30,21 +30,21 @@ import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.dao.entity.User; -import java.io.File;; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.util.EntityUtils; + +import java.io.File; import java.util.LinkedHashMap; import java.util.List; +import lombok.extern.slf4j.Slf4j; + import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; -import lombok.extern.slf4j.Slf4j; - -import org.apache.http.client.methods.CloseableHttpResponse; -import org.apache.http.util.EntityUtils; - @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") @Slf4j public class ProcessDefinitionAPITest { @@ -67,12 +67,12 @@ public class ProcessDefinitionAPITest { private static String processDefinitionName; - @BeforeAll public static void setup() { LoginPage loginPage = new LoginPage(); HttpResponse loginHttpResponse = loginPage.login(username, password); - sessionId = JSONUtils.convertValue(loginHttpResponse.getBody().getData(), LoginResponseData.class).getSessionId(); + sessionId = + JSONUtils.convertValue(loginHttpResponse.getBody().getData(), LoginResponseData.class).getSessionId(); processDefinitionPage = new ProcessDefinitionPage(sessionId); projectPage = new ProjectPage(sessionId); loginUser = new User(); @@ -93,14 +93,15 @@ public void testImportProcessDefinition() { HttpResponse queryAllProjectListResponse = projectPage.queryAllProjectList(loginUser); Assertions.assertTrue(queryAllProjectListResponse.getBody().getSuccess()); - projectCode = (long) ((LinkedHashMap) ((List) queryAllProjectListResponse.getBody().getData()).get(0)).get("code"); + projectCode = (long) ((LinkedHashMap) ((List) queryAllProjectListResponse + .getBody().getData()).get(0)).get("code"); ClassLoader classLoader = getClass().getClassLoader(); File file = new File(classLoader.getResource("workflow-json/test.json").getFile()); CloseableHttpResponse importProcessDefinitionResponse = processDefinitionPage - .importProcessDefinition(loginUser, projectCode, file); + .importProcessDefinition(loginUser, projectCode, file); String data = EntityUtils.toString(importProcessDefinitionResponse.getEntity()); Assertions.assertTrue(data.contains("\"success\":true")); - } catch (Exception e) { + } catch (Exception e) { log.error("failed", e); Assertions.fail(); } @@ -109,72 +110,92 @@ public void testImportProcessDefinition() { @Test @Order(2) public void testQueryAllProcessDefinitionByProjectCode() { - HttpResponse queryAllProcessDefinitionByProjectCodeResponse = processDefinitionPage.queryAllProcessDefinitionByProjectCode(loginUser, projectCode); + HttpResponse queryAllProcessDefinitionByProjectCodeResponse = + processDefinitionPage.queryAllProcessDefinitionByProjectCode(loginUser, projectCode); Assertions.assertTrue(queryAllProcessDefinitionByProjectCodeResponse.getBody().getSuccess()); - Assertions.assertTrue(queryAllProcessDefinitionByProjectCodeResponse.getBody().getData().toString().contains("hello world")); - processDefinitionCode = (long) ((LinkedHashMap) ((LinkedHashMap) ((List) queryAllProcessDefinitionByProjectCodeResponse.getBody().getData()).get(0)).get("processDefinition")).get("code"); - processDefinitionName = (String) ((LinkedHashMap) ((LinkedHashMap) ((List) queryAllProcessDefinitionByProjectCodeResponse.getBody().getData()).get(0)).get("processDefinition")).get("name"); + Assertions.assertTrue( + queryAllProcessDefinitionByProjectCodeResponse.getBody().getData().toString().contains("hello world")); + processDefinitionCode = + (long) ((LinkedHashMap) ((LinkedHashMap) ((List) queryAllProcessDefinitionByProjectCodeResponse + .getBody().getData()).get(0)).get("processDefinition")).get("code"); + processDefinitionName = + (String) ((LinkedHashMap) ((LinkedHashMap) ((List) queryAllProcessDefinitionByProjectCodeResponse + .getBody().getData()).get(0)).get("processDefinition")).get("name"); } @Test @Order(3) public void testQueryProcessDefinitionByCode() { - HttpResponse queryProcessDefinitionByCodeResponse = processDefinitionPage.queryProcessDefinitionByCode(loginUser, projectCode, processDefinitionCode); + HttpResponse queryProcessDefinitionByCodeResponse = + processDefinitionPage.queryProcessDefinitionByCode(loginUser, projectCode, processDefinitionCode); Assertions.assertTrue(queryProcessDefinitionByCodeResponse.getBody().getSuccess()); - Assertions.assertTrue(queryProcessDefinitionByCodeResponse.getBody().getData().toString().contains("hello world")); + Assertions.assertTrue( + queryProcessDefinitionByCodeResponse.getBody().getData().toString().contains("hello world")); } @Test @Order(4) public void testgetProcessListByProjectCode() { - HttpResponse getProcessListByProjectCodeResponse = processDefinitionPage.getProcessListByProjectCode(loginUser, projectCode); + HttpResponse getProcessListByProjectCodeResponse = + processDefinitionPage.getProcessListByProjectCode(loginUser, projectCode); Assertions.assertTrue(getProcessListByProjectCodeResponse.getBody().getSuccess()); - Assertions.assertTrue(getProcessListByProjectCodeResponse.getBody().getData().toString().contains("test_import")); + Assertions + .assertTrue(getProcessListByProjectCodeResponse.getBody().getData().toString().contains("test_import")); } @Test @Order(5) public void testQueryProcessDefinitionByName() { - HttpResponse queryProcessDefinitionByNameResponse = processDefinitionPage.queryProcessDefinitionByName(loginUser, projectCode, processDefinitionName); + HttpResponse queryProcessDefinitionByNameResponse = + processDefinitionPage.queryProcessDefinitionByName(loginUser, projectCode, processDefinitionName); Assertions.assertTrue(queryProcessDefinitionByNameResponse.getBody().getSuccess()); - Assertions.assertTrue(queryProcessDefinitionByNameResponse.getBody().getData().toString().contains("hello world")); + Assertions.assertTrue( + queryProcessDefinitionByNameResponse.getBody().getData().toString().contains("hello world")); } @Test @Order(6) public void testQueryProcessDefinitionList() { - HttpResponse queryProcessDefinitionListResponse = processDefinitionPage.queryProcessDefinitionList(loginUser, projectCode); + HttpResponse queryProcessDefinitionListResponse = + processDefinitionPage.queryProcessDefinitionList(loginUser, projectCode); Assertions.assertTrue(queryProcessDefinitionListResponse.getBody().getSuccess()); - Assertions.assertTrue(queryProcessDefinitionListResponse.getBody().getData().toString().contains("hello world")); + Assertions + .assertTrue(queryProcessDefinitionListResponse.getBody().getData().toString().contains("hello world")); } @Test @Order(7) public void testReleaseProcessDefinition() { - HttpResponse releaseProcessDefinitionResponse = processDefinitionPage.releaseProcessDefinition(loginUser, projectCode, processDefinitionCode, ReleaseState.ONLINE); + HttpResponse releaseProcessDefinitionResponse = processDefinitionPage.releaseProcessDefinition(loginUser, + projectCode, processDefinitionCode, ReleaseState.ONLINE); Assertions.assertTrue(releaseProcessDefinitionResponse.getBody().getSuccess()); - HttpResponse queryProcessDefinitionByCodeResponse = processDefinitionPage.queryProcessDefinitionByCode(loginUser, projectCode, processDefinitionCode); + HttpResponse queryProcessDefinitionByCodeResponse = + processDefinitionPage.queryProcessDefinitionByCode(loginUser, projectCode, processDefinitionCode); Assertions.assertTrue(queryProcessDefinitionByCodeResponse.getBody().getSuccess()); - Assertions.assertTrue(queryProcessDefinitionByCodeResponse.getBody().getData().toString().contains("releaseState=ONLINE")); + Assertions.assertTrue( + queryProcessDefinitionByCodeResponse.getBody().getData().toString().contains("releaseState=ONLINE")); } @Test @Order(8) public void testDeleteProcessDefinitionByCode() { - HttpResponse deleteProcessDefinitionByCodeResponse = processDefinitionPage.deleteProcessDefinitionByCode(loginUser, projectCode, processDefinitionCode); + HttpResponse deleteProcessDefinitionByCodeResponse = + processDefinitionPage.deleteProcessDefinitionByCode(loginUser, projectCode, processDefinitionCode); Assertions.assertFalse(deleteProcessDefinitionByCodeResponse.getBody().getSuccess()); - HttpResponse releaseProcessDefinitionResponse = processDefinitionPage.releaseProcessDefinition(loginUser, projectCode, processDefinitionCode, ReleaseState.OFFLINE); + HttpResponse releaseProcessDefinitionResponse = processDefinitionPage.releaseProcessDefinition(loginUser, + projectCode, processDefinitionCode, ReleaseState.OFFLINE); Assertions.assertTrue(releaseProcessDefinitionResponse.getBody().getSuccess()); - deleteProcessDefinitionByCodeResponse = processDefinitionPage.deleteProcessDefinitionByCode(loginUser, projectCode, processDefinitionCode); + deleteProcessDefinitionByCodeResponse = + processDefinitionPage.deleteProcessDefinitionByCode(loginUser, projectCode, processDefinitionCode); Assertions.assertTrue(deleteProcessDefinitionByCodeResponse.getBody().getSuccess()); - HttpResponse queryProcessDefinitionListResponse = processDefinitionPage.queryProcessDefinitionList(loginUser, projectCode); + HttpResponse queryProcessDefinitionListResponse = + processDefinitionPage.queryProcessDefinitionList(loginUser, projectCode); Assertions.assertTrue(queryProcessDefinitionListResponse.getBody().getSuccess()); - Assertions.assertFalse(queryProcessDefinitionListResponse.getBody().getData().toString().contains("hello world")); + Assertions + .assertFalse(queryProcessDefinitionListResponse.getBody().getData().toString().contains("hello world")); } } - - diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/ProcessInstanceAPITest.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/ProcessInstanceAPITest.java index 0c939bb0fb28..1ddc2f8275b9 100644 --- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/ProcessInstanceAPITest.java +++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/ProcessInstanceAPITest.java @@ -48,6 +48,8 @@ import java.util.List; import java.util.concurrent.TimeUnit; +import lombok.extern.slf4j.Slf4j; + import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; @@ -55,8 +57,6 @@ import org.junit.jupiter.api.Test; import org.testcontainers.shaded.org.awaitility.Awaitility; -import lombok.extern.slf4j.Slf4j; - @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") @Slf4j public class ProcessInstanceAPITest { @@ -89,7 +89,8 @@ public class ProcessInstanceAPITest { public static void setup() { LoginPage loginPage = new LoginPage(); HttpResponse loginHttpResponse = loginPage.login(username, password); - sessionId = JSONUtils.convertValue(loginHttpResponse.getBody().getData(), LoginResponseData.class).getSessionId(); + sessionId = + JSONUtils.convertValue(loginHttpResponse.getBody().getData(), LoginResponseData.class).getSessionId(); processInstancePage = new ProcessInstancePage(sessionId); executorPage = new ExecutorPage(sessionId); processDefinitionPage = new ProcessDefinitionPage(sessionId); @@ -113,24 +114,30 @@ public void testQueryProcessInstancesByTriggerCode() { HttpResponse createProjectResponse = projectPage.createProject(loginUser, "project-test"); HttpResponse queryAllProjectListResponse = projectPage.queryAllProjectList(loginUser); assertTrue(queryAllProjectListResponse.getBody().getSuccess()); - projectCode = (long) ((LinkedHashMap) ((List) queryAllProjectListResponse.getBody().getData()).get(0)).get("code"); + projectCode = (long) ((LinkedHashMap) ((List) queryAllProjectListResponse + .getBody().getData()).get(0)).get("code"); // upload test workflow definition json ClassLoader classLoader = getClass().getClassLoader(); File file = new File(classLoader.getResource("workflow-json/test.json").getFile()); CloseableHttpResponse importProcessDefinitionResponse = processDefinitionPage - .importProcessDefinition(loginUser, projectCode, file); + .importProcessDefinition(loginUser, projectCode, file); String data = EntityUtils.toString(importProcessDefinitionResponse.getEntity()); assertTrue(data.contains("\"success\":true")); // get workflow definition code - HttpResponse queryAllProcessDefinitionByProjectCodeResponse = processDefinitionPage.queryAllProcessDefinitionByProjectCode(loginUser, projectCode); + HttpResponse queryAllProcessDefinitionByProjectCodeResponse = + processDefinitionPage.queryAllProcessDefinitionByProjectCode(loginUser, projectCode); assertTrue(queryAllProcessDefinitionByProjectCodeResponse.getBody().getSuccess()); - assertTrue(queryAllProcessDefinitionByProjectCodeResponse.getBody().getData().toString().contains("hello world")); - processDefinitionCode = (long) ((LinkedHashMap) ((LinkedHashMap) ((List) queryAllProcessDefinitionByProjectCodeResponse.getBody().getData()).get(0)).get("processDefinition")).get("code"); + assertTrue(queryAllProcessDefinitionByProjectCodeResponse.getBody().getData().toString() + .contains("hello world")); + processDefinitionCode = + (long) ((LinkedHashMap) ((LinkedHashMap) ((List) queryAllProcessDefinitionByProjectCodeResponse + .getBody().getData()).get(0)).get("processDefinition")).get("code"); // release test workflow - HttpResponse releaseProcessDefinitionResponse = processDefinitionPage.releaseProcessDefinition(loginUser, projectCode, processDefinitionCode, ReleaseState.ONLINE); + HttpResponse releaseProcessDefinitionResponse = processDefinitionPage.releaseProcessDefinition(loginUser, + projectCode, processDefinitionCode, ReleaseState.ONLINE); assertTrue(releaseProcessDefinitionResponse.getBody().getSuccess()); // trigger workflow instance @@ -138,23 +145,27 @@ public void testQueryProcessInstancesByTriggerCode() { Date date = new Date(); String scheduleTime = String.format("%s,%s", formatter.format(date), formatter.format(date)); log.info("use current time {} as scheduleTime", scheduleTime); - HttpResponse startProcessInstanceResponse = executorPage.startProcessInstance(loginUser, projectCode, processDefinitionCode, scheduleTime, FailureStrategy.END, WarningType.NONE); + HttpResponse startProcessInstanceResponse = executorPage.startProcessInstance(loginUser, projectCode, + processDefinitionCode, scheduleTime, FailureStrategy.END, WarningType.NONE); assertTrue(startProcessInstanceResponse.getBody().getSuccess()); // make sure process instance has completed and successfully persisted into db Awaitility.await() - .atMost(30, TimeUnit.SECONDS) - .untilAsserted(() -> { - // query workflow instance by trigger code - triggerCode = (long) startProcessInstanceResponse.getBody().getData(); - HttpResponse queryProcessInstancesByTriggerCodeResponse = processInstancePage.queryProcessInstancesByTriggerCode(loginUser, projectCode, triggerCode); - assertTrue(queryProcessInstancesByTriggerCodeResponse.getBody().getSuccess()); - List> body = (List>) queryProcessInstancesByTriggerCodeResponse.getBody().getData(); - assertTrue(CollectionUtils.isNotEmpty(body)); - assertEquals("SUCCESS", body.get(0).get("state")); - processInstanceId = (int) body.get(0).get("id"); - }); - } catch (Exception e) { + .atMost(30, TimeUnit.SECONDS) + .untilAsserted(() -> { + // query workflow instance by trigger code + triggerCode = (long) startProcessInstanceResponse.getBody().getData(); + HttpResponse queryProcessInstancesByTriggerCodeResponse = processInstancePage + .queryProcessInstancesByTriggerCode(loginUser, projectCode, triggerCode); + assertTrue(queryProcessInstancesByTriggerCodeResponse.getBody().getSuccess()); + List> body = + (List>) queryProcessInstancesByTriggerCodeResponse + .getBody().getData(); + assertTrue(CollectionUtils.isNotEmpty(body)); + assertEquals("SUCCESS", body.get(0).get("state")); + processInstanceId = (int) body.get(0).get("id"); + }); + } catch (Exception e) { log.error("failed", e); Assertions.fail(); } @@ -163,7 +174,8 @@ public void testQueryProcessInstancesByTriggerCode() { @Test @Order(2) public void testQueryProcessInstanceList() { - HttpResponse queryProcessInstanceListResponse = processInstancePage.queryProcessInstanceList(loginUser, projectCode, 1, 10); + HttpResponse queryProcessInstanceListResponse = + processInstancePage.queryProcessInstanceList(loginUser, projectCode, 1, 10); assertTrue(queryProcessInstanceListResponse.getBody().getSuccess()); assertTrue(queryProcessInstanceListResponse.getBody().getData().toString().contains("test_import")); } @@ -171,7 +183,8 @@ public void testQueryProcessInstanceList() { @Test @Order(3) public void testQueryTaskListByProcessId() { - HttpResponse queryTaskListByProcessIdResponse = processInstancePage.queryTaskListByProcessId(loginUser, projectCode, processInstanceId); + HttpResponse queryTaskListByProcessIdResponse = + processInstancePage.queryTaskListByProcessId(loginUser, projectCode, processInstanceId); assertTrue(queryTaskListByProcessIdResponse.getBody().getSuccess()); assertTrue(queryTaskListByProcessIdResponse.getBody().getData().toString().contains("test_import")); } @@ -179,7 +192,8 @@ public void testQueryTaskListByProcessId() { @Test @Order(4) public void testQueryProcessInstanceById() { - HttpResponse queryProcessInstanceByIdResponse = processInstancePage.queryProcessInstanceById(loginUser, projectCode, processInstanceId); + HttpResponse queryProcessInstanceByIdResponse = + processInstancePage.queryProcessInstanceById(loginUser, projectCode, processInstanceId); assertTrue(queryProcessInstanceByIdResponse.getBody().getSuccess()); assertTrue(queryProcessInstanceByIdResponse.getBody().getData().toString().contains("test_import")); } @@ -187,10 +201,12 @@ public void testQueryProcessInstanceById() { @Test @Order(5) public void testDeleteProcessInstanceById() { - HttpResponse deleteProcessInstanceByIdResponse = processInstancePage.deleteProcessInstanceById(loginUser, projectCode, processInstanceId); + HttpResponse deleteProcessInstanceByIdResponse = + processInstancePage.deleteProcessInstanceById(loginUser, projectCode, processInstanceId); assertTrue(deleteProcessInstanceByIdResponse.getBody().getSuccess()); - HttpResponse queryProcessInstanceListResponse = processInstancePage.queryProcessInstanceList(loginUser, projectCode, 1, 10); + HttpResponse queryProcessInstanceListResponse = + processInstancePage.queryProcessInstanceList(loginUser, projectCode, 1, 10); assertTrue(queryProcessInstanceListResponse.getBody().getSuccess()); Assertions.assertFalse(queryProcessInstanceListResponse.getBody().getData().toString().contains("test_import")); } diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/ProjectAPITest.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/ProjectAPITest.java index 40caa45a3182..cf5621f06c37 100644 --- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/ProjectAPITest.java +++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/ProjectAPITest.java @@ -29,17 +29,17 @@ import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.User; +import java.util.LinkedHashMap; +import java.util.List; + +import lombok.extern.slf4j.Slf4j; + import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; -import lombok.extern.slf4j.Slf4j; - -import java.util.LinkedHashMap; -import java.util.List; - @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") @Slf4j // TODO: Add more detailed permission control related cases after userPage test cases completed @@ -59,7 +59,8 @@ public class ProjectAPITest { public static void setup() { LoginPage loginPage = new LoginPage(); HttpResponse loginHttpResponse = loginPage.login(username, password); - sessionId = JSONUtils.convertValue(loginHttpResponse.getBody().getData(), LoginResponseData.class).getSessionId(); + sessionId = + JSONUtils.convertValue(loginHttpResponse.getBody().getData(), LoginResponseData.class).getSessionId(); projectPage = new ProjectPage(sessionId); loginUser = new User(); loginUser.setUserName("admin"); @@ -98,7 +99,8 @@ public void testUpdateProject() { List projects = (List) queryAllProjectListResponse.getBody().getData(); Long code = (Long) projects.get(0).get("code"); - HttpResponse updateProjectResponse = projectPage.updateProject(loginUser, code,"project-new", loginUser.getUserName()); + HttpResponse updateProjectResponse = + projectPage.updateProject(loginUser, code, "project-new", loginUser.getUserName()); Assertions.assertTrue(updateProjectResponse.getBody().getSuccess()); queryAllProjectListResponse = projectPage.queryAllProjectList(loginUser); @@ -133,17 +135,21 @@ public void testQueryProjectListPaging() { @Test @Order(6) public void testQueryProjectWithAuthorizedLevelListPaging() { - HttpResponse queryProjectWithAuthorizedLevelListPagingResponse = projectPage.queryProjectWithAuthorizedLevelListPaging(loginUser, loginUser.getId(),1, 1); + HttpResponse queryProjectWithAuthorizedLevelListPagingResponse = + projectPage.queryProjectWithAuthorizedLevelListPaging(loginUser, loginUser.getId(), 1, 1); Assertions.assertTrue(queryProjectWithAuthorizedLevelListPagingResponse.getBody().getSuccess()); - Assertions.assertTrue(queryProjectWithAuthorizedLevelListPagingResponse.getBody().getData().toString().contains("project-new")); + Assertions.assertTrue(queryProjectWithAuthorizedLevelListPagingResponse.getBody().getData().toString() + .contains("project-new")); } @Test @Order(7) public void testQueryUnauthorizedProject() { - HttpResponse queryUnauthorizedProjectResponse = projectPage.queryUnauthorizedProject(loginUser, loginUser.getId()); + HttpResponse queryUnauthorizedProjectResponse = + projectPage.queryUnauthorizedProject(loginUser, loginUser.getId()); Assertions.assertTrue(queryUnauthorizedProjectResponse.getBody().getSuccess()); - // project-new was created by instead of authorized to this user, therefore, it should be in the unauthorized list + // project-new was created by instead of authorized to this user, therefore, it should be in the unauthorized + // list Assertions.assertTrue(queryUnauthorizedProjectResponse.getBody().getData().toString().contains("project-new")); } @@ -152,17 +158,21 @@ public void testQueryUnauthorizedProject() { public void testQueryAuthorizedProject() { HttpResponse queryAuthorizedProjectResponse = projectPage.queryAuthorizedProject(loginUser, loginUser.getId()); Assertions.assertTrue(queryAuthorizedProjectResponse.getBody().getSuccess()); - // project-new was created by instead of authorized to this user, therefore, it should not be in the authorized list + // project-new was created by instead of authorized to this user, therefore, it should not be in the authorized + // list Assertions.assertFalse(queryAuthorizedProjectResponse.getBody().getData().toString().contains("project-new")); } @Test @Order(9) public void testQueryProjectWithAuthorizedLevel() { - HttpResponse queryProjectWithAuthorizedLevelResponse = projectPage.queryProjectWithAuthorizedLevel(loginUser, loginUser.getId()); + HttpResponse queryProjectWithAuthorizedLevelResponse = + projectPage.queryProjectWithAuthorizedLevel(loginUser, loginUser.getId()); Assertions.assertTrue(queryProjectWithAuthorizedLevelResponse.getBody().getSuccess()); - // queryProjectWithAuthorizedLevel api returns a joint-set of projects both created by and authorized to the user - Assertions.assertTrue(queryProjectWithAuthorizedLevelResponse.getBody().getData().toString().contains("project-new")); + // queryProjectWithAuthorizedLevel api returns a joint-set of projects both created by and authorized to the + // user + Assertions.assertTrue( + queryProjectWithAuthorizedLevelResponse.getBody().getData().toString().contains("project-new")); } @Test @@ -181,10 +191,13 @@ public void testQueryAuthorizedUser() { @Test @Order(11) public void testQueryProjectCreatedAndAuthorizedByUser() { - HttpResponse queryProjectCreatedAndAuthorizedByUserResponse = projectPage.queryProjectCreatedAndAuthorizedByUser(loginUser); + HttpResponse queryProjectCreatedAndAuthorizedByUserResponse = + projectPage.queryProjectCreatedAndAuthorizedByUser(loginUser); Assertions.assertTrue(queryProjectCreatedAndAuthorizedByUserResponse.getBody().getSuccess()); - // queryProjectCreatedAndAuthorizedByUser api returns a joint-set of projects both created by and authorized to the user - Assertions.assertTrue(queryProjectCreatedAndAuthorizedByUserResponse.getBody().getData().toString().contains("project-new")); + // queryProjectCreatedAndAuthorizedByUser api returns a joint-set of projects both created by and authorized to + // the user + Assertions.assertTrue( + queryProjectCreatedAndAuthorizedByUserResponse.getBody().getData().toString().contains("project-new")); } @Test @@ -192,7 +205,8 @@ public void testQueryProjectCreatedAndAuthorizedByUser() { public void testQueryAllProjectListForDependent() { HttpResponse queryAllProjectListForDependentResponse = projectPage.queryAllProjectListForDependent(loginUser); Assertions.assertTrue(queryAllProjectListForDependentResponse.getBody().getSuccess()); - Assertions.assertTrue(queryAllProjectListForDependentResponse.getBody().getData().toString().contains("project-new")); + Assertions.assertTrue( + queryAllProjectListForDependentResponse.getBody().getData().toString().contains("project-new")); } @Test @@ -203,8 +217,7 @@ public void testDeleteProject() { Long code = (Long) projects.get(0).get("code"); HttpResponse queryAllProjectListForDependentResponse = projectPage.deleteProject(loginUser, code); Assertions.assertTrue(queryAllProjectListForDependentResponse.getBody().getSuccess()); - Assertions.assertFalse(queryAllProjectListForDependentResponse.getBody().getData().toString().contains("project-new")); + Assertions.assertFalse( + queryAllProjectListForDependentResponse.getBody().getData().toString().contains("project-new")); } } - - diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/SchedulerAPITest.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/SchedulerAPITest.java index ac06afefc6f4..9d02acfd28bd 100644 --- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/SchedulerAPITest.java +++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/SchedulerAPITest.java @@ -35,14 +35,14 @@ import java.util.LinkedHashMap; import java.util.List; +import lombok.extern.slf4j.Slf4j; + import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; -import lombok.extern.slf4j.Slf4j; - @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") @Slf4j public class SchedulerAPITest { @@ -67,12 +67,12 @@ public class SchedulerAPITest { private static int scheduleId; - @BeforeAll public static void setup() { LoginPage loginPage = new LoginPage(); HttpResponse loginHttpResponse = loginPage.login(username, password); - sessionId = JSONUtils.convertValue(loginHttpResponse.getBody().getData(), LoginResponseData.class).getSessionId(); + sessionId = + JSONUtils.convertValue(loginHttpResponse.getBody().getData(), LoginResponseData.class).getSessionId(); projectPage = new ProjectPage(sessionId); schedulerPage = new SchedulerPage(sessionId); processDefinitionPage = new ProcessDefinitionPage(sessionId); @@ -94,17 +94,24 @@ public void testCreateSchedule() { HttpResponse queryAllProjectListResponse = projectPage.queryAllProjectList(loginUser); Assertions.assertTrue(queryAllProjectListResponse.getBody().getSuccess()); - projectCode = (long) ((LinkedHashMap) ((List) queryAllProjectListResponse.getBody().getData()).get(0)).get("code"); + projectCode = (long) ((LinkedHashMap) ((List) queryAllProjectListResponse + .getBody().getData()).get(0)).get("code"); ClassLoader classLoader = getClass().getClassLoader(); File file = new File(classLoader.getResource("workflow-json/test.json").getFile()); processDefinitionPage.importProcessDefinition(loginUser, projectCode, file); - HttpResponse queryAllProcessDefinitionByProjectCodeResponse = processDefinitionPage.queryAllProcessDefinitionByProjectCode(loginUser, projectCode); + HttpResponse queryAllProcessDefinitionByProjectCodeResponse = + processDefinitionPage.queryAllProcessDefinitionByProjectCode(loginUser, projectCode); Assertions.assertTrue(queryAllProcessDefinitionByProjectCodeResponse.getBody().getSuccess()); - processDefinitionCode = (long) ((LinkedHashMap) ((LinkedHashMap) ((List) queryAllProcessDefinitionByProjectCodeResponse.getBody().getData()).get(0)).get("processDefinition")).get("code"); - - processDefinitionPage.releaseProcessDefinition(loginUser, projectCode, processDefinitionCode, ReleaseState.ONLINE); - final String schedule = "{\"startTime\":\"2019-08-08 00:00:00\",\"endTime\":\"2100-08-08 00:00:00\",\"timezoneId\":\"America/Phoenix\",\"crontab\":\"0 0 3/6 * * ? *\"}" ; - HttpResponse createScheduleResponse = schedulerPage.createSchedule(loginUser, projectCode, processDefinitionCode, schedule); + processDefinitionCode = + (long) ((LinkedHashMap) ((LinkedHashMap) ((List) queryAllProcessDefinitionByProjectCodeResponse + .getBody().getData()).get(0)).get("processDefinition")).get("code"); + + processDefinitionPage.releaseProcessDefinition(loginUser, projectCode, processDefinitionCode, + ReleaseState.ONLINE); + final String schedule = + "{\"startTime\":\"2019-08-08 00:00:00\",\"endTime\":\"2100-08-08 00:00:00\",\"timezoneId\":\"America/Phoenix\",\"crontab\":\"0 0 3/6 * * ? *\"}"; + HttpResponse createScheduleResponse = + schedulerPage.createSchedule(loginUser, projectCode, processDefinitionCode, schedule); Assertions.assertTrue(createScheduleResponse.getBody().getSuccess()); Assertions.assertTrue(createScheduleResponse.getBody().getData().toString().contains("2019-08-08")); } @@ -115,13 +122,15 @@ public void testQueryScheduleList() { HttpResponse queryScheduleListResponse = schedulerPage.queryScheduleList(loginUser, projectCode); Assertions.assertTrue(queryScheduleListResponse.getBody().getSuccess()); Assertions.assertTrue(queryScheduleListResponse.getBody().getData().toString().contains("2019-08-08")); - scheduleId = (int) ((LinkedHashMap) ((List) queryScheduleListResponse.getBody().getData()).get(0)).get("id"); + scheduleId = (int) ((LinkedHashMap) ((List) queryScheduleListResponse.getBody() + .getData()).get(0)).get("id"); } @Test @Order(3) public void testPublishScheduleOnline() { - HttpResponse publishScheduleOnlineResponse = schedulerPage.publishScheduleOnline(loginUser, projectCode, scheduleId); + HttpResponse publishScheduleOnlineResponse = + schedulerPage.publishScheduleOnline(loginUser, projectCode, scheduleId); Assertions.assertTrue(publishScheduleOnlineResponse.getBody().getSuccess()); HttpResponse queryScheduleListResponse = schedulerPage.queryScheduleList(loginUser, projectCode); @@ -137,14 +146,17 @@ public void testOfflineSchedule() { HttpResponse queryScheduleListResponse = schedulerPage.queryScheduleList(loginUser, projectCode); Assertions.assertTrue(queryScheduleListResponse.getBody().getSuccess()); - Assertions.assertTrue(queryScheduleListResponse.getBody().getData().toString().contains("releaseState=OFFLINE")); + Assertions + .assertTrue(queryScheduleListResponse.getBody().getData().toString().contains("releaseState=OFFLINE")); } @Test @Order(5) public void testUpdateSchedule() { - final String schedule = "{\"startTime\":\"1996-08-08 00:00:00\",\"endTime\":\"2200-08-08 00:00:00\",\"timezoneId\":\"America/Phoenix\",\"crontab\":\"0 0 3/6 * * ? *\"}"; - HttpResponse updateScheduleResponse = schedulerPage.updateSchedule(loginUser, projectCode, scheduleId, schedule); + final String schedule = + "{\"startTime\":\"1996-08-08 00:00:00\",\"endTime\":\"2200-08-08 00:00:00\",\"timezoneId\":\"America/Phoenix\",\"crontab\":\"0 0 3/6 * * ? *\"}"; + HttpResponse updateScheduleResponse = + schedulerPage.updateSchedule(loginUser, projectCode, scheduleId, schedule); Assertions.assertTrue(updateScheduleResponse.getBody().getSuccess()); HttpResponse queryScheduleListResponse = schedulerPage.queryScheduleList(loginUser, projectCode); @@ -163,5 +175,3 @@ public void testDeleteScheduleById() { Assertions.assertFalse(queryScheduleListResponse.getBody().getData().toString().contains("1996-08-08")); } } - - diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/TenantAPITest.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/TenantAPITest.java index 76dbd21cdc9f..2f8e6aa056a5 100644 --- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/TenantAPITest.java +++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/TenantAPITest.java @@ -28,17 +28,18 @@ import org.apache.dolphinscheduler.api.test.pages.security.TenantPage; import org.apache.dolphinscheduler.api.test.utils.JSONUtils; +import lombok.extern.slf4j.Slf4j; + import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; -import lombok.extern.slf4j.Slf4j; - @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") @Slf4j public class TenantAPITest { + private static final String tenant = System.getProperty("user.name"); private static final String user = "admin"; @@ -54,7 +55,8 @@ public static void setup() { LoginPage loginPage = new LoginPage(); HttpResponse loginHttpResponse = loginPage.login(user, password); - sessionId = JSONUtils.convertValue(loginHttpResponse.getBody().getData(), LoginResponseData.class).getSessionId(); + sessionId = + JSONUtils.convertValue(loginHttpResponse.getBody().getData(), LoginResponseData.class).getSessionId(); } @AfterAll @@ -90,7 +92,9 @@ public void testGetTenantListPaging() { HttpResponse createTenantHttpResponse = tenantPage.getTenantListPaging(sessionId, 1, 10, ""); boolean result = false; - for (TenantListPagingResponseTotalList tenantListPagingResponseTotalList : JSONUtils.convertValue(createTenantHttpResponse.getBody().getData(), TenantListPagingResponseData.class).getTotalList()) { + for (TenantListPagingResponseTotalList tenantListPagingResponseTotalList : JSONUtils + .convertValue(createTenantHttpResponse.getBody().getData(), TenantListPagingResponseData.class) + .getTotalList()) { if (tenantListPagingResponseTotalList.getTenantCode().equals(tenant)) { result = true; existTenantId = tenantListPagingResponseTotalList.getId(); diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/WorkerGroupAPITest.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/WorkerGroupAPITest.java index a83dea06e1a3..d34f6bad2d3a 100644 --- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/WorkerGroupAPITest.java +++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/cases/WorkerGroupAPITest.java @@ -33,14 +33,14 @@ import java.util.List; import java.util.Set; +import lombok.extern.slf4j.Slf4j; + import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; -import lombok.extern.slf4j.Slf4j; - @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") @Slf4j public class WorkerGroupAPITest { @@ -59,7 +59,8 @@ public class WorkerGroupAPITest { public static void setup() { LoginPage loginPage = new LoginPage(); HttpResponse loginHttpResponse = loginPage.login(username, password); - sessionId = JSONUtils.convertValue(loginHttpResponse.getBody().getData(), LoginResponseData.class).getSessionId(); + sessionId = + JSONUtils.convertValue(loginHttpResponse.getBody().getData(), LoginResponseData.class).getSessionId(); workerGroupPage = new WorkerGroupPage(sessionId); loginUser = new User(); loginUser.setId(123); @@ -75,7 +76,7 @@ public static void cleanup() { @Order(1) public void testSaveWorkerGroup() { HttpResponse saveWorkerGroupHttpResponse = workerGroupPage - .saveWorkerGroup(loginUser, 1, "test_worker_group", "10.5.0.5:1234", "test", null); + .saveWorkerGroup(loginUser, 1, "test_worker_group", "10.5.0.5:1234", "test", null); Assertions.assertTrue(saveWorkerGroupHttpResponse.getBody().getSuccess()); HttpResponse queryAllWorkerGroupsResponse = workerGroupPage.queryAllWorkerGroups(loginUser); @@ -88,9 +89,10 @@ public void testSaveWorkerGroup() { @Test @Order(2) public void testQueryAllWorkerGroupsPaging() { - HttpResponse queryAllWorkerGroupsPagingResponse = workerGroupPage.queryAllWorkerGroupsPaging(loginUser, 1, 2, null); + HttpResponse queryAllWorkerGroupsPagingResponse = + workerGroupPage.queryAllWorkerGroupsPaging(loginUser, 1, 2, null); Assertions.assertTrue(queryAllWorkerGroupsPagingResponse.getBody().getSuccess()); - String workerGroupPageInfoData = queryAllWorkerGroupsPagingResponse.getBody().getData().toString(); + String workerGroupPageInfoData = queryAllWorkerGroupsPagingResponse.getBody().getData().toString(); Assertions.assertTrue(workerGroupPageInfoData.contains("test_worker_group")); } @@ -100,7 +102,7 @@ public void testQueryAllWorkerGroups() { HttpResponse queryAllWorkerGroupsResponse = workerGroupPage.queryAllWorkerGroups(loginUser); Assertions.assertTrue(queryAllWorkerGroupsResponse.getBody().getSuccess()); - String workerGroupPageInfoData = queryAllWorkerGroupsResponse.getBody().getData().toString(); + String workerGroupPageInfoData = queryAllWorkerGroupsResponse.getBody().getData().toString(); Assertions.assertTrue(workerGroupPageInfoData.contains("test_worker_group")); } diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/entity/TenantListPagingResponseData.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/entity/TenantListPagingResponseData.java index 415d711ebc3c..5577f58b3f56 100644 --- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/entity/TenantListPagingResponseData.java +++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/entity/TenantListPagingResponseData.java @@ -29,6 +29,7 @@ @NoArgsConstructor @Data public class TenantListPagingResponseData { + private Integer currentPage; private Integer pageSize; diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/entity/TenantListPagingResponseTotalList.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/entity/TenantListPagingResponseTotalList.java index e75196af8eed..25abbeb05610 100644 --- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/entity/TenantListPagingResponseTotalList.java +++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/entity/TenantListPagingResponseTotalList.java @@ -29,6 +29,7 @@ @NoArgsConstructor @Data public class TenantListPagingResponseTotalList { + private Date createTime; private Date updateTime; diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/LoginPage.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/LoginPage.java index a94231329831..8b40b080c6e5 100644 --- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/LoginPage.java +++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/LoginPage.java @@ -26,6 +26,7 @@ import java.util.Map; public final class LoginPage { + public HttpResponse login(String username, String password) { Map params = new HashMap<>(); diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/project/ProjectPage.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/project/ProjectPage.java index b5f0a31bfc4f..efd84c2ada94 100644 --- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/project/ProjectPage.java +++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/project/ProjectPage.java @@ -19,8 +19,6 @@ package org.apache.dolphinscheduler.api.test.pages.project; -import lombok.AllArgsConstructor; - import org.apache.dolphinscheduler.api.test.core.Constants; import org.apache.dolphinscheduler.api.test.entity.HttpResponse; import org.apache.dolphinscheduler.api.test.utils.RequestClient; @@ -29,8 +27,11 @@ import java.util.HashMap; import java.util.Map; +import lombok.AllArgsConstructor; + @AllArgsConstructor public final class ProjectPage { + private String sessionId; public HttpResponse createProject(User loginUser, String projectName) { @@ -90,7 +91,8 @@ public HttpResponse queryProjectListPaging(User loginUser, Integer pageSize, Int return requestClient.get("/projects", headers, params); } - public HttpResponse queryProjectWithAuthorizedLevelListPaging(User loginUser, Integer userId, Integer pageSize, Integer pageNo) { + public HttpResponse queryProjectWithAuthorizedLevelListPaging(User loginUser, Integer userId, Integer pageSize, + Integer pageNo) { Map params = new HashMap<>(); params.put("loginUser", loginUser); params.put("userId", userId); diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/security/TenantPage.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/security/TenantPage.java index 1d1d74de4990..6c4b4733629c 100644 --- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/security/TenantPage.java +++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/security/TenantPage.java @@ -27,6 +27,7 @@ import java.util.Map; public final class TenantPage { + public HttpResponse createTenant(String sessionId, String tenant, Integer queueId, String description) { Map params = new HashMap<>(); params.put("tenantCode", tenant); diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/security/WorkerGroupPage.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/security/WorkerGroupPage.java index 9d6185fd9a02..1d34d7e76332 100644 --- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/security/WorkerGroupPage.java +++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/security/WorkerGroupPage.java @@ -19,8 +19,6 @@ package org.apache.dolphinscheduler.api.test.pages.security; -import lombok.AllArgsConstructor; - import org.apache.dolphinscheduler.api.test.core.Constants; import org.apache.dolphinscheduler.api.test.entity.HttpResponse; import org.apache.dolphinscheduler.api.test.utils.RequestClient; @@ -29,13 +27,15 @@ import java.util.HashMap; import java.util.Map; +import lombok.AllArgsConstructor; @AllArgsConstructor public class WorkerGroupPage { private String sessionId; - public HttpResponse saveWorkerGroup(User loginUser, int id, String name, String addrList, String description, String otherParamsJson) { + public HttpResponse saveWorkerGroup(User loginUser, int id, String name, String addrList, String description, + String otherParamsJson) { Map params = new HashMap<>(); params.put("loginUser", loginUser); params.put("id", id); @@ -96,5 +96,4 @@ public HttpResponse queryWorkerAddressList(User loginUser) { return requestClient.get("/worker-groups/worker-address-list", headers, params); } - } diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/ExecutorPage.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/ExecutorPage.java index 7ae09cafeaa4..1318aa6f3e21 100644 --- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/ExecutorPage.java +++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/ExecutorPage.java @@ -34,14 +34,15 @@ import lombok.AllArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @AllArgsConstructor public class ExecutorPage { private String sessionId; - public HttpResponse startProcessInstance(User loginUser, long projectCode, long processDefinitionCode, String scheduleTime, FailureStrategy failureStrategy, WarningType warningType) { + public HttpResponse startProcessInstance(User loginUser, long projectCode, long processDefinitionCode, + String scheduleTime, FailureStrategy failureStrategy, + WarningType warningType) { Map params = new HashMap<>(); params.put("loginUser", loginUser); params.put("processDefinitionCode", processDefinitionCode); @@ -93,7 +94,8 @@ public HttpResponse startCheckProcessDefinition(User loginUser, long projectCode return requestClient.post(url, headers, params); } - public HttpResponse executeTask(User loginUser, long projectCode, int processInstanceId, String startNodeList, TaskDependType taskDependType) { + public HttpResponse executeTask(User loginUser, long projectCode, int processInstanceId, String startNodeList, + TaskDependType taskDependType) { Map params = new HashMap<>(); params.put("loginUser", loginUser); params.put("processInstanceId", processInstanceId); @@ -107,4 +109,4 @@ public HttpResponse executeTask(User loginUser, long projectCode, int processIns return requestClient.post(url, headers, params); } -} \ No newline at end of file +} diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/ProcessDefinitionPage.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/ProcessDefinitionPage.java index ef72e997ae59..3f3b715c39b2 100644 --- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/ProcessDefinitionPage.java +++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/ProcessDefinitionPage.java @@ -25,6 +25,8 @@ import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.http.client.methods.CloseableHttpResponse; + import java.io.File; import java.util.HashMap; import java.util.Map; @@ -32,9 +34,6 @@ import lombok.AllArgsConstructor; import lombok.extern.slf4j.Slf4j; -import org.apache.http.client.methods.CloseableHttpResponse; - - @Slf4j @AllArgsConstructor public class ProcessDefinitionPage { @@ -107,7 +106,8 @@ public HttpResponse queryProcessDefinitionList(User loginUser, long projectCode) return requestClient.get(url, headers, params); } - public HttpResponse releaseProcessDefinition(User loginUser, long projectCode, long code, ReleaseState releaseState) { + public HttpResponse releaseProcessDefinition(User loginUser, long projectCode, long code, + ReleaseState releaseState) { Map params = new HashMap<>(); params.put("loginUser", loginUser); params.put("code", code); diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/ProcessInstancePage.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/ProcessInstancePage.java index e07e1166b1c0..eba4e6303650 100644 --- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/ProcessInstancePage.java +++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/ProcessInstancePage.java @@ -19,13 +19,9 @@ package org.apache.dolphinscheduler.api.test.pages.workflow; -import org.apache.dolphinscheduler.api.enums.ExecuteType; import org.apache.dolphinscheduler.api.test.core.Constants; import org.apache.dolphinscheduler.api.test.entity.HttpResponse; import org.apache.dolphinscheduler.api.test.utils.RequestClient; -import org.apache.dolphinscheduler.common.enums.FailureStrategy; -import org.apache.dolphinscheduler.common.enums.TaskDependType; -import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.dao.entity.User; import java.util.HashMap; @@ -34,7 +30,6 @@ import lombok.AllArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @AllArgsConstructor public class ProcessInstancePage { diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/SchedulerPage.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/SchedulerPage.java index cfe23fd1bbc9..d6b3b9a74382 100644 --- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/SchedulerPage.java +++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/pages/workflow/SchedulerPage.java @@ -30,7 +30,6 @@ import lombok.AllArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @AllArgsConstructor public class SchedulerPage { @@ -63,7 +62,6 @@ public HttpResponse queryScheduleList(User loginUser, long projectCode) { return requestClient.post(url, headers, params); } - public HttpResponse publishScheduleOnline(User loginUser, long projectCode, int scheduleId) { Map params = new HashMap<>(); params.put("loginUser", loginUser); diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/utils/JSONUtils.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/utils/JSONUtils.java index 0d9a9b97429d..3a25c927ffb5 100644 --- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/utils/JSONUtils.java +++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/utils/JSONUtils.java @@ -17,12 +17,11 @@ package org.apache.dolphinscheduler.api.test.utils; -import static java.nio.charset.StandardCharsets.UTF_8; - import static com.fasterxml.jackson.databind.DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT; import static com.fasterxml.jackson.databind.DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES; import static com.fasterxml.jackson.databind.DeserializationFeature.READ_UNKNOWN_ENUM_VALUES_AS_NULL; import static com.fasterxml.jackson.databind.MapperFeature.REQUIRE_SETTERS_FOR_GETTERS; +import static java.nio.charset.StandardCharsets.UTF_8; import org.apache.dolphinscheduler.api.test.core.Constants; @@ -72,8 +71,11 @@ public class JSONUtils { * can use static singleton, inject: just make sure to reuse! */ private static final ObjectMapper objectMapper = - new ObjectMapper().configure(FAIL_ON_UNKNOWN_PROPERTIES, false).configure(ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT, true).configure(READ_UNKNOWN_ENUM_VALUES_AS_NULL, true) - .configure(REQUIRE_SETTERS_FOR_GETTERS, true).setTimeZone(TimeZone.getDefault()).setDateFormat(new SimpleDateFormat(Constants.YYYY_MM_DD_HH_MM_SS)); + new ObjectMapper().configure(FAIL_ON_UNKNOWN_PROPERTIES, false) + .configure(ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT, true) + .configure(READ_UNKNOWN_ENUM_VALUES_AS_NULL, true) + .configure(REQUIRE_SETTERS_FOR_GETTERS, true).setTimeZone(TimeZone.getDefault()) + .setDateFormat(new SimpleDateFormat(Constants.YYYY_MM_DD_HH_MM_SS)); private JSONUtils() { throw new UnsupportedOperationException("Construct JSONUtils"); diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/utils/RequestClient.java b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/utils/RequestClient.java index 2993ea8de3f4..b04511f250d8 100644 --- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/utils/RequestClient.java +++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/java/org/apache/dolphinscheduler/api/test/utils/RequestClient.java @@ -23,6 +23,15 @@ import org.apache.dolphinscheduler.api.test.entity.HttpResponse; import org.apache.dolphinscheduler.api.test.entity.HttpResponseBody; +import org.apache.http.HttpEntity; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.mime.MultipartEntityBuilder; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClients; +import org.apache.http.message.BasicHeader; + import java.io.File; import java.io.FileInputStream; import java.util.HashMap; @@ -39,17 +48,6 @@ import okhttp3.RequestBody; import okhttp3.Response; -import org.apache.http.HttpEntity; -import org.apache.http.client.methods.CloseableHttpResponse; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.mime.MultipartEntityBuilder; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClients; -import org.apache.http.message.BasicHeader; -import org.apache.http.util.EntityUtils; - - @Slf4j public class RequestClient { @@ -70,10 +68,10 @@ public HttpResponse get(String url, Map headers, Map headers, Map headers, Map headers, Map headers, Map params, File file) { + public CloseableHttpResponse postWithFile(String url, Map headers, Map params, + File file) { try { Headers headersBuilder = Headers.of(headers); MultipartEntityBuilder builder = MultipartEntityBuilder.create(); builder.addTextBody("json", getParams(params), ContentType.MULTIPART_FORM_DATA); builder.addBinaryBody( - "file", - new FileInputStream(file), - ContentType.APPLICATION_OCTET_STREAM, - file.getName() - ); + "file", + new FileInputStream(file), + ContentType.APPLICATION_OCTET_STREAM, + file.getName()); HttpEntity multipart = builder.build(); String requestUrl = String.format("%s%s", Constants.DOLPHINSCHEDULER_API_URL, url); log.info("POST request to {}, Headers: {}, Params: {}", requestUrl, headersBuilder, params); @@ -199,7 +197,6 @@ public CloseableHttpResponse postWithFile(String url, Map header return null; } - @SneakyThrows public HttpResponse delete(String url, Map headers, Map params) { if (headers == null) { @@ -214,10 +211,10 @@ public HttpResponse delete(String url, Map headers, Map new IllegalStateException("DolphinScheduler service is not healthy")); } } @@ -63,17 +67,18 @@ private DockerComposeContainer createDockerCompose(ExtensionContext context) final Class clazz = context.getRequiredTestClass(); final DolphinScheduler annotation = clazz.getAnnotation(DolphinScheduler.class); final List files = Stream.of(annotation.composeFiles()) - .map(it -> DolphinScheduler.class.getClassLoader().getResource(it)) - .filter(Objects::nonNull) - .map(URL::getPath) - .map(File::new) - .collect(Collectors.toList()); + .map(it -> DolphinScheduler.class.getClassLoader().getResource(it)) + .filter(Objects::nonNull) + .map(URL::getPath) + .map(File::new) + .collect(Collectors.toList()); compose = new DockerComposeContainer<>(files) - .withPull(true) - .withTailChildContainers(true) - .withLogConsumer(serviceName, outputFrame -> log.info(outputFrame.getUtf8String())) - .waitingFor(serviceName, Wait.forHealthcheck().withStartupTimeout(Duration.ofSeconds(Constants.DOCKER_COMPOSE_DEFAULT_TIMEOUT))); + .withPull(true) + .withTailChildContainers(true) + .withLogConsumer(serviceName, outputFrame -> log.info(outputFrame.getUtf8String())) + .waitingFor(serviceName, Wait.forHealthcheck() + .withStartupTimeout(Duration.ofSeconds(Constants.DOCKER_COMPOSE_DEFAULT_TIMEOUT))); return compose; } diff --git a/dolphinscheduler-api/pom.xml b/dolphinscheduler-api/pom.xml index 681a85318abd..40d556a17ed3 100644 --- a/dolphinscheduler-api/pom.xml +++ b/dolphinscheduler-api/pom.xml @@ -61,6 +61,11 @@ dolphinscheduler-meter + + org.apache.dolphinscheduler + dolphinscheduler-data-quality + + org.apache.dolphinscheduler dolphinscheduler-datasource-all diff --git a/dolphinscheduler-api/src/main/assembly/dolphinscheduler-api-server.xml b/dolphinscheduler-api/src/main/assembly/dolphinscheduler-api-server.xml index 77b2f54c64a2..5453f8fb154d 100644 --- a/dolphinscheduler-api/src/main/assembly/dolphinscheduler-api-server.xml +++ b/dolphinscheduler-api/src/main/assembly/dolphinscheduler-api-server.xml @@ -53,6 +53,14 @@ ${basedir}/../dolphinscheduler-common/src/main/resources **/*.properties + **/*.yaml + + conf + + + ${basedir}/../dolphinscheduler-authentication/dolphinscheduler-aws-authentication/src/main/resources + + **/*.yaml conf diff --git a/dolphinscheduler-api/src/main/bin/jvm_args_env.sh b/dolphinscheduler-api/src/main/bin/jvm_args_env.sh index c668944139ce..d953e04d2f04 100644 --- a/dolphinscheduler-api/src/main/bin/jvm_args_env.sh +++ b/dolphinscheduler-api/src/main/bin/jvm_args_env.sh @@ -24,6 +24,7 @@ -XX:+PrintGCDetails -Xloggc:gc.log +-XX:-OmitStackTraceInFastThrow -XX:+ExitOnOutOfMemoryError -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=dump.hprof diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/ApiApplicationServer.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/ApiApplicationServer.java index c7e6d9778f60..6b183e369c11 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/ApiApplicationServer.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/ApiApplicationServer.java @@ -18,17 +18,15 @@ package org.apache.dolphinscheduler.api; import org.apache.dolphinscheduler.api.metrics.ApiServerMetrics; -import org.apache.dolphinscheduler.common.enums.PluginType; +import org.apache.dolphinscheduler.common.CommonConfiguration; import org.apache.dolphinscheduler.common.thread.DefaultUncaughtExceptionHandler; +import org.apache.dolphinscheduler.dao.DaoConfiguration; import org.apache.dolphinscheduler.dao.PluginDao; -import org.apache.dolphinscheduler.dao.entity.PluginDefine; -import org.apache.dolphinscheduler.plugin.task.api.TaskChannelFactory; +import org.apache.dolphinscheduler.plugin.datasource.api.plugin.DataSourceProcessorProvider; +import org.apache.dolphinscheduler.plugin.storage.api.StorageConfiguration; import org.apache.dolphinscheduler.plugin.task.api.TaskPluginManager; -import org.apache.dolphinscheduler.spi.params.PluginParamsTransfer; -import org.apache.dolphinscheduler.spi.params.base.PluginParams; - -import java.util.List; -import java.util.Map; +import org.apache.dolphinscheduler.registry.api.RegistryConfiguration; +import org.apache.dolphinscheduler.service.ServiceConfiguration; import lombok.extern.slf4j.Slf4j; @@ -37,18 +35,19 @@ import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.context.event.ApplicationReadyEvent; import org.springframework.boot.web.servlet.ServletComponentScan; -import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.Import; import org.springframework.context.event.EventListener; +@Slf4j +@Import({DaoConfiguration.class, + CommonConfiguration.class, + ServiceConfiguration.class, + StorageConfiguration.class, + RegistryConfiguration.class}) @ServletComponentScan @SpringBootApplication -@ComponentScan("org.apache.dolphinscheduler") -@Slf4j public class ApiApplicationServer { - @Autowired - private TaskPluginManager taskPluginManager; - @Autowired private PluginDao pluginDao; @@ -61,16 +60,7 @@ public static void main(String[] args) { @EventListener public void run(ApplicationReadyEvent readyEvent) { log.info("Received spring application context ready event will load taskPlugin and write to DB"); - // install task plugin - taskPluginManager.loadPlugin(); - for (Map.Entry entry : taskPluginManager.getTaskChannelFactoryMap().entrySet()) { - String taskPluginName = entry.getKey(); - TaskChannelFactory taskChannelFactory = entry.getValue(); - List params = taskChannelFactory.getParams(); - String paramsJson = PluginParamsTransfer.transferParamsToJson(params); - - PluginDefine pluginDefine = new PluginDefine(taskPluginName, PluginType.TASK.getDesc(), paramsJson); - pluginDao.addOrUpdatePluginDefine(pluginDefine); - } + DataSourceProcessorProvider.initialize(); + TaskPluginManager.loadTaskPlugin(); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/AuditMessage.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/AuditMessage.java deleted file mode 100644 index 0d936f0ad5e1..000000000000 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/AuditMessage.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.api.audit; - -import org.apache.dolphinscheduler.common.enums.AuditOperationType; -import org.apache.dolphinscheduler.common.enums.AuditResourceType; -import org.apache.dolphinscheduler.dao.entity.User; - -import java.util.Date; - -public class AuditMessage { - - private User user; - - private Date auditDate; - - private AuditResourceType resourceType; - - private AuditOperationType operation; - - private Integer resourceId; - - public AuditMessage(User user, Date auditDate, AuditResourceType resourceType, AuditOperationType operation, - Integer resourceId) { - this.user = user; - this.auditDate = auditDate; - this.resourceType = resourceType; - this.operation = operation; - this.resourceId = resourceId; - } - - public User getUser() { - return user; - } - - public void setUser(User user) { - this.user = user; - } - - public Date getAuditDate() { - return auditDate; - } - - public void setAuditDate(Date auditDate) { - this.auditDate = auditDate; - } - - public AuditResourceType getResourceType() { - return resourceType; - } - - public void setResourceType(AuditResourceType resourceType) { - this.resourceType = resourceType; - } - - public AuditOperationType getOperation() { - return operation; - } - - public void setOperation(AuditOperationType operation) { - this.operation = operation; - } - - public Integer getResourceId() { - return resourceId; - } - - public void setResourceId(Integer resourceId) { - this.resourceId = resourceId; - } - - @Override - public String toString() { - return "AuditMessage{" - + "user=" + user - + ", Date=" + auditDate - + ", resourceType" + resourceType - + ", operation=" + operation - + ", resourceId='" + resourceId + '\''; - } -} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/AuditPublishService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/AuditPublishService.java deleted file mode 100644 index 34a5cd8ac0b9..000000000000 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/AuditPublishService.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.api.audit; - -import org.apache.dolphinscheduler.api.configuration.ApiConfig; - -import java.util.List; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.LinkedBlockingQueue; - -import javax.annotation.PostConstruct; - -import lombok.extern.slf4j.Slf4j; - -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; - -@Component -@Slf4j -public class AuditPublishService { - - private final BlockingQueue auditMessageQueue = new LinkedBlockingQueue<>(); - - @Autowired - private List subscribers; - - @Autowired - private ApiConfig apiConfig; - - /** - * create a daemon thread to process the message queue - */ - @PostConstruct - private void init() { - if (apiConfig.isAuditEnable()) { - Thread thread = new Thread(this::doPublish); - thread.setDaemon(true); - thread.setName("Audit-Log-Consume-Thread"); - thread.start(); - } - } - - /** - * publish a new audit message - * - * @param message audit message - */ - public void publish(AuditMessage message) { - if (apiConfig.isAuditEnable() && !auditMessageQueue.offer(message)) { - log.error("Publish audit message failed, message:{}", message); - } - } - - /** - * subscribers execute the message processor method - */ - private void doPublish() { - AuditMessage message = null; - while (true) { - try { - message = auditMessageQueue.take(); - for (AuditSubscriber subscriber : subscribers) { - try { - subscriber.execute(message); - } catch (Exception e) { - log.error("Consume audit message failed, message:{}", message, e); - } - } - } catch (InterruptedException e) { - log.error("Consume audit message failed, message:{}", message, e); - Thread.currentThread().interrupt(); - break; - } - } - } - -} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/OperatorLog.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/OperatorLog.java new file mode 100644 index 000000000000..440a67bf87ea --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/OperatorLog.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.audit; + +import org.apache.dolphinscheduler.api.audit.enums.AuditType; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Custom annotation for logging and auditing operator actions in the system. + * This annotation can be applied to methods to indicate the type of operation, object type, + * and specific parameters to be recorded in the logs. + */ +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@Documented +public @interface OperatorLog { + + AuditType auditType(); +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/OperatorLogAspect.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/OperatorLogAspect.java new file mode 100644 index 000000000000..aaf35d5b6662 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/OperatorLogAspect.java @@ -0,0 +1,121 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.audit; + +import org.apache.dolphinscheduler.api.audit.enums.AuditType; +import org.apache.dolphinscheduler.api.audit.operator.AuditOperator; +import org.apache.dolphinscheduler.dao.entity.AuditLog; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; + +import java.lang.reflect.Method; +import java.util.List; +import java.util.Map; + +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.Setter; +import lombok.extern.slf4j.Slf4j; + +import org.aspectj.lang.JoinPoint; +import org.aspectj.lang.annotation.AfterReturning; +import org.aspectj.lang.annotation.AfterThrowing; +import org.aspectj.lang.annotation.Aspect; +import org.aspectj.lang.annotation.Before; +import org.aspectj.lang.annotation.Pointcut; +import org.aspectj.lang.reflect.MethodSignature; +import org.springframework.stereotype.Component; + +import io.swagger.v3.oas.annotations.Operation; + +@Aspect +@Slf4j +@Component +public class OperatorLogAspect { + + private static final ThreadLocal auditThreadLocal = new ThreadLocal<>(); + + @Pointcut("@annotation(org.apache.dolphinscheduler.api.audit.OperatorLog)") + public void logPointCut() { + } + + @Before("logPointCut()") + public void before(JoinPoint point) { + MethodSignature signature = (MethodSignature) point.getSignature(); + Method method = signature.getMethod(); + OperatorLog operatorLog = method.getAnnotation(OperatorLog.class); + Operation operation = method.getAnnotation(Operation.class); + + if (operation == null) { + log.warn("Operation is null of method: {}", method.getName()); + return; + } + + Map paramsMap = OperatorUtils.getParamsMap(point, signature); + User user = OperatorUtils.getUser(paramsMap); + if (user == null) { + log.error("user is null"); + return; + } + + AuditType auditType = operatorLog.auditType(); + + try { + AuditOperator operator = SpringApplicationContext.getBean(operatorLog.auditType().getOperatorClass()); + List auditLogList = OperatorUtils.buildAuditLogList(operation.description(), auditType, user); + operator.setRequestParam(auditType, auditLogList, paramsMap); + AuditContext auditContext = + new AuditContext(auditLogList, paramsMap, operatorLog, System.currentTimeMillis(), operator); + auditThreadLocal.set(auditContext); + } catch (Throwable throwable) { + log.error("Record audit log error", throwable); + } + } + + @AfterReturning(value = "logPointCut()", returning = "returnValue") + public void afterReturning(Object returnValue) { + try { + AuditContext auditContext = auditThreadLocal.get(); + if (auditContext == null) { + return; + } + auditContext.getOperator().recordAudit(auditContext, returnValue); + } catch (Throwable throwable) { + log.error("Record audit log error", throwable); + } finally { + auditThreadLocal.remove(); + } + } + + @AfterThrowing("logPointCut()") + public void afterThrowing() { + auditThreadLocal.remove(); + } + + @Getter + @Setter + @AllArgsConstructor + public static class AuditContext { + + List auditLogList; + Map paramsMap; + OperatorLog operatorLog; + long beginTime; + AuditOperator operator; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/OperatorUtils.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/OperatorUtils.java new file mode 100644 index 000000000000..eecb53bfbffb --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/OperatorUtils.java @@ -0,0 +1,183 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.audit; + +import org.apache.dolphinscheduler.api.audit.enums.AuditType; +import org.apache.dolphinscheduler.api.enums.ExecuteType; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.constants.Constants; +import org.apache.dolphinscheduler.common.enums.AuditModelType; +import org.apache.dolphinscheduler.common.enums.AuditOperationType; +import org.apache.dolphinscheduler.common.enums.ReleaseState; +import org.apache.dolphinscheduler.dao.entity.AuditLog; +import org.apache.dolphinscheduler.dao.entity.User; + +import java.lang.reflect.Field; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import lombok.extern.slf4j.Slf4j; + +import org.aspectj.lang.JoinPoint; +import org.aspectj.lang.reflect.MethodSignature; + +@Slf4j +public class OperatorUtils { + + public static boolean resultFail(Result result) { + return result != null && result.isFailed(); + } + + public static List buildAuditLogList(String apiDescription, AuditType auditType, User user) { + List auditLogList = new ArrayList<>(); + AuditLog auditLog = new AuditLog(); + auditLog.setUserId(user.getId()); + auditLog.setModelType(auditType.getAuditModelType().getName()); + auditLog.setOperationType(auditType.getAuditOperationType().getName()); + auditLog.setDescription(apiDescription); + auditLog.setCreateTime(new Date()); + auditLogList.add(auditLog); + return auditLogList; + } + + public static User getUser(Map paramsMap) { + for (Object object : paramsMap.values()) { + if (object instanceof User) { + return (User) object; + } + } + + return null; + } + + public static Map getParamsMap(JoinPoint point, MethodSignature signature) { + Object[] args = point.getArgs(); + String[] strings = signature.getParameterNames(); + + Map paramsMap = new HashMap<>(); + for (int i = 0; i < strings.length; i++) { + paramsMap.put(strings[i], args[i]); + } + + return paramsMap; + } + + public static AuditOperationType modifyReleaseOperationType(AuditType auditType, Map paramsMap) { + switch (auditType.getAuditOperationType()) { + case RELEASE: + ReleaseState releaseState = (ReleaseState) paramsMap.get(Constants.RELEASE_STATE); + if (releaseState == null) { + break; + } + switch (releaseState) { + case ONLINE: + return AuditOperationType.ONLINE; + case OFFLINE: + return AuditOperationType.OFFLINE; + default: + break; + } + break; + case EXECUTE: + ExecuteType executeType = (ExecuteType) paramsMap.get(Constants.EXECUTE_TYPE); + if (executeType == null) { + break; + } + switch (executeType) { + case REPEAT_RUNNING: + return AuditOperationType.RERUN; + case RECOVER_SUSPENDED_PROCESS: + return AuditOperationType.RESUME_PAUSE; + case START_FAILURE_TASK_PROCESS: + return AuditOperationType.RESUME_FAILURE; + case STOP: + return AuditOperationType.STOP; + case PAUSE: + return AuditOperationType.PAUSE; + case EXECUTE_TASK: + return AuditOperationType.EXECUTE; + default: + break; + } + break; + default: + break; + } + + return auditType.getAuditOperationType(); + } + + public static long getObjectIdentityByParam(String[] paramNameArr, Map paramsMap) { + for (String name : paramNameArr) { + if (paramsMap.get(name) instanceof String) { + String param = (String) paramsMap.get(name); + try { + if (param.matches("\\d+")) { + return Long.parseLong(param); + } + } catch (NumberFormatException e) { + return -1; + } + } + } + + return -1; + } + + public static Map getObjectIfFromReturnObject(Object obj, String[] params) { + Map map = new HashMap<>(); + + try { + Class clazz = obj.getClass(); + + if (clazz.equals(Long.class)) { + map.put(params[0], obj); + } + + while (clazz != null) { + Field[] fields = clazz.getDeclaredFields(); + for (Field field : fields) { + field.setAccessible(true); + + if (field.getName().equals(params[0])) { + map.put(params[0], field.get(obj)); + } + } + + clazz = clazz.getSuperclass(); + } + } catch (Exception e) { + log.error("get object if from return object error", e); + } + + return map; + } + + public static boolean isFolder(String name) { + return name != null && name.endsWith("/"); + } + + public static String getFileAuditObject(AuditType auditType, Map paramsMap, String name) { + boolean isFolder = auditType == AuditType.FOLDER_CREATE || isFolder(name); + return isFolder ? AuditModelType.FOLDER.getName() : AuditModelType.FILE.getName(); + } + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/constants/AuditLogConstants.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/constants/AuditLogConstants.java new file mode 100644 index 000000000000..f0f749aea78e --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/constants/AuditLogConstants.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.audit.constants; + +public final class AuditLogConstants { + + private AuditLogConstants() { + throw new UnsupportedOperationException("Construct Constants"); + } + + public static final String CODE = "code"; + public static final String CODES = "codes"; + public static final String VERSION = "version"; + public static final String PROCESS_DEFINITION_CODE = "processDefinitionCode"; + public static final String PROCESS_DEFINITION_CODES = "processDefinitionCodes"; + public static final String PROCESS_INSTANCE_IDS = "processInstanceIds"; + public static final String PROCESS_INSTANCE_ID = "processInstanceId"; + public static final String WORKFLOW_DEFINITION_CODE = "workflowDefinitionCode"; + public static final String TYPE = "type"; + public static final String NAME = "name"; + public static final String ID = "id"; + public static final String USER_ID = "userId"; + public static final String QUEUE_ID = "queueId"; + public static final String PRIORITY = "priority"; + public static final String CLUSTER_CODE = "clusterCode"; + public static final String ENVIRONMENT_CODE = "environmentCode"; + public static final String ALIAS = "alias"; + public static final String FILE_NAME = "fileName"; + public static final String FULL_NAME = "fullName"; + public static final String FUNC_NAME = "funcName"; + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/enums/AuditType.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/enums/AuditType.java new file mode 100644 index 000000000000..59cca4279f40 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/enums/AuditType.java @@ -0,0 +1,241 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.audit.enums; + +import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.ALIAS; +import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.CLUSTER_CODE; +import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.CODE; +import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.CODES; +import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.ENVIRONMENT_CODE; +import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.FILE_NAME; +import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.FULL_NAME; +import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.ID; +import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.NAME; +import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.PRIORITY; +import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.PROCESS_DEFINITION_CODE; +import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.PROCESS_DEFINITION_CODES; +import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.PROCESS_INSTANCE_ID; +import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.PROCESS_INSTANCE_IDS; +import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.QUEUE_ID; +import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.TYPE; +import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.USER_ID; +import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.VERSION; +import static org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants.WORKFLOW_DEFINITION_CODE; +import static org.apache.dolphinscheduler.common.enums.AuditModelType.ALARM_GROUP; +import static org.apache.dolphinscheduler.common.enums.AuditModelType.ALARM_INSTANCE; +import static org.apache.dolphinscheduler.common.enums.AuditModelType.CLUSTER; +import static org.apache.dolphinscheduler.common.enums.AuditModelType.DATASOURCE; +import static org.apache.dolphinscheduler.common.enums.AuditModelType.ENVIRONMENT; +import static org.apache.dolphinscheduler.common.enums.AuditModelType.FILE; +import static org.apache.dolphinscheduler.common.enums.AuditModelType.FOLDER; +import static org.apache.dolphinscheduler.common.enums.AuditModelType.K8S_NAMESPACE; +import static org.apache.dolphinscheduler.common.enums.AuditModelType.PROCESS; +import static org.apache.dolphinscheduler.common.enums.AuditModelType.PROCESS_INSTANCE; +import static org.apache.dolphinscheduler.common.enums.AuditModelType.PROJECT; +import static org.apache.dolphinscheduler.common.enums.AuditModelType.SCHEDULE; +import static org.apache.dolphinscheduler.common.enums.AuditModelType.TASK; +import static org.apache.dolphinscheduler.common.enums.AuditModelType.TASK_GROUP; +import static org.apache.dolphinscheduler.common.enums.AuditModelType.TASK_INSTANCE; +import static org.apache.dolphinscheduler.common.enums.AuditModelType.TENANT; +import static org.apache.dolphinscheduler.common.enums.AuditModelType.TOKEN; +import static org.apache.dolphinscheduler.common.enums.AuditModelType.USER; +import static org.apache.dolphinscheduler.common.enums.AuditModelType.WORKER_GROUP; +import static org.apache.dolphinscheduler.common.enums.AuditModelType.YARN_QUEUE; +import static org.apache.dolphinscheduler.common.enums.AuditOperationType.BATCH_DELETE; +import static org.apache.dolphinscheduler.common.enums.AuditOperationType.BATCH_RERUN; +import static org.apache.dolphinscheduler.common.enums.AuditOperationType.BATCH_START; +import static org.apache.dolphinscheduler.common.enums.AuditOperationType.CLOSE; +import static org.apache.dolphinscheduler.common.enums.AuditOperationType.COPY; +import static org.apache.dolphinscheduler.common.enums.AuditOperationType.CREATE; +import static org.apache.dolphinscheduler.common.enums.AuditOperationType.DELETE; +import static org.apache.dolphinscheduler.common.enums.AuditOperationType.DELETE_VERSION; +import static org.apache.dolphinscheduler.common.enums.AuditOperationType.EXECUTE; +import static org.apache.dolphinscheduler.common.enums.AuditOperationType.EXPORT; +import static org.apache.dolphinscheduler.common.enums.AuditOperationType.FORCE_SUCCESS; +import static org.apache.dolphinscheduler.common.enums.AuditOperationType.IMPORT; +import static org.apache.dolphinscheduler.common.enums.AuditOperationType.MODIFY; +import static org.apache.dolphinscheduler.common.enums.AuditOperationType.OFFLINE; +import static org.apache.dolphinscheduler.common.enums.AuditOperationType.ONLINE; +import static org.apache.dolphinscheduler.common.enums.AuditOperationType.RELEASE; +import static org.apache.dolphinscheduler.common.enums.AuditOperationType.START; +import static org.apache.dolphinscheduler.common.enums.AuditOperationType.SWITCH_VERSION; +import static org.apache.dolphinscheduler.common.enums.AuditOperationType.UPDATE; + +import org.apache.dolphinscheduler.api.audit.operator.AuditOperator; +import org.apache.dolphinscheduler.api.audit.operator.impl.AlertGroupAuditOperatorImpl; +import org.apache.dolphinscheduler.api.audit.operator.impl.AlertInstanceAuditOperatorImpl; +import org.apache.dolphinscheduler.api.audit.operator.impl.ClusterAuditOperatorImpl; +import org.apache.dolphinscheduler.api.audit.operator.impl.DatasourceAuditOperatorImpl; +import org.apache.dolphinscheduler.api.audit.operator.impl.EnvironmentAuditOperatorImpl; +import org.apache.dolphinscheduler.api.audit.operator.impl.K8SNamespaceAuditOperatorImpl; +import org.apache.dolphinscheduler.api.audit.operator.impl.ProcessAuditOperatorImpl; +import org.apache.dolphinscheduler.api.audit.operator.impl.ProcessInstanceAuditOperatorImpl; +import org.apache.dolphinscheduler.api.audit.operator.impl.ProjectAuditOperatorImpl; +import org.apache.dolphinscheduler.api.audit.operator.impl.ResourceAuditOperatorImpl; +import org.apache.dolphinscheduler.api.audit.operator.impl.ScheduleAuditOperatorImpl; +import org.apache.dolphinscheduler.api.audit.operator.impl.TaskAuditOperatorImpl; +import org.apache.dolphinscheduler.api.audit.operator.impl.TaskGroupAuditOperatorImpl; +import org.apache.dolphinscheduler.api.audit.operator.impl.TaskInstancesAuditOperatorImpl; +import org.apache.dolphinscheduler.api.audit.operator.impl.TenantAuditOperatorImpl; +import org.apache.dolphinscheduler.api.audit.operator.impl.TokenAuditOperatorImpl; +import org.apache.dolphinscheduler.api.audit.operator.impl.UserAuditOperatorImpl; +import org.apache.dolphinscheduler.api.audit.operator.impl.WorkerGroupAuditOperatorImpl; +import org.apache.dolphinscheduler.api.audit.operator.impl.YarnQueueAuditOperatorImpl; +import org.apache.dolphinscheduler.common.enums.AuditModelType; +import org.apache.dolphinscheduler.common.enums.AuditOperationType; + +import lombok.Getter; + +@Getter +public enum AuditType { + + PROJECT_CREATE(PROJECT, CREATE, ProjectAuditOperatorImpl.class, new String[]{}, new String[]{CODE}), + PROJECT_UPDATE(PROJECT, UPDATE, ProjectAuditOperatorImpl.class, new String[]{}, new String[]{CODE}), + PROJECT_DELETE(PROJECT, DELETE, ProjectAuditOperatorImpl.class, new String[]{CODE}, new String[]{}), + + PROCESS_CREATE(PROCESS, CREATE, ProcessAuditOperatorImpl.class, new String[]{}, new String[]{CODE}), + PROCESS_UPDATE(PROCESS, UPDATE, ProcessAuditOperatorImpl.class, new String[]{}, new String[]{CODE}), + PROCESS_SWITCH_VERSION(PROCESS, SWITCH_VERSION, ProcessAuditOperatorImpl.class, new String[]{CODE, VERSION}, + new String[]{}), + PROCESS_DELETE_VERSION(PROCESS, DELETE_VERSION, ProcessAuditOperatorImpl.class, new String[]{CODE, VERSION}, + new String[]{}), + PROCESS_RELEASE(PROCESS, RELEASE, ProcessAuditOperatorImpl.class, new String[]{WORKFLOW_DEFINITION_CODE}, + new String[]{}), + PROCESS_COPY(PROCESS, COPY, ProcessAuditOperatorImpl.class, new String[]{CODES}, new String[]{}), + PROCESS_EXPORT(PROCESS, EXPORT, ProcessAuditOperatorImpl.class, new String[]{CODES}, new String[]{}), + PROCESS_DELETE(PROCESS, DELETE, ProcessAuditOperatorImpl.class, new String[]{CODE}, new String[]{}), + PROCESS_BATCH_DELETE(PROCESS, BATCH_DELETE, ProcessAuditOperatorImpl.class, new String[]{CODES}, new String[]{}), + PROCESS_START(PROCESS, START, ProcessAuditOperatorImpl.class, new String[]{PROCESS_DEFINITION_CODE}, + new String[]{}), + PROCESS_BATCH_START(PROCESS, BATCH_START, ProcessAuditOperatorImpl.class, new String[]{PROCESS_DEFINITION_CODES}, + new String[]{}), + PROCESS_BATCH_RERUN(PROCESS, BATCH_RERUN, ProcessInstanceAuditOperatorImpl.class, + new String[]{PROCESS_INSTANCE_IDS}, + new String[]{}), + PROCESS_EXECUTE(PROCESS, EXECUTE, ProcessInstanceAuditOperatorImpl.class, new String[]{PROCESS_INSTANCE_ID}, + new String[]{}), + PROCESS_IMPORT(PROCESS, IMPORT, ProcessAuditOperatorImpl.class, new String[]{}, new String[]{CODE}), + PROCESS_INSTANCE_UPDATE(PROCESS_INSTANCE, UPDATE, ProcessInstanceAuditOperatorImpl.class, new String[]{ID}, + new String[]{}), + PROCESS_INSTANCE_DELETE(PROCESS_INSTANCE, DELETE, ProcessInstanceAuditOperatorImpl.class, new String[]{ID}, + new String[]{}), + PROCESS_INSTANCE_BATCH_DELETE(PROCESS_INSTANCE, BATCH_DELETE, ProcessInstanceAuditOperatorImpl.class, + new String[]{PROCESS_INSTANCE_IDS}, new String[]{}), + + TASK_CREATE(TASK, CREATE, TaskAuditOperatorImpl.class, new String[]{}, new String[]{CODE}), + TASK_UPDATE(TASK, UPDATE, TaskAuditOperatorImpl.class, new String[]{}, new String[]{CODE}), + TASK_SWITCH_VERSION(TASK, SWITCH_VERSION, TaskAuditOperatorImpl.class, new String[]{CODE, VERSION}, new String[]{}), + TASK_DELETE_VERSION(TASK, DELETE_VERSION, TaskAuditOperatorImpl.class, new String[]{CODE, VERSION}, new String[]{}), + TASK_DELETE(TASK, DELETE, TaskAuditOperatorImpl.class, new String[]{CODE}, new String[]{}), + TASK_RELEASE(TASK, RELEASE, TaskAuditOperatorImpl.class, new String[]{CODE}, new String[]{}), + TASK_START(TASK, START, TaskAuditOperatorImpl.class, new String[]{CODE}, new String[]{}), + TASK_INSTANCE_FORCE_SUCCESS(TASK_INSTANCE, FORCE_SUCCESS, TaskInstancesAuditOperatorImpl.class, new String[]{ID}, + new String[]{}), + + SCHEDULE_CREATE(SCHEDULE, CREATE, ScheduleAuditOperatorImpl.class, new String[]{PROCESS_DEFINITION_CODE}, + new String[]{ID}), + SCHEDULE_UPDATE(SCHEDULE, UPDATE, ScheduleAuditOperatorImpl.class, new String[]{ID}, new String[]{}), + SCHEDULE_ONLINE(SCHEDULE, ONLINE, ScheduleAuditOperatorImpl.class, new String[]{ID}, new String[]{}), + SCHEDULE_OFFLINE(SCHEDULE, OFFLINE, ScheduleAuditOperatorImpl.class, new String[]{ID}, new String[]{}), + SCHEDULE_DELETE(SCHEDULE, DELETE, ScheduleAuditOperatorImpl.class, new String[]{ID}, new String[]{}), + + FOLDER_CREATE(FOLDER, CREATE, ResourceAuditOperatorImpl.class, new String[]{TYPE, ALIAS}, new String[]{}), + FILE_CREATE(FILE, CREATE, ResourceAuditOperatorImpl.class, new String[]{TYPE, FILE_NAME, ALIAS}, new String[]{}), + FILE_UPDATE(FILE, UPDATE, ResourceAuditOperatorImpl.class, new String[]{TYPE, FILE_NAME, ALIAS}, new String[]{}), + FILE_DELETE(FILE, DELETE, ResourceAuditOperatorImpl.class, new String[]{FULL_NAME}, new String[]{}), + + TASK_GROUP_CREATE(TASK_GROUP, CREATE, TaskGroupAuditOperatorImpl.class, new String[]{NAME}, new String[]{}), + TASK_GROUP_UPDATE(TASK_GROUP, UPDATE, TaskGroupAuditOperatorImpl.class, new String[]{}, new String[]{ID}), + TASK_GROUP_CLOSE(TASK_GROUP, CLOSE, TaskGroupAuditOperatorImpl.class, new String[]{ID}, new String[]{}), + TASK_GROUP_START(TASK_GROUP, START, TaskGroupAuditOperatorImpl.class, new String[]{ID}, new String[]{}), + TASK_GROUP_MODIFY(TASK_GROUP, MODIFY, TaskGroupAuditOperatorImpl.class, new String[]{QUEUE_ID, PRIORITY}, + new String[]{}), + + DATASOURCE_CREATE(DATASOURCE, CREATE, DatasourceAuditOperatorImpl.class, new String[]{}, new String[]{ID}), + DATASOURCE_UPDATE(DATASOURCE, UPDATE, DatasourceAuditOperatorImpl.class, new String[]{}, new String[]{ID}), + DATASOURCE_DELETE(DATASOURCE, DELETE, DatasourceAuditOperatorImpl.class, new String[]{ID}, new String[]{}), + + TENANT_CREATE(TENANT, CREATE, TenantAuditOperatorImpl.class, new String[]{}, new String[]{ID}), + TENANT_UPDATE(TENANT, UPDATE, TenantAuditOperatorImpl.class, new String[]{ID}, new String[]{}), + TENANT_DELETE(TENANT, DELETE, TenantAuditOperatorImpl.class, new String[]{ID}, new String[]{}), + + USER_CREATE(USER, CREATE, UserAuditOperatorImpl.class, new String[]{}, new String[]{ID}), + USER_UPDATE(USER, UPDATE, UserAuditOperatorImpl.class, new String[]{}, new String[]{ID}), + USER_DELETE(USER, DELETE, UserAuditOperatorImpl.class, new String[]{ID}, new String[]{}), + + ALARM_GROUP_CREATE(ALARM_GROUP, CREATE, AlertGroupAuditOperatorImpl.class, new String[]{}, new String[]{ID}), + ALARM_GROUP_UPDATE(ALARM_GROUP, UPDATE, AlertGroupAuditOperatorImpl.class, new String[]{}, new String[]{ID}), + ALARM_GROUP_DELETE(ALARM_GROUP, DELETE, AlertGroupAuditOperatorImpl.class, new String[]{ID}, new String[]{}), + + ALARM_INSTANCE_CREATE(ALARM_INSTANCE, CREATE, AlertInstanceAuditOperatorImpl.class, new String[]{}, + new String[]{ID}), + ALARM_INSTANCE_UPDATE(ALARM_INSTANCE, UPDATE, AlertInstanceAuditOperatorImpl.class, new String[]{}, + new String[]{ID}), + ALARM_INSTANCE_DELETE(ALARM_INSTANCE, DELETE, AlertInstanceAuditOperatorImpl.class, new String[]{ID}, + new String[]{}), + + WORKER_GROUP_CREATE(WORKER_GROUP, CREATE, WorkerGroupAuditOperatorImpl.class, new String[]{}, new String[]{ID}), + WORKER_GROUP_DELETE(WORKER_GROUP, DELETE, WorkerGroupAuditOperatorImpl.class, new String[]{ID}, new String[]{}), + + YARN_QUEUE_CREATE(YARN_QUEUE, CREATE, YarnQueueAuditOperatorImpl.class, new String[]{}, new String[]{ID}), + YARN_QUEUE_UPDATE(YARN_QUEUE, UPDATE, YarnQueueAuditOperatorImpl.class, new String[]{}, new String[]{ID}), + YARN_QUEUE_DELETE(YARN_QUEUE, DELETE, YarnQueueAuditOperatorImpl.class, new String[]{ID}, new String[]{}), + + ENVIRONMENT_CREATE(ENVIRONMENT, CREATE, EnvironmentAuditOperatorImpl.class, new String[]{}, new String[]{CODE}), + ENVIRONMENT_UPDATE(ENVIRONMENT, UPDATE, EnvironmentAuditOperatorImpl.class, new String[]{}, new String[]{CODE}), + ENVIRONMENT_DELETE(ENVIRONMENT, DELETE, EnvironmentAuditOperatorImpl.class, new String[]{ENVIRONMENT_CODE}, + new String[]{}), + + CLUSTER_CREATE(CLUSTER, CREATE, ClusterAuditOperatorImpl.class, new String[]{}, new String[]{CODE}), + CLUSTER_UPDATE(CLUSTER, UPDATE, ClusterAuditOperatorImpl.class, new String[]{}, new String[]{CODE}), + CLUSTER_DELETE(CLUSTER, DELETE, ClusterAuditOperatorImpl.class, new String[]{CLUSTER_CODE}, new String[]{}), + + K8S_NAMESPACE_CREATE(K8S_NAMESPACE, CREATE, K8SNamespaceAuditOperatorImpl.class, new String[]{}, new String[]{ID}), + K8S_NAMESPACE_DELETE(K8S_NAMESPACE, DELETE, K8SNamespaceAuditOperatorImpl.class, new String[]{ID}, new String[]{}), + + TOKEN_CREATE(TOKEN, CREATE, TokenAuditOperatorImpl.class, new String[]{}, new String[]{USER_ID}), + TOKEN_UPDATE(TOKEN, UPDATE, TokenAuditOperatorImpl.class, new String[]{}, new String[]{USER_ID}), + TOKEN_DELETE(TOKEN, DELETE, TokenAuditOperatorImpl.class, new String[]{ID}, new String[]{}), + ; + + private final Class operatorClass; + private final AuditModelType auditModelType; + private final AuditOperationType auditOperationType; + + /** + * The names of the fields in the API request to be recorded. + * Represents an array of key-value pairs, e.g., [ID, "status"]. + */ + private final String[] requestParamName; + + /** + * The names of the fields in the returned object to be recorded. + * Represents an array of field names, e.g., [ID, CODE]. + * Specify the field names to record from the returned object. + */ + private final String[] returnObjectFieldName; + + AuditType(AuditModelType auditModelType, AuditOperationType auditOperationType, + Class operatorClass, String[] requestParamName, String[] returnObjectFieldName) { + this.auditModelType = auditModelType; + this.auditOperationType = auditOperationType; + this.operatorClass = operatorClass; + this.requestParamName = requestParamName; + this.returnObjectFieldName = returnObjectFieldName; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/AuditOperator.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/AuditOperator.java new file mode 100644 index 000000000000..c3a9a845f572 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/AuditOperator.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.audit.operator; + +import org.apache.dolphinscheduler.api.audit.OperatorLogAspect; +import org.apache.dolphinscheduler.api.audit.enums.AuditType; +import org.apache.dolphinscheduler.dao.entity.AuditLog; + +import java.util.List; +import java.util.Map; + +public interface AuditOperator { + + void recordAudit(OperatorLogAspect.AuditContext auditContext, Object returnValue); + + void setRequestParam(AuditType auditType, List auditLogList, Map paramsMap); +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/BaseAuditOperator.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/BaseAuditOperator.java new file mode 100644 index 000000000000..1e34ff296764 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/BaseAuditOperator.java @@ -0,0 +1,201 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.audit.operator; + +import org.apache.dolphinscheduler.api.audit.OperatorLog; +import org.apache.dolphinscheduler.api.audit.OperatorLogAspect; +import org.apache.dolphinscheduler.api.audit.OperatorUtils; +import org.apache.dolphinscheduler.api.audit.enums.AuditType; +import org.apache.dolphinscheduler.api.service.AuditService; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.dao.entity.AuditLog; + +import org.apache.commons.lang3.math.NumberUtils; + +import java.util.List; +import java.util.Map; + +import lombok.extern.slf4j.Slf4j; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import com.google.common.base.Strings; + +@Service +@Slf4j +public abstract class BaseAuditOperator implements AuditOperator { + + @Autowired + private AuditService auditService; + + @Override + public void recordAudit(OperatorLogAspect.AuditContext auditContext, Object returnValue) { + Result result = new Result<>(); + + if (returnValue instanceof Result) { + result = (Result) returnValue; + if (OperatorUtils.resultFail(result)) { + log.error("request fail, code {}", result.getCode()); + return; + } + } + + long latency = System.currentTimeMillis() - auditContext.getBeginTime(); + List auditLogList = auditContext.getAuditLogList(); + + Map paramsMap = auditContext.getParamsMap(); + OperatorLog operatorLog = auditContext.getOperatorLog(); + AuditType auditType = operatorLog.auditType(); + + setObjectIdentityFromReturnObject(auditType, result, auditLogList); + modifyAuditOperationType(auditType, paramsMap, auditLogList); + modifyAuditObjectType(auditType, paramsMap, auditLogList); + + auditLogList.forEach(auditLog -> auditLog.setLatency(latency)); + auditLogList.forEach(auditLog -> auditService.addAudit(auditLog)); + } + + @Override + public void setRequestParam(AuditType auditType, List auditLogList, Map paramsMap) { + String[] paramNameArr = auditType.getRequestParamName(); + + if (paramNameArr.length == 0) { + return; + } + + modifyRequestParams(paramNameArr, paramsMap, auditLogList); + setObjectByParam(paramNameArr, paramsMap, auditLogList); + + if (auditLogList.get(0).getModelId() == null) { + auditLogList.get(0).setModelId(OperatorUtils.getObjectIdentityByParam(paramNameArr, paramsMap)); + } + } + + protected void setObjectByParam(String[] paramNameArr, Map paramsMap, + List auditLogList) { + + String name = paramNameArr[0]; + Object value = paramsMap.get(name); + + if (value == null) { + return; + } + + String objName = getObjectNameFromIdentity(value); + + if (Strings.isNullOrEmpty(objName)) { + auditLogList.get(0).setModelName(value.toString()); + return; + } + + try { + long objectId = Long.parseLong(value.toString()); + auditLogList.get(0).setModelId(objectId); + } catch (NumberFormatException e) { + log.error("value is not long, value: {}", value); + } + + auditLogList.get(0).setModelName(objName); + } + + protected void setObjectByParamArr(String[] paramNameArr, Map paramsMap, + List auditLogList) { + + AuditLog auditLog = auditLogList.get(0); + for (String param : paramNameArr) { + if (!paramsMap.containsKey(param)) { + continue; + } + + String[] identityArr = ((String) paramsMap.get(param)).split(","); + for (String identityString : identityArr) { + long identity = toLong(identityString); + + String value = getObjectNameFromIdentity(identity); + + if (value == null) { + continue; + } + + auditLog.setModelId(identity); + auditLog.setModelName(value); + auditLogList.add(auditLog); + auditLog = AuditLog.copyNewOne(auditLog); + } + } + auditLogList.remove(0); + } + + protected void setObjectIdentityFromReturnObject(AuditType auditType, Result result, + List auditLogList) { + String[] returnObjectFieldNameArr = auditType.getReturnObjectFieldName(); + if (returnObjectFieldNameArr.length == 0) { + return; + } + Map returnObjectMap = + OperatorUtils.getObjectIfFromReturnObject(result.getData(), returnObjectFieldNameArr); + modifyObjectFromReturnObject(returnObjectFieldNameArr, returnObjectMap, auditLogList); + setObjectNameFromReturnIdentity(auditLogList); + } + + protected void setObjectNameFromReturnIdentity(List auditLogList) { + auditLogList + .forEach(auditLog -> auditLog.setModelName(getObjectNameFromIdentity(auditLog.getModelId()))); + } + + protected void modifyObjectFromReturnObject(String[] params, Map returnObjectMap, + List auditLogList) { + if (returnObjectMap.isEmpty() || returnObjectMap.get(params[0]) == null) { + return; + } + + Long objId = toLong(returnObjectMap.get(params[0])); + + if (objId != -1) { + auditLogList.get(0).setModelId(objId); + } + } + + protected Long toLong(Object str) { + if (str == null) { + return -1L; + } + + return NumberUtils.toLong(str.toString(), -1); + } + + protected String getObjectNameFromIdentity(Object identity) { + return identity.toString(); + } + + protected void modifyRequestParams(String[] paramNameArr, Map paramsMap, + List auditLogList) { + + } + + protected void modifyAuditObjectType(AuditType auditType, Map paramsMap, + List auditLogList) { + + } + + protected void modifyAuditOperationType(AuditType auditType, Map paramsMap, + List auditLogList) { + + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/AuditSubscriberImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/AlertGroupAuditOperatorImpl.java similarity index 54% rename from dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/AuditSubscriberImpl.java rename to dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/AlertGroupAuditOperatorImpl.java index b5ed361c24ee..65f0f491be9d 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/AuditSubscriberImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/AlertGroupAuditOperatorImpl.java @@ -15,28 +15,29 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.api.audit; +package org.apache.dolphinscheduler.api.audit.operator.impl; -import org.apache.dolphinscheduler.dao.entity.AuditLog; -import org.apache.dolphinscheduler.dao.mapper.AuditLogMapper; +import org.apache.dolphinscheduler.api.audit.operator.BaseAuditOperator; +import org.apache.dolphinscheduler.dao.entity.AlertGroup; +import org.apache.dolphinscheduler.dao.mapper.AlertGroupMapper; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; +import org.springframework.stereotype.Service; -@Component -public class AuditSubscriberImpl implements AuditSubscriber { +@Service +public class AlertGroupAuditOperatorImpl extends BaseAuditOperator { @Autowired - private AuditLogMapper logMapper; + private AlertGroupMapper alertGroupMapper; @Override - public void execute(AuditMessage message) { - AuditLog auditLog = new AuditLog(); - auditLog.setUserId(message.getUser().getId()); - auditLog.setResourceType(message.getResourceType().getCode()); - auditLog.setOperation(message.getOperation().getCode()); - auditLog.setTime(message.getAuditDate()); - auditLog.setResourceId(message.getResourceId()); - logMapper.insert(auditLog); + public String getObjectNameFromIdentity(Object identity) { + Long objId = toLong(identity); + if (objId == -1) { + return ""; + } + + AlertGroup obj = alertGroupMapper.selectById(objId); + return obj == null ? "" : obj.getGroupName(); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/AlertInstanceAuditOperatorImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/AlertInstanceAuditOperatorImpl.java new file mode 100644 index 000000000000..d2c28832f0df --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/AlertInstanceAuditOperatorImpl.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.audit.operator.impl; + +import org.apache.dolphinscheduler.api.audit.operator.BaseAuditOperator; +import org.apache.dolphinscheduler.dao.entity.AlertPluginInstance; +import org.apache.dolphinscheduler.dao.mapper.AlertPluginInstanceMapper; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +@Service +public class AlertInstanceAuditOperatorImpl extends BaseAuditOperator { + + @Autowired + private AlertPluginInstanceMapper alertPluginInstanceMapper; + + @Override + public String getObjectNameFromIdentity(Object identity) { + Long objId = toLong(identity); + if (objId == -1) { + return ""; + } + + AlertPluginInstance obj = alertPluginInstanceMapper.selectById(objId); + return obj == null ? "" : obj.getInstanceName(); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/ClusterAuditOperatorImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/ClusterAuditOperatorImpl.java new file mode 100644 index 000000000000..4f1cabc043b6 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/ClusterAuditOperatorImpl.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.audit.operator.impl; + +import org.apache.dolphinscheduler.api.audit.operator.BaseAuditOperator; +import org.apache.dolphinscheduler.dao.entity.Cluster; +import org.apache.dolphinscheduler.dao.mapper.ClusterMapper; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +@Service +public class ClusterAuditOperatorImpl extends BaseAuditOperator { + + @Autowired + private ClusterMapper clusterMapper; + + @Override + public String getObjectNameFromIdentity(Object identity) { + Long objId = toLong(identity); + if (objId == -1) { + return ""; + } + + Cluster obj = clusterMapper.queryByClusterCode(objId); + return obj == null ? "" : obj.getName(); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/DatasourceAuditOperatorImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/DatasourceAuditOperatorImpl.java new file mode 100644 index 000000000000..a859ab8ceaf4 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/DatasourceAuditOperatorImpl.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.audit.operator.impl; + +import org.apache.dolphinscheduler.api.audit.operator.BaseAuditOperator; +import org.apache.dolphinscheduler.dao.entity.DataSource; +import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +@Service +public class DatasourceAuditOperatorImpl extends BaseAuditOperator { + + @Autowired + private DataSourceMapper dataSourceMapper; + + @Override + public String getObjectNameFromIdentity(Object identity) { + Long objId = toLong(identity); + if (objId == -1) { + return ""; + } + + DataSource obj = dataSourceMapper.selectById(objId); + return obj == null ? "" : obj.getName(); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/EnvironmentAuditOperatorImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/EnvironmentAuditOperatorImpl.java new file mode 100644 index 000000000000..cf8a93e366d1 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/EnvironmentAuditOperatorImpl.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.audit.operator.impl; + +import org.apache.dolphinscheduler.api.audit.operator.BaseAuditOperator; +import org.apache.dolphinscheduler.dao.entity.Environment; +import org.apache.dolphinscheduler.dao.mapper.EnvironmentMapper; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +@Service +public class EnvironmentAuditOperatorImpl extends BaseAuditOperator { + + @Autowired + private EnvironmentMapper environmentMapper; + + @Override + public String getObjectNameFromIdentity(Object identity) { + Long objId = toLong(identity); + if (objId == -1) { + return ""; + } + + Environment obj = environmentMapper.queryByEnvironmentCode(objId); + return obj == null ? "" : obj.getName(); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/K8SNamespaceAuditOperatorImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/K8SNamespaceAuditOperatorImpl.java new file mode 100644 index 000000000000..9f0fe6b4e524 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/K8SNamespaceAuditOperatorImpl.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.audit.operator.impl; + +import org.apache.dolphinscheduler.api.audit.operator.BaseAuditOperator; +import org.apache.dolphinscheduler.dao.entity.K8sNamespace; +import org.apache.dolphinscheduler.dao.mapper.K8sNamespaceMapper; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +@Service +public class K8SNamespaceAuditOperatorImpl extends BaseAuditOperator { + + @Autowired + private K8sNamespaceMapper k8sNamespaceMapper; + + @Override + public String getObjectNameFromIdentity(Object identity) { + Long objId = toLong(identity); + if (objId == -1) { + return ""; + } + + K8sNamespace obj = k8sNamespaceMapper.selectById(objId); + return obj == null ? "" : obj.getNamespace(); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/ProcessAuditOperatorImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/ProcessAuditOperatorImpl.java new file mode 100644 index 000000000000..5a65adbe5579 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/ProcessAuditOperatorImpl.java @@ -0,0 +1,77 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.audit.operator.impl; + +import org.apache.dolphinscheduler.api.audit.OperatorUtils; +import org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants; +import org.apache.dolphinscheduler.api.audit.enums.AuditType; +import org.apache.dolphinscheduler.api.audit.operator.BaseAuditOperator; +import org.apache.dolphinscheduler.common.enums.AuditOperationType; +import org.apache.dolphinscheduler.dao.entity.AuditLog; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; + +import java.util.List; +import java.util.Map; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +@Service +public class ProcessAuditOperatorImpl extends BaseAuditOperator { + + @Autowired + private ProcessDefinitionMapper processDefinitionMapper; + + @Override + public void modifyAuditOperationType(AuditType auditType, Map paramsMap, + List auditLogList) { + AuditOperationType auditOperationType = OperatorUtils.modifyReleaseOperationType(auditType, paramsMap); + auditLogList.forEach(auditLog -> auditLog.setOperationType(auditOperationType.getName())); + } + + @Override + protected void setObjectByParam(String[] paramNameArr, Map paramsMap, + List auditLogList) { + if (paramNameArr[0].equals(AuditLogConstants.CODES) + || paramNameArr[0].equals(AuditLogConstants.PROCESS_DEFINITION_CODES) + || paramNameArr[0].equals(AuditLogConstants.PROCESS_INSTANCE_IDS)) { + super.setObjectByParamArr(paramNameArr, paramsMap, auditLogList); + } else { + super.setObjectByParam(paramNameArr, paramsMap, auditLogList); + } + if (paramsMap.containsKey(AuditLogConstants.VERSION)) { + if (paramsMap.get(AuditLogConstants.VERSION) != null) { + auditLogList.get(0).setDetail(paramsMap.get(AuditLogConstants.VERSION).toString()); + } else { + auditLogList.get(0).setDetail("latest"); + } + } + } + + @Override + protected String getObjectNameFromIdentity(Object identity) { + Long objId = toLong(identity); + if (objId == -1) { + return ""; + } + + ProcessDefinition obj = processDefinitionMapper.queryByCode(objId); + return obj == null ? "" : obj.getName(); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/ProcessInstanceAuditOperatorImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/ProcessInstanceAuditOperatorImpl.java new file mode 100644 index 000000000000..cb45d5abba33 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/ProcessInstanceAuditOperatorImpl.java @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.audit.operator.impl; + +import org.apache.dolphinscheduler.api.audit.OperatorUtils; +import org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants; +import org.apache.dolphinscheduler.api.audit.enums.AuditType; +import org.apache.dolphinscheduler.api.audit.operator.BaseAuditOperator; +import org.apache.dolphinscheduler.common.enums.AuditOperationType; +import org.apache.dolphinscheduler.dao.entity.AuditLog; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; + +import org.apache.commons.lang3.math.NumberUtils; + +import java.util.List; +import java.util.Map; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +@Service +public class ProcessInstanceAuditOperatorImpl extends BaseAuditOperator { + + @Autowired + private ProcessInstanceMapper processInstanceMapper; + + @Override + public void modifyAuditOperationType(AuditType auditType, Map paramsMap, + List auditLogList) { + AuditOperationType auditOperationType = OperatorUtils.modifyReleaseOperationType(auditType, paramsMap); + auditLogList.forEach(auditLog -> auditLog.setOperationType(auditOperationType.getName())); + } + + @Override + protected void setObjectByParam(String[] paramNameArr, Map paramsMap, + List auditLogList) { + if (paramNameArr[0].equals(AuditLogConstants.PROCESS_INSTANCE_IDS)) { + super.setObjectByParamArr(paramNameArr, paramsMap, auditLogList); + } else { + super.setObjectByParam(paramNameArr, paramsMap, auditLogList); + } + } + + @Override + protected String getObjectNameFromIdentity(Object identity) { + int objId = NumberUtils.toInt(identity.toString(), -1); + if (objId == -1) { + return ""; + } + + ProcessInstance obj = processInstanceMapper.queryDetailById(objId); + return obj == null ? "" : obj.getName(); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/ProjectAuditOperatorImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/ProjectAuditOperatorImpl.java new file mode 100644 index 000000000000..62cea94f951c --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/ProjectAuditOperatorImpl.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.audit.operator.impl; + +import org.apache.dolphinscheduler.api.audit.operator.BaseAuditOperator; +import org.apache.dolphinscheduler.dao.entity.Project; +import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; + +import lombok.extern.slf4j.Slf4j; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +@Service +@Slf4j +public class ProjectAuditOperatorImpl extends BaseAuditOperator { + + @Autowired + private ProjectMapper projectMapper; + + @Override + protected String getObjectNameFromIdentity(Object identity) { + Long objId = toLong(identity); + if (objId == -1) { + return ""; + } + + Project obj = projectMapper.queryByCode(objId); + return obj == null ? "" : obj.getName(); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/ResourceAuditOperatorImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/ResourceAuditOperatorImpl.java new file mode 100644 index 000000000000..303f7bfdb372 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/ResourceAuditOperatorImpl.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.audit.operator.impl; + +import org.apache.dolphinscheduler.api.audit.OperatorUtils; +import org.apache.dolphinscheduler.api.audit.enums.AuditType; +import org.apache.dolphinscheduler.api.audit.operator.BaseAuditOperator; +import org.apache.dolphinscheduler.dao.entity.AuditLog; + +import java.util.List; +import java.util.Map; + +import org.springframework.stereotype.Service; + +@Service +public class ResourceAuditOperatorImpl extends BaseAuditOperator { + + @Override + public void modifyAuditObjectType(AuditType auditType, Map paramsMap, List auditLogList) { + auditLogList.forEach(auditLog -> auditLog + .setModelType(OperatorUtils.getFileAuditObject(auditType, paramsMap, auditLog.getModelName()))); + } + + @Override + protected void setObjectByParam(String[] paramNameArr, Map paramsMap, + List auditLogList) { + + Object objName = getFileNameFromParam(paramNameArr, paramsMap); + + if (objName == null) { + return; + } + + auditLogList.get(0).setModelName(objName.toString()); + } + + private String getFileNameFromParam(String[] paramNameArr, Map paramsMap) { + for (String param : paramNameArr) { + if (!param.equals("type") && paramsMap.containsKey(param)) { + return paramsMap.get(param).toString(); + } + } + + return null; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/ScheduleAuditOperatorImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/ScheduleAuditOperatorImpl.java new file mode 100644 index 000000000000..be2ea935ed10 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/ScheduleAuditOperatorImpl.java @@ -0,0 +1,84 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.audit.operator.impl; + +import org.apache.dolphinscheduler.api.audit.OperatorUtils; +import org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants; +import org.apache.dolphinscheduler.api.audit.enums.AuditType; +import org.apache.dolphinscheduler.api.audit.operator.BaseAuditOperator; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.dao.entity.AuditLog; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.Schedule; +import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; +import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper; + +import java.util.List; +import java.util.Map; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +@Service +public class ScheduleAuditOperatorImpl extends BaseAuditOperator { + + @Autowired + private ScheduleMapper scheduleMapper; + + @Autowired + private ProcessDefinitionMapper processDefinitionMapper; + + @Override + public void modifyRequestParams(String[] paramNameArr, Map paramsMap, List auditLogList) { + if (!paramNameArr[0].equals(AuditLogConstants.ID)) { + return; + } + int id = (int) paramsMap.get(paramNameArr[0]); + Schedule schedule = scheduleMapper.selectById(id); + if (schedule != null) { + paramsMap.put(AuditLogConstants.CODE, schedule.getProcessDefinitionCode()); + paramNameArr[0] = AuditLogConstants.CODE; + auditLogList.forEach(auditLog -> auditLog.setDetail(String.valueOf(id))); + } + } + + @Override + protected void setObjectIdentityFromReturnObject(AuditType auditType, Result result, + List auditLogList) { + String[] returnObjectFieldNameArr = auditType.getReturnObjectFieldName(); + if (returnObjectFieldNameArr.length == 0) { + return; + } + + Map returnObjectMap = + OperatorUtils.getObjectIfFromReturnObject(result.getData(), returnObjectFieldNameArr); + auditLogList + .forEach(auditLog -> auditLog.setDetail(returnObjectMap.get(returnObjectFieldNameArr[0]).toString())); + } + + @Override + protected String getObjectNameFromIdentity(Object identity) { + Long objId = toLong(identity); + if (objId == -1) { + return ""; + } + + ProcessDefinition obj = processDefinitionMapper.queryByCode(objId); + return obj == null ? "" : obj.getName(); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/TaskAuditOperatorImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/TaskAuditOperatorImpl.java new file mode 100644 index 000000000000..db6559b86546 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/TaskAuditOperatorImpl.java @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.audit.operator.impl; + +import org.apache.dolphinscheduler.api.audit.OperatorUtils; +import org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants; +import org.apache.dolphinscheduler.api.audit.enums.AuditType; +import org.apache.dolphinscheduler.api.audit.operator.BaseAuditOperator; +import org.apache.dolphinscheduler.common.enums.AuditOperationType; +import org.apache.dolphinscheduler.dao.entity.AuditLog; +import org.apache.dolphinscheduler.dao.entity.TaskDefinition; +import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper; + +import java.util.List; +import java.util.Map; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +@Service +public class TaskAuditOperatorImpl extends BaseAuditOperator { + + @Autowired + private TaskDefinitionMapper taskDefinitionMapper; + + @Override + public void modifyAuditOperationType(AuditType auditType, Map paramsMap, + List auditLogList) { + AuditOperationType auditOperationType = OperatorUtils.modifyReleaseOperationType(auditType, paramsMap); + auditLogList.forEach(auditLog -> auditLog.setOperationType(auditOperationType.getName())); + } + + @Override + protected void setObjectByParam(String[] paramNameArr, Map paramsMap, + List auditLogList) { + + super.setObjectByParam(paramNameArr, paramsMap, auditLogList); + if (paramsMap.containsKey(AuditLogConstants.VERSION)) { + auditLogList.get(0).setDetail(paramsMap.get(AuditLogConstants.VERSION).toString()); + } + } + + @Override + protected String getObjectNameFromIdentity(Object identity) { + Long objId = toLong(identity); + if (objId == -1) { + return ""; + } + + TaskDefinition obj = taskDefinitionMapper.queryByCode(objId); + return obj == null ? "" : obj.getName(); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/TaskGroupAuditOperatorImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/TaskGroupAuditOperatorImpl.java new file mode 100644 index 000000000000..12e42890f30f --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/TaskGroupAuditOperatorImpl.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.audit.operator.impl; + +import org.apache.dolphinscheduler.api.audit.operator.BaseAuditOperator; +import org.apache.dolphinscheduler.dao.entity.TaskGroup; +import org.apache.dolphinscheduler.dao.mapper.TaskGroupMapper; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +@Service +public class TaskGroupAuditOperatorImpl extends BaseAuditOperator { + + @Autowired + private TaskGroupMapper taskGroupMapper; + + @Override + public String getObjectNameFromIdentity(Object identity) { + Long objId = toLong(identity); + if (objId == -1) { + return ""; + } + + TaskGroup obj = taskGroupMapper.selectById(objId); + return obj == null ? "" : obj.getName(); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/TaskInstancesAuditOperatorImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/TaskInstancesAuditOperatorImpl.java new file mode 100644 index 000000000000..8d888b7e574b --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/TaskInstancesAuditOperatorImpl.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.audit.operator.impl; + +import org.apache.dolphinscheduler.api.audit.operator.BaseAuditOperator; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +@Service +public class TaskInstancesAuditOperatorImpl extends BaseAuditOperator { + + @Autowired + private TaskInstanceMapper taskInstanceMapper; + + @Override + protected String getObjectNameFromIdentity(Object identity) { + Long objId = toLong(identity); + if (objId == -1) { + return ""; + } + + TaskInstance obj = taskInstanceMapper.selectById(objId); + return obj == null ? "" : obj.getName(); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/TenantAuditOperatorImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/TenantAuditOperatorImpl.java new file mode 100644 index 000000000000..f76ddd4124ee --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/TenantAuditOperatorImpl.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.audit.operator.impl; + +import org.apache.dolphinscheduler.api.audit.operator.BaseAuditOperator; +import org.apache.dolphinscheduler.dao.entity.Tenant; +import org.apache.dolphinscheduler.dao.mapper.TenantMapper; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +@Service +public class TenantAuditOperatorImpl extends BaseAuditOperator { + + @Autowired + private TenantMapper tenantMapper; + + @Override + public String getObjectNameFromIdentity(Object identity) { + Long objId = toLong(identity); + if (objId == -1) { + return ""; + } + + Tenant obj = tenantMapper.selectById(objId); + return obj == null ? "" : obj.getTenantCode(); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/TokenAuditOperatorImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/TokenAuditOperatorImpl.java new file mode 100644 index 000000000000..96f1964996d3 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/TokenAuditOperatorImpl.java @@ -0,0 +1,66 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.audit.operator.impl; + +import org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants; +import org.apache.dolphinscheduler.api.audit.enums.AuditType; +import org.apache.dolphinscheduler.api.audit.operator.BaseAuditOperator; +import org.apache.dolphinscheduler.dao.entity.AccessToken; +import org.apache.dolphinscheduler.dao.entity.AuditLog; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.AccessTokenMapper; +import org.apache.dolphinscheduler.dao.mapper.UserMapper; + +import java.util.List; +import java.util.Map; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +@Service +public class TokenAuditOperatorImpl extends BaseAuditOperator { + + @Autowired + private AccessTokenMapper accessTokenMapper; + + @Autowired + private UserMapper userMapper; + + @Override + public void modifyAuditOperationType(AuditType auditType, Map paramsMap, + List auditLogList) { + if (paramsMap.get(AuditLogConstants.USER_ID) != null) { + User user = userMapper.selectById(paramsMap.get(AuditLogConstants.USER_ID).toString()); + auditLogList.forEach(auditLog -> { + auditLog.setModelName(user.getUserName()); + auditLog.setModelId(Long.valueOf(user.getId())); + }); + } + } + + @Override + public String getObjectNameFromIdentity(Object identity) { + Long objId = toLong(identity); + if (objId == -1) { + return ""; + } + + AccessToken obj = accessTokenMapper.selectById(objId); + return obj == null ? "" : obj.getUserName(); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/UserAuditOperatorImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/UserAuditOperatorImpl.java new file mode 100644 index 000000000000..bbce39cb027d --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/UserAuditOperatorImpl.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.audit.operator.impl; + +import org.apache.dolphinscheduler.api.audit.operator.BaseAuditOperator; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.UserMapper; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +@Service +public class UserAuditOperatorImpl extends BaseAuditOperator { + + @Autowired + private UserMapper userMapper; + + @Override + public String getObjectNameFromIdentity(Object identity) { + Long objId = toLong(identity); + if (objId == -1) { + return ""; + } + + User obj = userMapper.selectById(objId); + return obj == null ? "" : obj.getUserName(); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/WorkerGroupAuditOperatorImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/WorkerGroupAuditOperatorImpl.java new file mode 100644 index 000000000000..f7a264290433 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/WorkerGroupAuditOperatorImpl.java @@ -0,0 +1,60 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.audit.operator.impl; + +import org.apache.dolphinscheduler.api.audit.constants.AuditLogConstants; +import org.apache.dolphinscheduler.api.audit.enums.AuditType; +import org.apache.dolphinscheduler.api.audit.operator.BaseAuditOperator; +import org.apache.dolphinscheduler.common.enums.AuditOperationType; +import org.apache.dolphinscheduler.dao.entity.AuditLog; +import org.apache.dolphinscheduler.dao.entity.WorkerGroup; +import org.apache.dolphinscheduler.dao.mapper.WorkerGroupMapper; + +import java.util.List; +import java.util.Map; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +@Service +public class WorkerGroupAuditOperatorImpl extends BaseAuditOperator { + + @Autowired + private WorkerGroupMapper workerGroupMapper; + + @Override + public void modifyAuditOperationType(AuditType auditType, Map paramsMap, + List auditLogList) { + if (auditType.getAuditOperationType() == AuditOperationType.CREATE + && paramsMap.get(AuditLogConstants.ID) != null && + !paramsMap.get(AuditLogConstants.ID).toString().equals("0")) { + auditLogList.forEach(auditLog -> auditLog.setOperationType(AuditOperationType.UPDATE.getName())); + } + } + + @Override + public String getObjectNameFromIdentity(Object identity) { + Long objId = toLong(identity); + if (objId == -1) { + return ""; + } + + WorkerGroup obj = workerGroupMapper.selectById(objId); + return obj == null ? "" : obj.getName(); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/YarnQueueAuditOperatorImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/YarnQueueAuditOperatorImpl.java new file mode 100644 index 000000000000..25b15f09e1c8 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/operator/impl/YarnQueueAuditOperatorImpl.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.audit.operator.impl; + +import org.apache.dolphinscheduler.api.audit.operator.BaseAuditOperator; +import org.apache.dolphinscheduler.dao.entity.Queue; +import org.apache.dolphinscheduler.dao.mapper.QueueMapper; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +@Service +public class YarnQueueAuditOperatorImpl extends BaseAuditOperator { + + @Autowired + private QueueMapper queueMapper; + + @Override + public String getObjectNameFromIdentity(Object identity) { + Long objId = toLong(identity); + if (objId == -1) { + return ""; + } + + Queue obj = queueMapper.selectById(objId); + return obj == null ? "" : obj.getQueueName(); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/constants/ApiFuncIdentificationConstant.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/constants/ApiFuncIdentificationConstant.java index b93792307f73..12a0bee29180 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/constants/ApiFuncIdentificationConstant.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/constants/ApiFuncIdentificationConstant.java @@ -37,8 +37,7 @@ public class ApiFuncIdentificationConstant { public static final String TENANT_CREATE = "security:tenant:create"; public static final String TENANT_UPDATE = "security:tenant:update"; public static final String TENANT_DELETE = "security:tenant:delete"; - public static final String ALART_LIST = "monitor:alert:view"; - public static final String ALART_INSTANCE_CREATE = "security:alert-plugin:create"; + public static final String ALERT_INSTANCE_CREATE = "security:alert-plugin:create"; public static final String ALERT_PLUGIN_UPDATE = "security:alert-plugin:update"; public static final String ALERT_PLUGIN_DELETE = "security:alert-plugin:delete"; public static final String WORKER_GROUP_CREATE = "security:worker-group:create"; @@ -129,18 +128,6 @@ public class ApiFuncIdentificationConstant { public static final String FILE_DOWNLOAD = "resources:file:download"; public static final String FILE_DELETE = "resources:file:delete"; - public static final String UDF_FILE_VIEW = "resources:udf:view"; - public static final String UDF_FOLDER_ONLINE_CREATE = "resources:udf:create"; - public static final String UDF_UPLOAD = "resources:udf:upload"; - public static final String UDF_UPDATE = "resources:udf:edit"; - public static final String UDF_DOWNLOAD = "resources:udf:download"; - public static final String UDF_DELETE = "resources:udf:delete"; - - public static final String UDF_FUNCTION_VIEW = "resources:udf-func:view"; - public static final String UDF_FUNCTION_CREATE = "resources:udf-func:create"; - public static final String UDF_FUNCTION_UPDATE = "resources:udf-func:update"; - public static final String UDF_FUNCTION_DELETE = "resources:udf-func:delete"; - public static final String TASK_GROUP_VIEW = "resources:task-group:view"; public static final String TASK_GROUP_CREATE = "resources:task-group:create"; public static final String TASK_GROUP_CLOSE = "resources:task-group:close"; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java index 781c08ad62d1..3e594dadbc3e 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java @@ -24,6 +24,8 @@ import static org.apache.dolphinscheduler.api.enums.Status.QUERY_ACCESSTOKEN_LIST_PAGING_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_ACCESS_TOKEN_ERROR; +import org.apache.dolphinscheduler.api.audit.OperatorLog; +import org.apache.dolphinscheduler.api.audit.enums.AuditType; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.AccessTokenService; import org.apache.dolphinscheduler.api.utils.PageInfo; @@ -83,6 +85,7 @@ public class AccessTokenController extends BaseController { @PostMapping() @ResponseStatus(HttpStatus.CREATED) @ApiException(CREATE_ACCESS_TOKEN_ERROR) + @OperatorLog(auditType = AuditType.TOKEN_CREATE) public Result createToken(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "userId") int userId, @RequestParam(value = "expireTime") String expireTime, @@ -169,13 +172,15 @@ public Result> queryAccessTokenByUser(@Parameter(hidden = true * @return delete result code */ @Parameter(hidden = true) + @Operation(summary = "deleteToken", description = "DELETE_TOKEN_NOTES") @DeleteMapping(value = "/{id}") @ResponseStatus(HttpStatus.OK) @ApiException(DELETE_ACCESS_TOKEN_ERROR) + @OperatorLog(auditType = AuditType.TOKEN_DELETE) public Result delAccessTokenById(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @PathVariable(value = "id") int id) { accessTokenService.deleteAccessTokenById(loginUser, id); - return Result.success(true); + return Result.success(false); } /** @@ -198,6 +203,7 @@ public Result delAccessTokenById(@Parameter(hidden = true) @RequestAttr @PutMapping(value = "/{id}") @ResponseStatus(HttpStatus.OK) @ApiException(UPDATE_ACCESS_TOKEN_ERROR) + @OperatorLog(auditType = AuditType.TOKEN_UPDATE) public Result updateToken(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @PathVariable(value = "id") int id, @RequestParam(value = "userId") int userId, diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertGroupController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertGroupController.java index ac3bd9376d30..3cbaf0a3d471 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertGroupController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertGroupController.java @@ -24,6 +24,8 @@ import static org.apache.dolphinscheduler.api.enums.Status.QUERY_ALL_ALERTGROUP_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_ALERT_GROUP_ERROR; +import org.apache.dolphinscheduler.api.audit.OperatorLog; +import org.apache.dolphinscheduler.api.audit.enums.AuditType; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.AlertGroupService; @@ -86,6 +88,7 @@ public class AlertGroupController extends BaseController { @PostMapping() @ResponseStatus(HttpStatus.CREATED) @ApiException(CREATE_ALERT_GROUP_ERROR) + @OperatorLog(auditType = AuditType.ALARM_GROUP_CREATE) public Result createAlertGroup(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "groupName") String groupName, @RequestParam(value = "description", required = false) String description, @@ -195,6 +198,7 @@ public Result queryAlertGroupById(@Parameter(hidden = true) @Request @PutMapping(value = "/{id}") @ResponseStatus(HttpStatus.OK) @ApiException(UPDATE_ALERT_GROUP_ERROR) + @OperatorLog(auditType = AuditType.ALARM_GROUP_UPDATE) public Result updateAlertGroupById(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @PathVariable(value = "id") int id, @RequestParam(value = "groupName") String groupName, @@ -219,6 +223,7 @@ public Result updateAlertGroupById(@Parameter(hidden = true) @Reques @DeleteMapping(value = "/{id}") @ResponseStatus(HttpStatus.OK) @ApiException(DELETE_ALERT_GROUP_ERROR) + @OperatorLog(auditType = AuditType.ALARM_GROUP_DELETE) public Result deleteAlertGroupById(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @PathVariable(value = "id") int id) { alertGroupService.deleteAlertGroupById(loginUser, id); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertPluginInstanceController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertPluginInstanceController.java index 143116fc8e30..68a46e553764 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertPluginInstanceController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertPluginInstanceController.java @@ -25,6 +25,8 @@ import static org.apache.dolphinscheduler.api.enums.Status.SEND_TEST_ALERT_PLUGIN_INSTANCE_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_ALERT_PLUGIN_INSTANCE_ERROR; +import org.apache.dolphinscheduler.api.audit.OperatorLog; +import org.apache.dolphinscheduler.api.audit.enums.AuditType; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.AlertPluginInstanceService; @@ -91,6 +93,7 @@ public class AlertPluginInstanceController extends BaseController { @PostMapping() @ResponseStatus(HttpStatus.CREATED) @ApiException(CREATE_ALERT_PLUGIN_INSTANCE_ERROR) + @OperatorLog(auditType = AuditType.ALARM_INSTANCE_CREATE) public Result createAlertPluginInstance(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "pluginDefineId") int pluginDefineId, @RequestParam(value = "instanceName") String instanceName, @@ -134,6 +137,7 @@ public Result testSendAlertPluginInstance(@RequestParam(value = "plugin @PutMapping(value = "/{id}") @ResponseStatus(HttpStatus.OK) @ApiException(UPDATE_ALERT_PLUGIN_INSTANCE_ERROR) + @OperatorLog(auditType = AuditType.ALARM_INSTANCE_UPDATE) public Result updateAlertPluginInstanceById(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @PathVariable(value = "id") int id, @RequestParam(value = "instanceName") String instanceName, @@ -158,6 +162,7 @@ public Result updateAlertPluginInstanceById(@Parameter(hidd @DeleteMapping(value = "/{id}") @ResponseStatus(HttpStatus.OK) @ApiException(DELETE_ALERT_PLUGIN_INSTANCE_ERROR) + @OperatorLog(auditType = AuditType.ALARM_INSTANCE_DELETE) public Result deleteAlertPluginInstance(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @PathVariable(value = "id") int id) { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AuditLogController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AuditLogController.java index 6d334951e865..afa418ebb6cc 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AuditLogController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AuditLogController.java @@ -20,15 +20,17 @@ import static org.apache.dolphinscheduler.api.enums.Status.QUERY_AUDIT_LOG_LIST_PAGING; import org.apache.dolphinscheduler.api.dto.AuditDto; +import org.apache.dolphinscheduler.api.dto.auditLog.AuditModelTypeDto; +import org.apache.dolphinscheduler.api.dto.auditLog.AuditOperationTypeDto; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.AuditService; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.constants.Constants; -import org.apache.dolphinscheduler.common.enums.AuditOperationType; -import org.apache.dolphinscheduler.common.enums.AuditResourceType; import org.apache.dolphinscheduler.dao.entity.User; +import java.util.List; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.web.bind.annotation.GetMapping; @@ -57,21 +59,23 @@ public class AuditLogController extends BaseController { * * @param loginUser login user * @param pageNo page number - * @param resourceType resource type - * @param operationType operation type + * @param pageSize page size + * @param modelTypes model types + * @param operationTypes operation types + * @param userName user name + * @param modelName model name * @param startDate start time * @param endDate end time - * @param userName user name - * @param pageSize page size * @return audit log content */ @Operation(summary = "queryAuditLogListPaging", description = "QUERY_AUDIT_LOG") @Parameters({ @Parameter(name = "startDate", description = "START_DATE", schema = @Schema(implementation = String.class)), @Parameter(name = "endDate", description = "END_DATE", schema = @Schema(implementation = String.class)), - @Parameter(name = "resourceType", description = "RESOURCE_TYPE", schema = @Schema(implementation = AuditResourceType.class)), - @Parameter(name = "operationType", description = "OPERATION_TYPE", schema = @Schema(implementation = AuditOperationType.class)), + @Parameter(name = "objectTypes", description = "MODEL_TYPES", schema = @Schema(implementation = String.class)), + @Parameter(name = "operationTypes", description = "OPERATION_TYPES", schema = @Schema(implementation = String.class)), @Parameter(name = "userName", description = "USER_NAME", schema = @Schema(implementation = String.class)), + @Parameter(name = "objectName", description = "MODEL_NAME", schema = @Schema(implementation = String.class)), @Parameter(name = "pageNo", description = "PAGE_NO", required = true, schema = @Schema(implementation = int.class, example = "1")), @Parameter(name = "pageSize", description = "PAGE_SIZE", required = true, schema = @Schema(implementation = int.class, example = "20")) }) @@ -81,21 +85,48 @@ public class AuditLogController extends BaseController { public Result> queryAuditLogListPaging(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("pageNo") Integer pageNo, @RequestParam("pageSize") Integer pageSize, - @RequestParam(value = "resourceType", required = false) AuditResourceType resourceType, - @RequestParam(value = "operationType", required = false) AuditOperationType operationType, + @RequestParam(value = "modelTypes", required = false) String modelTypes, + @RequestParam(value = "operationTypes", required = false) String operationTypes, @RequestParam(value = "startDate", required = false) String startDate, @RequestParam(value = "endDate", required = false) String endDate, - @RequestParam(value = "userName", required = false) String userName) { + @RequestParam(value = "userName", required = false) String userName, + @RequestParam(value = "modelName", required = false) String modelName) { checkPageParams(pageNo, pageSize); PageInfo auditDtoPageInfo = auditService.queryLogListPaging( - loginUser, - resourceType, - operationType, + modelTypes, + operationTypes, startDate, endDate, userName, + modelName, pageNo, pageSize); return Result.success(auditDtoPageInfo); } + + /** + * query audit log operation type list + * + * @return object type list + */ + @Operation(summary = "queryAuditOperationTypeList", description = "QUERY_AUDIT_OPERATION_TYPE_LIST") + @GetMapping(value = "/audit-log-operation-type") + @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_AUDIT_LOG_LIST_PAGING) + public Result> queryAuditOperationTypeList() { + return Result.success(AuditOperationTypeDto.getOperationTypeDtoList()); + } + + /** + * query audit log model type list + * + * @return model type list + */ + @Operation(summary = "queryAuditModelTypeList", description = "QUERY_AUDIT_MODEL_TYPE_LIST") + @GetMapping(value = "/audit-log-model-type") + @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_AUDIT_LOG_LIST_PAGING) + public Result> queryAuditModelTypeList() { + return Result.success(AuditModelTypeDto.getModelTypeDtoList()); + } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ClusterController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ClusterController.java index b29ed9b7a360..3cae7afbda55 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ClusterController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ClusterController.java @@ -24,6 +24,8 @@ import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_CLUSTER_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_CLUSTER_ERROR; +import org.apache.dolphinscheduler.api.audit.OperatorLog; +import org.apache.dolphinscheduler.api.audit.enums.AuditType; import org.apache.dolphinscheduler.api.dto.ClusterDto; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.ClusterService; @@ -81,6 +83,7 @@ public class ClusterController extends BaseController { @PostMapping(value = "/create") @ResponseStatus(HttpStatus.CREATED) @ApiException(CREATE_CLUSTER_ERROR) + @OperatorLog(auditType = AuditType.CLUSTER_CREATE) public Result createCluster(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("name") String name, @RequestParam("config") String config, @@ -110,6 +113,7 @@ public Result createCluster(@Parameter(hidden = true) @RequestAttribute(va @PostMapping(value = "/update") @ResponseStatus(HttpStatus.OK) @ApiException(UPDATE_CLUSTER_ERROR) + @OperatorLog(auditType = AuditType.CLUSTER_UPDATE) public Result updateCluster(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("code") Long code, @RequestParam("name") String name, @@ -181,6 +185,7 @@ public Result> queryClusterListPaging(@Parameter(hidden = t @PostMapping(value = "/delete") @ResponseStatus(HttpStatus.OK) @ApiException(DELETE_CLUSTER_ERROR) + @OperatorLog(auditType = AuditType.CLUSTER_DELETE) public Result deleteCluster(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("clusterCode") Long clusterCode) { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataAnalysisController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataAnalysisController.java index c69c1d969ccd..9e57ce34dde3 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataAnalysisController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataAnalysisController.java @@ -20,17 +20,21 @@ import static org.apache.dolphinscheduler.api.enums.Status.COMMAND_STATE_COUNT_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.COUNT_PROCESS_DEFINITION_USER_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.COUNT_PROCESS_INSTANCE_STATE_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.LIST_PAGING_ALERT_GROUP_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.QUEUE_COUNT_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.TASK_INSTANCE_STATE_COUNT_ERROR; import org.apache.dolphinscheduler.api.dto.CommandStateCount; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.DataAnalysisService; +import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.vo.TaskInstanceCountVO; import org.apache.dolphinscheduler.api.vo.WorkflowDefinitionCountVO; import org.apache.dolphinscheduler.api.vo.WorkflowInstanceCountVO; import org.apache.dolphinscheduler.common.constants.Constants; +import org.apache.dolphinscheduler.dao.entity.Command; +import org.apache.dolphinscheduler.dao.entity.ErrorCommand; import org.apache.dolphinscheduler.dao.entity.User; import java.util.List; @@ -148,4 +152,54 @@ public Result> countQueueState(@Parameter(hidden = true) @R Map stringIntegerMap = dataAnalysisService.countQueueState(loginUser); return Result.success(stringIntegerMap); } + + /** + * command queue + * + * @param loginUser login user + * @return queue state count + */ + @Operation(summary = "listPendingCommands", description = "LIST_PENDING_COMMANDS") + @Parameters({ + @Parameter(name = "searchVal", description = "SEARCH_VAL", schema = @Schema(implementation = String.class)), + @Parameter(name = "pageNo", description = "PAGE_NO", required = true, schema = @Schema(implementation = int.class, example = "1")), + @Parameter(name = "pageSize", description = "PAGE_SIZE", required = true, schema = @Schema(implementation = int.class, example = "20")) + }) + @GetMapping("/listCommand") + @ResponseStatus(HttpStatus.OK) + @ApiException(LIST_PAGING_ALERT_GROUP_ERROR) + public Result> listPaging(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "projectCode", required = false) Long projectCode, + @RequestParam("pageNo") Integer pageNo, + @RequestParam("pageSize") Integer pageSize) { + checkPageParams(pageNo, pageSize); + PageInfo commandPageInfo = + dataAnalysisService.listPendingCommands(loginUser, projectCode, pageNo, pageSize); + return Result.success(commandPageInfo); + } + + /** + * error command + * + * @param loginUser login user + * @return queue state count + */ + @Operation(summary = "listErrorCommand", description = "LIST_ERROR_COMMAND_LIST_PAGING_NOTES") + @Parameters({ + @Parameter(name = "searchVal", description = "SEARCH_VAL", schema = @Schema(implementation = String.class)), + @Parameter(name = "pageNo", description = "PAGE_NO", required = true, schema = @Schema(implementation = int.class, example = "1")), + @Parameter(name = "pageSize", description = "PAGE_SIZE", required = true, schema = @Schema(implementation = int.class, example = "20")) + }) + @GetMapping("/listErrorCommand") + @ResponseStatus(HttpStatus.OK) + @ApiException(LIST_PAGING_ALERT_GROUP_ERROR) + public Result> listErrorCommand(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "projectCode", required = false) Long projectCode, + @RequestParam("pageNo") Integer pageNo, + @RequestParam("pageSize") Integer pageSize) { + checkPageParams(pageNo, pageSize); + PageInfo errorCommandPageInfo = + dataAnalysisService.listErrorCommand(loginUser, projectCode, pageNo, pageSize); + return Result.success(errorCommandPageInfo); + } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java index 4d1f0f9f3d70..90dfaf7c686d 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java @@ -31,6 +31,8 @@ import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_DATASOURCE_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_DATASOURCE_NAME_FAILURE; +import org.apache.dolphinscheduler.api.audit.OperatorLog; +import org.apache.dolphinscheduler.api.audit.enums.AuditType; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.DataSourceService; @@ -92,6 +94,7 @@ public class DataSourceController extends BaseController { @PostMapping() @ResponseStatus(HttpStatus.CREATED) @ApiException(CREATE_DATASOURCE_ERROR) + @OperatorLog(auditType = AuditType.DATASOURCE_CREATE) public Result createDataSource(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "dataSourceParam", description = "DATA_SOURCE_PARAM", required = true) @RequestBody String jsonStr) { BaseDataSourceParamDTO dataSourceParam = DataSourceUtils.buildDatasourceParam(jsonStr); @@ -116,6 +119,7 @@ public Result createDataSource(@Parameter(hidden = true) @RequestAtt @PutMapping(value = "/{id}") @ResponseStatus(HttpStatus.OK) @ApiException(UPDATE_DATASOURCE_ERROR) + @OperatorLog(auditType = AuditType.DATASOURCE_UPDATE) public Result updateDataSource(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @PathVariable(value = "id") Integer id, @RequestBody String jsonStr) { @@ -162,7 +166,7 @@ public Result queryDataSource(@Parameter(hidden = true) @RequestAttribut @ApiException(QUERY_DATASOURCE_ERROR) public Result queryDataSourceList(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("type") DbType type) { - List datasourceList = dataSourceService.queryDataSourceList(loginUser, type.ordinal()); + List datasourceList = dataSourceService.queryDataSourceList(loginUser, type.getCode()); return Result.success(datasourceList); } @@ -250,6 +254,7 @@ public Result connectionTest(@Parameter(hidden = true) @RequestAttribut @DeleteMapping(value = "/{id}") @ResponseStatus(HttpStatus.OK) @ApiException(DELETE_DATA_SOURCE_FAILURE) + @OperatorLog(auditType = AuditType.DATASOURCE_DELETE) public Result deleteDataSource(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @PathVariable("id") int id) { dataSourceService.delete(loginUser, id); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/EnvironmentController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/EnvironmentController.java index 3e33da89090b..0520f5d0752f 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/EnvironmentController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/EnvironmentController.java @@ -24,6 +24,8 @@ import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_ENVIRONMENT_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_ENVIRONMENT_ERROR; +import org.apache.dolphinscheduler.api.audit.OperatorLog; +import org.apache.dolphinscheduler.api.audit.enums.AuditType; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.EnvironmentService; import org.apache.dolphinscheduler.api.utils.Result; @@ -80,6 +82,7 @@ public class EnvironmentController extends BaseController { @PostMapping(value = "/create") @ResponseStatus(HttpStatus.CREATED) @ApiException(CREATE_ENVIRONMENT_ERROR) + @OperatorLog(auditType = AuditType.ENVIRONMENT_CREATE) public Result createEnvironment(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("name") String name, @RequestParam("config") String config, @@ -111,6 +114,7 @@ public Result createEnvironment(@Parameter(hidden = true) @RequestAttribut @PostMapping(value = "/update") @ResponseStatus(HttpStatus.OK) @ApiException(UPDATE_ENVIRONMENT_ERROR) + @OperatorLog(auditType = AuditType.ENVIRONMENT_UPDATE) public Result updateEnvironment(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("code") Long code, @RequestParam("name") String name, @@ -183,6 +187,7 @@ public Result queryEnvironmentListPaging(@Parameter(hidden = true) @RequestAttri @PostMapping(value = "/delete") @ResponseStatus(HttpStatus.OK) @ApiException(DELETE_ENVIRONMENT_ERROR) + @OperatorLog(auditType = AuditType.ENVIRONMENT_DELETE) public Result deleteEnvironment(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("environmentCode") Long environmentCode) { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java index 98070b4d65a9..8c86de7fe684 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java @@ -25,6 +25,8 @@ import static org.apache.dolphinscheduler.api.enums.Status.START_PROCESS_INSTANCE_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.START_TASK_INSTANCE_ERROR; +import org.apache.dolphinscheduler.api.audit.OperatorLog; +import org.apache.dolphinscheduler.api.audit.enums.AuditType; import org.apache.dolphinscheduler.api.enums.ExecuteType; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ApiException; @@ -42,6 +44,8 @@ import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.extract.master.dto.WorkflowExecuteDto; +import org.apache.dolphinscheduler.plugin.task.api.model.Property; +import org.apache.dolphinscheduler.plugin.task.api.utils.PropertyUtils; import org.apache.commons.lang3.StringUtils; @@ -132,6 +136,7 @@ public class ExecutorController extends BaseController { @PostMapping(value = "start-process-instance") @ResponseStatus(HttpStatus.OK) @ApiException(START_PROCESS_INSTANCE_ERROR) + @OperatorLog(auditType = AuditType.PROCESS_START) public Result startProcessInstance(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @RequestParam(value = "processDefinitionCode") long processDefinitionCode, @@ -160,10 +165,8 @@ public Result startProcessInstance(@Parameter(hidden = true) @RequestAttribute(v if (timeout == null) { timeout = Constants.MAX_TASK_TIMEOUT; } - Map startParamMap = null; - if (startParams != null) { - startParamMap = JSONUtils.toMap(startParams); - } + + List startParamList = PropertyUtils.startParamsTransformPropertyList(startParams); if (complementDependentMode == null) { complementDependentMode = ComplementDependentMode.OFF_MODE; @@ -172,7 +175,7 @@ public Result startProcessInstance(@Parameter(hidden = true) @RequestAttribute(v Map result = execService.execProcessInstance(loginUser, projectCode, processDefinitionCode, scheduleTime, execType, failureStrategy, startNodeList, taskDependType, warningType, warningGroupId, runMode, processInstancePriority, - workerGroup, tenantCode, environmentCode, timeout, startParamMap, expectedParallelismNumber, dryRun, + workerGroup, tenantCode, environmentCode, timeout, startParamList, expectedParallelismNumber, dryRun, testFlag, complementDependentMode, version, allLevelDependent, executionOrder); return returnDataList(result); @@ -229,6 +232,7 @@ public Result startProcessInstance(@Parameter(hidden = true) @RequestAttribute(v @PostMapping(value = "batch-start-process-instance") @ResponseStatus(HttpStatus.OK) @ApiException(BATCH_START_PROCESS_INSTANCE_ERROR) + @OperatorLog(auditType = AuditType.PROCESS_BATCH_START) public Result batchStartProcessInstance(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @RequestParam(value = "processDefinitionCodes") String processDefinitionCodes, @@ -258,10 +262,7 @@ public Result batchStartProcessInstance(@Parameter(hidden = true) @RequestAttrib timeout = Constants.MAX_TASK_TIMEOUT; } - Map startParamMap = null; - if (startParams != null) { - startParamMap = JSONUtils.toMap(startParams); - } + List startParamList = PropertyUtils.startParamsTransformPropertyList(startParams); if (complementDependentMode == null) { log.debug("Parameter complementDependentMode set to {} due to null.", ComplementDependentMode.OFF_MODE); @@ -279,7 +280,8 @@ public Result batchStartProcessInstance(@Parameter(hidden = true) @RequestAttrib result = execService.execProcessInstance(loginUser, projectCode, processDefinitionCode, scheduleTime, execType, failureStrategy, startNodeList, taskDependType, warningType, warningGroupId, runMode, processInstancePriority, - workerGroup, tenantCode, environmentCode, timeout, startParamMap, expectedParallelismNumber, dryRun, + workerGroup, tenantCode, environmentCode, timeout, startParamList, expectedParallelismNumber, + dryRun, testFlag, complementDependentMode, null, allLevelDependent, executionOrder); @@ -318,6 +320,7 @@ public Result batchStartProcessInstance(@Parameter(hidden = true) @RequestAttrib @PostMapping(value = "/execute") @ResponseStatus(HttpStatus.OK) @ApiException(EXECUTE_PROCESS_INSTANCE_ERROR) + @OperatorLog(auditType = AuditType.PROCESS_EXECUTE) public Result execute(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @RequestParam("processInstanceId") Integer processInstanceId, @@ -344,6 +347,7 @@ public Result execute(@Parameter(hidden = true) @RequestAttribute(value = Consta @PostMapping(value = "/batch-execute") @ResponseStatus(HttpStatus.OK) @ApiException(BATCH_EXECUTE_PROCESS_INSTANCE_ERROR) + @OperatorLog(auditType = AuditType.PROCESS_BATCH_RERUN) public Result batchExecute(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, @PathVariable long projectCode, @RequestParam("processInstanceIds") String processInstanceIds, @@ -440,6 +444,7 @@ public Result queryExecutingWorkflow(@RequestParam("id") Integer processInstance @PostMapping(value = "/task-instance/{code}/start") @ResponseStatus(HttpStatus.OK) @ApiException(START_TASK_INSTANCE_ERROR) + @OperatorLog(auditType = AuditType.TASK_START) public Result startStreamTaskInstance(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @Parameter(name = "code", description = "TASK_CODE", required = true) @PathVariable long code, @@ -482,6 +487,7 @@ public Result startStreamTaskInstance(@Parameter(hidden = true) @Reques @PostMapping(value = "/execute-task") @ResponseStatus(HttpStatus.OK) @ApiException(EXECUTE_PROCESS_INSTANCE_ERROR) + @OperatorLog(auditType = AuditType.PROCESS_EXECUTE) public Result executeTask(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @RequestParam("processInstanceId") Integer processInstanceId, diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/K8sNamespaceController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/K8sNamespaceController.java index 38b1923c8248..dbc48a96019c 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/K8sNamespaceController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/K8sNamespaceController.java @@ -25,6 +25,8 @@ import static org.apache.dolphinscheduler.api.enums.Status.QUERY_UNAUTHORIZED_NAMESPACE_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_K8S_NAMESPACE_ERROR; +import org.apache.dolphinscheduler.api.audit.OperatorLog; +import org.apache.dolphinscheduler.api.audit.enums.AuditType; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.K8sNamespaceService; import org.apache.dolphinscheduler.api.utils.Result; @@ -107,6 +109,7 @@ public Result queryNamespaceListPaging(@Parameter(hidden = true) @RequestAttribu @PostMapping() @ResponseStatus(HttpStatus.CREATED) @ApiException(CREATE_K8S_NAMESPACE_ERROR) + @OperatorLog(auditType = AuditType.K8S_NAMESPACE_CREATE) public Result createNamespace(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "namespace") String namespace, @RequestParam(value = "clusterCode") Long clusterCode) { @@ -152,6 +155,7 @@ public Result verifyNamespace(@Parameter(hidden = true) @RequestAttribute(value @PostMapping(value = "/delete") @ResponseStatus(HttpStatus.OK) @ApiException(DELETE_K8S_NAMESPACE_BY_ID_ERROR) + @OperatorLog(auditType = AuditType.K8S_NAMESPACE_DELETE) public Result delNamespaceById(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "id") int id) { Map result = k8sNamespaceService.deleteNamespaceById(loginUser, id); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/MonitorController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/MonitorController.java index 7c0a8ef0e1ae..4bf952d00630 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/MonitorController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/MonitorController.java @@ -18,7 +18,6 @@ package org.apache.dolphinscheduler.api.controller; import static org.apache.dolphinscheduler.api.enums.Status.LIST_MASTERS_ERROR; -import static org.apache.dolphinscheduler.api.enums.Status.LIST_WORKERS_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.QUERY_DATABASE_STATE_ERROR; import org.apache.dolphinscheduler.api.exceptions.ApiException; @@ -26,15 +25,16 @@ import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.constants.Constants; import org.apache.dolphinscheduler.common.model.Server; -import org.apache.dolphinscheduler.common.model.WorkerServerModel; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.plugin.api.monitor.DatabaseMetrics; +import org.apache.dolphinscheduler.registry.api.enums.RegistryNodeType; import java.util.List; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestAttribute; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.ResponseStatus; @@ -56,35 +56,19 @@ public class MonitorController extends BaseController { private MonitorService monitorService; /** - * master list + * server list * - * @param loginUser login user - * @return master list + * @return server list */ - @Operation(summary = "listMaster", description = "MASTER_LIST_NOTES") - @GetMapping(value = "/masters") + @Operation(summary = "listServer", description = "SERVER_LIST_NOTES") + @GetMapping(value = "/{nodeType}") @ResponseStatus(HttpStatus.OK) @ApiException(LIST_MASTERS_ERROR) - public Result> listMaster(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { - List servers = monitorService.queryMaster(loginUser); + public Result> listServer(@PathVariable("nodeType") RegistryNodeType nodeType) { + List servers = monitorService.listServer(nodeType); return Result.success(servers); } - /** - * worker list - * - * @param loginUser login user - * @return worker information list - */ - @Operation(summary = "listWorker", description = "WORKER_LIST_NOTES") - @GetMapping(value = "/workers") - @ResponseStatus(HttpStatus.OK) - @ApiException(LIST_WORKERS_ERROR) - public Result> listWorker(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { - List workerServerModels = monitorService.queryWorker(loginUser); - return Result.success(workerServerModels); - } - /** * query database state * diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java index 261ea9183b2f..d7ed288ef6c3 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java @@ -36,6 +36,8 @@ import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_PROCESS_DEFINITION_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR; +import org.apache.dolphinscheduler.api.audit.OperatorLog; +import org.apache.dolphinscheduler.api.audit.enums.AuditType; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.ProcessDefinitionService; @@ -112,6 +114,7 @@ public class ProcessDefinitionController extends BaseController { @PostMapping() @ResponseStatus(HttpStatus.CREATED) @ApiException(CREATE_PROCESS_DEFINITION_ERROR) + @OperatorLog(auditType = AuditType.PROCESS_CREATE) public Result createProcessDefinition(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @RequestParam(value = "name", required = true) String name, @@ -146,6 +149,7 @@ public Result createProcessDefinition(@Parameter(hidden = true) @RequestAttribut @PostMapping(value = "/batch-copy") @ResponseStatus(HttpStatus.OK) @ApiException(BATCH_COPY_PROCESS_DEFINITION_ERROR) + @OperatorLog(auditType = AuditType.PROCESS_COPY) public Result copyProcessDefinition(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @RequestParam(value = "codes", required = true) String codes, @@ -232,6 +236,7 @@ public Result verifyProcessDefinitionName(@Parameter(hidden = true) @RequestAttr @PutMapping(value = "/{code}") @ResponseStatus(HttpStatus.OK) @ApiException(UPDATE_PROCESS_DEFINITION_ERROR) + @OperatorLog(auditType = AuditType.PROCESS_UPDATE) public Result updateProcessDefinition(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @RequestParam(value = "name", required = true) String name, @@ -307,6 +312,7 @@ public Result queryProcessDefinitionVersions(@Parameter(hidden = true) @RequestA @GetMapping(value = "/{code}/versions/{version}") @ResponseStatus(HttpStatus.OK) @ApiException(SWITCH_PROCESS_DEFINITION_VERSION_ERROR) + @OperatorLog(auditType = AuditType.PROCESS_SWITCH_VERSION) public Result switchProcessDefinitionVersion(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @PathVariable(value = "code") long code, @@ -333,12 +339,12 @@ public Result switchProcessDefinitionVersion(@Parameter(hidden = true) @RequestA @DeleteMapping(value = "/{code}/versions/{version}") @ResponseStatus(HttpStatus.OK) @ApiException(DELETE_PROCESS_DEFINITION_VERSION_ERROR) + @OperatorLog(auditType = AuditType.PROCESS_DELETE_VERSION) public Result deleteProcessDefinitionVersion(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, - @PathVariable(value = "code") long workflowDefinitionCode, - @PathVariable(value = "version") int workflowDefinitionVersion) { - processDefinitionService.deleteProcessDefinitionVersion(loginUser, projectCode, workflowDefinitionCode, - workflowDefinitionVersion); + @PathVariable(value = "code") long code, + @PathVariable(value = "version") int version) { + processDefinitionService.deleteProcessDefinitionVersion(loginUser, projectCode, code, version); return Result.success(); } @@ -351,6 +357,7 @@ public Result deleteProcessDefinitionVersion(@Parameter(hidden = true) @Re @PostMapping(value = "/{code}/release") @ResponseStatus(HttpStatus.OK) @ApiException(RELEASE_PROCESS_DEFINITION_ERROR) + @OperatorLog(auditType = AuditType.PROCESS_RELEASE) public Result releaseProcessDefinition(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @PathVariable(value = "code", required = true) long workflowDefinitionCode, @@ -611,10 +618,11 @@ public Result getTaskListByProcessDefinitionCode(@Parameter(hidden = true) @Requ @DeleteMapping(value = "/{code}") @ResponseStatus(HttpStatus.OK) @ApiException(DELETE_PROCESS_DEFINE_BY_CODE_ERROR) + @OperatorLog(auditType = AuditType.PROCESS_DELETE) public Result deleteProcessDefinitionByCode(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, - @PathVariable("code") long workflowDefinitionCode) { - processDefinitionService.deleteProcessDefinitionByCode(loginUser, workflowDefinitionCode); + @PathVariable("code") long code) { + processDefinitionService.deleteProcessDefinitionByCode(loginUser, code); return new Result(Status.SUCCESS); } @@ -633,6 +641,7 @@ public Result deleteProcessDefinitionByCode(@Parameter(hidden = true) @RequestAt @PostMapping(value = "/batch-delete") @ResponseStatus(HttpStatus.OK) @ApiException(BATCH_DELETE_PROCESS_DEFINE_BY_CODES_ERROR) + @OperatorLog(auditType = AuditType.PROCESS_BATCH_DELETE) public Result batchDeleteProcessDefinitionByCodes(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @RequestParam("codes") String codes) { @@ -656,6 +665,7 @@ public Result batchDeleteProcessDefinitionByCodes(@Parameter(hidden = true) @Req }) @PostMapping(value = "/batch-export") @ResponseBody + @OperatorLog(auditType = AuditType.PROCESS_EXPORT) public void batchExportProcessDefinitionByCodes(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @RequestParam("codes") String codes, @@ -699,6 +709,7 @@ public Result queryAllProcessDefinitionByProjectCode(@Parameter(hidden = true) @ }) @PostMapping(value = "/import") @ApiException(IMPORT_PROCESS_DEFINE_ERROR) + @OperatorLog(auditType = AuditType.PROCESS_IMPORT) public Result importProcessDefinition(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @RequestParam("file") MultipartFile file) { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java index 85f9cbcf0817..6d7b70918f9b 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java @@ -19,6 +19,8 @@ import static org.apache.dolphinscheduler.api.enums.Status.QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR; +import org.apache.dolphinscheduler.api.audit.OperatorLog; +import org.apache.dolphinscheduler.api.audit.enums.AuditType; import org.apache.dolphinscheduler.api.dto.DynamicSubWorkflowDto; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ApiException; @@ -173,6 +175,7 @@ public Result queryTaskListByProcessId(@Parameter(hidden = true) @RequestAttribu @PutMapping(value = "/{id}") @ResponseStatus(HttpStatus.OK) @ApiException(Status.UPDATE_PROCESS_INSTANCE_ERROR) + @OperatorLog(auditType = AuditType.PROCESS_INSTANCE_UPDATE) public Result updateProcessInstance(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @RequestParam(value = "taskRelationJson", required = true) String taskRelationJson, @@ -255,6 +258,7 @@ public Result queryTopNLongestRunningProcessInstance(@Parameter @DeleteMapping(value = "/{id}") @ResponseStatus(HttpStatus.OK) @ApiException(Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR) + @OperatorLog(auditType = AuditType.PROCESS_INSTANCE_DELETE) public Result deleteProcessInstanceById(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @PathVariable("id") Integer id) { @@ -388,6 +392,7 @@ public Result viewTree(@Parameter(hidden = true) @RequestAttribute(value = Const @PostMapping(value = "/batch-delete") @ResponseStatus(HttpStatus.OK) @ApiException(Status.BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR) + @OperatorLog(auditType = AuditType.PROCESS_INSTANCE_BATCH_DELETE) public Result batchDeleteProcessInstanceByIds(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, @PathVariable long projectCode, @RequestParam("processInstanceIds") String processInstanceIds) { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java index 7963bae22d9f..ecec69622a8b 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java @@ -27,7 +27,10 @@ import static org.apache.dolphinscheduler.api.enums.Status.QUERY_UNAUTHORIZED_PROJECT_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_PROJECT_ERROR; +import org.apache.dolphinscheduler.api.audit.OperatorLog; +import org.apache.dolphinscheduler.api.audit.enums.AuditType; import org.apache.dolphinscheduler.api.exceptions.ApiException; +import org.apache.dolphinscheduler.api.service.AuditService; import org.apache.dolphinscheduler.api.service.ProjectService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.constants.Constants; @@ -67,6 +70,9 @@ public class ProjectController extends BaseController { @Autowired private ProjectService projectService; + @Autowired + private AuditService auditService; + /** * create project * @@ -83,6 +89,7 @@ public class ProjectController extends BaseController { @PostMapping() @ResponseStatus(HttpStatus.CREATED) @ApiException(CREATE_PROJECT_ERROR) + @OperatorLog(auditType = AuditType.PROJECT_CREATE) public Result createProject(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("projectName") String projectName, @RequestParam(value = "description", required = false) String description) { @@ -107,6 +114,7 @@ public Result createProject(@Parameter(hidden = true) @RequestAttribute(value = @PutMapping(value = "/{code}") @ResponseStatus(HttpStatus.OK) @ApiException(UPDATE_PROJECT_ERROR) + @OperatorLog(auditType = AuditType.PROJECT_UPDATE) public Result updateProject(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @PathVariable("code") Long code, @RequestParam("projectName") String projectName, @@ -207,6 +215,7 @@ public Result queryProjectWithAuthorizedLevelListPaging(@Parameter(hidden = true @DeleteMapping(value = "/{code}") @ResponseStatus(HttpStatus.OK) @ApiException(DELETE_PROJECT_ERROR) + @OperatorLog(auditType = AuditType.PROJECT_DELETE) public Result deleteProject(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @PathVariable("code") Long code) { return projectService.deleteProject(loginUser, code); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectParameterController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectParameterController.java index 706c7e939aa3..efec970e6fe6 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectParameterController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectParameterController.java @@ -61,7 +61,8 @@ public class ProjectParameterController extends BaseController { @Operation(summary = "createProjectParameter", description = "CREATE_PROJECT_PARAMETER_NOTES") @Parameters({ @Parameter(name = "projectParameterName", description = "PROJECT_PARAMETER_NAME", schema = @Schema(implementation = String.class)), - @Parameter(name = "projectParameterValue", description = "PROJECT_PARAMETER_VALUE", schema = @Schema(implementation = String.class)) + @Parameter(name = "projectParameterValue", description = "PROJECT_PARAMETER_VALUE", schema = @Schema(implementation = String.class)), + @Parameter(name = "projectParameterDataType", description = "PROJECT_PARAMETER_DATA_TYPE", schema = @Schema(implementation = String.class)) }) @PostMapping() @ResponseStatus(HttpStatus.CREATED) @@ -69,9 +70,10 @@ public class ProjectParameterController extends BaseController { public Result createProjectParameter(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @RequestParam("projectParameterName") String projectParameterName, - @RequestParam(value = "projectParameterValue") String projectParameterValue) { + @RequestParam(value = "projectParameterValue") String projectParameterValue, + @RequestParam(value = "projectParameterDataType", defaultValue = "VARCHAR") String projectParameterDataType) { return projectParameterService.createProjectParameter(loginUser, projectCode, projectParameterName, - projectParameterValue); + projectParameterValue, projectParameterDataType); } @Operation(summary = "updateProjectParameter", description = "UPDATE_PROJECT_PARAMETER_NOTES") @@ -79,6 +81,7 @@ public Result createProjectParameter(@Parameter(hidden = true) @RequestAttribute @Parameter(name = "code", description = "PROJECT_PARAMETER_CODE", schema = @Schema(implementation = long.class, example = "123456")), @Parameter(name = "projectParameterName", description = "PROJECT_PARAMETER_NAME", schema = @Schema(implementation = String.class)), @Parameter(name = "projectParameterValue", description = "PROJECT_PARAMETER_VALUE", schema = @Schema(implementation = String.class)), + @Parameter(name = "projectParameterDataType", description = "PROJECT_PARAMETER_DATA_TYPE", schema = @Schema(implementation = String.class)) }) @PutMapping(value = "/{code}") @ResponseStatus(HttpStatus.OK) @@ -87,9 +90,10 @@ public Result updateProjectParameter(@Parameter(hidden = true) @RequestAttribute @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @PathVariable("code") Long code, @RequestParam("projectParameterName") String projectParameterName, - @RequestParam(value = "projectParameterValue") String projectParameterValue) { + @RequestParam(value = "projectParameterValue") String projectParameterValue, + @RequestParam(value = "projectParameterDataType") String projectParameterDataType) { return projectParameterService.updateProjectParameter(loginUser, projectCode, code, projectParameterName, - projectParameterValue); + projectParameterValue, projectParameterDataType); } @Operation(summary = "deleteProjectParametersByCode", description = "DELETE_PROJECT_PARAMETER_NOTES") @@ -133,13 +137,14 @@ public Result queryProjectParameterListPaging( @Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @RequestParam(value = "searchVal", required = false) String searchVal, + @RequestParam(value = "projectParameterDataType", required = false) String projectParameterDataType, @RequestParam("pageNo") Integer pageNo, @RequestParam("pageSize") Integer pageSize) { checkPageParams(pageNo, pageSize); searchVal = ParameterUtils.handleEscapes(searchVal); return projectParameterService.queryProjectParameterListPaging(loginUser, projectCode, pageSize, pageNo, - searchVal); + searchVal, projectParameterDataType); } @Operation(summary = "queryProjectParameterByCode", description = "QUERY_PROJECT_PARAMETER_NOTES") diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/QueueController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/QueueController.java index dbbc7d858e31..4e69dede6f65 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/QueueController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/QueueController.java @@ -23,6 +23,8 @@ import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_QUEUE_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_QUEUE_ERROR; +import org.apache.dolphinscheduler.api.audit.OperatorLog; +import org.apache.dolphinscheduler.api.audit.enums.AuditType; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.QueueService; import org.apache.dolphinscheduler.api.utils.PageInfo; @@ -124,6 +126,7 @@ public Result> queryQueueListPaging(@Parameter(hidden = true) @R @PostMapping() @ResponseStatus(HttpStatus.CREATED) @ApiException(CREATE_QUEUE_ERROR) + @OperatorLog(auditType = AuditType.YARN_QUEUE_CREATE) public Result createQueue(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "queue") String queue, @RequestParam(value = "queueName") String queueName) { @@ -148,6 +151,7 @@ public Result createQueue(@Parameter(hidden = true) @RequestAttribute(val @PutMapping(value = "/{id}") @ResponseStatus(HttpStatus.CREATED) @ApiException(UPDATE_QUEUE_ERROR) + @OperatorLog(auditType = AuditType.YARN_QUEUE_UPDATE) public Result updateQueue(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @PathVariable(value = "id") int id, @RequestParam(value = "queue") String queue, @@ -169,6 +173,7 @@ public Result updateQueue(@Parameter(hidden = true) @RequestAttribute(val @DeleteMapping(value = "/{id}") @ResponseStatus(HttpStatus.OK) @ApiException(DELETE_QUEUE_BY_ID_ERROR) + @OperatorLog(auditType = AuditType.YARN_QUEUE_DELETE) public Result deleteQueueById(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @PathVariable(value = "id") int id) throws Exception { queueService.deleteQueueById(loginUser, id); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java index 773e734c22c2..03eb42c5d691 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java @@ -19,52 +19,52 @@ import static org.apache.dolphinscheduler.api.enums.Status.CREATE_RESOURCE_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.CREATE_RESOURCE_FILE_ON_LINE_ERROR; -import static org.apache.dolphinscheduler.api.enums.Status.CREATE_UDF_FUNCTION_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.DELETE_RESOURCE_ERROR; -import static org.apache.dolphinscheduler.api.enums.Status.DELETE_UDF_FUNCTION_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.DOWNLOAD_RESOURCE_FILE_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.EDIT_RESOURCE_FILE_ON_LINE_ERROR; -import static org.apache.dolphinscheduler.api.enums.Status.QUERY_DATASOURCE_BY_TYPE_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.QUERY_RESOURCES_LIST_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.QUERY_RESOURCES_LIST_PAGING; -import static org.apache.dolphinscheduler.api.enums.Status.QUERY_UDF_FUNCTION_LIST_PAGING_ERROR; -import static org.apache.dolphinscheduler.api.enums.Status.RESOURCE_FILE_IS_EMPTY; import static org.apache.dolphinscheduler.api.enums.Status.RESOURCE_NOT_EXIST; import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_RESOURCE_ERROR; -import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_UDF_FUNCTION_ERROR; -import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR; -import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_UDF_FUNCTION_NAME_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.VIEW_RESOURCE_FILE_ON_LINE_ERROR; -import static org.apache.dolphinscheduler.api.enums.Status.VIEW_UDF_FUNCTION_ERROR; -import org.apache.dolphinscheduler.api.dto.resources.DeleteDataTransferResponse; +import org.apache.dolphinscheduler.api.audit.OperatorLog; +import org.apache.dolphinscheduler.api.audit.enums.AuditType; +import org.apache.dolphinscheduler.api.dto.resources.CreateDirectoryRequest; +import org.apache.dolphinscheduler.api.dto.resources.CreateFileFromContentRequest; +import org.apache.dolphinscheduler.api.dto.resources.CreateFileRequest; +import org.apache.dolphinscheduler.api.dto.resources.DeleteResourceRequest; +import org.apache.dolphinscheduler.api.dto.resources.DownloadFileRequest; +import org.apache.dolphinscheduler.api.dto.resources.FetchFileContentRequest; +import org.apache.dolphinscheduler.api.dto.resources.PagingResourceItemRequest; +import org.apache.dolphinscheduler.api.dto.resources.RenameDirectoryRequest; +import org.apache.dolphinscheduler.api.dto.resources.RenameFileRequest; +import org.apache.dolphinscheduler.api.dto.resources.ResourceComponent; +import org.apache.dolphinscheduler.api.dto.resources.UpdateFileFromContentRequest; +import org.apache.dolphinscheduler.api.dto.resources.UpdateFileRequest; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.ResourcesService; -import org.apache.dolphinscheduler.api.service.UdfFuncService; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.api.vo.ResourceItemVO; +import org.apache.dolphinscheduler.api.vo.resources.FetchFileContentResponse; import org.apache.dolphinscheduler.common.constants.Constants; -import org.apache.dolphinscheduler.common.enums.ProgramType; -import org.apache.dolphinscheduler.common.enums.UdfType; import org.apache.dolphinscheduler.dao.entity.User; -import org.apache.dolphinscheduler.plugin.storage.api.StorageEntity; import org.apache.dolphinscheduler.plugin.task.api.utils.ParameterUtils; import org.apache.dolphinscheduler.spi.enums.ResourceType; import org.apache.commons.lang3.StringUtils; -import java.util.Map; +import java.util.List; + +import javax.servlet.http.HttpServletResponse; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.core.io.Resource; -import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus; -import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.GetMapping; -import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.PutMapping; import org.springframework.web.bind.annotation.RequestAttribute; @@ -75,15 +75,14 @@ import org.springframework.web.bind.annotation.RestController; import org.springframework.web.multipart.MultipartFile; +import com.google.common.io.Files; + import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.Parameter; import io.swagger.v3.oas.annotations.Parameters; import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.tags.Tag; -/** - * resources controller - */ @Tag(name = "RESOURCES_TAG") @RestController @RequestMapping("resources") @@ -92,40 +91,41 @@ public class ResourcesController extends BaseController { @Autowired private ResourcesService resourceService; - @Autowired - private UdfFuncService udfFuncService; - /** - * @param loginUser login user - * @param type type - * @param alias alias - * @param pid parent id - * @param currentDir current directory - * @return create result code - */ + @Operation(summary = "queryResourceList", description = "QUERY_RESOURCE_LIST_NOTES") + @Parameter(name = "type", description = "RESOURCE_TYPE", required = true, schema = @Schema(implementation = ResourceType.class)) + @GetMapping(value = "/list") + @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_RESOURCES_LIST_ERROR) + public Result> queryResourceList(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "type") ResourceType type) { + return Result.success(resourceService.queryResourceFiles(loginUser, type)); + } + @Operation(summary = "createDirectory", description = "CREATE_RESOURCE_NOTES") @Parameters({ @Parameter(name = "type", description = "RESOURCE_TYPE", required = true, schema = @Schema(implementation = ResourceType.class)), @Parameter(name = "name", description = "RESOURCE_NAME", required = true, schema = @Schema(implementation = String.class)), - @Parameter(name = "pid", description = "RESOURCE_PID", required = true, schema = @Schema(implementation = int.class, example = "10")), @Parameter(name = "currentDir", description = "RESOURCE_CURRENT_DIR", required = true, schema = @Schema(implementation = String.class))}) @PostMapping(value = "/directory") @ApiException(CREATE_RESOURCE_ERROR) - public Result createDirectory(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "type") ResourceType type, - @RequestParam(value = "name") String alias, - @RequestParam(value = "pid") int pid, - @RequestParam(value = "currentDir") String currentDir) { - // todo verify the directory name - return resourceService.createDirectory(loginUser, alias, type, pid, currentDir); + @OperatorLog(auditType = AuditType.FOLDER_CREATE) + public Result createDirectory(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "type") ResourceType type, + @RequestParam(value = "name") String directoryName, + @RequestParam(value = "currentDir") String parentDirectory) { + + CreateDirectoryRequest createDirectoryRequest = CreateDirectoryRequest.builder() + .loginUser(loginUser) + .directoryName(directoryName) + .type(type) + .parentAbsoluteDirectory(parentDirectory) + .build(); + resourceService.createDirectory(createDirectoryRequest); + return Result.success(null); } - /** - * create resource - * - * @return create result code - */ - @Operation(summary = "createResource", description = "CREATE_RESOURCE_NOTES") + @Operation(summary = "uploadFile", description = "CREATE_FILE") @Parameters({ @Parameter(name = "type", description = "RESOURCE_TYPE", required = true, schema = @Schema(implementation = ResourceType.class)), @Parameter(name = "name", description = "RESOURCE_NAME", required = true, schema = @Schema(implementation = String.class)), @@ -133,24 +133,72 @@ public Result createDirectory(@Parameter(hidden = true) @RequestAttribut @Parameter(name = "currentDir", description = "RESOURCE_CURRENT_DIR", required = true, schema = @Schema(implementation = String.class))}) @PostMapping() @ApiException(CREATE_RESOURCE_ERROR) - public Result createResource(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "type") ResourceType type, - @RequestParam(value = "name") String alias, - @RequestParam("file") MultipartFile file, - @RequestParam(value = "currentDir") String currentDir) { - // todo verify the file name - return resourceService.uploadResource(loginUser, alias, type, file, currentDir); + @OperatorLog(auditType = AuditType.FILE_CREATE) + public Result createFile(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "type") ResourceType type, + @RequestParam(value = "name") String fileName, + @RequestParam("file") MultipartFile file, + @RequestParam(value = "currentDir") String parentDirectoryAbsolutePath) { + + CreateFileRequest uploadFileRequest = CreateFileRequest.builder() + .loginUser(loginUser) + .fileName(fileName) + .file(file) + .type(type) + .parentAbsoluteDirectory(parentDirectoryAbsolutePath) + .build(); + resourceService.createFile(uploadFileRequest); + return Result.success(); + } + + @Operation(summary = "createFileFromContent", description = "ONLINE_CREATE_RESOURCE_NOTES") + @Parameters({ + @Parameter(name = "type", description = "RESOURCE_TYPE", required = true, schema = @Schema(implementation = ResourceType.class)), + @Parameter(name = "fileName", description = "RESOURCE_NAME", required = true, schema = @Schema(implementation = String.class)), + @Parameter(name = "suffix", description = "SUFFIX", required = true, schema = @Schema(implementation = String.class)), + @Parameter(name = "description", description = "RESOURCE_DESC", schema = @Schema(implementation = String.class)), + @Parameter(name = "content", description = "CONTENT", required = true, schema = @Schema(implementation = String.class)), + @Parameter(name = "currentDir", description = "RESOURCE_CURRENTDIR", required = true, schema = @Schema(implementation = String.class))}) + @PostMapping(value = "/online-create") + @ApiException(CREATE_RESOURCE_FILE_ON_LINE_ERROR) + @OperatorLog(auditType = AuditType.FILE_CREATE) + public Result createFileFromContent(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "type") ResourceType type, + @RequestParam(value = "fileName") String fileName, + @RequestParam(value = "suffix") String fileSuffix, + @RequestParam(value = "content") String fileContent, + @RequestParam(value = "currentDir") String fileParentDirectoryAbsolutePath) { + CreateFileFromContentRequest createFileFromContentRequest = CreateFileFromContentRequest.builder() + .loginUser(loginUser) + .fileName(fileName + "." + fileSuffix) + .fileContent(fileContent) + .type(type) + .parentAbsoluteDirectory(fileParentDirectoryAbsolutePath) + .build(); + resourceService.createFileFromContent(createFileFromContentRequest); + return Result.success(); + } + + @Operation(summary = "updateFileContent", description = "UPDATE_RESOURCE_NOTES") + @Parameters({ + @Parameter(name = "content", description = "CONTENT", required = true, schema = @Schema(implementation = String.class)), + @Parameter(name = "fullName", description = "FULL_NAME", required = true, schema = @Schema(implementation = String.class)), + @Parameter(name = "tenantCode", description = "TENANT_CODE", required = true, schema = @Schema(implementation = String.class))}) + @PutMapping(value = "/update-content") + @ApiException(EDIT_RESOURCE_FILE_ON_LINE_ERROR) + @OperatorLog(auditType = AuditType.FILE_UPDATE) + public Result updateFileContent(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "fullName") String fileAbsolutePath, + @RequestParam(value = "content") String fileContent) { + UpdateFileFromContentRequest updateFileContentRequest = UpdateFileFromContentRequest.builder() + .loginUser(loginUser) + .fileContent(fileContent) + .fileAbsolutePath(fileAbsolutePath) + .build(); + resourceService.updateFileFromContent(updateFileContentRequest); + return Result.success(); } - /** - * update resource - * - * @param loginUser login user - * @param alias alias - * @param type resource type - * @param file resource file - * @return update result code - */ @Operation(summary = "updateResource", description = "UPDATE_RESOURCE_NOTES") @Parameters({ @Parameter(name = "fullName", description = "RESOURCE_FULLNAME", required = true, schema = @Schema(implementation = String.class)), @@ -160,47 +208,40 @@ public Result createResource(@Parameter(hidden = true) @RequestAttribute @Parameter(name = "file", description = "RESOURCE_FILE", required = true, schema = @Schema(implementation = MultipartFile.class))}) @PutMapping() @ApiException(UPDATE_RESOURCE_ERROR) - public Result updateResource(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "fullName") String fullName, - @RequestParam(value = "tenantCode", required = false) String tenantCode, - @RequestParam(value = "type") ResourceType type, - @RequestParam(value = "name") String alias, - @RequestParam(value = "file", required = false) MultipartFile file) { - return resourceService.updateResource(loginUser, fullName, tenantCode, alias, type, file); - } + @OperatorLog(auditType = AuditType.FILE_UPDATE) + public Result updateResource(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "fullName") String resourceAbsolutePath, + @RequestParam(value = "name") String resourceName, + @RequestParam(value = "file", required = false) MultipartFile file) { + if (StringUtils.isEmpty(Files.getFileExtension(resourceName))) { + RenameDirectoryRequest renameDirectoryRequest = RenameDirectoryRequest.builder() + .loginUser(loginUser) + .directoryAbsolutePath(resourceAbsolutePath) + .newDirectoryName(resourceName) + .build(); + resourceService.renameDirectory(renameDirectoryRequest); + return Result.success(); + } - /** - * query resources list - * - * @param loginUser login user - * @param type resource type - * @return resource list - */ - @Operation(summary = "queryResourceList", description = "QUERY_RESOURCE_LIST_NOTES") - @Parameters({ - @Parameter(name = "type", description = "RESOURCE_TYPE", required = true, schema = @Schema(implementation = ResourceType.class)), - @Parameter(name = "fullName", description = "RESOURCE_FULLNAME", required = true, schema = @Schema(implementation = String.class))}) - @GetMapping(value = "/list") - @ResponseStatus(HttpStatus.OK) - @ApiException(QUERY_RESOURCES_LIST_ERROR) - public Result queryResourceList(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "type") ResourceType type, - @RequestParam(value = "fullName") String fullName) { - Map result = resourceService.queryResourceList(loginUser, type, fullName); - return returnDataList(result); + if (file == null) { + RenameFileRequest renameFileRequest = RenameFileRequest.builder() + .loginUser(loginUser) + .fileAbsolutePath(resourceAbsolutePath) + .newFileName(resourceName) + .build(); + resourceService.renameFile(renameFileRequest); + return Result.success(); + } + UpdateFileRequest updateFileRequest = UpdateFileRequest.builder() + .loginUser(loginUser) + .fileAbsolutePath(resourceAbsolutePath) + .file(file) + .build(); + resourceService.updateFile(updateFileRequest); + return Result.success(); } - /** - * query resources list paging - * - * @param loginUser login user - * @param type resource type - * @param searchVal search value - * @param pageNo page number - * @param pageSize page size - * @return resource list page - */ - @Operation(summary = "queryResourceListPaging", description = "QUERY_RESOURCE_LIST_PAGING_NOTES") + @Operation(summary = "pagingResourceItemRequest", description = "PAGING_RESOURCE_ITEM_LIST") @Parameters({ @Parameter(name = "type", description = "RESOURCE_TYPE", required = true, schema = @Schema(implementation = ResourceType.class)), @Parameter(name = "fullName", description = "RESOURCE_FULLNAME", required = true, schema = @Schema(implementation = String.class, example = "bucket_name/tenant_name/type/ds")), @@ -210,128 +251,54 @@ public Result queryResourceList(@Parameter(hidden = true) @RequestAttrib @GetMapping() @ResponseStatus(HttpStatus.OK) @ApiException(QUERY_RESOURCES_LIST_PAGING) - public Result> queryResourceListPaging(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "fullName") String fullName, - @RequestParam(value = "tenantCode") String tenantCode, - @RequestParam(value = "type") ResourceType type, - @RequestParam("pageNo") Integer pageNo, - @RequestParam(value = "searchVal", required = false) String searchVal, - @RequestParam("pageSize") Integer pageSize) { - checkPageParams(pageNo, pageSize); - - searchVal = ParameterUtils.handleEscapes(searchVal); - return resourceService.queryResourceListPaging(loginUser, fullName, tenantCode, type, searchVal, pageNo, - pageSize); + public Result> pagingResourceItemRequest(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "fullName") String resourceAbsolutePath, + @RequestParam(value = "type") ResourceType resourceType, + @RequestParam(value = "searchVal", required = false) String resourceNameKeyWord, + @RequestParam("pageNo") Integer pageNo, + @RequestParam("pageSize") Integer pageSize) { + PagingResourceItemRequest pagingResourceItemRequest = PagingResourceItemRequest.builder() + .loginUser(loginUser) + .resourceAbsolutePath(resourceAbsolutePath) + .resourceType(resourceType) + .resourceNameKeyWord(StringUtils.trim(ParameterUtils.handleEscapes(resourceNameKeyWord))) + .pageNo(pageNo) + .pageSize(pageSize) + .build(); + pagingResourceItemRequest.checkPageNoAndPageSize(); + + return Result.success(resourceService.pagingResourceItem(pagingResourceItemRequest)); } - /** - * delete resource - * - * @param loginUser login user - * @return delete result code - */ @Operation(summary = "deleteResource", description = "DELETE_RESOURCE_BY_ID_NOTES") @Parameters({ - @Parameter(name = "fullName", description = "RESOURCE_FULLNAME", required = true, schema = @Schema(implementation = String.class, example = "test/"))}) + @Parameter(name = "fullName", description = "RESOURCE_FULLNAME", required = true, schema = @Schema(implementation = String.class, example = "file:////tmp/dolphinscheduler/storage/default/resources/demo.sql")) + }) @DeleteMapping() @ResponseStatus(HttpStatus.OK) @ApiException(DELETE_RESOURCE_ERROR) - public Result deleteResource(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "fullName") String fullName, - @RequestParam(value = "tenantCode", required = false) String tenantCode) throws Exception { - return resourceService.delete(loginUser, fullName, tenantCode); + @OperatorLog(auditType = AuditType.FILE_DELETE) + public Result deleteResource(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "fullName") String resourceAbsolutePath) { + DeleteResourceRequest deleteResourceRequest = DeleteResourceRequest.builder() + .loginUser(loginUser) + .resourceAbsolutePath(resourceAbsolutePath) + .build(); + resourceService.delete(deleteResourceRequest); + return Result.success(); } - /** - * delete DATA_TRANSFER data - * - * @param loginUser login user - * @return delete result code - */ - @Operation(summary = "deleteDataTransferData", description = "Delete the N days ago data of DATA_TRANSFER ") - @Parameters({ - @Parameter(name = "days", description = "N days ago", required = true, schema = @Schema(implementation = Integer.class))}) - @DeleteMapping(value = "/data-transfer") - @ResponseStatus(HttpStatus.OK) - @ApiException(DELETE_RESOURCE_ERROR) - public DeleteDataTransferResponse deleteDataTransferData(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "days") Integer days) { - return resourceService.deleteDataTransferData(loginUser, days); - } - - /** - * verify resource by alias and type - * - * @param loginUser login user - * @param fullName resource full name - * @param type resource type - * @return true if the resource name not exists, otherwise return false - */ - @Operation(summary = "verifyResourceName", description = "VERIFY_RESOURCE_NAME_NOTES") - @Parameters({ - @Parameter(name = "type", description = "RESOURCE_TYPE", required = true, schema = @Schema(implementation = ResourceType.class)), - @Parameter(name = "fullName", description = "RESOURCE_FULL_NAME", required = true, schema = @Schema(implementation = String.class))}) - @GetMapping(value = "/verify-name") - @ResponseStatus(HttpStatus.OK) - @ApiException(VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR) - public Result verifyResourceName(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "fullName") String fullName, - @RequestParam(value = "type") ResourceType type) { - return resourceService.verifyResourceName(fullName, type, loginUser); - } - - /** - * query resources by type - * - * @param loginUser login user - * @param type resource type - * @return resource list - */ - @Operation(summary = "queryResourceByProgramType", description = "QUERY_RESOURCE_LIST_NOTES") + @Operation(summary = "queryResourceFileList", description = "QUERY_RESOURCE_FILE_LIST_NOTES") @Parameters({ @Parameter(name = "type", description = "RESOURCE_TYPE", required = true, schema = @Schema(implementation = ResourceType.class))}) @GetMapping(value = "/query-by-type") @ResponseStatus(HttpStatus.OK) @ApiException(QUERY_RESOURCES_LIST_ERROR) - public Result queryResourceJarList(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "type") ResourceType type, - @RequestParam(value = "programType", required = false) ProgramType programType) { - return resourceService.queryResourceByProgramType(loginUser, type, programType); + public Result> queryResourceFileList(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "type") ResourceType type) { + return Result.success(resourceService.queryResourceFiles(loginUser, type)); } - /** - * query resource by file name and type - * - * @param loginUser login user - * @param fileName resource full name - * @param tenantCode tenantCode of the owner of the resource - * @param type resource type - * @return true if the resource name not exists, otherwise return false - */ - @Operation(summary = "queryResourceByFileName", description = "QUERY_BY_RESOURCE_FILE_NAME") - @Parameters({ - @Parameter(name = "type", description = "RESOURCE_TYPE", required = true, schema = @Schema(implementation = ResourceType.class)), - @Parameter(name = "fileName", description = "RESOURCE_FILE_NAME", required = true, schema = @Schema(implementation = String.class)), - @Parameter(name = "tenantCode", description = "TENANT_CODE", required = true, schema = @Schema(implementation = String.class)),}) - @GetMapping(value = "/query-file-name") - @ResponseStatus(HttpStatus.OK) - @ApiException(RESOURCE_NOT_EXIST) - public Result queryResourceByFileName(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "fileName", required = false) String fileName, - @RequestParam(value = "tenantCode", required = false) String tenantCode, - @RequestParam(value = "type") ResourceType type) { - - return resourceService.queryResourceByFileName(loginUser, fileName, type, tenantCode); - } - - /** - * view resource file online - * - * @param loginUser login user - * @param skipLineNum skip line number - * @param limit limit - * @return resource content - */ @Operation(summary = "viewResource", description = "VIEW_RESOURCE_BY_ID_NOTES") @Parameters({ @Parameter(name = "fullName", description = "RESOURCE_FULL_NAME", required = true, schema = @Schema(implementation = String.class, example = "tenant/1.png")), @@ -340,259 +307,35 @@ public Result queryResourceByFileName(@Parameter(hidden = true) @Request @Parameter(name = "limit", description = "LIMIT", required = true, schema = @Schema(implementation = int.class, example = "100"))}) @GetMapping(value = "/view") @ApiException(VIEW_RESOURCE_FILE_ON_LINE_ERROR) - public Result viewResource(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "skipLineNum") int skipLineNum, - @RequestParam(value = "limit") int limit, - @RequestParam(value = "fullName") String fullName, - @RequestParam(value = "tenantCode") String tenantCode) { - return resourceService.readResource(loginUser, fullName, tenantCode, skipLineNum, limit); + public Result viewResource(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "fullName") String resourceAbsoluteFilePath, + @RequestParam(value = "skipLineNum") int skipLineNum, + @RequestParam(value = "limit") int limit) { + FetchFileContentRequest fetchFileContentRequest = FetchFileContentRequest.builder() + .loginUser(loginUser) + .resourceFileAbsolutePath(resourceAbsoluteFilePath) + .limit(limit == -1 ? Integer.MAX_VALUE : skipLineNum) + .skipLineNum(skipLineNum) + .build(); + return Result.success(resourceService.fetchResourceFileContent(fetchFileContentRequest)); } - @Operation(summary = "onlineCreateResource", description = "ONLINE_CREATE_RESOURCE_NOTES") - @Parameters({ - @Parameter(name = "type", description = "RESOURCE_TYPE", required = true, schema = @Schema(implementation = ResourceType.class)), - @Parameter(name = "fileName", description = "RESOURCE_NAME", required = true, schema = @Schema(implementation = String.class)), - @Parameter(name = "suffix", description = "SUFFIX", required = true, schema = @Schema(implementation = String.class)), - @Parameter(name = "description", description = "RESOURCE_DESC", schema = @Schema(implementation = String.class)), - @Parameter(name = "content", description = "CONTENT", required = true, schema = @Schema(implementation = String.class)), - @Parameter(name = "currentDir", description = "RESOURCE_CURRENTDIR", required = true, schema = @Schema(implementation = String.class))}) - @PostMapping(value = "/online-create") - @ApiException(CREATE_RESOURCE_FILE_ON_LINE_ERROR) - public Result createResourceFile(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "type") ResourceType type, - @RequestParam(value = "fileName") String fileName, - @RequestParam(value = "suffix") String fileSuffix, - @RequestParam(value = "content") String content, - @RequestParam(value = "currentDir") String currentDir) { - if (StringUtils.isEmpty(content)) { - log.error("resource file contents are not allowed to be empty"); - return error(RESOURCE_FILE_IS_EMPTY.getCode(), RESOURCE_FILE_IS_EMPTY.getMsg()); - } - return resourceService.createResourceFile(loginUser, type, fileName, fileSuffix, content, currentDir); - } - - /** - * edit resource file online - * - * @param loginUser login user - * @param content content - * @return update result code - */ - @Operation(summary = "updateResourceContent", description = "UPDATE_RESOURCE_NOTES") - @Parameters({ - @Parameter(name = "content", description = "CONTENT", required = true, schema = @Schema(implementation = String.class)), - @Parameter(name = "fullName", description = "FULL_NAME", required = true, schema = @Schema(implementation = String.class)), - @Parameter(name = "tenantCode", description = "TENANT_CODE", required = true, schema = @Schema(implementation = String.class))}) - @PutMapping(value = "/update-content") - @ApiException(EDIT_RESOURCE_FILE_ON_LINE_ERROR) - public Result updateResourceContent(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "fullName") String fullName, - @RequestParam(value = "tenantCode") String tenantCode, - @RequestParam(value = "content") String content) { - if (StringUtils.isEmpty(content)) { - log.error("The resource file contents are not allowed to be empty"); - return error(RESOURCE_FILE_IS_EMPTY.getCode(), RESOURCE_FILE_IS_EMPTY.getMsg()); - } - return resourceService.updateResourceContent(loginUser, fullName, tenantCode, content); - } - - /** - * download resource file - * - * @param loginUser login user - * @return resource content - */ @Operation(summary = "downloadResource", description = "DOWNLOAD_RESOURCE_NOTES") @Parameters({ @Parameter(name = "fullName", description = "RESOURCE_FULLNAME", required = true, schema = @Schema(implementation = String.class, example = "test/"))}) @GetMapping(value = "/download") @ResponseBody @ApiException(DOWNLOAD_RESOURCE_FILE_ERROR) - public ResponseEntity downloadResource(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "fullName") String fullName) throws Exception { - Resource file = resourceService.downloadResource(loginUser, fullName); - if (file == null) { - return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(RESOURCE_NOT_EXIST.getMsg()); - } - return ResponseEntity.ok() - .header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + file.getFilename() + "\"") - .body(file); - } + public void downloadResource(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + HttpServletResponse response, + @RequestParam(value = "fullName") String fileAbsolutePath) { - /** - * create udf function - * - * @param loginUser login user - * @param type udf type - * @param funcName function name - * @param argTypes argument types - * @param database database - * @param description description - * @param className class name - * @return create result code - */ - @Operation(summary = "createUdfFunc", description = "CREATE_UDF_FUNCTION_NOTES") - @Parameters({ - @Parameter(name = "type", description = "UDF_TYPE", required = true, schema = @Schema(implementation = UdfType.class)), - @Parameter(name = "funcName", description = "FUNC_NAME", required = true, schema = @Schema(implementation = String.class)), - @Parameter(name = "className", description = "CLASS_NAME", required = true, schema = @Schema(implementation = String.class)), - @Parameter(name = "argTypes", description = "ARG_TYPES", schema = @Schema(implementation = String.class)), - @Parameter(name = "database", description = "DATABASE_NAME", schema = @Schema(implementation = String.class)), - @Parameter(name = "description", description = "UDF_DESC", schema = @Schema(implementation = String.class)), - @Parameter(name = "resourceId", description = "RESOURCE_ID", required = true, schema = @Schema(implementation = int.class, example = "100")) - - }) - @PostMapping(value = "/udf-func") - @ResponseStatus(HttpStatus.CREATED) - @ApiException(CREATE_UDF_FUNCTION_ERROR) - public Result createUdfFunc(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "type") UdfType type, - @RequestParam(value = "funcName") String funcName, - @RequestParam(value = "className") String className, - @RequestParam(value = "fullName") String fullName, - @RequestParam(value = "argTypes", required = false) String argTypes, - @RequestParam(value = "database", required = false) String database, - @RequestParam(value = "description", required = false) String description) { - // todo verify the sourceName - return udfFuncService.createUdfFunction(loginUser, funcName, className, fullName, argTypes, database, - description, type); - } + DownloadFileRequest downloadFileRequest = DownloadFileRequest.builder() + .loginUser(loginUser) + .fileAbsolutePath(fileAbsolutePath) + .build(); - /** - * view udf function - * - * @param loginUser login user - * @param id udf function id - * @return udf function detail - */ - @Operation(summary = "viewUIUdfFunction", description = "VIEW_UDF_FUNCTION_NOTES") - @Parameters({ - @Parameter(name = "id", description = "RESOURCE_ID", required = true, schema = @Schema(implementation = int.class, example = "100")) - - }) - @GetMapping(value = "/{id}/udf-func") - @ResponseStatus(HttpStatus.OK) - @ApiException(VIEW_UDF_FUNCTION_ERROR) - public Result viewUIUdfFunction(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @PathVariable("id") int id) { - return udfFuncService.queryUdfFuncDetail(loginUser, id); - } - - /** - * update udf function - * - * @param loginUser login user - * @param type resource type - * @param funcName function name - * @param argTypes argument types - * @param database data base - * @param description description - * @param className class name - * @param udfFuncId udf function id - * @return update result code - */ - @Operation(summary = "updateUdfFunc", description = "UPDATE_UDF_FUNCTION_NOTES") - @Parameters({ - @Parameter(name = "id", description = "UDF_ID", required = true, schema = @Schema(implementation = int.class)), - @Parameter(name = "type", description = "UDF_TYPE", required = true, schema = @Schema(implementation = UdfType.class)), - @Parameter(name = "funcName", description = "FUNC_NAME", required = true, schema = @Schema(implementation = String.class)), - @Parameter(name = "className", description = "CLASS_NAME", required = true, schema = @Schema(implementation = String.class)), - @Parameter(name = "argTypes", description = "ARG_TYPES", schema = @Schema(implementation = String.class)), - @Parameter(name = "database", description = "DATABASE_NAME", schema = @Schema(implementation = String.class)), - @Parameter(name = "description", description = "UDF_DESC", schema = @Schema(implementation = String.class))}) - @PutMapping(value = "/udf-func/{id}") - @ApiException(UPDATE_UDF_FUNCTION_ERROR) - public Result updateUdfFunc(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @PathVariable(value = "id") int udfFuncId, @RequestParam(value = "type") UdfType type, - @RequestParam(value = "funcName") String funcName, - @RequestParam(value = "className") String className, - @RequestParam(value = "argTypes", required = false) String argTypes, - @RequestParam(value = "database", required = false) String database, - @RequestParam(value = "description", required = false) String description, - @RequestParam(value = "fullName") String fullName) { - return udfFuncService.updateUdfFunc(loginUser, udfFuncId, funcName, className, argTypes, database, description, - type, fullName); - } - - /** - * query udf function list paging - * - * @param loginUser login user - * @param searchVal search value - * @param pageNo page number - * @param pageSize page size - * @return udf function list page - */ - @Operation(summary = "queryUdfFuncListPaging", description = "QUERY_UDF_FUNCTION_LIST_PAGING_NOTES") - @Parameters({ - @Parameter(name = "searchVal", description = "SEARCH_VAL", schema = @Schema(implementation = String.class)), - @Parameter(name = "pageNo", description = "PAGE_NO", required = true, schema = @Schema(implementation = int.class, example = "1")), - @Parameter(name = "pageSize", description = "PAGE_SIZE", required = true, schema = @Schema(implementation = int.class, example = "20"))}) - @GetMapping(value = "/udf-func") - @ResponseStatus(HttpStatus.OK) - @ApiException(QUERY_UDF_FUNCTION_LIST_PAGING_ERROR) - public Result queryUdfFuncListPaging(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("pageNo") Integer pageNo, - @RequestParam(value = "searchVal", required = false) String searchVal, - @RequestParam("pageSize") Integer pageSize) { - checkPageParams(pageNo, pageSize); - return udfFuncService.queryUdfFuncListPaging(loginUser, searchVal, pageNo, pageSize); - } - - /** - * query udf func list by type - * - * @param loginUser login user - * @param type resource type - * @return resource list - */ - @Operation(summary = "queryUdfFuncList", description = "QUERY_UDF_FUNC_LIST_NOTES") - @Parameters({ - @Parameter(name = "type", description = "UDF_TYPE", required = true, schema = @Schema(implementation = UdfType.class))}) - @GetMapping(value = "/udf-func/list") - @ResponseStatus(HttpStatus.OK) - @ApiException(QUERY_DATASOURCE_BY_TYPE_ERROR) - public Result queryUdfFuncList(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("type") UdfType type) { - return udfFuncService.queryUdfFuncList(loginUser, type.ordinal()); - } - - /** - * verify udf function name can use or not - * - * @param loginUser login user - * @param name name - * @return true if the name can user, otherwise return false - */ - @Operation(summary = "verifyUdfFuncName", description = "VERIFY_UDF_FUNCTION_NAME_NOTES") - @Parameters({ - @Parameter(name = "name", description = "FUNC_NAME", required = true, schema = @Schema(implementation = String.class)) - - }) - @GetMapping(value = "/udf-func/verify-name") - @ResponseStatus(HttpStatus.OK) - @ApiException(VERIFY_UDF_FUNCTION_NAME_ERROR) - public Result verifyUdfFuncName(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "name") String name) { - return udfFuncService.verifyUdfFuncByName(loginUser, name); - } - - /** - * delete udf function - * - * @param loginUser login user - * @param udfFuncId udf function id - * @return delete result code - */ - @Operation(summary = "deleteUdfFunc", description = "DELETE_UDF_FUNCTION_NOTES") - @Parameters({ - @Parameter(name = "id", description = "UDF_FUNC_ID", required = true, schema = @Schema(implementation = int.class, example = "100"))}) - @DeleteMapping(value = "/udf-func/{id}") - @ResponseStatus(HttpStatus.OK) - @ApiException(DELETE_UDF_FUNCTION_ERROR) - public Result deleteUdfFunc(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @PathVariable(value = "id") int udfFuncId) { - return udfFuncService.delete(loginUser, udfFuncId); + resourceService.downloadResource(response, downloadFileRequest); } @Operation(summary = "queryResourceBaseDir", description = "QUERY_RESOURCE_BASE_DIR") @@ -601,8 +344,8 @@ public Result deleteUdfFunc(@Parameter(hidden = true) @RequestAttribute(value = @GetMapping(value = "/base-dir") @ResponseStatus(HttpStatus.OK) @ApiException(RESOURCE_NOT_EXIST) - public Result queryResourceBaseDir(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + public Result queryResourceBaseDir(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "type") ResourceType type) { - return resourceService.queryResourceBaseDir(loginUser, type); + return Result.success(resourceService.queryResourceBaseDir(loginUser, type)); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java index 3f3660e37783..7dccc674204b 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java @@ -27,6 +27,8 @@ import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_SCHEDULE_ERROR; import static org.apache.dolphinscheduler.common.constants.Constants.SESSION_USER; +import org.apache.dolphinscheduler.api.audit.OperatorLog; +import org.apache.dolphinscheduler.api.audit.enums.AuditType; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.SchedulerService; @@ -104,6 +106,7 @@ public class SchedulerController extends BaseController { @PostMapping() @ResponseStatus(HttpStatus.CREATED) @ApiException(CREATE_SCHEDULE_ERROR) + @OperatorLog(auditType = AuditType.SCHEDULE_CREATE) public Result createSchedule(@Parameter(hidden = true) @RequestAttribute(value = SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @RequestParam(value = "processDefinitionCode") long processDefinitionCode, @@ -161,6 +164,7 @@ public Result createSchedule(@Parameter(hidden = true) @RequestAttribute(value = @PutMapping("/{id}") @ResponseStatus(HttpStatus.OK) @ApiException(UPDATE_SCHEDULE_ERROR) + @OperatorLog(auditType = AuditType.SCHEDULE_UPDATE) public Result updateSchedule(@Parameter(hidden = true) @RequestAttribute(value = SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @PathVariable(value = "id") Integer id, @@ -185,6 +189,7 @@ public Result updateSchedule(@Parameter(hidden = true) @RequestAttribute(value = }) @PostMapping("/{id}/online") @ApiException(PUBLISH_SCHEDULE_ONLINE_ERROR) + @OperatorLog(auditType = AuditType.SCHEDULE_ONLINE) public Result publishScheduleOnline(@Parameter(hidden = true) @RequestAttribute(value = SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @PathVariable("id") Integer id) { @@ -198,6 +203,7 @@ public Result publishScheduleOnline(@Parameter(hidden = true) @RequestA }) @PostMapping("/{id}/offline") @ApiException(OFFLINE_SCHEDULE_ERROR) + @OperatorLog(auditType = AuditType.SCHEDULE_OFFLINE) public Result offlineSchedule(@Parameter(hidden = true) @RequestAttribute(value = SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @PathVariable("id") Integer id) { @@ -253,6 +259,7 @@ public Result queryScheduleListPaging(@Parameter(hidden = true) @RequestAttribut @DeleteMapping(value = "/{id}") @ResponseStatus(HttpStatus.OK) @ApiException(DELETE_SCHEDULE_BY_ID_ERROR) + @OperatorLog(auditType = AuditType.SCHEDULE_DELETE) public Result deleteScheduleById(@RequestAttribute(value = SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @PathVariable("id") Integer id) { @@ -325,6 +332,7 @@ public Result previewSchedule(@Parameter(hidden = true) @RequestAttribute(value @PutMapping("/update/{code}") @ResponseStatus(HttpStatus.OK) @ApiException(UPDATE_SCHEDULE_ERROR) + @OperatorLog(auditType = AuditType.SCHEDULE_UPDATE) public Result updateScheduleByProcessDefinitionCode(@Parameter(hidden = true) @RequestAttribute(value = SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @PathVariable(value = "code") long processDefinitionCode, diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskDefinitionController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskDefinitionController.java index 458f74725581..fc819c284d30 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskDefinitionController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskDefinitionController.java @@ -28,6 +28,8 @@ import static org.apache.dolphinscheduler.api.enums.Status.SWITCH_TASK_DEFINITION_VERSION_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_TASK_DEFINITION_ERROR; +import org.apache.dolphinscheduler.api.audit.OperatorLog; +import org.apache.dolphinscheduler.api.audit.enums.AuditType; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.TaskDefinitionService; @@ -88,6 +90,7 @@ public class TaskDefinitionController extends BaseController { @PostMapping() @ResponseStatus(HttpStatus.CREATED) @ApiException(CREATE_TASK_DEFINITION_ERROR) + @OperatorLog(auditType = AuditType.TASK_CREATE) public Result createTaskDefinition(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @RequestParam(value = "taskDefinitionJson", required = true) String taskDefinitionJson) { @@ -116,6 +119,7 @@ public Result createTaskDefinition(@Parameter(hidden = true) @RequestAttribute(v @PostMapping("/save-single") @ResponseStatus(HttpStatus.CREATED) @ApiException(CREATE_TASK_DEFINITION_ERROR) + @OperatorLog(auditType = AuditType.TASK_CREATE) public Result createTaskBindsWorkFlow(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @RequestParam(value = "processDefinitionCode", required = true) long processDefinitionCode, @@ -144,6 +148,7 @@ public Result createTaskBindsWorkFlow(@Parameter(hidden = true) @RequestAttribut @PutMapping(value = "/{code}") @ResponseStatus(HttpStatus.OK) @ApiException(UPDATE_TASK_DEFINITION_ERROR) + @OperatorLog(auditType = AuditType.TASK_UPDATE) public Result updateTaskDefinition(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @PathVariable(value = "code") long code, @@ -173,6 +178,7 @@ public Result updateTaskDefinition(@Parameter(hidden = true) @RequestAttribute(v @PutMapping(value = "/{code}/with-upstream") @ResponseStatus(HttpStatus.OK) @ApiException(UPDATE_TASK_DEFINITION_ERROR) + @OperatorLog(auditType = AuditType.TASK_UPDATE) public Result updateTaskWithUpstream(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @PathVariable(value = "code") long code, @@ -229,6 +235,7 @@ public Result queryTaskDefinitionVersions(@Parameter(hidden = true) @RequestAttr @GetMapping(value = "/{code}/versions/{version}") @ResponseStatus(HttpStatus.OK) @ApiException(SWITCH_TASK_DEFINITION_VERSION_ERROR) + @OperatorLog(auditType = AuditType.TASK_SWITCH_VERSION) public Result switchTaskDefinitionVersion(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @PathVariable(value = "code") long code, @@ -254,6 +261,7 @@ public Result switchTaskDefinitionVersion(@Parameter(hidden = true) @RequestAttr @DeleteMapping(value = "/{code}/versions/{version}") @ResponseStatus(HttpStatus.OK) @ApiException(DELETE_TASK_DEFINITION_VERSION_ERROR) + @OperatorLog(auditType = AuditType.TASK_DELETE_VERSION) public Result deleteTaskDefinitionVersion(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @PathVariable(value = "code") long code, @@ -278,6 +286,7 @@ public Result deleteTaskDefinitionVersion(@Parameter(hidden = true) @RequestAttr @DeleteMapping(value = "/{code}") @ResponseStatus(HttpStatus.OK) @ApiException(DELETE_TASK_DEFINE_BY_CODE_ERROR) + @OperatorLog(auditType = AuditType.TASK_DELETE) public Result deleteTaskDefinitionByCode(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @PathVariable(value = "code") long code) { @@ -384,6 +393,7 @@ public Result genTaskCodeList(@Parameter(hidden = true) @RequestAttribute(value @PostMapping(value = "/{code}/release") @ResponseStatus(HttpStatus.OK) @ApiException(RELEASE_TASK_DEFINITION_ERROR) + @OperatorLog(auditType = AuditType.TASK_RELEASE) public Result releaseTaskDefinition(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @PathVariable(value = "code", required = true) long code, diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskGroupController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskGroupController.java index 5d8735fbaa2c..2f99f098edbf 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskGroupController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskGroupController.java @@ -24,6 +24,8 @@ import static org.apache.dolphinscheduler.api.enums.Status.START_TASK_GROUP_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_TASK_GROUP_ERROR; +import org.apache.dolphinscheduler.api.audit.OperatorLog; +import org.apache.dolphinscheduler.api.audit.enums.AuditType; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.TaskGroupQueueService; import org.apache.dolphinscheduler.api.service.TaskGroupService; @@ -81,6 +83,7 @@ public class TaskGroupController extends BaseController { @PostMapping(value = "/create") @ResponseStatus(HttpStatus.CREATED) @ApiException(CREATE_TASK_GROUP_ERROR) + @OperatorLog(auditType = AuditType.TASK_GROUP_CREATE) public Result createTaskGroup(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("name") String name, @RequestParam(value = "projectCode", required = false, defaultValue = "0") Long projectCode, @@ -112,6 +115,7 @@ public Result createTaskGroup(@Parameter(hidden = true) @RequestAttribute(value @PostMapping(value = "/update") @ResponseStatus(HttpStatus.CREATED) @ApiException(UPDATE_TASK_GROUP_ERROR) + @OperatorLog(auditType = AuditType.TASK_GROUP_UPDATE) public Result updateTaskGroup(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("id") Integer id, @RequestParam("name") String name, @@ -214,6 +218,7 @@ public Result queryTaskGroupByCode(@Parameter(hidden = true) @RequestAttribute(v @PostMapping(value = "/close-task-group") @ResponseStatus(HttpStatus.CREATED) @ApiException(CLOSE_TASK_GROUP_ERROR) + @OperatorLog(auditType = AuditType.TASK_GROUP_CLOSE) public Result closeTaskGroup(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "id", required = false) Integer id) { @@ -235,6 +240,7 @@ public Result closeTaskGroup(@Parameter(hidden = true) @RequestAttribute(value = @PostMapping(value = "/start-task-group") @ResponseStatus(HttpStatus.CREATED) @ApiException(START_TASK_GROUP_ERROR) + @OperatorLog(auditType = AuditType.TASK_GROUP_START) public Result startTaskGroup(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "id", required = false) Integer id) { Map result = taskGroupService.startTaskGroup(loginUser, id); @@ -255,9 +261,10 @@ public Result startTaskGroup(@Parameter(hidden = true) @RequestAttribute(value = @PostMapping(value = "/forceStart") @ResponseStatus(HttpStatus.CREATED) @ApiException(START_TASK_GROUP_ERROR) + @OperatorLog(auditType = AuditType.TASK_GROUP_START) public Result forceStart(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "queueId") Integer queueId) { - Map result = taskGroupService.forceStartTask(loginUser, queueId); + @RequestParam(value = "queueId") Integer id) { + Map result = taskGroupService.forceStartTask(loginUser, id); return returnDataList(result); } @@ -276,6 +283,7 @@ public Result forceStart(@Parameter(hidden = true) @RequestAttribute(value = Con @PostMapping(value = "/modifyPriority") @ResponseStatus(HttpStatus.CREATED) @ApiException(START_TASK_GROUP_ERROR) + @OperatorLog(auditType = AuditType.TASK_GROUP_MODIFY) public Result modifyPriority(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "queueId") Integer queueId, @RequestParam(value = "priority") Integer priority) { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java index 2aa2a5eb7256..e0055595c9b1 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java @@ -23,6 +23,8 @@ import static org.apache.dolphinscheduler.api.enums.Status.TASK_SAVEPOINT_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.TASK_STOP_ERROR; +import org.apache.dolphinscheduler.api.audit.OperatorLog; +import org.apache.dolphinscheduler.api.audit.enums.AuditType; import org.apache.dolphinscheduler.api.dto.taskInstance.TaskInstanceRemoveCacheResponse; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.TaskInstanceService; @@ -150,10 +152,12 @@ public Result queryTaskListPaging(@Parameter(hidden = true) @RequestAttribute(va @PostMapping(value = "/{id}/force-success") @ResponseStatus(HttpStatus.OK) @ApiException(FORCE_TASK_SUCCESS_ERROR) - public Result forceTaskSuccess(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @Schema(name = "projectCode", required = true) @PathVariable long projectCode, - @PathVariable(value = "id") Integer id) { - return taskInstanceService.forceTaskSuccess(loginUser, projectCode, id); + @OperatorLog(auditType = AuditType.TASK_INSTANCE_FORCE_SUCCESS) + public Result forceTaskSuccess(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @Schema(name = "projectCode", required = true) @PathVariable long projectCode, + @PathVariable(value = "id") Integer id) { + taskInstanceService.forceTaskSuccess(loginUser, projectCode, id); + return Result.success(); } /** diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TenantController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TenantController.java index 7ae22f895c55..4528024d1068 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TenantController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TenantController.java @@ -24,6 +24,8 @@ import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_TENANT_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_OS_TENANT_CODE_ERROR; +import org.apache.dolphinscheduler.api.audit.OperatorLog; +import org.apache.dolphinscheduler.api.audit.enums.AuditType; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.TenantService; import org.apache.dolphinscheduler.api.utils.PageInfo; @@ -83,6 +85,7 @@ public class TenantController extends BaseController { @PostMapping() @ResponseStatus(HttpStatus.CREATED) @ApiException(CREATE_TENANT_ERROR) + @OperatorLog(auditType = AuditType.TENANT_CREATE) public Result createTenant(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "tenantCode") String tenantCode, @RequestParam(value = "queueId") int queueId, @@ -155,6 +158,7 @@ public Result> queryTenantList(@Parameter(hidden = true) @RequestAt @PutMapping(value = "/{id}") @ResponseStatus(HttpStatus.OK) @ApiException(UPDATE_TENANT_ERROR) + @OperatorLog(auditType = AuditType.TENANT_UPDATE) public Result updateTenant(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @PathVariable(value = "id") int id, @RequestParam(value = "tenantCode") String tenantCode, @@ -179,6 +183,7 @@ public Result updateTenant(@Parameter(hidden = true) @RequestAttribute( @DeleteMapping(value = "/{id}") @ResponseStatus(HttpStatus.OK) @ApiException(DELETE_TENANT_BY_ID_ERROR) + @OperatorLog(auditType = AuditType.TENANT_DELETE) public Result deleteTenantById(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @PathVariable(value = "id") int id) throws Exception { tenantService.deleteTenantById(loginUser, id); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java index cc40c6a98c71..546ec675c962 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java @@ -24,7 +24,6 @@ import static org.apache.dolphinscheduler.api.enums.Status.GRANT_DATASOURCE_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.GRANT_K8S_NAMESPACE_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.GRANT_PROJECT_ERROR; -import static org.apache.dolphinscheduler.api.enums.Status.GRANT_UDF_FUNCTION_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.QUERY_USER_LIST_PAGING_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.REVOKE_PROJECT_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.UNAUTHORIZED_USER_ERROR; @@ -32,6 +31,8 @@ import static org.apache.dolphinscheduler.api.enums.Status.USER_LIST_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_USERNAME_ERROR; +import org.apache.dolphinscheduler.api.audit.OperatorLog; +import org.apache.dolphinscheduler.api.audit.enums.AuditType; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.UsersService; @@ -88,6 +89,7 @@ public class UsersController extends BaseController { @PostMapping(value = "/create") @ResponseStatus(HttpStatus.CREATED) @ApiException(CREATE_USER_ERROR) + @OperatorLog(auditType = AuditType.USER_CREATE) public Result createUser(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "userName") String userName, @RequestParam(value = "userPassword") String userPassword, @@ -109,9 +111,9 @@ public Result createUser(@Parameter(hidden = true) @RequestAttribute(value = Con * query user list paging * * @param loginUser login user - * @param pageNo page number + * @param pageNo page number * @param searchVal search avlue - * @param pageSize page size + * @param pageSize page size * @return user list page */ @Operation(summary = "queryUserList", description = "QUERY_USER_LIST_NOTES") @@ -135,14 +137,14 @@ public Result queryUserList(@Parameter(hidden = true) @RequestAttribute(value = /** * update user * - * @param loginUser login user - * @param id user id - * @param userName user name + * @param loginUser login user + * @param id user id + * @param userName user name * @param userPassword user password - * @param email email - * @param tenantId tennat id - * @param phone phone - * @param queue queue + * @param email email + * @param tenantId tennat id + * @param phone phone + * @param queue queue * @return update result code */ @Operation(summary = "updateUser", description = "UPDATE_USER_NOTES") @@ -159,6 +161,7 @@ public Result queryUserList(@Parameter(hidden = true) @RequestAttribute(value = @PostMapping(value = "/update") @ResponseStatus(HttpStatus.OK) @ApiException(UPDATE_USER_ERROR) + @OperatorLog(auditType = AuditType.USER_UPDATE) public Result updateUser(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "id") int id, @RequestParam(value = "userName") String userName, @@ -186,7 +189,7 @@ public Result updateUser(@Parameter(hidden = true) @RequestAttribute(value * delete user by id * * @param loginUser login user - * @param id user id + * @param id user id * @return delete result code */ @Operation(summary = "delUserById", description = "DELETE_USER_BY_ID_NOTES") @@ -196,6 +199,7 @@ public Result updateUser(@Parameter(hidden = true) @RequestAttribute(value @PostMapping(value = "/delete") @ResponseStatus(HttpStatus.OK) @ApiException(DELETE_USER_BY_ID_ERROR) + @OperatorLog(auditType = AuditType.USER_DELETE) public Result delUserById(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "id") int id) throws Exception { Map result = usersService.deleteUserById(loginUser, id); @@ -205,8 +209,8 @@ public Result delUserById(@Parameter(hidden = true) @RequestAttribute(value = Co /** * revoke project By Id * - * @param loginUser login user - * @param userId user id + * @param loginUser login user + * @param userId user id * @param projectIds project id array * @return revoke result code */ @@ -228,8 +232,8 @@ public Result revokeProjectById(@Parameter(hidden = true) @RequestAttribute(valu /** * grant project with read permission * - * @param loginUser login user - * @param userId user id + * @param loginUser login user + * @param userId user id * @param projectIds project id array * @return grant result code */ @@ -251,8 +255,8 @@ public Result grantProjectWithReadPerm(@Parameter(hidden = true) @RequestAttribu /** * grant project * - * @param loginUser login user - * @param userId user id + * @param loginUser login user + * @param userId user id * @param projectIds project id array * @return grant result code */ @@ -274,8 +278,8 @@ public Result grantProject(@Parameter(hidden = true) @RequestAttribute(value = C /** * grant project by code * - * @param loginUser login user - * @param userId user id + * @param loginUser login user + * @param userId user id * @param projectCode project code * @return grant result code */ @@ -297,9 +301,9 @@ public Result grantProjectByCode(@Parameter(hidden = true) @RequestAttribute(val /** * revoke project * - * @param loginUser login user - * @param userId user id - * @param projectCode project code + * @param loginUser login user + * @param userId user id + * @param projectCode project code * @return revoke result code */ @Operation(summary = "revokeProject", description = "REVOKE_PROJECT_NOTES") @@ -317,34 +321,11 @@ public Result revokeProject(@Parameter(hidden = true) @RequestAttribute(value = return returnDataList(result); } - /** - * grant udf function - * - * @param loginUser login user - * @param userId user id - * @param udfIds udf id array - * @return grant result code - */ - @Operation(summary = "grantUDFFunc", description = "GRANT_UDF_FUNC_NOTES") - @Parameters({ - @Parameter(name = "userId", description = "USER_ID", required = true, schema = @Schema(implementation = int.class, example = "100")), - @Parameter(name = "udfIds", description = "UDF_IDS", required = true, schema = @Schema(implementation = String.class)) - }) - @PostMapping(value = "/grant-udf-func") - @ResponseStatus(HttpStatus.OK) - @ApiException(GRANT_UDF_FUNCTION_ERROR) - public Result grantUDFFunc(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "userId") int userId, - @RequestParam(value = "udfIds") String udfIds) { - Map result = usersService.grantUDFFunction(loginUser, userId, udfIds); - return returnDataList(result); - } - /** * grant namespace * - * @param loginUser login user - * @param userId user id + * @param loginUser login user + * @param userId user id * @param namespaceIds namespace id array * @return grant result code */ @@ -366,8 +347,8 @@ public Result grantNamespace(@Parameter(hidden = true) @RequestAttribute(value = /** * grant datasource * - * @param loginUser login user - * @param userId user id + * @param loginUser login user + * @param userId user id * @param datasourceIds data source id array * @return grant result code */ @@ -434,7 +415,7 @@ public Result listAll(@RequestAttribute(value = Constants.SESSION_USER) User log * verify username * * @param loginUser login user - * @param userName user name + * @param userName user name * @return true if user name not exists, otherwise return false */ @Operation(summary = "verifyUserName", description = "VERIFY_USER_NAME_NOTES") @@ -452,7 +433,7 @@ public Result verifyUserName(@Parameter(hidden = true) @RequestAttribute(value = /** * unauthorized user * - * @param loginUser login user + * @param loginUser login user * @param alertgroupId alert group id * @return unauthorize result code */ @@ -472,7 +453,7 @@ public Result unauthorizedUser(@Parameter(hidden = true) @RequestAttribute(value /** * authorized user * - * @param loginUser login user + * @param loginUser login user * @param alertgroupId alert group id * @return authorized result code */ @@ -497,10 +478,10 @@ public Result authorizedUser(@Parameter(hidden = true) @RequestAttribute(value = /** * user registry * - * @param userName user name - * @param userPassword user password + * @param userName user name + * @param userPassword user password * @param repeatPassword repeat password - * @param email user email + * @param email user email */ @Operation(summary = "registerUser", description = "REGISTER_USER_NOTES") @Parameters({ diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkerGroupController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkerGroupController.java index 8c332a7bbbb5..8d025bd873b7 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkerGroupController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkerGroupController.java @@ -22,6 +22,8 @@ import static org.apache.dolphinscheduler.api.enums.Status.QUERY_WORKER_GROUP_FAIL; import static org.apache.dolphinscheduler.api.enums.Status.SAVE_ERROR; +import org.apache.dolphinscheduler.api.audit.OperatorLog; +import org.apache.dolphinscheduler.api.audit.enums.AuditType; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.WorkerGroupService; import org.apache.dolphinscheduler.api.utils.Result; @@ -80,6 +82,7 @@ public class WorkerGroupController extends BaseController { @PostMapping() @ResponseStatus(HttpStatus.OK) @ApiException(SAVE_ERROR) + @OperatorLog(auditType = AuditType.WORKER_GROUP_CREATE) public Result saveWorkerGroup(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "id", required = false, defaultValue = "0") int id, @RequestParam(value = "name") String name, @@ -147,6 +150,7 @@ public Result queryAllWorkerGroups(@Parameter(hidden = true) @RequestAttribute(v @DeleteMapping(value = "/{id}") @ResponseStatus(HttpStatus.OK) @ApiException(DELETE_WORKER_GROUP_FAIL) + @OperatorLog(auditType = AuditType.WORKER_GROUP_DELETE) public Result deleteWorkerGroupById(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @PathVariable("id") Integer id) { Map result = workerGroupService.deleteWorkerGroupById(loginUser, id); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/v2/TaskInstanceV2Controller.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/v2/TaskInstanceV2Controller.java index 3e3a87681b69..ea767f0fa33a 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/v2/TaskInstanceV2Controller.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/v2/TaskInstanceV2Controller.java @@ -167,8 +167,8 @@ public Result stopTask(@Parameter(hidden = true) @RequestAttribute(value public TaskInstanceSuccessResponse forceTaskSuccess(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @PathVariable(value = "id") Integer id) { - Result result = taskInstanceService.forceTaskSuccess(loginUser, projectCode, id); - return new TaskInstanceSuccessResponse(result); + taskInstanceService.forceTaskSuccess(loginUser, projectCode, id); + return new TaskInstanceSuccessResponse(Result.success()); } /** diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/AuditDto.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/AuditDto.java index 437a2f8b8a75..0b36325b44c0 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/AuditDto.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/AuditDto.java @@ -19,55 +19,26 @@ import java.util.Date; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter public class AuditDto { private String userName; - private String resource; - - private String operation; - - private Date time; - - private String resourceName; + private String modelType; - public String getUserName() { - return userName; - } + private String modelName; - public void setUserName(String userName) { - this.userName = userName; - } - - public String getResource() { - return resource; - } - - public void setResource(String resource) { - this.resource = resource; - } - - public String getOperation() { - return operation; - } - - public void setOperation(String operation) { - this.operation = operation; - } + private String operation; - public Date getTime() { - return time; - } + private Date createTime; - public void setTime(Date time) { - this.time = time; - } + private String description; - public String getResourceName() { - return resourceName; - } + private String detail; - public void setResourceName(String resourceName) { - this.resourceName = resourceName; - } + private String latency; } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/auditLog/AuditModelTypeDto.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/auditLog/AuditModelTypeDto.java new file mode 100644 index 000000000000..d71836cee470 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/auditLog/AuditModelTypeDto.java @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.dto.auditLog; + +import org.apache.dolphinscheduler.common.enums.AuditModelType; + +import java.util.ArrayList; +import java.util.List; + +import lombok.Data; + +@Data +public class AuditModelTypeDto { + + private String name; + + private List child = null; + + public static List getModelTypeDtoList() { + List dtoList = new ArrayList<>(); + transFromEnumListToDto(dtoList, AuditModelType.getAuditModelTreeList()); + return dtoList; + } + + public static List transFromEnumListToDto(List dtoList, + List objectTypeList) { + for (AuditModelType operationType : objectTypeList) { + dtoList.add(transFromEnumToDto(operationType)); + } + + return dtoList; + } + + public static AuditModelTypeDto transFromEnumToDto(AuditModelType operationType) { + AuditModelTypeDto dto = new AuditModelTypeDto(); + dto.setName(operationType.getName()); + + if (!operationType.getChild().isEmpty()) { + dto.setChild(transFromEnumListToDto(new ArrayList<>(), operationType.getChild())); + } + + return dto; + } +} diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/common/UiChannelFactory.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/auditLog/AuditOperationTypeDto.java similarity index 56% rename from dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/common/UiChannelFactory.java rename to dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/auditLog/AuditOperationTypeDto.java index 8b89215049b1..2e9f7de36518 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/common/UiChannelFactory.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/auditLog/AuditOperationTypeDto.java @@ -15,29 +15,28 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.spi.common; +package org.apache.dolphinscheduler.api.dto.auditLog; -import org.apache.dolphinscheduler.spi.params.base.PluginParams; +import org.apache.dolphinscheduler.common.enums.AuditOperationType; +import java.util.ArrayList; import java.util.List; -public interface UiChannelFactory { - - /** - * plugin name - * Must be UNIQUE . - * This alert plugin name eg: email , message ... - * Name can often be displayed on the page ui eg : email , message , MR , spark , hive ... - * - * @return this alert plugin name - */ - String getName(); - - /** - * Returns the configurable parameters that this plugin needs to display on the web ui - * - * @return this alert plugin params - */ - List getParams(); +import lombok.Data; +@Data +public class AuditOperationTypeDto { + + private String name; + + public static List getOperationTypeDtoList() { + List dtoList = new ArrayList<>(); + for (AuditOperationType operationType : AuditOperationType.getOperationList()) { + AuditOperationTypeDto dto = new AuditOperationTypeDto(); + dto.setName(operationType.getName()); + dtoList.add(dto); + } + + return dtoList; + } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/AbstractResourceCreateRequest.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/AbstractResourceCreateRequest.java new file mode 100644 index 000000000000..e4e0f2fa95b5 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/AbstractResourceCreateRequest.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.dto.resources; + +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.spi.enums.ResourceType; + +import lombok.Data; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +public abstract class AbstractResourceCreateRequest { + + private User loginUser; + private String parentAbsoluteDirectory; + private ResourceType type; + +} diff --git a/dolphinscheduler-ui/src/views/resource/udf/resource/index.tsx b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/AbstractResourceDto.java similarity index 76% rename from dolphinscheduler-ui/src/views/resource/udf/resource/index.tsx rename to dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/AbstractResourceDto.java index 0d78ade01fde..6314c8542c7f 100644 --- a/dolphinscheduler-ui/src/views/resource/udf/resource/index.tsx +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/AbstractResourceDto.java @@ -15,13 +15,15 @@ * limitations under the License. */ -import { defineComponent } from 'vue' -import ResourceListModal from '../../components/resource' +package org.apache.dolphinscheduler.api.dto.resources; -export default defineComponent({ - name: 'resource-manage', - setup() {}, - render() { - return - } -}) +import lombok.Data; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +public abstract class AbstractResourceDto { + + private String resourceAbsolutePath; + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/CreateDirectoryDto.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/CreateDirectoryDto.java new file mode 100644 index 000000000000..14b8eaeac210 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/CreateDirectoryDto.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.dto.resources; + +import org.apache.dolphinscheduler.dao.entity.User; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@AllArgsConstructor +@NoArgsConstructor +public class CreateDirectoryDto { + + private User loginUser; + + private String directoryAbsolutePath; + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/CreateDirectoryRequest.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/CreateDirectoryRequest.java new file mode 100644 index 000000000000..7719fffb66e1 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/CreateDirectoryRequest.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.dto.resources; + +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.spi.enums.ResourceType; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class CreateDirectoryRequest { + + private User loginUser; + + private ResourceType type; + + private String parentAbsoluteDirectory; + + private String directoryName; +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/CreateFileDto.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/CreateFileDto.java new file mode 100644 index 000000000000..da8a37dc455c --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/CreateFileDto.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.dto.resources; + +import org.apache.dolphinscheduler.dao.entity.User; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +import org.springframework.web.multipart.MultipartFile; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class CreateFileDto { + + private User loginUser; + + private String fileAbsolutePath; + + private MultipartFile file; + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/CreateFileFromContentDto.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/CreateFileFromContentDto.java new file mode 100644 index 000000000000..a8faf8b0aa4a --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/CreateFileFromContentDto.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.dto.resources; + +import org.apache.dolphinscheduler.dao.entity.User; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class CreateFileFromContentDto { + + private User loginUser; + + private String fileAbsolutePath; + + private String fileContent; + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkerServer.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/CreateFileFromContentRequest.java similarity index 60% rename from dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkerServer.java rename to dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/CreateFileFromContentRequest.java index 1504fdd367bb..4b424dbd8924 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/WorkerServer.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/CreateFileFromContentRequest.java @@ -15,47 +15,26 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.dao.entity; +package org.apache.dolphinscheduler.api.dto.resources; -import java.util.Date; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.spi.enums.ResourceType; +import lombok.AllArgsConstructor; +import lombok.Builder; import lombok.Data; +import lombok.NoArgsConstructor; @Data -public class WorkerServer { +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class CreateFileFromContentRequest { - /** - * id - */ - private int id; + private User loginUser; + private ResourceType type; + private String parentAbsoluteDirectory; + private String fileName; + private String fileContent; - /** - * host - */ - private String host; - - /** - * port - */ - private int port; - - /** - * zookeeper directory - */ - private String zkDirectory; - - /** - * resource info - */ - private String resInfo; - - /** - * create time - */ - private Date createTime; - - /** - * last heart beat time - */ - private Date lastHeartbeatTime; } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/CreateFileRequest.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/CreateFileRequest.java new file mode 100644 index 000000000000..c72ffa97c405 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/CreateFileRequest.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.dto.resources; + +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.spi.enums.ResourceType; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +import org.springframework.web.multipart.MultipartFile; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class CreateFileRequest { + + private User loginUser; + private ResourceType type; + private String parentAbsoluteDirectory; + private String fileName; + private MultipartFile file; +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/DeleteResourceDto.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/DeleteResourceDto.java new file mode 100644 index 000000000000..c79cd0d59aa5 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/DeleteResourceDto.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.dto.resources; + +import org.apache.dolphinscheduler.dao.entity.User; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class DeleteResourceDto { + + private User loginUser; + private String resourceAbsolutePath; + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/DeleteResourceRequest.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/DeleteResourceRequest.java new file mode 100644 index 000000000000..3af948d51252 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/DeleteResourceRequest.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.dto.resources; + +import org.apache.dolphinscheduler.dao.entity.User; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@AllArgsConstructor +@NoArgsConstructor +public class DeleteResourceRequest { + + private User loginUser; + private String resourceAbsolutePath; +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/DownloadFileDto.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/DownloadFileDto.java new file mode 100644 index 000000000000..fe5f0721cb25 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/DownloadFileDto.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.dto.resources; + +import org.apache.dolphinscheduler.dao.entity.User; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class DownloadFileDto { + + private User loginUser; + + private String fileAbsolutePath; +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/DownloadFileRequest.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/DownloadFileRequest.java new file mode 100644 index 000000000000..1f28a162bef0 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/DownloadFileRequest.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.dto.resources; + +import org.apache.dolphinscheduler.dao.entity.User; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@AllArgsConstructor +@NoArgsConstructor +public class DownloadFileRequest { + + private User loginUser; + + private String fileAbsolutePath; + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/FetchFileContentDto.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/FetchFileContentDto.java new file mode 100644 index 000000000000..106cbe142f04 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/FetchFileContentDto.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.dto.resources; + +import org.apache.dolphinscheduler.dao.entity.User; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@AllArgsConstructor +@NoArgsConstructor +public class FetchFileContentDto { + + private User loginUser; + private String resourceFileAbsolutePath; + private int skipLineNum; + private int limit; + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/FetchFileContentRequest.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/FetchFileContentRequest.java new file mode 100644 index 000000000000..8357317e8540 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/FetchFileContentRequest.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.dto.resources; + +import org.apache.dolphinscheduler.dao.entity.User; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class FetchFileContentRequest { + + private User loginUser; + private String resourceFileAbsolutePath; + private int skipLineNum; + private int limit; +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/PagingResourceItemRequest.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/PagingResourceItemRequest.java new file mode 100644 index 000000000000..49d720bca353 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/PagingResourceItemRequest.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.dto.resources; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.exceptions.ServiceException; +import org.apache.dolphinscheduler.common.constants.Constants; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.spi.enums.ResourceType; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@AllArgsConstructor +@NoArgsConstructor +public class PagingResourceItemRequest { + + private User loginUser; + + private String resourceAbsolutePath; + + private ResourceType resourceType; + + private String resourceNameKeyWord; + + Integer pageNo; + + Integer pageSize; + + public void checkPageNoAndPageSize() { + if (pageNo <= 0) { + throw new ServiceException(Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.PAGE_NUMBER); + } + if (pageSize <= 0) { + throw new ServiceException(Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.PAGE_SIZE); + } + } + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/QueryResourceDto.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/QueryResourceDto.java new file mode 100644 index 000000000000..7778ca50f8a1 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/QueryResourceDto.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.dto.resources; + +import java.util.List; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class QueryResourceDto { + + private List resourceAbsolutePaths; +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/RenameDirectoryDto.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/RenameDirectoryDto.java new file mode 100644 index 000000000000..29899479223f --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/RenameDirectoryDto.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.dto.resources; + +import org.apache.dolphinscheduler.dao.entity.User; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class RenameDirectoryDto { + + private User loginUser; + + private String originDirectoryAbsolutePath; + + private String targetDirectoryAbsolutePath; + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/RenameDirectoryRequest.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/RenameDirectoryRequest.java new file mode 100644 index 000000000000..640acb677dfb --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/RenameDirectoryRequest.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.dto.resources; + +import org.apache.dolphinscheduler.dao.entity.User; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class RenameDirectoryRequest { + + private User loginUser; + + private String directoryAbsolutePath; + + private String newDirectoryName; + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/RenameFileDto.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/RenameFileDto.java new file mode 100644 index 000000000000..091febad0e4d --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/RenameFileDto.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.dto.resources; + +import org.apache.dolphinscheduler.dao.entity.User; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class RenameFileDto { + + private User loginUser; + + private String originFileAbsolutePath; + + private String targetFileAbsolutePath; +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/RenameFileRequest.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/RenameFileRequest.java new file mode 100644 index 000000000000..1b0c56be3b6d --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/RenameFileRequest.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.dto.resources; + +import org.apache.dolphinscheduler.dao.entity.User; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class RenameFileRequest { + + private User loginUser; + + private String fileAbsolutePath; + + private String newFileName; + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/ResourceComponent.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/ResourceComponent.java index 4487c5a080dd..2aa11a6ce9e7 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/ResourceComponent.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/ResourceComponent.java @@ -25,34 +25,13 @@ import lombok.Data; import lombok.NoArgsConstructor; -import com.fasterxml.jackson.annotation.JsonPropertyOrder; - /** * resource component */ @Data @NoArgsConstructor -@JsonPropertyOrder({"id", "pid", "name", "fullName", "description", "isDirctory", "children", "type"}) public abstract class ResourceComponent { - public ResourceComponent(int id, String pid, String name, String fullName, String description, boolean isDirctory) { - this.id = id; - this.pid = pid; - this.name = name; - this.fullName = fullName; - this.isDirctory = isDirctory; - int directoryFlag = isDirctory ? 1 : 0; - this.idValue = String.format("%s_%s", id, directoryFlag); - } - - /** - * id - */ - protected int id; - /** - * parent id - */ - protected String pid; /** * name */ @@ -73,10 +52,7 @@ public ResourceComponent(int id, String pid, String name, String fullName, Strin * is directory */ protected boolean isDirctory; - /** - * id value - */ - protected String idValue; + /** * resoruce type */ @@ -88,14 +64,11 @@ public ResourceComponent(int id, String pid, String name, String fullName, Strin /** * add resource component + * * @param resourceComponent resource component */ public void add(ResourceComponent resourceComponent) { children.add(resourceComponent); } - public void setIdValue(int id, boolean isDirctory) { - int directoryFlag = isDirctory ? 1 : 0; - this.idValue = String.format("%s_%s", id, directoryFlag); - } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/UpdateFileDto.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/UpdateFileDto.java new file mode 100644 index 000000000000..9213ac04d163 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/UpdateFileDto.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.dto.resources; + +import org.apache.dolphinscheduler.dao.entity.User; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +import org.springframework.web.multipart.MultipartFile; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class UpdateFileDto { + + private User loginUser; + + private String fileAbsolutePath; + + private MultipartFile file; + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/UpdateFileFromContentDto.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/UpdateFileFromContentDto.java new file mode 100644 index 000000000000..0848cd8f2bfa --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/UpdateFileFromContentDto.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.dto.resources; + +import org.apache.dolphinscheduler.dao.entity.User; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class UpdateFileFromContentDto { + + private User loginUser; + + private String fileAbsolutePath; + + private String fileContent; + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/UpdateFileFromContentRequest.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/UpdateFileFromContentRequest.java new file mode 100644 index 000000000000..a9564a4b2544 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/UpdateFileFromContentRequest.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.dto.resources; + +import org.apache.dolphinscheduler.dao.entity.User; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class UpdateFileFromContentRequest { + + private User loginUser; + + private String fileAbsolutePath; + + private String fileContent; + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/UpdateFileRequest.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/UpdateFileRequest.java new file mode 100644 index 000000000000..10fbc6570087 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/UpdateFileRequest.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.dto.resources; + +import org.apache.dolphinscheduler.dao.entity.User; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +import org.springframework.web.multipart.MultipartFile; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class UpdateFileRequest { + + private User loginUser; + + private String fileAbsolutePath; + + private MultipartFile file; + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/visitor/ResourceTreeVisitor.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/visitor/ResourceTreeVisitor.java index 6c88cd84a8f1..1d24610af055 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/visitor/ResourceTreeVisitor.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/visitor/ResourceTreeVisitor.java @@ -122,12 +122,8 @@ private static ResourceComponent getResourceComponent(StorageEntity resource) { tempResourceComponent = new FileLeaf(); } - tempResourceComponent.setName(resource.getAlias()); - // tempResourceComponent.setFullName(resource.getFullName().replaceFirst("/","")); + tempResourceComponent.setName(resource.getFileName()); tempResourceComponent.setFullName(resource.getFullName()); - tempResourceComponent.setId(resource.getId()); - tempResourceComponent.setPid(resource.getPfullName()); - tempResourceComponent.setIdValue(resource.getId(), resource.isDirectory()); tempResourceComponent.setType(resource.getType()); return tempResourceComponent; } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/task/TaskCreateRequest.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/task/TaskCreateRequest.java index 1e651ee6e345..bb362962fa9b 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/task/TaskCreateRequest.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/task/TaskCreateRequest.java @@ -19,11 +19,11 @@ import static org.apache.dolphinscheduler.common.constants.Constants.VERSION_FIRST; -import org.apache.dolphinscheduler.common.constants.Constants; import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.TimeoutFlag; import org.apache.dolphinscheduler.dao.entity.TaskDefinition; +import org.apache.dolphinscheduler.dao.utils.WorkerGroupUtils; import org.apache.dolphinscheduler.plugin.task.api.enums.TaskTimeoutStrategy; import java.util.Date; @@ -107,7 +107,7 @@ public TaskDefinition convert2TaskDefinition() { taskDefinition.setProjectCode(this.projectCode); taskDefinition.setTaskType(this.taskType); taskDefinition.setTaskParams(this.taskParams); - taskDefinition.setWorkerGroup(this.workerGroup == null ? Constants.DEFAULT_WORKER_GROUP : this.workerGroup); + taskDefinition.setWorkerGroup(WorkerGroupUtils.getWorkerGroupOrDefault(workerGroup)); taskDefinition.setEnvironmentCode(this.environmentCode); taskDefinition.setFailRetryTimes(this.failRetryTimes); taskDefinition.setFailRetryInterval(this.failRetryInterval); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/task/TaskUpdateRequest.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/task/TaskUpdateRequest.java index b5a8ea7a46fa..d7b026ed160e 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/task/TaskUpdateRequest.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/task/TaskUpdateRequest.java @@ -25,10 +25,10 @@ import org.apache.commons.beanutils.BeanUtils; -import java.lang.reflect.InvocationTargetException; import java.util.Date; import lombok.Data; +import lombok.SneakyThrows; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonInclude; @@ -107,7 +107,8 @@ public class TaskUpdateRequest { * @param taskDefinition exists task definition object * @return task definition */ - public TaskDefinition mergeIntoTaskDefinition(TaskDefinition taskDefinition) throws InvocationTargetException, IllegalAccessException, InstantiationException, NoSuchMethodException { + @SneakyThrows + public TaskDefinition mergeIntoTaskDefinition(TaskDefinition taskDefinition) { TaskDefinition taskDefinitionDeepCopy = (TaskDefinition) BeanUtils.cloneBean(taskDefinition); assert taskDefinitionDeepCopy != null; if (this.name != null) { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java index 113ccb6bd1dd..243b554aafc3 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java @@ -93,17 +93,9 @@ public enum Status { RESOURCE_FILE_IS_EMPTY(10062, "resource file is empty", "资源文件内容不能为空"), EDIT_RESOURCE_FILE_ON_LINE_ERROR(10063, "edit resource file online error", "更新资源文件错误"), DOWNLOAD_RESOURCE_FILE_ERROR(10064, "download resource file error", "下载资源文件错误"), - CREATE_UDF_FUNCTION_ERROR(10065, "create udf function error", "创建UDF函数错误"), - VIEW_UDF_FUNCTION_ERROR(10066, "view udf function error", "查询UDF函数错误"), - UPDATE_UDF_FUNCTION_ERROR(10067, "update udf function error", "更新UDF函数错误"), - QUERY_UDF_FUNCTION_LIST_PAGING_ERROR(10068, "query udf function list paging error", "分页查询UDF函数列表错误"), QUERY_DATASOURCE_BY_TYPE_ERROR(10069, "query datasource by type error", "查询数据源信息错误"), - VERIFY_UDF_FUNCTION_NAME_ERROR(10070, "verify udf function name error", "UDF函数名称验证错误"), - DELETE_UDF_FUNCTION_ERROR(10071, "delete udf function error", "删除UDF函数错误"), AUTHORIZED_FILE_RESOURCE_ERROR(10072, "authorized file resource error", "授权资源文件错误"), AUTHORIZE_RESOURCE_TREE(10073, "authorize resource tree display error", "授权资源目录树错误"), - UNAUTHORIZED_UDF_FUNCTION_ERROR(10074, "unauthorized udf function error", "查询未授权UDF函数错误"), - AUTHORIZED_UDF_FUNCTION_ERROR(10075, "authorized udf function error", "授权UDF函数错误"), CREATE_SCHEDULE_ERROR(10076, "create schedule error", "创建调度配置错误"), UPDATE_SCHEDULE_ERROR(10077, "update schedule error", "更新调度配置错误"), PUBLISH_SCHEDULE_ONLINE_ERROR(10078, "publish schedule online error", "上线调度配置错误"), @@ -124,7 +116,6 @@ public enum Status { DELETE_USER_BY_ID_ERROR(10093, "delete user by id error", "删除用户错误"), GRANT_PROJECT_ERROR(10094, "grant project error", "授权项目错误"), GRANT_RESOURCE_ERROR(10095, "grant resource error", "授权资源错误"), - GRANT_UDF_FUNCTION_ERROR(10096, "grant udf function error", "授权UDF函数错误"), GRANT_DATASOURCE_ERROR(10097, "grant datasource error", "授权数据源错误"), GET_USER_INFO_ERROR(10098, "get user info error", "获取用户信息错误"), USER_LIST_ERROR(10099, "user list error", "查询用户列表错误"), @@ -298,19 +289,17 @@ public enum Status { QUERY_PROJECT_PREFERENCE_ERROR(10302, "query project preference error", "查询项目偏好设置错误"), UPDATE_PROJECT_PREFERENCE_STATE_ERROR(10303, "Failed to update the state of the project preference", "更新项目偏好设置错误"), - UDF_FUNCTION_NOT_EXIST(20001, "UDF function not found", "UDF函数不存在"), - UDF_FUNCTION_EXISTS(20002, "UDF function already exists", "UDF函数已存在"), RESOURCE_NOT_EXIST(20004, "resource not exist", "资源不存在"), RESOURCE_EXIST(20005, "resource already exists", "资源已存在"), RESOURCE_SUFFIX_NOT_SUPPORT_VIEW(20006, "resource suffix do not support online viewing", "资源文件后缀不支持查看"), RESOURCE_SIZE_EXCEED_LIMIT(20007, "upload resource file size exceeds limit", "上传资源文件大小超过限制"), RESOURCE_SUFFIX_FORBID_CHANGE(20008, "resource suffix not allowed to be modified", "资源文件后缀不支持修改"), - UDF_RESOURCE_SUFFIX_NOT_JAR(20009, "UDF resource suffix name must be jar", "UDF资源文件后缀名只支持[jar]"), + HDFS_COPY_FAIL(20010, "hdfs copy {0} -> {1} fail", "hdfs复制失败:[{0}] -> [{1}]"), RESOURCE_FILE_EXIST(20011, "resource file {0} already exists in hdfs,please delete it or change name!", "资源文件[{0}]在hdfs中已存在,请删除或修改资源名"), RESOURCE_FILE_NOT_EXIST(20012, "resource file {0} not exists !", "资源文件[{0}]不存在"), - UDF_RESOURCE_IS_BOUND(20013, "udf resource file is bound by UDF functions:{0}", "udf函数绑定了资源文件[{0}]"), + RESOURCE_IS_USED(20014, "resource file is used by process definition", "资源文件被上线的流程定义使用了"), PARENT_RESOURCE_NOT_EXIST(20015, "parent resource not exist", "父资源文件不存在"), @@ -323,6 +312,8 @@ public enum Status { REMOVE_TASK_INSTANCE_CACHE_ERROR(20019, "remove task instance cache error", "删除任务实例缓存错误"), + ILLEGAL_RESOURCE_PATH(20020, "Resource file [{0}] is illegal", "非法的资源路径[{0}]"), + USER_NO_OPERATION_PERM(30001, "user has no operation privilege", "当前用户没有操作权限"), USER_NO_OPERATION_PROJECT_PERM(30002, "user {0} is not has project {1} permission", "当前用户[{0}]没有[{1}]项目的操作权限"), USER_NO_WRITE_PROJECT_PERM(30003, "user [{0}] does not have write permission for project [{1}]", diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/exceptions/ApiExceptionHandler.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/exceptions/ApiExceptionHandler.java index 3c978795ab4b..9ab2d29f6aca 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/exceptions/ApiExceptionHandler.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/exceptions/ApiExceptionHandler.java @@ -44,7 +44,7 @@ public Result exceptionHandler(ServiceException e, HandlerMethod hm) { @ExceptionHandler(Throwable.class) public Result exceptionHandler(Throwable e, HandlerMethod hm) { ApiException ce = hm.getMethodAnnotation(ApiException.class); - log.error("Meet en unknown exception: ", e); + log.error("Meet an unknown exception: ", e); if (ce == null) { return Result.errorWithArgs(Status.INTERNAL_SERVER_ERROR_ARGS, e.getMessage()); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/exceptions/ServiceException.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/exceptions/ServiceException.java index 2fa3e01a1c9c..a0f8162fcf23 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/exceptions/ServiceException.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/exceptions/ServiceException.java @@ -52,4 +52,8 @@ public ServiceException(int code, String message, Exception cause) { this.code = code; } + public ServiceException(String message, Exception exception) { + this(Status.INTERNAL_SERVER_ERROR_ARGS, message, exception); + } + } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/executor/workflow/instance/pause/recover/RecoverExecuteFunction.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/executor/workflow/instance/pause/recover/RecoverExecuteFunction.java index 149e1abd29b7..34bd2561b146 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/executor/workflow/instance/pause/recover/RecoverExecuteFunction.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/executor/workflow/instance/pause/recover/RecoverExecuteFunction.java @@ -43,7 +43,7 @@ public RecoverExecuteFunction(CommandService commandService) { @Override public RecoverExecuteResult execute(RecoverExecuteRequest request) throws ExecuteRuntimeException { ProcessInstance workflowInstance = request.getWorkflowInstance(); - if (!workflowInstance.getState().isPause()) { + if (!(workflowInstance.getState().isPause() || workflowInstance.getState().isStop())) { throw new ExecuteRuntimeException( String.format("The workflow instance: %s state is %s, cannot recovery", workflowInstance.getName(), workflowInstance.getState())); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/permission/ResourcePermissionCheckServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/permission/ResourcePermissionCheckServiceImpl.java index 355c2257b9d2..50e5bd419598 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/permission/ResourcePermissionCheckServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/permission/ResourcePermissionCheckServiceImpl.java @@ -46,7 +46,6 @@ import org.apache.dolphinscheduler.dao.entity.Queue; import org.apache.dolphinscheduler.dao.entity.TaskGroup; import org.apache.dolphinscheduler.dao.entity.Tenant; -import org.apache.dolphinscheduler.dao.entity.UdfFunc; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.WorkerGroup; import org.apache.dolphinscheduler.dao.mapper.AccessTokenMapper; @@ -59,7 +58,6 @@ import org.apache.dolphinscheduler.dao.mapper.QueueMapper; import org.apache.dolphinscheduler.dao.mapper.TaskGroupMapper; import org.apache.dolphinscheduler.dao.mapper.TenantMapper; -import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper; import org.apache.dolphinscheduler.dao.mapper.WorkerGroupMapper; import org.apache.dolphinscheduler.service.process.ProcessService; @@ -201,32 +199,6 @@ public Set listAuthorizedResourceIds(int userId, Logger logger) { } } - @Component - public static class UdfFuncPermissionCheck implements ResourceAcquisitionAndPermissionCheck { - - private final UdfFuncMapper udfFuncMapper; - - public UdfFuncPermissionCheck(UdfFuncMapper udfFuncMapper) { - this.udfFuncMapper = udfFuncMapper; - } - - @Override - public List authorizationTypes() { - return Collections.singletonList(AuthorizationType.UDF); - } - - @Override - public Set listAuthorizedResourceIds(int userId, Logger logger) { - List udfFuncList = udfFuncMapper.listAuthorizedUdfByUserId(userId); - return udfFuncList.stream().map(UdfFunc::getId).collect(toSet()); - } - - @Override - public boolean permissionCheck(int userId, String permissionKey, Logger logger) { - return true; - } - } - @Component public static class TaskGroupPermissionCheck implements ResourceAcquisitionAndPermissionCheck { @@ -481,6 +453,7 @@ interface ResourceAcquisitionAndPermissionCheck { /** * authorization types + * * @return */ List authorizationTypes(); @@ -495,6 +468,7 @@ interface ResourceAcquisitionAndPermissionCheck { /** * permission check + * * @param userId * @return */ diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/python/PythonGateway.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/python/PythonGateway.java index 1e4e1f5aa099..a2ad908e4723 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/python/PythonGateway.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/python/PythonGateway.java @@ -31,14 +31,12 @@ import org.apache.dolphinscheduler.api.service.TaskDefinitionService; import org.apache.dolphinscheduler.api.service.TenantService; import org.apache.dolphinscheduler.api.service.UsersService; -import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.constants.Constants; import org.apache.dolphinscheduler.common.enums.ComplementDependentMode; import org.apache.dolphinscheduler.common.enums.ExecutionOrder; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.ProcessExecutionTypeEnum; -import org.apache.dolphinscheduler.common.enums.ProgramType; import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.RunMode; import org.apache.dolphinscheduler.common.enums.TaskDependType; @@ -184,7 +182,7 @@ public Map getCodeAndVersion(String projectName, String processDef Map result = new HashMap<>(); // project do not exists, mean task not exists too, so we should directly return init value if (project == null) { - result.put("code", CodeGenerateUtils.getInstance().genCode()); + result.put("code", CodeGenerateUtils.genCode()); result.put("version", 0L); return result; } @@ -194,7 +192,7 @@ public Map getCodeAndVersion(String projectName, String processDef // In the case project exists, but current workflow still not created, we should also return the init // version of it if (processDefinition == null) { - result.put("code", CodeGenerateUtils.getInstance().genCode()); + result.put("code", CodeGenerateUtils.genCode()); result.put("version", 0L); return result; } @@ -202,7 +200,7 @@ public Map getCodeAndVersion(String projectName, String processDef TaskDefinition taskDefinition = taskDefinitionMapper.queryByName(project.getCode(), processDefinition.getCode(), taskName); if (taskDefinition == null) { - result.put("code", CodeGenerateUtils.getInstance().genCode()); + result.put("code", CodeGenerateUtils.genCode()); result.put("version", 0L); } else { result.put("code", taskDefinition.getCode()); @@ -216,22 +214,22 @@ public Map getCodeAndVersion(String projectName, String processDef * If workflow do not exists in Project=`projectCode` would create a new one * If workflow already exists in Project=`projectCode` would update it * - * @param userName user name who create or update workflow - * @param projectName project name which workflow belongs to - * @param name workflow name - * @param description description - * @param globalParams global params - * @param schedule schedule for workflow, will not set schedule if null, - * and if would always fresh exists schedule if not null - * @param onlineSchedule Whether set the workflow's schedule to online state - * @param warningType warning type - * @param warningGroupId warning group id - * @param timeout timeout for workflow working, if running time longer than timeout, - * task will mark as fail - * @param workerGroup run task in which worker group - * @param taskRelationJson relation json for nodes + * @param userName user name who create or update workflow + * @param projectName project name which workflow belongs to + * @param name workflow name + * @param description description + * @param globalParams global params + * @param schedule schedule for workflow, will not set schedule if null, + * and if would always fresh exists schedule if not null + * @param onlineSchedule Whether set the workflow's schedule to online state + * @param warningType warning type + * @param warningGroupId warning group id + * @param timeout timeout for workflow working, if running time longer than timeout, + * task will mark as fail + * @param workerGroup run task in which worker group + * @param taskRelationJson relation json for nodes * @param taskDefinitionJson taskDefinitionJson - * @param otherParamsJson otherParamsJson handle other params + * @param otherParamsJson otherParamsJson handle other params * @return create result code */ public Long createOrUpdateWorkflow(String userName, @@ -300,8 +298,8 @@ public Long createOrUpdateWorkflow(String userName, /** * get workflow * - * @param user user who create or update schedule - * @param projectCode project which workflow belongs to + * @param user user who create or update schedule + * @param projectCode project which workflow belongs to * @param workflowName workflow name */ private ProcessDefinition getWorkflow(User user, long projectCode, String workflowName) { @@ -327,13 +325,13 @@ private ProcessDefinition getWorkflow(User user, long projectCode, String workfl * It would always use latest schedule define in workflow-as-code, and set schedule online when * it's not null * - * @param user user who create or update schedule - * @param projectCode project which workflow belongs to - * @param workflowCode workflow code - * @param schedule schedule expression + * @param user user who create or update schedule + * @param projectCode project which workflow belongs to + * @param workflowCode workflow code + * @param schedule schedule expression * @param onlineSchedule Whether set the workflow's schedule to online state - * @param workerGroup work group - * @param warningType warning type + * @param workerGroup work group + * @param warningType warning type * @param warningGroupId warning group id */ private void createOrUpdateSchedule(User user, @@ -512,7 +510,7 @@ public User deleteUser(String userName, int id) throws Exception { * it will return the datasource match the type. * * @param datasourceName datasource name of datasource - * @param type datasource type + * @param type datasource type */ public DataSource getDatasource(String datasourceName, String type) { @@ -545,8 +543,8 @@ public DataSource getDatasource(String datasourceName, String type) { * Get workflow object by given workflow name. It returns map contain workflow id, name, code. * Useful in Python API create subProcess task which need workflow information. * - * @param userName user who create or update schedule - * @param projectName project name which workflow belongs to + * @param userName user who create or update schedule + * @param projectName project name which workflow belongs to * @param workflowName workflow name */ public Map getWorkflowInfo(String userName, String projectName, @@ -577,9 +575,9 @@ public Map getWorkflowInfo(String userName, String projectName, * Get project, workflow, task code. * Useful in Python API create dependent task which need workflow information. * - * @param projectName project name which workflow belongs to + * @param projectName project name which workflow belongs to * @param workflowName workflow name - * @param taskName task name + * @param taskName task name */ public Map getDependentInfo(String projectName, String workflowName, String taskName) { Map result = new HashMap<>(); @@ -614,25 +612,22 @@ public Map getDependentInfo(String projectName, String workflowN * Get resource by given program type and full name. It returns map contain resource id, name. * Useful in Python API create flink or spark task which need workflow information. * - * @param programType program type one of SCALA, JAVA and PYTHON - * @param fullName full name of the resource + * @param fullName full name of the resource */ - public Map getResourcesFileInfo(String programType, String fullName) { + public Map getResourcesFileInfo(String fullName) { Map result = new HashMap<>(); - Result resources = resourceService.queryResourceByProgramType(dummyAdminUser, ResourceType.FILE, - ProgramType.valueOf(programType)); - List resourcesComponent = (List) resources.getData(); - List namedResources = - resourcesComponent.stream().filter(s -> fullName.equals(s.getFullName())).collect(Collectors.toList()); + List resourceComponents = + resourceService.queryResourceFiles(dummyAdminUser, ResourceType.FILE); + List namedResources = resourceComponents.stream() + .filter(s -> fullName.equals(s.getFullName())) + .collect(Collectors.toList()); if (CollectionUtils.isEmpty(namedResources)) { - String msg = - String.format("Can not find valid resource by program type %s and name %s", programType, fullName); + String msg = String.format("Can not find valid resource by name %s", fullName); log.error(msg); throw new IllegalArgumentException(msg); } - result.put("id", namedResources.get(0).getId()); result.put("name", namedResources.get(0).getName()); return result; } @@ -671,20 +666,6 @@ public String getGatewayVersion() { return PythonGateway.class.getPackage().getImplementationVersion(); } - /** - * create or update resource. - * If the folder is not already created, it will be - * - * @param userName user who create or update resource - * @param fullName The fullname of resource.Includes path and suffix. - * @param resourceContent content of resource - * @return StorageEntity object which contains necessary information about resource - */ - public StorageEntity createOrUpdateResource(String userName, String fullName, - String resourceContent) throws Exception { - return resourceService.createOrUpdateResource(userName, fullName, resourceContent); - } - @PostConstruct public void init() { if (apiConfig.getPythonGateway().isEnabled()) { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AuditService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AuditService.java index 7b998dccce2b..8fb99eaf735e 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AuditService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AuditService.java @@ -19,9 +19,7 @@ import org.apache.dolphinscheduler.api.dto.AuditDto; import org.apache.dolphinscheduler.api.utils.PageInfo; -import org.apache.dolphinscheduler.common.enums.AuditOperationType; -import org.apache.dolphinscheduler.common.enums.AuditResourceType; -import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.entity.AuditLog; /** * audit information service @@ -29,30 +27,28 @@ public interface AuditService { /** - * add new audit record + * add audit object * - * @param user login user - * @param resourceType resource type - * @param resourceId resource id - * @param operation operation type + * @param auditLog auditLog */ - void addAudit(User user, AuditResourceType resourceType, Integer resourceId, AuditOperationType operation); + void addAudit(AuditLog auditLog); /** * query audit log list * - * @param loginUser login user - * @param resourceType resource type - * @param operationType operation type - * @param startTime start time - * @param endTime end time - * @param userName query user name - * @param pageNo page number - * @param pageSize page size - * @return audit log string + * @param modelTypes model types + * @param modelName model name + * @param operationTypes operation types + * @param startTime start time + * @param endTime end time + * @param userName query user name + * @param pageNo page number + * @param pageSize page size + * @return audit log string */ - PageInfo queryLogListPaging(User loginUser, AuditResourceType resourceType, - AuditOperationType operationType, String startTime, - String endTime, String userName, + PageInfo queryLogListPaging(String modelTypes, + String operationTypes, String startTime, + String endTime, String userName, String modelName, Integer pageNo, Integer pageSize); + } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java index 351a3d5c9191..f312707c2c77 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java @@ -21,9 +21,12 @@ import org.apache.dolphinscheduler.api.dto.DefineUserDto; import org.apache.dolphinscheduler.api.dto.TaskCountDto; import org.apache.dolphinscheduler.api.dto.project.StatisticsStateRequest; +import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.vo.TaskInstanceCountVO; import org.apache.dolphinscheduler.api.vo.WorkflowDefinitionCountVO; import org.apache.dolphinscheduler.api.vo.WorkflowInstanceCountVO; +import org.apache.dolphinscheduler.dao.entity.Command; +import org.apache.dolphinscheduler.dao.entity.ErrorCommand; import org.apache.dolphinscheduler.dao.entity.User; import java.util.List; @@ -117,4 +120,7 @@ TaskCountDto countWorkflowStates(User loginUser, */ TaskCountDto countOneTaskStates(User loginUser, Long taskCode); + PageInfo listPendingCommands(User loginUser, Long projectCode, Integer pageNo, Integer pageSize); + + PageInfo listErrorCommand(User loginUser, Long projectCode, Integer pageNo, Integer pageSize); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java index 3166d3e71842..55ca697412f7 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java @@ -30,7 +30,9 @@ import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.extract.master.dto.WorkflowExecuteDto; +import org.apache.dolphinscheduler.plugin.task.api.model.Property; +import java.util.List; import java.util.Map; /** @@ -57,7 +59,7 @@ public interface ExecutorService { * @param environmentCode environment code * @param runMode run mode * @param timeout timeout - * @param startParams the global param values which pass to new process instance + * @param startParamList the global param values which pass to new process instance * @param expectedParallelismNumber the expected parallelism number when execute complement in parallel mode * @param executionOrder the execution order when complementing data * @return execute process instance code @@ -71,7 +73,7 @@ Map execProcessInstance(User loginUser, long projectCode, Priority processInstancePriority, String workerGroup, String tenantCode, Long environmentCode, Integer timeout, - Map startParams, Integer expectedParallelismNumber, + List startParamList, Integer expectedParallelismNumber, int dryRun, int testFlag, ComplementDependentMode complementDependentMode, Integer version, boolean allLevelDependent, ExecutionOrder executionOrder); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/MonitorService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/MonitorService.java index c4a8d94a1f8d..f054fafdce7e 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/MonitorService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/MonitorService.java @@ -18,9 +18,9 @@ package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.common.model.Server; -import org.apache.dolphinscheduler.common.model.WorkerServerModel; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.plugin.api.monitor.DatabaseMetrics; +import org.apache.dolphinscheduler.registry.api.enums.RegistryNodeType; import java.util.List; @@ -38,20 +38,10 @@ public interface MonitorService { List queryDatabaseState(User loginUser); /** - * query master list + * query server list * - * @param loginUser login user - * @return master information list + * @param nodeType RegistryNodeType + * @return server information list */ - List queryMaster(User loginUser); - - /** - * query worker list - * - * @param loginUser login user - * @return worker information list - */ - List queryWorker(User loginUser); - - List getServerListFromRegistry(boolean isMaster); + List listServer(RegistryNodeType nodeType); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java index 7bf49e001664..4df7a69da57d 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java @@ -25,7 +25,6 @@ import org.apache.dolphinscheduler.common.enums.WorkflowExecutionStatus; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.User; -import org.apache.dolphinscheduler.plugin.task.api.enums.DependResult; import java.io.IOException; import java.util.List; @@ -122,8 +121,6 @@ Map queryTaskListByProcessId(User loginUser, long projectCode, Integer processId) throws IOException; - Map parseLogForDependentResult(String log) throws IOException; - /** * query sub process instance detail info by task id * diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectParameterService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectParameterService.java index ef984dfac414..af2b04748882 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectParameterService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectParameterService.java @@ -23,17 +23,17 @@ public interface ProjectParameterService { Result createProjectParameter(User loginUser, long projectCode, String projectParameterName, - String projectParameterValue); + String projectParameterValue, String projectParameterDataType); Result updateProjectParameter(User loginUser, long projectCode, long code, String projectParameterName, - String projectParameterValue); + String projectParameterValue, String projectParameterDataType); Result deleteProjectParametersByCode(User loginUser, long projectCode, long code); Result batchDeleteProjectParametersByCodes(User loginUser, long projectCode, String codes); Result queryProjectParameterListPaging(User loginUser, long projectCode, Integer pageSize, Integer pageNo, - String searchVal); + String searchVal, String projectParameterDataType); Result queryProjectParameterByCode(User loginUser, long projectCode, long code); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java index 24d1ba8727dd..0b501dbbc88a 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java @@ -17,206 +17,109 @@ package org.apache.dolphinscheduler.api.service; -import org.apache.dolphinscheduler.api.dto.resources.DeleteDataTransferResponse; +import org.apache.dolphinscheduler.api.dto.resources.CreateDirectoryRequest; +import org.apache.dolphinscheduler.api.dto.resources.CreateFileFromContentRequest; +import org.apache.dolphinscheduler.api.dto.resources.CreateFileRequest; +import org.apache.dolphinscheduler.api.dto.resources.DeleteResourceRequest; +import org.apache.dolphinscheduler.api.dto.resources.DownloadFileRequest; +import org.apache.dolphinscheduler.api.dto.resources.FetchFileContentRequest; +import org.apache.dolphinscheduler.api.dto.resources.PagingResourceItemRequest; +import org.apache.dolphinscheduler.api.dto.resources.RenameDirectoryRequest; +import org.apache.dolphinscheduler.api.dto.resources.RenameFileRequest; +import org.apache.dolphinscheduler.api.dto.resources.ResourceComponent; +import org.apache.dolphinscheduler.api.dto.resources.UpdateFileFromContentRequest; +import org.apache.dolphinscheduler.api.dto.resources.UpdateFileRequest; import org.apache.dolphinscheduler.api.utils.PageInfo; -import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.common.enums.ProgramType; +import org.apache.dolphinscheduler.api.vo.ResourceItemVO; +import org.apache.dolphinscheduler.api.vo.resources.FetchFileContentResponse; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.plugin.storage.api.StorageEntity; import org.apache.dolphinscheduler.spi.enums.ResourceType; -import java.io.IOException; -import java.util.Map; +import java.util.List; -import org.springframework.web.multipart.MultipartFile; +import javax.servlet.http.HttpServletResponse; -/** - * resources service - */ public interface ResourcesService { /** - * create directory - * - * @param loginUser login user - * @param name alias - * @param type type - * @param pid parent id - * @param currentDir current directory - * @return create directory result + * Create a new directory in the resource storage, if the directory already exists will throw exception */ - Result createDirectory(User loginUser, - String name, - ResourceType type, - int pid, - String currentDir); + void createDirectory(CreateDirectoryRequest createDirectoryRequest); /** - * create resource - * - * @param loginUser login user - * @param name alias - * @param type type - * @param file file - * @param currentDir current directory - * @return create result code + * Rename the directory in the resource storage, if the origin directory not exists or the new directory already exists will throw exception. + *

If the origin directory is empty will only update the directory name. + *

If the origin directory is not empty will move all the files and directories to the new directory. + *

After update the origin directory will be deleted. */ - Result uploadResource(User loginUser, - String name, - ResourceType type, - MultipartFile file, - String currentDir); + void renameDirectory(RenameDirectoryRequest renameDirectoryRequest); /** - * update resource - * @param loginUser login user - * @param name name - * @param type resource type - * @param file resource file - * @return update result code + * Upload a new file to the resource storage, if the file already exists will throw exception */ - Result updateResource(User loginUser, - String fullName, - String tenantCode, - String name, - ResourceType type, - MultipartFile file); + void createFile(CreateFileRequest createFileRequest); /** - * query resources list paging - * - * @param loginUser login user - * @param type resource type - * @param searchVal search value - * @param pageNo page number - * @param pageSize page size - * @return resource list page + * Update the file in the resource storage, if the origin file not exists or the new file already exists will throw exception. + *

If the new file is empty will only update the file name. + *

If the new file is not empty will update the file content and name. + *

After update the origin file will be deleted. */ - Result> queryResourceListPaging(User loginUser, String fullName, String resTenantCode, - ResourceType type, String searchVal, Integer pageNo, - Integer pageSize); + void updateFile(UpdateFileRequest updateFileRequest); /** - * query resource list - * - * @param loginUser login user - * @param type resource type - * @return resource list + * Rename the file in the resource storage, if the origin file not exists or the new file already exists will throw exception. */ - Map queryResourceList(User loginUser, ResourceType type, String fullName); + void renameFile(RenameFileRequest renameFileRequest); /** - * query resource list by program type - * - * @param loginUser login user - * @param type resource type - * @return resource list + * Create a new file in the resource storage, if the file already exists will throw exception. + * Different with {@link ResourcesService#createFile(CreateFileRequest)} this method will create a new file with the given content. */ - Result queryResourceByProgramType(User loginUser, ResourceType type, ProgramType programType); + void createFileFromContent(CreateFileFromContentRequest createFileFromContentRequest); /** - * delete resource - * - * @param loginUser login user - * @return delete result code - * @throws IOException exception + * Update the file content. */ - Result delete(User loginUser, String fullName, String tenantCode) throws IOException; + void updateFileFromContent(UpdateFileFromContentRequest updateFileContentRequest); /** - * verify resource by name and type - * @param loginUser login user - * @param fullName resource full name - * @param type resource type - * @return true if the resource name not exists, otherwise return false + * Paging query resource items. + *

If the login user is not admin will only query the resource items that under the user's tenant. + *

If the login user is admin and {@link PagingResourceItemRequest##resourceAbsolutePath} is null will return all the resource items. */ - Result verifyResourceName(String fullName, ResourceType type, User loginUser); + PageInfo pagingResourceItem(PagingResourceItemRequest pagingResourceItemRequest); /** - * verify resource by file name - * @param fileName resource file name - * @param type resource type - * @return true if the resource file name, otherwise return false + * Query the resource file items by the given resource type and program type. */ - Result queryResourceByFileName(User loginUser, String fileName, ResourceType type, String resTenantCode); + List queryResourceFiles(User loginUser, ResourceType type); /** - * view resource file online - * - * @param skipLineNum skip line number - * @param limit limit - * @param fullName fullName - * @return resource content + * Delete the resource item. + *

If the resource item is a directory will delete all the files and directories under the directory. + *

If the resource item is a file will delete the file. + *

If the resource item not exists will throw exception. */ - Result readResource(User loginUser, String fullName, String tenantCode, int skipLineNum, int limit); + void delete(DeleteResourceRequest deleteResourceRequest); /** - * create resource file online - * - * @param loginUser login user - * @param type resource type - * @param fileName file name - * @param fileSuffix file suffix - * @param content content - * @return create result code + * Fetch the file content. */ - Result createResourceFile(User loginUser, ResourceType type, String fileName, String fileSuffix, - String content, String currentDirectory); + FetchFileContentResponse fetchResourceFileContent(FetchFileContentRequest fetchFileContentRequest); - /** - * create or update resource. - * If the folder is not already created, it will be ignored and directly create the new file - * - * @param userName user who create or update resource - * @param fullName The fullname of resource.Includes path and suffix. - * @param resourceContent content of resource - */ - StorageEntity createOrUpdateResource(String userName, String fullName, String resourceContent) throws Exception; + void downloadResource(HttpServletResponse response, DownloadFileRequest downloadFileRequest); /** - * updateProcessInstance resource - * - * @param loginUser login user - * @param fullName full name - * @param tenantCode tenantCode - * @param content content - * @return update result cod - */ - Result updateResourceContent(User loginUser, String fullName, String tenantCode, - String content); - - /** - * download file - * - * @return resource content - * @throws IOException exception - */ - org.springframework.core.io.Resource downloadResource(User loginUser, String fullName) throws IOException; - - /** - * Get resource by given resource type and full name. + * Get resource by given resource type and file name. * Useful in Python API create task which need processDefinition information. * * @param userName user who query resource - * @param fullName full name of the resource + * @param fileName file name of the resource */ - StorageEntity queryFileStatus(String userName, String fullName) throws Exception; + StorageEntity queryFileStatus(String userName, String fileName) throws Exception; - /** - * delete DATA_TRANSFER data in resource center - * - * @param loginUser user who query resource - * @param days number of days - */ - DeleteDataTransferResponse deleteDataTransferData(User loginUser, Integer days); - - /** - * get resource base dir - * - * @param loginUser login user - * @param type resource type - * @return - */ - Result queryResourceBaseDir(User loginUser, ResourceType type); + String queryResourceBaseDir(User loginUser, ResourceType type); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java index 86e5396dbe48..bff051868578 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java @@ -72,9 +72,9 @@ Result queryTaskListPaging(User loginUser, * @param taskInstanceId task instance id * @return the result code and msg */ - Result forceTaskSuccess(User loginUser, - long projectCode, - Integer taskInstanceId); + void forceTaskSuccess(User loginUser, + long projectCode, + Integer taskInstanceId); /** * task savepoint diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UdfFuncService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UdfFuncService.java deleted file mode 100644 index 35be4066d86c..000000000000 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UdfFuncService.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.api.service; - -import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.common.enums.UdfType; -import org.apache.dolphinscheduler.dao.entity.User; - -/** - * udf func service - */ -public interface UdfFuncService { - - /** - * create udf function - * - * @param loginUser login user - * @param type udf type - * @param funcName function name - * @param argTypes argument types - * @param database database - * @param desc description - * @param className class name - * @return create result code - */ - Result createUdfFunction(User loginUser, - String funcName, - String className, - String fullName, - String argTypes, - String database, - String desc, - UdfType type); - - /** - * query udf function - * - * @param id udf function id - * @return udf function detail - */ - Result queryUdfFuncDetail(User loginUser, int id); - - /** - * updateProcessInstance udf function - * - * @param udfFuncId udf function id - * @param type resource type - * @param funcName function name - * @param argTypes argument types - * @param database data base - * @param desc description - * @param resourceId resource id - * @param fullName resource full name - * @param className class name - * @return update result code - */ - Result updateUdfFunc(User loginUser, - int udfFuncId, - String funcName, - String className, - String argTypes, - String database, - String desc, - UdfType type, - String fullName); - - /** - * query udf function list paging - * - * @param loginUser login user - * @param pageNo page number - * @param pageSize page size - * @param searchVal search value - * @return udf function list page - */ - Result queryUdfFuncListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize); - - /** - * query udf list - * - * @param loginUser login user - * @param type udf type - * @return udf func list - */ - Result queryUdfFuncList(User loginUser, Integer type); - - /** - * delete udf function - * - * @param id udf function id - * @return delete result code - */ - Result delete(User loginUser, int id); - - /** - * verify udf function by name - * - * @param name name - * @return true if the name can user, otherwise return false - */ - Result verifyUdfFuncByName(User loginUser, String name); - -} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java index 45e5964825d2..61f6a138ba7a 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java @@ -183,16 +183,6 @@ User updateUser(User loginUser, */ Map revokeProject(User loginUser, int userId, long projectCode); - /** - * grant udf function - * - * @param loginUser login user - * @param userId user id - * @param udfIds udf id array - * @return grant result code - */ - Map grantUDFFunction(User loginUser, int userId, String udfIds); - /** * grant namespace * diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkerGroupService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkerGroupService.java index 2c87e4be2ef3..b85d3912cb7d 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkerGroupService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkerGroupService.java @@ -18,7 +18,6 @@ package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.entity.User; import java.util.List; @@ -77,13 +76,6 @@ Map saveWorkerGroup(User loginUser, int id, String name, String */ Map getWorkerAddressList(); - /** - * Get task instance's worker group - * @param taskInstance task instance - * @return worker group - */ - String getTaskWorkerGroup(TaskInstance taskInstance); - /** * Query worker group by process definition codes * @param processDefinitionCodeList processDefinitionCodeList diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertPluginInstanceServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertPluginInstanceServiceImpl.java index 59eaca3c83f3..cf07c9fd7342 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertPluginInstanceServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertPluginInstanceServiceImpl.java @@ -17,7 +17,7 @@ package org.apache.dolphinscheduler.api.service.impl; -import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.ALART_INSTANCE_CREATE; +import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.ALERT_INSTANCE_CREATE; import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.ALERT_PLUGIN_DELETE; import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.ALERT_PLUGIN_UPDATE; @@ -108,7 +108,7 @@ public AlertPluginInstance create(User loginUser, WarningType warningType, String pluginInstanceParams) { - if (!canOperatorPermissions(loginUser, null, AuthorizationType.ALERT_PLUGIN_INSTANCE, ALART_INSTANCE_CREATE)) { + if (!canOperatorPermissions(loginUser, null, AuthorizationType.ALERT_PLUGIN_INSTANCE, ALERT_INSTANCE_CREATE)) { throw new ServiceException(Status.USER_NO_OPERATION_PERM); } @@ -359,7 +359,7 @@ public void testSend(int pluginDefineId, String pluginInstanceParams) { throw new ServiceException(Status.ALERT_TEST_SENDING_FAILED, e.getMessage()); } - if (alertSendResponse.isSuccess()) { + if (!alertSendResponse.isSuccess()) { throw new ServiceException(Status.ALERT_TEST_SENDING_FAILED, alertSendResponse.getResResults().get(0).getMessage()); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AuditServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AuditServiceImpl.java index 759e30b0364b..f8f763b4f4dd 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AuditServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AuditServiceImpl.java @@ -17,21 +17,24 @@ package org.apache.dolphinscheduler.api.service.impl; -import org.apache.dolphinscheduler.api.audit.AuditMessage; -import org.apache.dolphinscheduler.api.audit.AuditPublishService; import org.apache.dolphinscheduler.api.dto.AuditDto; import org.apache.dolphinscheduler.api.service.AuditService; import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.common.enums.AuditModelType; import org.apache.dolphinscheduler.common.enums.AuditOperationType; -import org.apache.dolphinscheduler.common.enums.AuditResourceType; import org.apache.dolphinscheduler.dao.entity.AuditLog; -import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.AuditLogMapper; +import org.apache.parquet.Strings; + +import java.util.ArrayList; +import java.util.Arrays; import java.util.Date; import java.util.List; import java.util.stream.Collectors; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @@ -39,74 +42,69 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page; @Service +@Slf4j public class AuditServiceImpl extends BaseServiceImpl implements AuditService { @Autowired private AuditLogMapper auditLogMapper; - @Autowired - private AuditPublishService publishService; - - /** - * add new audit log - * - * @param user login user - * @param resourceType resource type - * @param resourceId resource id - * @param operation operation type - */ @Override - public void addAudit(User user, AuditResourceType resourceType, Integer resourceId, AuditOperationType operation) { - publishService.publish(new AuditMessage(user, new Date(), resourceType, operation, resourceId)); + public void addAudit(AuditLog auditLog) { + if (auditLog.getModelId() == null || auditLog.getModelName() == null) { + return; + } + + auditLogMapper.insert(auditLog); } /** * query audit log paging * - * @param loginUser login user - * @param resourceType resource type - * @param operationType operation type - * @param startDate start time - * @param endDate end time - * @param userName query user name - * @param pageNo page number - * @param pageSize page size + * @param modelTypes object types + * @param operationTypes operation types + * @param startDate start time + * @param endDate end time + * @param userName query user name + * @param modelName query object name + * @param pageNo page number + * @param pageSize page size * @return audit log string data */ @Override - public PageInfo queryLogListPaging(User loginUser, - AuditResourceType resourceType, - AuditOperationType operationType, + public PageInfo queryLogListPaging(String modelTypes, + String operationTypes, String startDate, String endDate, String userName, + String modelName, Integer pageNo, Integer pageSize) { - - int[] resourceArray = null; - if (resourceType != null) { - resourceArray = new int[]{resourceType.getCode()}; - } - - int[] opsArray = null; - if (operationType != null) { - opsArray = new int[]{operationType.getCode()}; - } + List objectTypeCodeList = convertStringToList(modelTypes); + List operationTypeCodeList = convertStringToList(operationTypes); Date start = checkAndParseDateParameters(startDate); Date end = checkAndParseDateParameters(endDate); - IPage logIPage = auditLogMapper.queryAuditLog(new Page<>(pageNo, pageSize), resourceArray, opsArray, - userName, start, end); + IPage logIPage = + auditLogMapper.queryAuditLog(new Page<>(pageNo, pageSize), objectTypeCodeList, operationTypeCodeList, + userName, modelName, start, end); List auditDtos = logIPage.getRecords().stream().map(this::transformAuditLog).collect(Collectors.toList()); PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); - pageInfo.setTotal((int) auditDtos.size()); + pageInfo.setTotal((int) logIPage.getTotal()); pageInfo.setTotalList(auditDtos); return pageInfo; } + private List convertStringToList(String string) { + if (Strings.isNullOrEmpty(string)) { + return new ArrayList<>(); + } + + return Arrays.stream(string.split(",")).collect(Collectors.toList()); + } + /** * transform AuditLog to AuditDto * @@ -115,12 +113,15 @@ public PageInfo queryLogListPaging(User loginUser, */ private AuditDto transformAuditLog(AuditLog auditLog) { AuditDto auditDto = new AuditDto(); - String resourceType = AuditResourceType.of(auditLog.getResourceType()).getMsg(); - auditDto.setResource(resourceType); - auditDto.setOperation(AuditOperationType.of(auditLog.getOperation()).getMsg()); + AuditModelType objectType = AuditModelType.of(auditLog.getModelType()); + auditDto.setModelType(objectType.getName()); + auditDto.setModelName(auditLog.getModelName()); + auditDto.setOperation(AuditOperationType.of(auditLog.getOperationType()).getName()); auditDto.setUserName(auditLog.getUserName()); - auditDto.setResourceName(auditLogMapper.queryResourceNameByType(resourceType, auditLog.getResourceId())); - auditDto.setTime(auditLog.getTime()); + auditDto.setLatency(String.valueOf(auditLog.getLatency())); + auditDto.setDetail(auditLog.getDetail()); + auditDto.setDescription(auditLog.getDescription()); + auditDto.setCreateTime(auditLog.getCreateTime()); return auditDto; } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/BaseServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/BaseServiceImpl.java index 26dcbc5449fd..428b9527bc15 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/BaseServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/BaseServiceImpl.java @@ -133,10 +133,8 @@ public boolean check(Map result, boolean bool, Status userNoOper // @Override // public void createTenantDirIfNotExists(String tenantCode) throws IOException { // String resourcePath = HadoopUtils.getHdfsResDir(tenantCode); - // String udfsPath = HadoopUtils.getHdfsUdfDir(tenantCode); - // // init resource path and udf path + // // init resource path // HadoopUtils.getInstance().mkdir(tenantCode,resourcePath); - // HadoopUtils.getInstance().mkdir(tenantCode,udfsPath); // } /** diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ClusterServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ClusterServiceImpl.java index a5ceb92abc2f..bf4e3ac4d605 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ClusterServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ClusterServiceImpl.java @@ -96,7 +96,7 @@ public Long createCluster(User loginUser, String name, String config, String des cluster.setOperator(loginUser.getId()); cluster.setCreateTime(new Date()); cluster.setUpdateTime(new Date()); - cluster.setCode(CodeGenerateUtils.getInstance().genCode()); + cluster.setCode(CodeGenerateUtils.genCode()); if (clusterMapper.insert(cluster) > 0) { return cluster.getCode(); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataAnalysisServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataAnalysisServiceImpl.java index 36c9c2b8c13b..9520b5090c1d 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataAnalysisServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataAnalysisServiceImpl.java @@ -27,14 +27,18 @@ import org.apache.dolphinscheduler.api.exceptions.ServiceException; import org.apache.dolphinscheduler.api.service.DataAnalysisService; import org.apache.dolphinscheduler.api.service.ProjectService; +import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.vo.TaskInstanceCountVO; import org.apache.dolphinscheduler.api.vo.WorkflowDefinitionCountVO; import org.apache.dolphinscheduler.api.vo.WorkflowInstanceCountVO; import org.apache.dolphinscheduler.common.constants.Constants; import org.apache.dolphinscheduler.common.enums.AuthorizationType; import org.apache.dolphinscheduler.common.enums.CommandType; +import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.dao.entity.Command; import org.apache.dolphinscheduler.dao.entity.CommandCount; +import org.apache.dolphinscheduler.dao.entity.ErrorCommand; import org.apache.dolphinscheduler.dao.entity.ExecuteStatusCount; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.Project; @@ -71,6 +75,8 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import com.google.common.collect.Lists; /** @@ -380,6 +386,66 @@ public TaskCountDto countOneTaskStates(User loginUser, Long taskCode) { return new TaskCountDto(executeStatusCounts); } + @Override + public PageInfo listPendingCommands(User loginUser, Long projectCode, Integer pageNo, Integer pageSize) { + Page page = new Page<>(pageNo, pageSize); + if (loginUser.getUserType().equals(UserType.ADMIN_USER)) { + IPage commandIPage = commandMapper.queryCommandPage(page); + return PageInfo.of(commandIPage); + } + + List workflowDefinitionCodes = getAuthDefinitionCodes(loginUser, projectCode); + + if (workflowDefinitionCodes.isEmpty()) { + return PageInfo.of(pageNo, pageSize); + } + + IPage commandIPage = + commandMapper.queryCommandPageByIds(page, new ArrayList<>(workflowDefinitionCodes)); + return PageInfo.of(commandIPage); + } + + @Override + public PageInfo listErrorCommand(User loginUser, Long projectCode, Integer pageNo, Integer pageSize) { + Page page = new Page<>(pageNo, pageSize); + if (loginUser.getUserType().equals(UserType.ADMIN_USER)) { + IPage commandIPage = errorCommandMapper.queryErrorCommandPage(page); + return PageInfo.of(commandIPage); + } + + List workflowDefinitionCodes = getAuthDefinitionCodes(loginUser, projectCode); + + if (workflowDefinitionCodes.isEmpty()) { + return PageInfo.of(pageNo, pageSize); + } + + IPage commandIPage = + errorCommandMapper.queryErrorCommandPageByIds(page, new ArrayList<>(workflowDefinitionCodes)); + return PageInfo.of(commandIPage); + } + + private List getAuthDefinitionCodes(User loginUser, Long projectCode) { + Set projectIds = resourcePermissionCheckService + .userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), log); + if (CollectionUtils.isEmpty(projectIds)) { + return Collections.emptyList(); + } + List projectCodes = projectMapper.selectBatchIds(projectIds) + .stream() + .map(Project::getCode) + .collect(Collectors.toList()); + + if (projectCode != null) { + if (!projectCodes.contains(projectCode)) { + return Collections.emptyList(); + } + + projectCodes = Collections.singletonList(projectCode); + } + + return processDefinitionMapper.queryDefinitionCodeListByProjectCodes(projectCodes); + } + /** * statistics the process definition quantities of a certain person *

diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java index a0307624578c..210900e54c5a 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java @@ -230,7 +230,7 @@ public BaseDataSourceParamDTO queryDataSource(int id, User loginUser) { @Override public PageInfo queryDataSourceListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { - IPage dataSourceList = null; + IPage dataSourceList; Page dataSourcePage = new Page<>(pageNo, pageSize); PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); if (loginUser.getUserType().equals(UserType.ADMIN_USER)) { @@ -282,7 +282,7 @@ private String getHiddenPassword() { @Override public List queryDataSourceList(User loginUser, Integer type) { - List datasourceList = null; + List datasourceList; if (loginUser.getUserType().equals(UserType.ADMIN_USER)) { datasourceList = dataSourceMapper.queryDataSourceByType(0, type); } else { @@ -420,7 +420,7 @@ public List authedDatasource(User loginUser, Integer userId) { public List getTables(Integer datasourceId, String database) { DataSource dataSource = dataSourceMapper.selectById(datasourceId); - List tableList = null; + List tableList; BaseConnectionParam connectionParam = (BaseConnectionParam) DataSourceUtils.buildConnectionParams( dataSource.getType(), diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/EnvironmentServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/EnvironmentServiceImpl.java index ade9aea48383..2eb8f84810ea 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/EnvironmentServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/EnvironmentServiceImpl.java @@ -123,7 +123,7 @@ public Long createEnvironment(User loginUser, env.setUpdateTime(new Date()); long code = 0L; try { - code = CodeGenerateUtils.getInstance().genCode(); + code = CodeGenerateUtils.genCode(); env.setCode(code); } catch (CodeGenerateException e) { log.error("Generate environment code error.", e); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ExecutorServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ExecutorServiceImpl.java index 7ab6102ff8e7..6bb7d228ff3e 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ExecutorServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ExecutorServiceImpl.java @@ -89,7 +89,9 @@ import org.apache.dolphinscheduler.extract.master.transportor.StreamingTaskTriggerRequest; import org.apache.dolphinscheduler.extract.master.transportor.StreamingTaskTriggerResponse; import org.apache.dolphinscheduler.extract.master.transportor.WorkflowInstanceStateChangeEvent; -import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; +import org.apache.dolphinscheduler.plugin.task.api.model.Property; +import org.apache.dolphinscheduler.plugin.task.api.utils.TaskTypeUtils; +import org.apache.dolphinscheduler.registry.api.enums.RegistryNodeType; import org.apache.dolphinscheduler.service.command.CommandService; import org.apache.dolphinscheduler.service.cron.CronUtils; import org.apache.dolphinscheduler.service.exceptions.CronParseException; @@ -203,7 +205,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ * @param environmentCode environment code * @param runMode run mode * @param timeout timeout - * @param startParams the global param values which pass to new process instance + * @param startParamList the global param values which pass to new process instance * @param expectedParallelismNumber the expected parallelism number when execute complement in parallel mode * @param testFlag testFlag * @param executionOrder the execution order when complementing data @@ -219,7 +221,7 @@ public Map execProcessInstance(User loginUser, long projectCode, Priority processInstancePriority, String workerGroup, String tenantCode, Long environmentCode, Integer timeout, - Map startParams, Integer expectedParallelismNumber, + List startParamList, Integer expectedParallelismNumber, int dryRun, int testFlag, ComplementDependentMode complementDependentMode, Integer version, boolean allLevelDependent, ExecutionOrder executionOrder) { @@ -258,7 +260,7 @@ public Map execProcessInstance(User loginUser, long projectCode, checkScheduleTimeNumExceed(commandType, cronTime); checkMasterExists(); - long triggerCode = CodeGenerateUtils.getInstance().genCode(); + long triggerCode = CodeGenerateUtils.genCode(); /** * create command @@ -269,7 +271,7 @@ public Map execProcessInstance(User loginUser, long projectCode, startNodeList, cronTime, warningType, loginUser.getId(), warningGroupId, runMode, processInstancePriority, workerGroup, tenantCode, - environmentCode, startParams, expectedParallelismNumber, dryRun, testFlag, + environmentCode, startParamList, expectedParallelismNumber, dryRun, testFlag, complementDependentMode, allLevelDependent, executionOrder); if (create > 0) { @@ -289,7 +291,7 @@ public Map execProcessInstance(User loginUser, long projectCode, private void checkMasterExists() { // check master server exists - List masterServers = monitorService.getServerListFromRegistry(true); + List masterServers = monitorService.listServer(RegistryNodeType.MASTER); // no master if (masterServers.isEmpty()) { @@ -359,7 +361,7 @@ public boolean checkSubProcessDefinitionValid(ProcessDefinition processDefinitio // find out the process definition code Set processDefinitionCodeSet = new HashSet<>(); taskDefinitions.stream() - .filter(task -> TaskConstants.TASK_TYPE_SUB_PROCESS.equalsIgnoreCase(task.getTaskType())).forEach( + .filter(task -> TaskTypeUtils.isSubWorkflowTask(task.getTaskType())).forEach( taskDefinition -> processDefinitionCodeSet.add(Long.valueOf( JSONUtils.getNodeString(taskDefinition.getTaskParams(), CMD_PARAM_SUB_PROCESS_DEFINE_CODE)))); @@ -731,7 +733,7 @@ private int createCommand(Long triggerCode, CommandType commandType, long proces WarningType warningType, int executorId, Integer warningGroupId, RunMode runMode, Priority processInstancePriority, String workerGroup, String tenantCode, Long environmentCode, - Map startParams, Integer expectedParallelismNumber, int dryRun, + List startParamList, Integer expectedParallelismNumber, int dryRun, int testFlag, ComplementDependentMode complementDependentMode, boolean allLevelDependent, ExecutionOrder executionOrder) { @@ -760,8 +762,8 @@ private int createCommand(Long triggerCode, CommandType commandType, long proces if (warningType != null) { command.setWarningType(warningType); } - if (startParams != null && startParams.size() > 0) { - cmdParam.put(CMD_PARAM_START_PARAMS, JSONUtils.toJsonString(startParams)); + if (CollectionUtils.isNotEmpty(startParamList)) { + cmdParam.put(CMD_PARAM_START_PARAMS, JSONUtils.toJsonString(startParamList)); } command.setCommandParam(JSONUtils.toJsonString(cmdParam)); command.setExecutorId(executorId); @@ -1142,7 +1144,7 @@ public void execStreamTaskInstance(User loginUser, long projectCode, long taskDe checkValidTenant(tenantCode); checkMasterExists(); // todo dispatch improvement - List masterServerList = monitorService.getServerListFromRegistry(true); + List masterServerList = monitorService.listServer(RegistryNodeType.MASTER); Server server = masterServerList.get(0); StreamingTaskTriggerRequest taskExecuteStartMessage = new StreamingTaskTriggerRequest(); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/K8SNamespaceServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/K8SNamespaceServiceImpl.java index 06f3608207f3..7543616f31c9 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/K8SNamespaceServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/K8SNamespaceServiceImpl.java @@ -141,7 +141,7 @@ public Map registerK8sNamespace(User loginUser, String namespace long code = 0L; try { - code = CodeGenerateUtils.getInstance().genCode(); + code = CodeGenerateUtils.genCode(); cluster.setCode(code); } catch (CodeGenerateUtils.CodeGenerateException e) { log.error("Generate cluster code error.", e); @@ -173,6 +173,7 @@ public Map registerK8sNamespace(User loginUser, String namespace k8sNamespaceMapper.insert(k8sNamespaceObj); log.info("K8s namespace create complete, namespace:{}.", k8sNamespaceObj.getNamespace()); + result.put(Constants.DATA_LIST, k8sNamespaceObj); putMsg(result, Status.SUCCESS); return result; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java index 0663b883747e..c0ecb0e9b58a 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java @@ -237,7 +237,7 @@ private byte[] getLogBytes(TaskInstance taskInstance) { host, Constants.SYSTEM_LINE_SEPARATOR).getBytes(StandardCharsets.UTF_8); - byte[] logBytes = new byte[0]; + byte[] logBytes; ILogService iLogService = SingletonJdkDynamicRpcClientProxyFactory.getProxyClient(taskInstance.getHost(), ILogService.class); @@ -251,6 +251,5 @@ private byte[] getLogBytes(TaskInstance taskInstance) { log.error("Download TaskInstance: {} Log Error", taskInstance.getName(), ex); throw new ServiceException(Status.DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR); } - } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/MonitorServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/MonitorServiceImpl.java index 907ab2329c88..24bfcc5b259d 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/MonitorServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/MonitorServiceImpl.java @@ -19,7 +19,6 @@ import org.apache.dolphinscheduler.api.service.MonitorService; import org.apache.dolphinscheduler.common.model.Server; -import org.apache.dolphinscheduler.common.model.WorkerServerModel; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.plugin.api.monitor.DatabaseMetrics; import org.apache.dolphinscheduler.dao.plugin.api.monitor.DatabaseMonitor; @@ -27,7 +26,6 @@ import org.apache.dolphinscheduler.registry.api.enums.RegistryNodeType; import java.util.List; -import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; @@ -35,7 +33,6 @@ import org.springframework.stereotype.Service; import com.google.common.collect.Lists; -import com.google.common.collect.Sets; /** * monitor service impl @@ -61,48 +58,8 @@ public List queryDatabaseState(User loginUser) { return Lists.newArrayList(databaseMonitor.getDatabaseMetrics()); } - /** - * query master list - * - * @param loginUser login user - * @return master information list - */ @Override - public List queryMaster(User loginUser) { - return registryClient.getServerList(RegistryNodeType.MASTER); + public List listServer(RegistryNodeType nodeType) { + return registryClient.getServerList(nodeType); } - - /** - * query worker list - * - * @param loginUser login user - * @return worker information list - */ - @Override - public List queryWorker(User loginUser) { - - return registryClient.getServerList(RegistryNodeType.WORKER) - .stream() - .map((Server server) -> { - WorkerServerModel model = new WorkerServerModel(); - model.setId(server.getId()); - model.setHost(server.getHost()); - model.setPort(server.getPort()); - model.setZkDirectories(Sets.newHashSet(server.getZkDirectory())); - model.setResInfo(server.getResInfo()); - model.setCreateTime(server.getCreateTime()); - model.setLastHeartbeatTime(server.getLastHeartbeatTime()); - return model; - }) - .collect(Collectors.toList()); - - } - - @Override - public List getServerListFromRegistry(boolean isMaster) { - return isMaster - ? registryClient.getServerList(RegistryNodeType.MASTER) - : registryClient.getServerList(RegistryNodeType.WORKER); - } - } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java index 09fe59e52343..820d35b3b11e 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java @@ -34,13 +34,12 @@ import static org.apache.dolphinscheduler.common.constants.CommandKeyConstants.CMD_PARAM_SUB_PROCESS_DEFINE_CODE; import static org.apache.dolphinscheduler.common.constants.Constants.COPY_SUFFIX; import static org.apache.dolphinscheduler.common.constants.Constants.DATA_LIST; -import static org.apache.dolphinscheduler.common.constants.Constants.DEFAULT_WORKER_GROUP; import static org.apache.dolphinscheduler.common.constants.Constants.GLOBAL_PARAMS; import static org.apache.dolphinscheduler.common.constants.Constants.IMPORT_SUFFIX; import static org.apache.dolphinscheduler.common.constants.Constants.LOCAL_PARAMS; import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.LOCAL_PARAMS_LIST; import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.TASK_TYPE; -import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.TASK_TYPE_SQL; +import static org.apache.dolphinscheduler.plugin.task.api.TaskPluginManager.checkTaskParameters; import org.apache.dolphinscheduler.api.dto.DagDataSchedule; import org.apache.dolphinscheduler.api.dto.treeview.Instance; @@ -109,12 +108,13 @@ import org.apache.dolphinscheduler.dao.repository.ProcessDefinitionDao; import org.apache.dolphinscheduler.dao.repository.ProcessDefinitionLogDao; import org.apache.dolphinscheduler.dao.repository.TaskDefinitionLogDao; -import org.apache.dolphinscheduler.plugin.task.api.TaskPluginManager; +import org.apache.dolphinscheduler.dao.utils.WorkerGroupUtils; import org.apache.dolphinscheduler.plugin.task.api.enums.SqlType; import org.apache.dolphinscheduler.plugin.task.api.enums.TaskTimeoutStrategy; import org.apache.dolphinscheduler.plugin.task.api.model.Property; -import org.apache.dolphinscheduler.plugin.task.api.parameters.ParametersNode; import org.apache.dolphinscheduler.plugin.task.api.parameters.SqlParameters; +import org.apache.dolphinscheduler.plugin.task.api.utils.TaskTypeUtils; +import org.apache.dolphinscheduler.plugin.task.sql.SqlTaskChannelFactory; import org.apache.dolphinscheduler.service.alert.ListenerEventAlertManager; import org.apache.dolphinscheduler.service.model.TaskNode; import org.apache.dolphinscheduler.service.process.ProcessService; @@ -238,9 +238,6 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro @Autowired private DataSourceMapper dataSourceMapper; - @Autowired - private TaskPluginManager taskPluginManager; - @Autowired private WorkFlowLineageService workFlowLineageService; @@ -299,7 +296,7 @@ public Map createProcessDefinition(User loginUser, List taskDefinitionLogs = generateTaskDefinitionList(taskDefinitionJson); List taskRelationList = generateTaskRelationList(taskRelationJson, taskDefinitionLogs); - long processDefinitionCode = CodeGenerateUtils.getInstance().genCode(); + long processDefinitionCode = CodeGenerateUtils.genCode(); ProcessDefinition processDefinition = new ProcessDefinition(projectCode, name, processDefinitionCode, description, globalParams, locations, timeout, loginUser.getId()); @@ -360,7 +357,7 @@ public ProcessDefinition createSingleProcessDefinition(User loginUser, long processDefinitionCode; try { - processDefinitionCode = CodeGenerateUtils.getInstance().genCode(); + processDefinitionCode = CodeGenerateUtils.genCode(); } catch (CodeGenerateException e) { throw new ServiceException(Status.INTERNAL_SERVER_ERROR_ARGS); } @@ -424,11 +421,7 @@ private List generateTaskDefinitionList(String taskDefinition throw new ServiceException(Status.DATA_IS_NOT_VALID, taskDefinitionJson); } for (TaskDefinitionLog taskDefinitionLog : taskDefinitionLogs) { - if (!taskPluginManager.checkTaskParameters(ParametersNode.builder() - .taskType(taskDefinitionLog.getTaskType()) - .taskParams(taskDefinitionLog.getTaskParams()) - .dependence(taskDefinitionLog.getDependence()) - .build())) { + if (!checkTaskParameters(taskDefinitionLog.getTaskType(), taskDefinitionLog.getTaskParams())) { log.error( "Generate task definition list failed, the given task definition parameter is invalided, taskName: {}, taskDefinition: {}", taskDefinitionLog.getName(), taskDefinitionLog); @@ -1233,7 +1226,7 @@ public Map importSqlProcessDefinition(User loginUser, long proje // build process definition processDefinition = new ProcessDefinition(projectCode, processDefinitionName, - CodeGenerateUtils.getInstance().genCode(), + CodeGenerateUtils.genCode(), "", "[]", null, 0, loginUser.getId()); @@ -1388,12 +1381,12 @@ private TaskDefinitionLog buildNormalSqlTaskDefinition(String taskName, DataSour sqlParameters.setSqlType(SqlType.NON_QUERY.ordinal()); sqlParameters.setLocalParams(Collections.emptyList()); taskDefinition.setTaskParams(JSONUtils.toJsonString(sqlParameters)); - taskDefinition.setCode(CodeGenerateUtils.getInstance().genCode()); - taskDefinition.setTaskType(TASK_TYPE_SQL); + taskDefinition.setCode(CodeGenerateUtils.genCode()); + taskDefinition.setTaskType(SqlTaskChannelFactory.NAME); taskDefinition.setFailRetryTimes(0); taskDefinition.setFailRetryInterval(0); taskDefinition.setTimeoutFlag(TimeoutFlag.CLOSE); - taskDefinition.setWorkerGroup(DEFAULT_WORKER_GROUP); + taskDefinition.setWorkerGroup(WorkerGroupUtils.getDefaultWorkerGroup()); taskDefinition.setTaskPriority(Priority.MEDIUM); taskDefinition.setEnvironmentCode(-1); taskDefinition.setTimeout(0); @@ -1433,7 +1426,7 @@ protected boolean checkAndImport(User loginUser, processDefinition.setProjectCode(projectCode); processDefinition.setUserId(loginUser.getId()); try { - processDefinition.setCode(CodeGenerateUtils.getInstance().genCode()); + processDefinition.setCode(CodeGenerateUtils.genCode()); } catch (CodeGenerateException e) { log.error( "Save process definition error because generate process definition code error, projectCode:{}.", @@ -1456,7 +1449,7 @@ protected boolean checkAndImport(User loginUser, taskDefinitionLog.setOperator(loginUser.getId()); taskDefinitionLog.setOperateTime(now); try { - long code = CodeGenerateUtils.getInstance().genCode(); + long code = CodeGenerateUtils.genCode(); taskCodeMap.put(taskDefinitionLog.getCode(), code); taskDefinitionLog.setCode(code); } catch (CodeGenerateException e) { @@ -1537,6 +1530,7 @@ protected boolean checkAndImport(User loginUser, } } + result.put(Constants.DATA_LIST, processDefinition); log.info("Import process definition complete, projectCode:{}, processDefinitionCode:{}.", projectCode, processDefinition.getCode()); return true; @@ -1617,13 +1611,7 @@ public Map checkProcessNodeList(String processTaskRelationJson, // check whether the process definition json is normal for (TaskNode taskNode : taskNodes) { - if (!taskPluginManager.checkTaskParameters(ParametersNode.builder() - .taskType(taskNode.getType()) - .taskParams(taskNode.getTaskParams()) - .dependence(taskNode.getDependence()) - .switchResult(taskNode.getSwitchResult()) - .build())) { - log.error("Task node {} parameter invalid.", taskNode.getName()); + if (!checkTaskParameters(taskNode.getType(), taskNode.getParams())) { putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskNode.getName()); return result; } @@ -1893,7 +1881,7 @@ public Map viewTree(User loginUser, long projectCode, long code, long subProcessCode = 0L; // if process is sub process, the return sub id, or sub id=0 - if (taskInstance.isSubProcess()) { + if (TaskTypeUtils.isSubWorkflowTask(taskInstance.getTaskType())) { TaskDefinition taskDefinition = taskDefinitionMap.get(taskInstance.getTaskCode()); subProcessCode = Long.parseLong(JSONUtils.parseObject( taskDefinition.getTaskParams()).path(CMD_PARAM_SUB_PROCESS_DEFINE_CODE).asText()); @@ -2073,7 +2061,7 @@ protected void doBatchOperateProcessDefinition(User loginUser, Map taskCodeMap = new HashMap<>(); for (TaskDefinitionLog taskDefinitionLog : taskDefinitionLogs) { try { - long taskCode = CodeGenerateUtils.getInstance().genCode(); + long taskCode = CodeGenerateUtils.genCode(); taskCodeMap.put(taskDefinitionLog.getCode(), taskCode); taskDefinitionLog.setCode(taskCode); } catch (CodeGenerateException e) { @@ -2096,7 +2084,7 @@ protected void doBatchOperateProcessDefinition(User loginUser, } final long oldProcessDefinitionCode = processDefinition.getCode(); try { - processDefinition.setCode(CodeGenerateUtils.getInstance().genCode()); + processDefinition.setCode(CodeGenerateUtils.genCode()); } catch (CodeGenerateException e) { log.error("Generate process definition code error, projectCode:{}.", targetProjectCode, e); putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessInstanceServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessInstanceServiceImpl.java index 36cc986607ee..11ff034bb64e 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessInstanceServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessInstanceServiceImpl.java @@ -21,13 +21,11 @@ import static org.apache.dolphinscheduler.api.enums.Status.PROCESS_INSTANCE_NOT_EXIST; import static org.apache.dolphinscheduler.api.enums.Status.PROCESS_INSTANCE_STATE_OPERATION_ERROR; import static org.apache.dolphinscheduler.common.constants.Constants.DATA_LIST; -import static org.apache.dolphinscheduler.common.constants.Constants.DEPENDENT_SPLIT; import static org.apache.dolphinscheduler.common.constants.Constants.GLOBAL_PARAMS; import static org.apache.dolphinscheduler.common.constants.Constants.LOCAL_PARAMS; import static org.apache.dolphinscheduler.common.constants.Constants.PROCESS_INSTANCE_STATE; import static org.apache.dolphinscheduler.common.constants.Constants.TASK_LIST; -import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.TASK_TYPE_DEPENDENT; -import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.TASK_TYPE_SUB_PROCESS; +import static org.apache.dolphinscheduler.plugin.task.api.TaskPluginManager.checkTaskParameters; import org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant; import org.apache.dolphinscheduler.api.dto.DynamicSubWorkflowDto; @@ -60,7 +58,6 @@ import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelationLog; import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.RelationSubWorkflow; -import org.apache.dolphinscheduler.dao.entity.ResponseTaskLog; import org.apache.dolphinscheduler.dao.entity.TaskDefinition; import org.apache.dolphinscheduler.dao.entity.TaskDefinitionLog; import org.apache.dolphinscheduler.dao.entity.TaskInstance; @@ -70,31 +67,22 @@ import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.RelationSubWorkflowMapper; -import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper; import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionLogMapper; import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; -import org.apache.dolphinscheduler.dao.mapper.TenantMapper; import org.apache.dolphinscheduler.dao.repository.ProcessInstanceDao; import org.apache.dolphinscheduler.dao.repository.ProcessInstanceMapDao; import org.apache.dolphinscheduler.dao.repository.TaskInstanceDao; import org.apache.dolphinscheduler.dao.utils.WorkflowUtils; -import org.apache.dolphinscheduler.plugin.task.api.TaskPluginManager; -import org.apache.dolphinscheduler.plugin.task.api.enums.DependResult; import org.apache.dolphinscheduler.plugin.task.api.model.Property; -import org.apache.dolphinscheduler.plugin.task.api.parameters.ParametersNode; import org.apache.dolphinscheduler.plugin.task.api.utils.ParameterUtils; +import org.apache.dolphinscheduler.plugin.task.api.utils.TaskTypeUtils; import org.apache.dolphinscheduler.service.expand.CuringParamsService; import org.apache.dolphinscheduler.service.model.TaskNode; import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.commons.lang3.StringUtils; -import java.io.BufferedReader; -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStreamReader; -import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; @@ -177,18 +165,9 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce @Autowired UsersService usersService; - @Autowired - private TenantMapper tenantMapper; - @Autowired TaskDefinitionMapper taskDefinitionMapper; - @Autowired - private TaskPluginManager taskPluginManager; - - @Autowired - private ScheduleMapper scheduleMapper; - @Autowired private RelationSubWorkflowMapper relationSubWorkflowMapper; @@ -459,11 +438,10 @@ public Result queryProcessInstanceList(User loginUser, WorkflowInstanceQueryRequ * @param projectCode project code * @param processId process instance id * @return task list for the process instance - * @throws IOException io exception */ @Override public Map queryTaskListByProcessId(User loginUser, long projectCode, - Integer processId) throws IOException { + Integer processId) { Project project = projectMapper.queryByCode(projectCode); // check user access for project Map result = @@ -484,7 +462,6 @@ public Map queryTaskListByProcessId(User loginUser, long project } List taskInstanceList = taskInstanceDao.queryValidTaskListByWorkflowInstanceId(processId, processInstance.getTestFlag()); - addDependResultForTaskList(loginUser, taskInstanceList); Map resultMap = new HashMap<>(); resultMap.put(PROCESS_INSTANCE_STATE, processInstance.getState().toString()); resultMap.put(TASK_LIST, taskInstanceList); @@ -509,7 +486,7 @@ public List queryDynamicSubWorkflowInstances(User loginUs throw new ServiceException(Status.TASK_INSTANCE_NOT_EXISTS, taskId); } - if (!taskInstance.isDynamic()) { + if (!TaskTypeUtils.isDynamicTask(taskInstance.getTaskType())) { putMsg(result, Status.TASK_INSTANCE_NOT_DYNAMIC_TASK, taskInstance.getName()); throw new ServiceException(Status.TASK_INSTANCE_NOT_EXISTS, taskId); } @@ -554,57 +531,6 @@ public List queryDynamicSubWorkflowInstances(User loginUs return allDynamicSubWorkflowDtos; } - /** - * add dependent result for dependent task - */ - private void addDependResultForTaskList(User loginUser, List taskInstanceList) throws IOException { - for (TaskInstance taskInstance : taskInstanceList) { - if (TASK_TYPE_DEPENDENT.equalsIgnoreCase(taskInstance.getTaskType())) { - log.info("DEPENDENT type task instance need to set dependent result, taskCode:{}, taskInstanceId:{}", - taskInstance.getTaskCode(), taskInstance.getId()); - // TODO The result of dependent item should not be obtained from the log, waiting for optimization. - Result logResult = loggerService.queryLog(loginUser, - taskInstance.getId(), Constants.LOG_QUERY_SKIP_LINE_NUMBER, Constants.LOG_QUERY_LIMIT); - if (logResult.getCode() == Status.SUCCESS.ordinal()) { - String log = logResult.getData().getMessage(); - Map resultMap = parseLogForDependentResult(log); - taskInstance.setDependentResult(JSONUtils.toJsonString(resultMap)); - } - } - } - } - - @Override - public Map parseLogForDependentResult(String content) throws IOException { - Map resultMap = new HashMap<>(); - if (StringUtils.isEmpty(content)) { - log.warn("Log content is empty."); - return resultMap; - } - - BufferedReader br = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(content.getBytes( - StandardCharsets.UTF_8)), StandardCharsets.UTF_8)); - String line; - while ((line = br.readLine()) != null) { - if (line.contains(DEPENDENT_SPLIT)) { - String[] tmpStringArray = line.split(":\\|\\|"); - if (tmpStringArray.length != 2) { - continue; - } - String dependResultString = tmpStringArray[1]; - String[] dependStringArray = dependResultString.split(","); - if (dependStringArray.length != 3) { - continue; - } - String key = dependStringArray[0].trim().split(":")[1].trim(); - String result = dependStringArray[1].trim().split(":")[1].trim(); - DependResult dependResult = DependResult.valueOf(result); - resultMap.put(key, dependResult); - } - } - return resultMap; - } - /** * query sub process instance detail info by task id * @@ -639,9 +565,7 @@ public Map querySubProcessInstanceByTaskId(User loginUser, long return result; } - if (!taskInstance.isSubProcess()) { - log.warn("Task instance is not {} type instance, projectCode:{}, taskInstanceId:{}.", - TASK_TYPE_SUB_PROCESS, projectCode, taskId); + if (!TaskTypeUtils.isSubWorkflowTask(taskInstance.getTaskType())) { putMsg(result, Status.TASK_INSTANCE_NOT_SUB_WORKFLOW_INSTANCE, taskInstance.getName()); return result; } @@ -725,11 +649,7 @@ public Map updateProcessInstance(User loginUser, long projectCod return result; } for (TaskDefinitionLog taskDefinitionLog : taskDefinitionLogs) { - if (!taskPluginManager.checkTaskParameters(ParametersNode.builder() - .taskType(taskDefinitionLog.getTaskType()) - .taskParams(taskDefinitionLog.getTaskParams()) - .dependence(taskDefinitionLog.getDependence()) - .build())) { + if (!checkTaskParameters(taskDefinitionLog.getTaskType(), taskDefinitionLog.getTaskParams())) { log.error("Task parameters are invalid, taskDefinitionName:{}.", taskDefinitionLog.getName()); putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskDefinitionLog.getName()); return result; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessTaskRelationServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessTaskRelationServiceImpl.java index d8c568e50aca..1f9fe6ff53c0 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessTaskRelationServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessTaskRelationServiceImpl.java @@ -18,9 +18,6 @@ package org.apache.dolphinscheduler.api.service.impl; import static java.util.stream.Collectors.toSet; -import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.TASK_TYPE_CONDITIONS; -import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.TASK_TYPE_DEPENDENT; -import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.TASK_TYPE_SUB_PROCESS; import org.apache.dolphinscheduler.api.dto.taskRelation.TaskRelationCreateRequest; import org.apache.dolphinscheduler.api.dto.taskRelation.TaskRelationFilterRequest; @@ -46,6 +43,7 @@ import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionLogMapper; import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper; +import org.apache.dolphinscheduler.plugin.task.api.utils.TaskTypeUtils; import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.commons.collections4.CollectionUtils; @@ -354,9 +352,9 @@ public Map deleteTaskProcessRelation(User loginUser, long projec } updateProcessDefiniteVersion(loginUser, result, processDefinition); updateRelation(loginUser, result, processDefinition, processTaskRelationList); - if (TASK_TYPE_CONDITIONS.equals(taskDefinition.getTaskType()) - || TASK_TYPE_DEPENDENT.equals(taskDefinition.getTaskType()) - || TASK_TYPE_SUB_PROCESS.equals(taskDefinition.getTaskType())) { + if (TaskTypeUtils.isConditionTask(taskDefinition.getTaskType()) + || TaskTypeUtils.isSubWorkflowTask(taskDefinition.getTaskType()) + || TaskTypeUtils.isDependentTask(taskDefinition.getTaskType())) { int deleteTaskDefinition = taskDefinitionMapper.deleteByCode(taskCode); if (0 == deleteTaskDefinition) { log.error("Delete task definition error, taskDefinitionCode:{}.", taskCode); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectParameterServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectParameterServiceImpl.java index 6e66f6286e9c..e23101da3658 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectParameterServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectParameterServiceImpl.java @@ -68,7 +68,7 @@ public class ProjectParameterServiceImpl extends BaseServiceImpl implements Proj @Override @Transactional public Result createProjectParameter(User loginUser, long projectCode, String projectParameterName, - String projectParameterValue) { + String projectParameterValue, String projectParameterDataType) { Result result = new Result(); // check if user have write perm for project @@ -97,7 +97,8 @@ public Result createProjectParameter(User loginUser, long projectCode, String pr .builder() .paramName(projectParameterName) .paramValue(projectParameterValue) - .code(CodeGenerateUtils.getInstance().genCode()) + .paramDataType(projectParameterDataType) + .code(CodeGenerateUtils.genCode()) .projectCode(projectCode) .userId(loginUser.getId()) .createTime(now) @@ -122,7 +123,7 @@ public Result createProjectParameter(User loginUser, long projectCode, String pr @Override public Result updateProjectParameter(User loginUser, long projectCode, long code, String projectParameterName, - String projectParameterValue) { + String projectParameterValue, String projectParameterDataType) { Result result = new Result(); // check if user have write perm for project @@ -155,6 +156,9 @@ public Result updateProjectParameter(User loginUser, long projectCode, long code projectParameter.setParamName(projectParameterName); projectParameter.setParamValue(projectParameterValue); + projectParameter.setParamDataType(projectParameterDataType); + projectParameter.setUpdateTime(new Date()); + projectParameter.setOperator(loginUser.getId()); if (projectParameterMapper.updateById(projectParameter) > 0) { log.info("Project parameter is updated and id is :{}", projectParameter.getId()); @@ -225,11 +229,7 @@ public Result batchDeleteProjectParametersByCodes(User loginUser, long projectCo } for (ProjectParameter projectParameter : projectParameterList) { - try { - this.deleteProjectParametersByCode(loginUser, projectCode, projectParameter.getCode()); - } catch (Exception e) { - throw new ServiceException(Status.DELETE_PROJECT_PARAMETER_ERROR, e.getMessage()); - } + this.deleteProjectParametersByCode(loginUser, projectCode, projectParameter.getCode()); } putMsg(result, Status.SUCCESS); @@ -238,7 +238,7 @@ public Result batchDeleteProjectParametersByCodes(User loginUser, long projectCo @Override public Result queryProjectParameterListPaging(User loginUser, long projectCode, Integer pageSize, Integer pageNo, - String searchVal) { + String searchVal, String projectParameterDataType) { Result result = new Result(); Project project = projectMapper.queryByCode(projectCode); @@ -251,7 +251,8 @@ public Result queryProjectParameterListPaging(User loginUser, long projectCode, Page page = new Page<>(pageNo, pageSize); IPage iPage = - projectParameterMapper.queryProjectParameterListPaging(page, projectCode, null, searchVal); + projectParameterMapper.queryProjectParameterListPaging(page, projectCode, null, searchVal, + projectParameterDataType); List projectParameterList = iPage.getRecords(); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectPreferenceServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectPreferenceServiceImpl.java index dcbd3b745602..6274d290d5a1 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectPreferenceServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectPreferenceServiceImpl.java @@ -76,7 +76,7 @@ public Result updateProjectPreference(User loginUser, long projectCode, String p projectPreference.setProjectCode(projectCode); projectPreference.setPreferences(preferences); projectPreference.setUserId(loginUser.getId()); - projectPreference.setCode(CodeGenerateUtils.getInstance().genCode()); + projectPreference.setCode(CodeGenerateUtils.genCode()); projectPreference.setState(1); projectPreference.setCreateTime(now); projectPreference.setUpdateTime(now); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java index 1a00584a26f5..b5c329c4fd0e 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java @@ -126,7 +126,7 @@ public Result createProject(User loginUser, String name, String desc) { project = Project .builder() .name(name) - .code(CodeGenerateUtils.getInstance().genCode()) + .code(CodeGenerateUtils.genCode()) .description(desc) .userId(loginUser.getId()) .userName(loginUser.getUserName()) diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectWorkerGroupRelationServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectWorkerGroupRelationServiceImpl.java index 15cbcbb7fa86..c11915667fc4 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectWorkerGroupRelationServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectWorkerGroupRelationServiceImpl.java @@ -26,11 +26,13 @@ import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.ProjectWorkerGroup; import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.entity.WorkerGroup; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectWorkerGroupMapper; import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper; import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.WorkerGroupMapper; +import org.apache.dolphinscheduler.dao.utils.WorkerGroupUtils; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections4.SetUtils; @@ -38,6 +40,7 @@ import java.util.Date; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; @@ -113,23 +116,25 @@ public Result assignWorkerGroupsToProject(User loginUser, Long projectCode, List } Set workerGroupNames = - workerGroupMapper.queryAllWorkerGroup().stream().map(item -> item.getName()).collect( + workerGroupMapper.queryAllWorkerGroup().stream().map(WorkerGroup::getName).collect( Collectors.toSet()); - workerGroupNames.add(Constants.DEFAULT_WORKER_GROUP); + workerGroupNames.add(WorkerGroupUtils.getDefaultWorkerGroup()); - Set assignedWorkerGroupNames = workerGroups.stream().collect(Collectors.toSet()); + Set assignedWorkerGroupNames = new HashSet<>(workerGroups); Set difference = SetUtils.difference(assignedWorkerGroupNames, workerGroupNames); - if (difference.size() > 0) { + if (!difference.isEmpty()) { putMsg(result, Status.WORKER_GROUP_NOT_EXIST, difference.toString()); return result; } Set projectWorkerGroupNames = projectWorkerGroupMapper.selectList(new QueryWrapper() .lambda() - .eq(ProjectWorkerGroup::getProjectCode, projectCode)).stream().map(item -> item.getWorkerGroup()) + .eq(ProjectWorkerGroup::getProjectCode, projectCode)) + .stream() + .map(ProjectWorkerGroup::getWorkerGroup) .collect(Collectors.toSet()); difference = SetUtils.difference(projectWorkerGroupNames, assignedWorkerGroupNames); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ResourcesServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ResourcesServiceImpl.java index e6341f021d20..0757acd816eb 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ResourcesServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ResourcesServiceImpl.java @@ -17,16 +17,29 @@ package org.apache.dolphinscheduler.api.service.impl; -import static org.apache.dolphinscheduler.common.constants.Constants.ALIAS; -import static org.apache.dolphinscheduler.common.constants.Constants.CONTENT; -import static org.apache.dolphinscheduler.common.constants.Constants.EMPTY_STRING; -import static org.apache.dolphinscheduler.common.constants.Constants.FOLDER_SEPARATOR; -import static org.apache.dolphinscheduler.common.constants.Constants.FORMAT_SS; -import static org.apache.dolphinscheduler.common.constants.Constants.JAR; -import static org.apache.dolphinscheduler.common.constants.Constants.PERIOD; - -import org.apache.dolphinscheduler.api.dto.resources.DeleteDataTransferResponse; -import org.apache.dolphinscheduler.api.dto.resources.filter.ResourceFilter; +import org.apache.dolphinscheduler.api.dto.resources.CreateDirectoryDto; +import org.apache.dolphinscheduler.api.dto.resources.CreateDirectoryRequest; +import org.apache.dolphinscheduler.api.dto.resources.CreateFileDto; +import org.apache.dolphinscheduler.api.dto.resources.CreateFileFromContentDto; +import org.apache.dolphinscheduler.api.dto.resources.CreateFileFromContentRequest; +import org.apache.dolphinscheduler.api.dto.resources.CreateFileRequest; +import org.apache.dolphinscheduler.api.dto.resources.DeleteResourceDto; +import org.apache.dolphinscheduler.api.dto.resources.DeleteResourceRequest; +import org.apache.dolphinscheduler.api.dto.resources.DownloadFileDto; +import org.apache.dolphinscheduler.api.dto.resources.DownloadFileRequest; +import org.apache.dolphinscheduler.api.dto.resources.FetchFileContentDto; +import org.apache.dolphinscheduler.api.dto.resources.FetchFileContentRequest; +import org.apache.dolphinscheduler.api.dto.resources.PagingResourceItemRequest; +import org.apache.dolphinscheduler.api.dto.resources.QueryResourceDto; +import org.apache.dolphinscheduler.api.dto.resources.RenameDirectoryDto; +import org.apache.dolphinscheduler.api.dto.resources.RenameDirectoryRequest; +import org.apache.dolphinscheduler.api.dto.resources.RenameFileDto; +import org.apache.dolphinscheduler.api.dto.resources.RenameFileRequest; +import org.apache.dolphinscheduler.api.dto.resources.ResourceComponent; +import org.apache.dolphinscheduler.api.dto.resources.UpdateFileDto; +import org.apache.dolphinscheduler.api.dto.resources.UpdateFileFromContentDto; +import org.apache.dolphinscheduler.api.dto.resources.UpdateFileFromContentRequest; +import org.apache.dolphinscheduler.api.dto.resources.UpdateFileRequest; import org.apache.dolphinscheduler.api.dto.resources.visitor.ResourceTreeVisitor; import org.apache.dolphinscheduler.api.dto.resources.visitor.Visitor; import org.apache.dolphinscheduler.api.enums.Status; @@ -34,1255 +47,374 @@ import org.apache.dolphinscheduler.api.metrics.ApiServerMetrics; import org.apache.dolphinscheduler.api.service.ResourcesService; import org.apache.dolphinscheduler.api.utils.PageInfo; -import org.apache.dolphinscheduler.api.utils.RegexUtils; -import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.common.constants.Constants; -import org.apache.dolphinscheduler.common.enums.ProgramType; -import org.apache.dolphinscheduler.common.enums.ResUploadType; +import org.apache.dolphinscheduler.api.validator.resource.CreateDirectoryDtoValidator; +import org.apache.dolphinscheduler.api.validator.resource.CreateDirectoryRequestTransformer; +import org.apache.dolphinscheduler.api.validator.resource.CreateFileDtoValidator; +import org.apache.dolphinscheduler.api.validator.resource.CreateFileFromContentDtoValidator; +import org.apache.dolphinscheduler.api.validator.resource.DeleteResourceDtoValidator; +import org.apache.dolphinscheduler.api.validator.resource.DownloadFileDtoValidator; +import org.apache.dolphinscheduler.api.validator.resource.FetchFileContentDtoValidator; +import org.apache.dolphinscheduler.api.validator.resource.FileFromContentRequestTransformer; +import org.apache.dolphinscheduler.api.validator.resource.FileRequestTransformer; +import org.apache.dolphinscheduler.api.validator.resource.PagingResourceItemRequestTransformer; +import org.apache.dolphinscheduler.api.validator.resource.RenameDirectoryDtoValidator; +import org.apache.dolphinscheduler.api.validator.resource.RenameDirectoryRequestTransformer; +import org.apache.dolphinscheduler.api.validator.resource.RenameFileDtoValidator; +import org.apache.dolphinscheduler.api.validator.resource.RenameFileRequestTransformer; +import org.apache.dolphinscheduler.api.validator.resource.UpdateFileDtoValidator; +import org.apache.dolphinscheduler.api.validator.resource.UpdateFileFromContentDtoValidator; +import org.apache.dolphinscheduler.api.validator.resource.UpdateFileFromContentRequestTransformer; +import org.apache.dolphinscheduler.api.validator.resource.UpdateFileRequestTransformer; +import org.apache.dolphinscheduler.api.vo.ResourceItemVO; +import org.apache.dolphinscheduler.api.vo.resources.FetchFileContentResponse; import org.apache.dolphinscheduler.common.utils.FileUtils; -import org.apache.dolphinscheduler.common.utils.PropertyUtils; import org.apache.dolphinscheduler.dao.entity.Tenant; -import org.apache.dolphinscheduler.dao.entity.UdfFunc; import org.apache.dolphinscheduler.dao.entity.User; -import org.apache.dolphinscheduler.dao.mapper.TenantMapper; -import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper; import org.apache.dolphinscheduler.dao.mapper.UserMapper; +import org.apache.dolphinscheduler.dao.repository.TenantDao; import org.apache.dolphinscheduler.plugin.storage.api.StorageEntity; -import org.apache.dolphinscheduler.plugin.storage.api.StorageOperate; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; import org.apache.dolphinscheduler.spi.enums.ResourceType; import org.apache.commons.collections4.CollectionUtils; -import org.apache.commons.lang3.StringUtils; import java.io.File; -import java.io.IOException; +import java.nio.file.Files; import java.nio.file.Paths; -import java.text.MessageFormat; -import java.time.LocalDateTime; -import java.time.temporal.ChronoUnit; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Set; -import java.util.UUID; import java.util.stream.Collectors; +import javax.servlet.http.HttpServletResponse; + +import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; -import org.springframework.transaction.annotation.Transactional; import org.springframework.web.multipart.MultipartFile; -import com.google.common.io.Files; - @Service @Slf4j public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesService { @Autowired - private UdfFuncMapper udfFunctionMapper; - - @Autowired - private TenantMapper tenantMapper; + private TenantDao tenantDao; @Autowired private UserMapper userMapper; - @Autowired(required = false) - private StorageOperate storageOperate; - - /** - * create directory - * - * @param loginUser login user - * @param name alias - * @param type type - * @param pid parent id - * @param currentDir current directory - * @return create directory result - */ - @Override - @Transactional - public Result createDirectory(User loginUser, String name, ResourceType type, int pid, String currentDir) { - Result result = new Result<>(); - if (FileUtils.directoryTraversal(name)) { - log.warn("Parameter name is invalid, name:{}.", RegexUtils.escapeNRT(name)); - putMsg(result, Status.VERIFY_PARAMETER_NAME_FAILED); - return result; - } - - User user = userMapper.selectById(loginUser.getId()); - if (user == null) { - log.error("user {} not exists", loginUser.getId()); - putMsg(result, Status.USER_NOT_EXIST, loginUser.getId()); - return result; - } - - String tenantCode = getTenantCode(user); - - String userResRootPath = ResourceType.UDF.equals(type) ? storageOperate.getUdfDir(tenantCode) - : storageOperate.getResDir(tenantCode); - String fullName = !currentDir.contains(userResRootPath) ? userResRootPath + name : currentDir + name; - - try { - if (checkResourceExists(fullName)) { - log.error("resource directory {} has exist, can't recreate", fullName); - putMsg(result, Status.RESOURCE_EXIST); - return result; - } - } catch (Exception e) { - log.warn("Resource exists, can not create again, fullName:{}.", fullName, e); - throw new ServiceException("resource already exists, can't recreate"); - } - - // create directory in storage - createDirectory(loginUser, fullName, type, result); - return result; - } - - /** - * create resource - * - * @param loginUser login user - * @param name alias - * @param type type - * @param file file - * @param currentDir current directory - * @return create result code - */ - @Override - @Transactional - public Result uploadResource(User loginUser, String name, ResourceType type, MultipartFile file, - String currentDir) { - Result result = new Result<>(); - - User user = userMapper.selectById(loginUser.getId()); - if (user == null) { - log.error("user {} not exists", loginUser.getId()); - putMsg(result, Status.USER_NOT_EXIST, loginUser.getId()); - return result; - } - - String tenantCode = getTenantCode(user); - - result = verifyFile(name, type, file); - if (!result.getCode().equals(Status.SUCCESS.getCode())) { - return result; - } - - // check resource name exists - String userResRootPath = ResourceType.UDF.equals(type) ? storageOperate.getUdfDir(tenantCode) - : storageOperate.getResDir(tenantCode); - String currDirNFileName = !currentDir.contains(userResRootPath) ? userResRootPath + name : currentDir + name; - - try { - if (checkResourceExists(currDirNFileName)) { - log.error("resource {} has exist, can't recreate", RegexUtils.escapeNRT(name)); - putMsg(result, Status.RESOURCE_EXIST); - return result; - } - } catch (Exception e) { - throw new ServiceException("resource already exists, can't recreate"); - } - if (currDirNFileName.length() > Constants.RESOURCE_FULL_NAME_MAX_LENGTH) { - log.error( - "Resource file's name is longer than max full name length, fullName:{}, " - + "fullNameSize:{}, maxFullNameSize:{}", - RegexUtils.escapeNRT(name), currDirNFileName.length(), Constants.RESOURCE_FULL_NAME_MAX_LENGTH); - putMsg(result, Status.RESOURCE_FULL_NAME_TOO_LONG_ERROR); - return result; - } - - // fail upload - if (!upload(loginUser, currDirNFileName, file, type)) { - log.error("upload resource: {} file: {} failed.", RegexUtils.escapeNRT(name), - RegexUtils.escapeNRT(file.getOriginalFilename())); - putMsg(result, Status.STORE_OPERATE_CREATE_ERROR); - throw new ServiceException( - String.format("upload resource: %s file: %s failed.", name, file.getOriginalFilename())); - } else - ApiServerMetrics.recordApiResourceUploadSize(file.getSize()); - log.info("Upload resource file complete, resourceName:{}, fileName:{}.", RegexUtils.escapeNRT(name), - RegexUtils.escapeNRT(file.getOriginalFilename())); - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * check resource is exists - * - * @param fullName fullName - * @return true if resource exists - */ - private boolean checkResourceExists(String fullName) { - Boolean existResource = false; - try { - existResource = storageOperate.exists(fullName); - } catch (IOException e) { - log.error("error occurred when checking resource: " + fullName, e); - } - return Boolean.TRUE.equals(existResource); - } - - /** - * update resource - * - * @param loginUser login user - * @param resourceFullName resource full name - * @param resTenantCode tenantCode in the request field "resTenantCode" for tenant code owning the resource, - * can be different from the login user in the case of logging in as admin users. - * @param name name - * @param type resource type - * @param file resource file - * @return update result code - */ - @Override - @Transactional - public Result updateResource(User loginUser, String resourceFullName, String resTenantCode, String name, - ResourceType type, MultipartFile file) { - Result result = new Result<>(); - - User user = userMapper.selectById(loginUser.getId()); - if (user == null) { - log.error("user {} not exists", loginUser.getId()); - putMsg(result, Status.USER_NOT_EXIST, loginUser.getId()); - return result; - } - - String tenantCode = getTenantCode(user); + @Autowired + private StorageOperator storageOperator; - if (!isUserTenantValid(isAdmin(loginUser), tenantCode, resTenantCode)) { - log.error("current user does not have permission"); - putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION); - return result; - } + @Autowired + private CreateDirectoryRequestTransformer createDirectoryRequestTransformer; - String defaultPath = storageOperate.getResDir(tenantCode); + @Autowired + private CreateDirectoryDtoValidator createDirectoryDtoValidator; - StorageEntity resource; - try { - resource = storageOperate.getFileStatus(resourceFullName, defaultPath, resTenantCode, type); - } catch (Exception e) { - log.error("Get file status fail, resource path: {}", resourceFullName, e); - putMsg(result, Status.RESOURCE_NOT_EXIST); - throw new ServiceException((String.format("Get file status fail, resource path: %s", resourceFullName))); - } + @Autowired + private RenameDirectoryRequestTransformer renameDirectoryRequestTransformer; - // TODO: deal with OSS - if (resource.isDirectory() && storageOperate.returnStorageType().equals(ResUploadType.S3) - && !resource.getFileName().equals(name)) { - log.warn("Directory in S3 storage can not be renamed."); - putMsg(result, Status.S3_CANNOT_RENAME); - return result; - } + @Autowired + private RenameDirectoryDtoValidator renameDirectoryDtoValidator; - // check if updated name of the resource already exists - String originFullName = resource.getFullName(); - String originResourceName = resource.getAlias(); - - // the format of hdfs folders in the implementation has a "/" at the very end, we need to remove it. - originFullName = originFullName.endsWith("/") ? StringUtils.chop(originFullName) : originFullName; - name = name.endsWith("/") ? StringUtils.chop(name) : name; - // updated fullName - String fullName = String.format(FORMAT_SS, - originFullName.substring(0, originFullName.lastIndexOf(FOLDER_SEPARATOR) + 1), name); - if (!originResourceName.equals(name)) { - try { - if (checkResourceExists(fullName)) { - log.error("resource {} already exists, can't recreate", fullName); - putMsg(result, Status.RESOURCE_EXIST); - return result; - } - } catch (Exception e) { - throw new ServiceException(String.format("error occurs while querying resource: %s", fullName)); - } + @Autowired + private RenameFileRequestTransformer renameFileRequestTransformer; - } + @Autowired + private RenameFileDtoValidator renameFileDtoValidator; - result = verifyFile(name, type, file); - if (!result.getCode().equals(Status.SUCCESS.getCode())) { - return result; - } + @Autowired + private FileFromContentRequestTransformer createFileFromContentRequestTransformer; - Date now = new Date(); + @Autowired + private CreateFileFromContentDtoValidator createFileFromContentDtoValidator; - resource.setAlias(name); - resource.setFileName(name); - resource.setFullName(fullName); - resource.setUpdateTime(now); - if (file != null) { - resource.setSize(file.getSize()); - } + @Autowired + private FetchFileContentDtoValidator fetchFileContentDtoValidator; - // if name unchanged, return directly without moving on HDFS - if (originResourceName.equals(name) && file == null) { - return result; - } + @Autowired + private UpdateFileFromContentRequestTransformer updateFileFromContentRequestTransformer; - if (file != null) { - // fail upload - if (!upload(loginUser, fullName, file, type)) { - log.error("Storage operation error, resourceName:{}, originFileName:{}.", name, - RegexUtils.escapeNRT(file.getOriginalFilename())); - putMsg(result, Status.HDFS_OPERATION_ERROR); - throw new ServiceException( - String.format("upload resource: %s file: %s failed.", name, file.getOriginalFilename())); - } - if (!fullName.equals(originFullName)) { - try { - storageOperate.delete(originFullName, false); - } catch (IOException e) { - log.error("Resource delete error, resourceFullName:{}.", originFullName, e); - throw new ServiceException(String.format("delete resource: %s failed.", originFullName)); - } - } + @Autowired + private UpdateFileFromContentDtoValidator updateFileFromContentDtoValidator; - ApiServerMetrics.recordApiResourceUploadSize(file.getSize()); - return result; - } + @Autowired + private FileRequestTransformer createFileRequestTransformer; - // get the path of dest file in hdfs - String destHdfsFileName = fullName; - try { - log.info("start copy {} -> {}", originFullName, destHdfsFileName); - storageOperate.copy(originFullName, destHdfsFileName, true, true); - putMsg(result, Status.SUCCESS); - } catch (Exception e) { - log.error(MessageFormat.format(" copy {0} -> {1} fail", originFullName, destHdfsFileName), e); - putMsg(result, Status.HDFS_COPY_FAIL); - throw new ServiceException( - MessageFormat.format(Status.HDFS_COPY_FAIL.getMsg(), originFullName, destHdfsFileName)); - } + @Autowired + private CreateFileDtoValidator createFileDtoValidator; - return result; - } + @Autowired + private UpdateFileRequestTransformer updateFileRequestTransformer; - private Result verifyFile(String name, ResourceType type, MultipartFile file) { - Result result = new Result<>(); - putMsg(result, Status.SUCCESS); + @Autowired + private UpdateFileDtoValidator updateFileDtoValidator; - if (FileUtils.directoryTraversal(name)) { - log.warn("Parameter file alias name verify failed, fileAliasName:{}.", RegexUtils.escapeNRT(name)); - putMsg(result, Status.VERIFY_PARAMETER_NAME_FAILED); - return result; - } + @Autowired + private DeleteResourceDtoValidator deleteResourceDtoValidator; - if (file != null && FileUtils.directoryTraversal(Objects.requireNonNull(file.getOriginalFilename()))) { - log.warn("File original name verify failed, fileOriginalName:{}.", - RegexUtils.escapeNRT(file.getOriginalFilename())); - putMsg(result, Status.VERIFY_PARAMETER_NAME_FAILED); - return result; - } + @Autowired + private DownloadFileDtoValidator downloadFileDtoValidator; - if (file != null) { - // file is empty - if (file.isEmpty()) { - log.warn("Parameter file is empty, fileOriginalName:{}.", - RegexUtils.escapeNRT(file.getOriginalFilename())); - putMsg(result, Status.RESOURCE_FILE_IS_EMPTY); - return result; - } - - // file suffix - String fileSuffix = Files.getFileExtension(file.getOriginalFilename()); - String nameSuffix = Files.getFileExtension(name); - - // determine file suffix - if (!fileSuffix.equalsIgnoreCase(nameSuffix)) { - // rename file suffix and original suffix must be consistent - log.warn("Rename file suffix and original suffix must be consistent, fileOriginalName:{}.", - RegexUtils.escapeNRT(file.getOriginalFilename())); - putMsg(result, Status.RESOURCE_SUFFIX_FORBID_CHANGE); - return result; - } - - // If resource type is UDF, only jar packages are allowed to be uploaded, and the suffix must be .jar - if (Constants.UDF.equals(type.name()) && !JAR.equalsIgnoreCase(fileSuffix)) { - log.warn(Status.UDF_RESOURCE_SUFFIX_NOT_JAR.getMsg()); - putMsg(result, Status.UDF_RESOURCE_SUFFIX_NOT_JAR); - return result; - } - if (file.getSize() > Constants.MAX_FILE_SIZE) { - log.warn( - "Resource file size is larger than max file size, fileOriginalName:{}, fileSize:{}, maxFileSize:{}.", - RegexUtils.escapeNRT(file.getOriginalFilename()), file.getSize(), Constants.MAX_FILE_SIZE); - putMsg(result, Status.RESOURCE_SIZE_EXCEED_LIMIT); - return result; - } - } - return result; - } + @Autowired + private PagingResourceItemRequestTransformer pagingResourceItemRequestTransformer; - /** - * query resources list paging - * - * @param loginUser login user - * @param fullName resource full name - * @param resTenantCode tenantCode in the request field "resTenantCode" for tenant code owning the resource, - * can be different from the login user in the case of logging in as admin users. - * @param type resource type - * @param searchVal search value - * @param pageNo page number - * @param pageSize page size - * @return resource list page - */ @Override - public Result> queryResourceListPaging(User loginUser, String fullName, - String resTenantCode, ResourceType type, - String searchVal, Integer pageNo, Integer pageSize) { - Result> result = new Result<>(); - PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); - if (storageOperate == null) { - log.warn("The resource storage is not opened."); - return Result.success(pageInfo); - } - - User user = userMapper.selectById(loginUser.getId()); - if (user == null) { - log.error("user {} not exists", loginUser.getId()); - putMsg(result, Status.USER_NOT_EXIST, loginUser.getId()); - return result; - } - - String tenantCode = getTenantCode(user); - checkFullName(tenantCode, fullName); + public void createDirectory(CreateDirectoryRequest createDirectoryRequest) { + CreateDirectoryDto createDirectoryDto = createDirectoryRequestTransformer.transform(createDirectoryRequest); + createDirectoryDtoValidator.validate(createDirectoryDto); - if (!isUserTenantValid(isAdmin(loginUser), tenantCode, resTenantCode)) { - log.error("current user does not have permission"); - putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION); - return result; - } - - List resourcesList; - try { - resourcesList = queryStorageEntityList(loginUser, fullName, type, tenantCode, false); - } catch (ServiceException e) { - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - - // remove leading and trailing spaces in searchVal - String trimmedSearchVal = searchVal != null ? searchVal.trim() : ""; - // filter based on trimmed searchVal - List filteredResourceList = resourcesList.stream() - .filter(x -> x.getFileName().contains(trimmedSearchVal)).collect(Collectors.toList()); - // inefficient pagination - List slicedResourcesList = filteredResourceList.stream().skip((long) (pageNo - 1) * pageSize) - .limit(pageSize).collect(Collectors.toList()); - - pageInfo.setTotal(filteredResourceList.size()); - pageInfo.setTotalList(slicedResourcesList); - result.setData(pageInfo); - putMsg(result, Status.SUCCESS); - return result; + storageOperator.createStorageDir(createDirectoryDto.getDirectoryAbsolutePath()); + log.info("Success create directory: {}", createDirectoryRequest.getParentAbsoluteDirectory()); } - private List queryStorageEntityList(User loginUser, String fullName, ResourceType type, - String tenantCode, boolean recursive) { - String defaultPath = ""; - List resourcesList = new ArrayList<>(); - String resourceStorageType = - PropertyUtils.getString(Constants.RESOURCE_STORAGE_TYPE, ResUploadType.LOCAL.name()); - if (isAdmin(loginUser) && StringUtils.isBlank(fullName)) { - // list all tenants' resources to admin users in the root directory - List userList = userMapper.selectList(null); - Set visitedTenantEntityCode = new HashSet<>(); - for (User userEntity : userList) { - String tenantEntityCode = getTenantCode(userEntity); - if (!visitedTenantEntityCode.contains(tenantEntityCode)) { - defaultPath = storageOperate.getResDir(tenantEntityCode); - if (type.equals(ResourceType.UDF)) { - defaultPath = storageOperate.getUdfDir(tenantEntityCode); - } - try { - resourcesList.addAll(recursive - ? storageOperate.listFilesStatusRecursively(defaultPath, defaultPath, tenantEntityCode, - type) - : storageOperate.listFilesStatus(defaultPath, defaultPath, tenantEntityCode, type)); - - visitedTenantEntityCode.add(tenantEntityCode); - } catch (Exception e) { - log.error(e.getMessage() + " Resource path: {}", defaultPath, e); - throw new ServiceException( - String.format(e.getMessage() + " make sure resource path: %s exists in %s", defaultPath, - resourceStorageType)); - } - } - } - } else { - defaultPath = storageOperate.getResDir(tenantCode); - if (type.equals(ResourceType.UDF)) { - defaultPath = storageOperate.getUdfDir(tenantCode); - } - - try { - if (StringUtils.isBlank(fullName)) { - fullName = defaultPath; - } - resourcesList = - recursive ? storageOperate.listFilesStatusRecursively(fullName, defaultPath, tenantCode, type) - : storageOperate.listFilesStatus(fullName, defaultPath, tenantCode, type); - } catch (Exception e) { - log.error(e.getMessage() + " Resource path: {}", fullName, e); - throw new ServiceException(String.format(e.getMessage() + " make sure resource path: %s exists in %s", - defaultPath, resourceStorageType)); - } - } - - return resourcesList; - } - - /** - * create directory - * xxx The steps to verify resources are cumbersome and can be optimized - * - * @param loginUser login user - * @param fullName full name - * @param type resource type - * @param result Result - */ - private void createDirectory(User loginUser, String fullName, ResourceType type, Result result) { - String tenantCode = tenantMapper.queryById(loginUser.getTenantId()).getTenantCode(); - // String directoryName = storageOperate.getFileName(type, tenantCode, fullName); - String resourceRootPath = storageOperate.getDir(type, tenantCode); + @Override + public void createFile(CreateFileRequest createFileRequest) { + CreateFileDto createFileDto = createFileRequestTransformer.transform(createFileRequest); + createFileDtoValidator.validate(createFileDto); + + // todo: use storage proxy + MultipartFile file = createFileDto.getFile(); + String fileAbsolutePath = createFileDto.getFileAbsolutePath(); + String srcLocalTmpFileAbsolutePath = copyFileToLocal(file); try { - if (!storageOperate.exists(resourceRootPath)) { - storageOperate.createTenantDirIfNotExists(tenantCode); - } - - if (!storageOperate.mkdir(tenantCode, fullName)) { - throw new ServiceException(String.format("Create resource directory: %s failed.", fullName)); - } - putMsg(result, Status.SUCCESS); - } catch (Exception e) { - throw new ServiceException(String.format("create resource directory: %s failed.", fullName)); + storageOperator.upload(srcLocalTmpFileAbsolutePath, fileAbsolutePath, true, false); + ApiServerMetrics.recordApiResourceUploadSize(file.getSize()); + log.info("Success upload resource file: {} complete.", fileAbsolutePath); + } catch (Exception ex) { + // If exception, clear the tmp path + FileUtils.deleteFile(srcLocalTmpFileAbsolutePath); + throw ex; } } - /** - * upload file to hdfs - * - * @param loginUser login user - * @param fullName full name - * @param file file - * @param type resource type - * @return upload success return true, otherwise false - */ - private boolean upload(User loginUser, String fullName, MultipartFile file, ResourceType type) { - // save to local - String fileSuffix = Files.getFileExtension(file.getOriginalFilename()); - String nameSuffix = Files.getFileExtension(fullName); - - // determine file suffix - if (!fileSuffix.equalsIgnoreCase(nameSuffix)) { - return false; - } - // query tenant - String tenantCode = getTenantCode(loginUser); - // random file name - String localFilename = FileUtils.getUploadFilename(tenantCode, UUID.randomUUID().toString()); - - // save file to hdfs, and delete original file - String resourcePath = storageOperate.getDir(type, tenantCode); + @Override + public void createFileFromContent(CreateFileFromContentRequest createFileFromContentRequest) { + CreateFileFromContentDto createFileFromContentDto = + createFileFromContentRequestTransformer.transform(createFileFromContentRequest); + createFileFromContentDtoValidator.validate(createFileFromContentDto); + + // todo: use storage proxy + String fileContent = createFileFromContentDto.getFileContent(); + String fileAbsolutePath = createFileFromContentDto.getFileAbsolutePath(); + String srcLocalTmpFileAbsolutePath = copyFileToLocal(fileContent); try { - // if tenant dir not exists - if (!storageOperate.exists(resourcePath)) { - storageOperate.createTenantDirIfNotExists(tenantCode); - } - org.apache.dolphinscheduler.api.utils.FileUtils.copyInputStreamToFile(file, localFilename); - storageOperate.upload(tenantCode, localFilename, fullName, true, true); - FileUtils.deleteFile(localFilename); - } catch (Exception e) { - FileUtils.deleteFile(localFilename); - log.error(e.getMessage(), e); - return false; + storageOperator.upload(srcLocalTmpFileAbsolutePath, fileAbsolutePath, true, false); + ApiServerMetrics.recordApiResourceUploadSize(fileContent.length()); + log.info("Success upload resource file: {} complete.", fileAbsolutePath); + } catch (Exception ex) { + // If exception, clear the tmp path + FileUtils.deleteFile(srcLocalTmpFileAbsolutePath); + throw ex; } - return true; } - /** - * query resource list - * - * @param loginUser login user - * @param type resource type - * @param fullName resource full name - * @return resource list - */ @Override - public Map queryResourceList(User loginUser, ResourceType type, String fullName) { - Map result = new HashMap<>(); - if (storageOperate == null) { - result.put(Constants.DATA_LIST, Collections.emptyList()); - result.put(Constants.STATUS, Status.SUCCESS); - return result; - } - - User user = userMapper.selectById(loginUser.getId()); - if (user == null) { - log.error("user {} not exists", loginUser.getId()); - putMsg(result, Status.USER_NOT_EXIST, loginUser.getId()); - return null; - } - - String tenantCode = getTenantCode(user); - checkFullName(tenantCode, fullName); - - String baseDir = storageOperate.getDir(type, tenantCode); - - List resourcesList = new ArrayList<>(); - if (StringUtils.isBlank(fullName)) { - if (isAdmin(loginUser)) { - List userList = userMapper.selectList(null); - Set visitedTenantEntityCode = new HashSet<>(); - for (User userEntity : userList) { - String tenantEntityCode = getTenantCode(userEntity); - if (!visitedTenantEntityCode.contains(tenantEntityCode)) { - baseDir = storageOperate.getDir(type, tenantEntityCode); - resourcesList.addAll(storageOperate.listFilesStatusRecursively(baseDir, baseDir, - tenantEntityCode, type)); - visitedTenantEntityCode.add(tenantEntityCode); - } - } - } else { - resourcesList = storageOperate.listFilesStatusRecursively(baseDir, baseDir, tenantCode, type); - } - } else { - resourcesList = storageOperate.listFilesStatusRecursively(fullName, baseDir, tenantCode, type); - } - - Visitor resourceTreeVisitor = new ResourceTreeVisitor(resourcesList); - result.put(Constants.DATA_LIST, resourceTreeVisitor.visit(baseDir).getChildren()); - putMsg(result, Status.SUCCESS); - - return result; + public void renameDirectory(RenameDirectoryRequest renameDirectoryRequest) { + RenameDirectoryDto renameDirectoryDto = renameDirectoryRequestTransformer.transform(renameDirectoryRequest); + renameDirectoryDtoValidator.validate(renameDirectoryDto); + + String originDirectoryAbsolutePath = renameDirectoryDto.getOriginDirectoryAbsolutePath(); + String targetDirectoryAbsolutePath = renameDirectoryDto.getTargetDirectoryAbsolutePath(); + storageOperator.copy(originDirectoryAbsolutePath, targetDirectoryAbsolutePath, true, true); + log.info("Success rename directory: {} -> {} ", originDirectoryAbsolutePath, targetDirectoryAbsolutePath); } - /** - * query resource list by program type - * - * @param loginUser login user - * @param type resource type - * @return resource list - */ @Override - public Result queryResourceByProgramType(User loginUser, ResourceType type, ProgramType programType) { - Result result = new Result<>(); - - User user = userMapper.selectById(loginUser.getId()); - if (user == null) { - log.error("user {} not exists", loginUser.getId()); - putMsg(result, Status.USER_NOT_EXIST, loginUser.getId()); - return result; - } - - Tenant tenant = tenantMapper.queryById(user.getTenantId()); - if (tenant == null) { - log.error("tenant not exists"); - putMsg(result, Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST); - return result; - } - - String tenantCode = tenant.getTenantCode(); - - List allResourceList = queryStorageEntityList(loginUser, "", type, tenantCode, true); - - String suffix = ".jar"; - if (programType != null) { - switch (programType) { - case JAVA: - case SCALA: - break; - case PYTHON: - suffix = ".py"; - break; - default: - } - } - List resources = new ResourceFilter(suffix, new ArrayList<>(allResourceList)).filter(); - Visitor visitor = new ResourceTreeVisitor(resources); - result.setData(visitor.visit("").getChildren()); - putMsg(result, Status.SUCCESS); - return result; + public void renameFile(RenameFileRequest renameFileRequest) { + RenameFileDto renameFileDto = renameFileRequestTransformer.transform(renameFileRequest); + renameFileDtoValidator.validate(renameFileDto); + + String originFileAbsolutePath = renameFileDto.getOriginFileAbsolutePath(); + String targetFileAbsolutePath = renameFileDto.getTargetFileAbsolutePath(); + storageOperator.copy(originFileAbsolutePath, targetFileAbsolutePath, true, true); + log.info("Success rename file: {} -> {} ", originFileAbsolutePath, targetFileAbsolutePath); } - /** - * delete resource - * - * @param loginUser login user - * @param fullName resource full name - * @param resTenantCode tenantCode in the request field "resTenantCode" for tenant code owning the resource, - * can be different from the login user in the case of logging in as admin users. - * @return delete result code - * @throws IOException exception - */ @Override - @Transactional(rollbackFor = Exception.class) - public Result delete(User loginUser, String fullName, String resTenantCode) throws IOException { - Result result = new Result<>(); - - User user = userMapper.selectById(loginUser.getId()); - if (user == null) { - log.error("user {} not exists", loginUser.getId()); - putMsg(result, Status.USER_NOT_EXIST, loginUser.getId()); - return result; - } - - String tenantCode = getTenantCode(user); - checkFullName(tenantCode, fullName); + public void updateFile(UpdateFileRequest updateFileRequest) { + UpdateFileDto updateFileDto = updateFileRequestTransformer.transform(updateFileRequest); + updateFileDtoValidator.validate(updateFileDto); - if (!isUserTenantValid(isAdmin(loginUser), tenantCode, resTenantCode)) { - log.error("current user does not have permission"); - putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION); - return result; - } - - String baseDir = storageOperate.getResDir(tenantCode); - - StorageEntity resource; + String srcLocalTmpFileAbsolutePath = copyFileToLocal(updateFileDto.getFile()); try { - resource = storageOperate.getFileStatus(fullName, baseDir, resTenantCode, null); - } catch (Exception e) { - log.error(e.getMessage() + " Resource path: {}", fullName, e); - putMsg(result, Status.RESOURCE_NOT_EXIST); - throw new ServiceException(String.format(e.getMessage() + " Resource path: %s", fullName)); - } - - if (resource == null) { - log.error("Resource does not exist, resource full name:{}.", fullName); - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - - // recursively delete a folder - List allChildren = - storageOperate.listFilesStatusRecursively(fullName, baseDir, resTenantCode, resource.getType()) - .stream().map(storageEntity -> storageEntity.getFullName()).collect(Collectors.toList()); - - String[] allChildrenFullNameArray = allChildren.stream().toArray(String[]::new); - - // if resource type is UDF,need check whether it is bound by UDF function - if (resource.getType() == (ResourceType.UDF)) { - List udfFuncs = udfFunctionMapper.listUdfByResourceFullName(allChildrenFullNameArray); - if (CollectionUtils.isNotEmpty(udfFuncs)) { - log.warn("Resource can not be deleted because it is bound by UDF functions, udfFuncIds:{}", udfFuncs); - putMsg(result, Status.UDF_RESOURCE_IS_BOUND, udfFuncs.get(0).getFuncName()); - return result; - } + storageOperator.upload(srcLocalTmpFileAbsolutePath, updateFileDto.getFileAbsolutePath(), true, true); + ApiServerMetrics.recordApiResourceUploadSize(updateFileDto.getFile().getSize()); + log.info("Success upload resource file: {} complete.", updateFileDto.getFileAbsolutePath()); + } catch (Exception ex) { + // If exception, clear the tmp path + FileUtils.deleteFile(srcLocalTmpFileAbsolutePath); + throw ex; } - - // delete file on hdfs,S3 - storageOperate.delete(fullName, allChildren, true); - - putMsg(result, Status.SUCCESS); - - return result; } - /** - * verify resource by name and type - * - * @param loginUser login user - * @param fullName resource full name - * @param type resource type - * @return true if the resource name not exists, otherwise return false - */ @Override - public Result verifyResourceName(String fullName, ResourceType type, User loginUser) { - Result result = new Result<>(); - putMsg(result, Status.SUCCESS); - if (checkResourceExists(fullName)) { - log.error("Resource with same name exists so can not create again, resourceType:{}, resourceName:{}.", type, - RegexUtils.escapeNRT(fullName)); - putMsg(result, Status.RESOURCE_EXIST); - } - - return result; + public PageInfo pagingResourceItem(PagingResourceItemRequest pagingResourceItemRequest) { + + QueryResourceDto queryResourceDto = pagingResourceItemRequestTransformer.transform(pagingResourceItemRequest); + List resourceAbsolutePaths = queryResourceDto.getResourceAbsolutePaths(); + if (CollectionUtils.isEmpty(resourceAbsolutePaths)) { + return new PageInfo<>(pagingResourceItemRequest.getPageNo(), pagingResourceItemRequest.getPageSize()); + } + + for (String resourceAbsolutePath : resourceAbsolutePaths) { + createDirectoryDtoValidator.exceptionResourceAbsolutePathInvalidated(resourceAbsolutePath); + createDirectoryDtoValidator.exceptionUserNoResourcePermission(pagingResourceItemRequest.getLoginUser(), + resourceAbsolutePath); + } + + Integer pageNo = pagingResourceItemRequest.getPageNo(); + Integer pageSize = pagingResourceItemRequest.getPageSize(); + + List storageEntities = resourceAbsolutePaths.stream() + .flatMap(resourceAbsolutePath -> storageOperator.listStorageEntity(resourceAbsolutePath).stream()) + .collect(Collectors.toList()); + + List result = storageEntities + .stream() + .filter(storageEntity -> storageEntity.getFileName() + .contains(pagingResourceItemRequest.getResourceNameKeyWord())) + .skip((long) (pageNo - 1) * pageSize) + .limit(pageSize) + .map(ResourceItemVO::new) + .collect(Collectors.toList()); + + return PageInfo.builder() + .pageNo(pagingResourceItemRequest.getPageNo()) + .pageSize(pagingResourceItemRequest.getPageSize()) + .total(storageEntities.size()) + .totalList(result) + .build(); } - /** - * verify resource by full name or pid and type - * - * @param fileName resource file name - * @param type resource type - * @param resTenantCode tenantCode in the request field "resTenantCode" for tenant code owning the resource, - * can be different from the login user in the case of logging in as admin users. - * @return true if the resource full name or pid not exists, otherwise return false - */ @Override - public Result queryResourceByFileName(User loginUser, String fileName, ResourceType type, - String resTenantCode) { - Result result = new Result<>(); - if (StringUtils.isBlank(fileName)) { - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR); - return result; - } - - User user = userMapper.selectById(loginUser.getId()); - if (user == null) { - log.error("user {} not exists", loginUser.getId()); - putMsg(result, Status.USER_NOT_EXIST, loginUser.getId()); - return result; - } - - String tenantCode = getTenantCode(user); - - if (!isUserTenantValid(isAdmin(loginUser), tenantCode, resTenantCode)) { - log.error("current user does not have permission"); - putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION); - return result; - } - - String defaultPath = storageOperate.getDir(type, resTenantCode); - StorageEntity file; - try { - file = storageOperate.getFileStatus(defaultPath + fileName, defaultPath, resTenantCode, type); - } catch (Exception e) { - log.error(e.getMessage() + " Resource path: {}", defaultPath + fileName, e); - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - - putMsg(result, Status.SUCCESS); - result.setData(file); - return result; + public List queryResourceFiles(User loginUser, ResourceType resourceType) { + Tenant tenant = tenantDao.queryOptionalById(loginUser.getTenantId()) + .orElseThrow(() -> new ServiceException(Status.TENANT_NOT_EXIST, loginUser.getTenantId())); + String storageBaseDirectory = storageOperator.getStorageBaseDirectory(tenant.getTenantCode(), resourceType); + List allResourceFiles = storageOperator.listFileStorageEntityRecursively(storageBaseDirectory); + + Visitor visitor = new ResourceTreeVisitor(allResourceFiles); + return visitor.visit("").getChildren(); } - /** - * view resource file online - * - * @param fullName resource fullName - * @param resTenantCode owner's tenant code of the resource - * @param skipLineNum skip line number - * @param limit limit - * @return resource content - */ @Override - public Result readResource(User loginUser, String fullName, String resTenantCode, int skipLineNum, - int limit) { - Result result = new Result<>(); - - User user = userMapper.selectById(loginUser.getId()); - if (user == null) { - log.error("user {} not exists", loginUser.getId()); - putMsg(result, Status.USER_NOT_EXIST, loginUser.getId()); - return result; - } - - String tenantCode = getTenantCode(user); - checkFullName(tenantCode, fullName); - - if (!isUserTenantValid(isAdmin(loginUser), tenantCode, resTenantCode)) { - log.error("current user does not have permission"); - putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION); - return result; - } - - // check preview or not by file suffix - String nameSuffix = Files.getFileExtension(fullName); - String resourceViewSuffixes = FileUtils.getResourceViewSuffixes(); - if (StringUtils.isNotEmpty(resourceViewSuffixes)) { - List strList = Arrays.asList(resourceViewSuffixes.split(",")); - if (!strList.contains(nameSuffix)) { - log.error("Resource suffix does not support view,resourceFullName:{}, suffix:{}.", fullName, - nameSuffix); - putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW); - return result; - } - } - - List content; - try { - if (storageOperate.exists(fullName)) { - content = storageOperate.vimFile(tenantCode, fullName, skipLineNum, limit); - long size = content.stream().mapToLong(String::length).sum(); - ApiServerMetrics.recordApiResourceDownloadSize(size); - } else { - log.error("read file {} not exist in storage", fullName); - putMsg(result, Status.RESOURCE_FILE_NOT_EXIST, fullName); - return result; - } - - } catch (Exception e) { - log.error("Resource {} read failed", fullName, e); - putMsg(result, Status.HDFS_OPERATION_ERROR); - return result; - } - - putMsg(result, Status.SUCCESS); - Map map = new HashMap<>(); - map.put(ALIAS, fullName); - map.put(CONTENT, String.join("\n", content)); - result.setData(map); - - return result; + public void delete(DeleteResourceRequest deleteResourceRequest) { + DeleteResourceDto deleteResourceDto = DeleteResourceDto.builder() + .loginUser(deleteResourceRequest.getLoginUser()) + .resourceAbsolutePath(deleteResourceRequest.getResourceAbsolutePath()) + .build(); + deleteResourceDtoValidator.validate(deleteResourceDto); + storageOperator.delete(deleteResourceDto.getResourceAbsolutePath(), true); } - /** - * create resource file online - * - * @param loginUser login user - * @param type resource type - * @param fileName file name - * @param fileSuffix file suffix - * @param content content - * @param currentDir current directory - * @return create result code - */ @Override - @Transactional - public Result createResourceFile(User loginUser, ResourceType type, String fileName, String fileSuffix, - String content, String currentDir) { - Result result = new Result<>(); - - User user = userMapper.selectById(loginUser.getId()); - if (user == null) { - log.error("user {} not exists", loginUser.getId()); - putMsg(result, Status.USER_NOT_EXIST, loginUser.getId()); - return result; - } - - String tenantCode = getTenantCode(user); - - if (FileUtils.directoryTraversal(fileName)) { - log.warn("File name verify failed, fileName:{}.", RegexUtils.escapeNRT(fileName)); - putMsg(result, Status.VERIFY_PARAMETER_NAME_FAILED); - return result; - } - - // check file suffix - String nameSuffix = fileSuffix.trim(); - String resourceViewSuffixes = FileUtils.getResourceViewSuffixes(); - if (StringUtils.isNotEmpty(resourceViewSuffixes)) { - List strList = Arrays.asList(resourceViewSuffixes.split(",")); - if (!strList.contains(nameSuffix)) { - log.warn("Resource suffix does not support view, suffix:{}.", nameSuffix); - putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW); - return result; - } - } - - String name = fileName.trim() + "." + nameSuffix; - - String userResRootPath = storageOperate.getResDir(tenantCode); - String fullName = currentDir.contains(userResRootPath) ? currentDir + name : userResRootPath + name; - - result = verifyResourceName(fullName, type, loginUser); - if (!result.getCode().equals(Status.SUCCESS.getCode())) { - return result; - } - - result = uploadContentToStorage(fullName, tenantCode, content); - if (!result.getCode().equals(Status.SUCCESS.getCode())) { - throw new ServiceException(result.getMsg()); - } - return result; + public FetchFileContentResponse fetchResourceFileContent(FetchFileContentRequest fetchFileContentRequest) { + FetchFileContentDto fetchFileContentDto = FetchFileContentDto.builder() + .loginUser(fetchFileContentRequest.getLoginUser()) + .resourceFileAbsolutePath(fetchFileContentRequest.getResourceFileAbsolutePath()) + .skipLineNum(fetchFileContentRequest.getSkipLineNum()) + .limit(fetchFileContentRequest.getLimit()) + .build(); + fetchFileContentDtoValidator.validate(fetchFileContentDto); + + String content = storageOperator + .fetchFileContent( + fetchFileContentRequest.getResourceFileAbsolutePath(), + fetchFileContentRequest.getSkipLineNum(), + fetchFileContentRequest.getLimit()) + .stream() + .collect(Collectors.joining("\n")); + + ApiServerMetrics.recordApiResourceDownloadSize(content.length()); + + return FetchFileContentResponse.builder() + .content(content) + .build(); } @Override - @Transactional - public StorageEntity createOrUpdateResource(String userName, String filepath, - String resourceContent) throws Exception { - User user = userMapper.queryByUserNameAccurately(userName); - int suffixLabelIndex = filepath.indexOf(PERIOD); - if (suffixLabelIndex == -1) { - throw new IllegalArgumentException(String - .format("Not allow create or update resources without extension name, filepath: %s", filepath)); - } - - String defaultPath = storageOperate.getResDir(user.getTenantCode()); - String fullName = defaultPath + filepath; + public void updateFileFromContent(UpdateFileFromContentRequest updateFileContentRequest) { + UpdateFileFromContentDto updateFileFromContentDto = + updateFileFromContentRequestTransformer.transform(updateFileContentRequest); + updateFileFromContentDtoValidator.validate(updateFileFromContentDto); - Result result = uploadContentToStorage(fullName, user.getTenantCode(), resourceContent); - if (result.getCode() != Status.SUCCESS.getCode()) { - throw new ServiceException(result.getMsg()); + String srcLocalTmpFileAbsolutePath = copyFileToLocal(updateFileFromContentDto.getFileContent()); + try { + storageOperator.upload(srcLocalTmpFileAbsolutePath, updateFileFromContentDto.getFileAbsolutePath(), true, + true); + ApiServerMetrics.recordApiResourceUploadSize(updateFileFromContentDto.getFileContent().length()); + log.info("Success upload resource file: {} complete.", updateFileFromContentDto.getFileAbsolutePath()); + } catch (Exception ex) { + // If exception, clear the tmp path + FileUtils.deleteFile(srcLocalTmpFileAbsolutePath); + throw new ServiceException("Update the resource file from content: " + + updateFileFromContentDto.getFileAbsolutePath() + " failed", ex); } - return storageOperate.getFileStatus(fullName, defaultPath, user.getTenantCode(), ResourceType.FILE); } - /** - * updateProcessInstance resource - * - * @param fullName resource full name - * @param resTenantCode tenantCode in the request field "resTenantCode" for tenant code owning the resource, - * can be different from the login user in the case of logging in as admin users. - * @param content content - * @return update result cod - */ @Override - @Transactional - public Result updateResourceContent(User loginUser, String fullName, String resTenantCode, String content) { - Result result = new Result<>(); - User user = userMapper.selectById(loginUser.getId()); - if (user == null) { - log.error("user {} not exists", loginUser.getId()); - putMsg(result, Status.USER_NOT_EXIST, loginUser.getId()); - return result; - } + public void downloadResource(HttpServletResponse response, DownloadFileRequest downloadFileRequest) { + DownloadFileDto downloadFileDto = DownloadFileDto.builder() + .loginUser(downloadFileRequest.getLoginUser()) + .fileAbsolutePath(downloadFileRequest.getFileAbsolutePath()) + .build(); + downloadFileDtoValidator.validate(downloadFileDto); - String tenantCode = getTenantCode(user); - checkFullName(tenantCode, fullName); - - if (!isUserTenantValid(isAdmin(loginUser), tenantCode, resTenantCode)) { - log.error("current user does not have permission"); - putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION); - return result; - } + String fileName = new File(downloadFileDto.getFileAbsolutePath()).getName(); + String localTmpFileAbsolutePath = FileUtils.getDownloadFilename(fileName); - StorageEntity resource; try { - resource = storageOperate.getFileStatus(fullName, "", resTenantCode, ResourceType.FILE); + storageOperator.download(downloadFileRequest.getFileAbsolutePath(), localTmpFileAbsolutePath, true); + int length = (int) new File(localTmpFileAbsolutePath).length(); + ApiServerMetrics.recordApiResourceDownloadSize(length); + + response.reset(); + response.setContentType("application/octet-stream"); + response.setCharacterEncoding("utf-8"); + response.setContentLength(length); + response.setHeader("Content-Disposition", "attachment;filename=" + fileName); + Files.copy(Paths.get(localTmpFileAbsolutePath), response.getOutputStream()); } catch (Exception e) { - log.error("error occurred when fetching resource information , resource full name {}", fullName); - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - - if (resource == null) { - log.error("Resource does not exist, resource full name:{}.", fullName); - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - - // check can edit by file suffix - String nameSuffix = Files.getFileExtension(resource.getAlias()); - String resourceViewSuffixes = FileUtils.getResourceViewSuffixes(); - if (StringUtils.isNotEmpty(resourceViewSuffixes)) { - List strList = Arrays.asList(resourceViewSuffixes.split(",")); - if (!strList.contains(nameSuffix)) { - log.warn("Resource suffix does not support view, resource full name:{}, suffix:{}.", fullName, - nameSuffix); - putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW); - return result; - } - } - - result = uploadContentToStorage(resource.getFullName(), resTenantCode, content); - - if (!result.getCode().equals(Status.SUCCESS.getCode())) { - throw new ServiceException(result.getMsg()); - } else - log.info("Update resource content complete, resource full name:{}.", fullName); - return result; - } - - /** - * @param fullName resource full name - * @param tenantCode tenant code - * @param content content - * @return result - */ - private Result uploadContentToStorage(String fullName, String tenantCode, String content) { - Result result = new Result<>(); - String localFilename = ""; - try { - localFilename = FileUtils.getUploadFilename(tenantCode, UUID.randomUUID().toString()); - - if (!FileUtils.writeContent2File(content, localFilename)) { - // write file fail - log.error("Write file error, fileName:{}, content:{}.", localFilename, RegexUtils.escapeNRT(content)); - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - - // get resource file path - String resourcePath = storageOperate.getResDir(tenantCode); - log.info("resource path is {}, resource dir is {}", fullName, resourcePath); - - if (!storageOperate.exists(resourcePath)) { - // create if tenant dir not exists - storageOperate.createTenantDirIfNotExists(tenantCode); - log.info("Create tenant dir because path {} does not exist, tenantCode:{}.", resourcePath, tenantCode); - } - if (storageOperate.exists(fullName)) { - storageOperate.delete(fullName, false); - } - - storageOperate.upload(tenantCode, localFilename, fullName, true, true); - } catch (Exception e) { - log.error("Upload content to storage error, tenantCode:{}, destFileName:{}.", tenantCode, localFilename, e); - result.setCode(Status.HDFS_OPERATION_ERROR.getCode()); - result.setMsg(String.format("copy %s to hdfs %s fail", localFilename, fullName)); - return result; + throw new ServiceException( + "Download the resource file: " + downloadFileRequest.getFileAbsolutePath() + " failed", e); } finally { - FileUtils.deleteFile(localFilename); + FileUtils.deleteFile(localTmpFileAbsolutePath); } - log.info("Upload content to storage complete, tenantCode:{}, destFileName:{}.", tenantCode, localFilename); - putMsg(result, Status.SUCCESS); - return result; } - /** - * download file - * - * @return resource content - */ @Override - public org.springframework.core.io.Resource downloadResource(User loginUser, String fullName) { - if (fullName.endsWith("/")) { - log.error("resource id {} is directory,can't download it", fullName); - throw new ServiceException("can't download directory"); - } - - int userId = loginUser.getId(); - User user = userMapper.selectById(userId); - if (user == null) { - log.error("User does not exits, userId:{}.", userId); - throw new ServiceException(String.format("Resource owner id %d does not exist", userId)); - } - - String tenantCode = getTenantCode(user); - checkFullName(tenantCode, fullName); - - String[] aliasArr = fullName.split("/"); - String alias = aliasArr[aliasArr.length - 1]; - String localFileName = FileUtils.getDownloadFilename(alias); - log.info("Resource path is {}, download local filename is {}", alias, localFileName); - - try { - storageOperate.download(fullName, localFileName, true); - ApiServerMetrics.recordApiResourceDownloadSize(java.nio.file.Files.size(Paths.get(localFileName))); - return org.apache.dolphinscheduler.api.utils.FileUtils.file2Resource(localFileName); - } catch (IOException e) { - log.error("Download resource error, the path is {}, and local filename is {}, the error message is {}", - fullName, localFileName, e.getMessage()); - throw new ServiceException("Download the resource file failed ,it may be related to your storage"); - } - } - - @Override - public StorageEntity queryFileStatus(String userName, String fileName) throws Exception { - // TODO: It is used in PythonGateway, should be revised - User user = userMapper.queryByUserNameAccurately(userName); - - String defaultPath = storageOperate.getResDir(user.getTenantCode()); - return storageOperate.getFileStatus(defaultPath + fileName, defaultPath, user.getTenantCode(), - ResourceType.FILE); + public StorageEntity queryFileStatus(String userName, String fileAbsolutePath) { + return storageOperator.getStorageEntity(fileAbsolutePath); } @Override - public DeleteDataTransferResponse deleteDataTransferData(User loginUser, Integer days) { - DeleteDataTransferResponse result = new DeleteDataTransferResponse(); - - User user = userMapper.selectById(loginUser.getId()); - if (user == null) { - log.error("user {} not exists", loginUser.getId()); - putMsg(result, Status.USER_NOT_EXIST, loginUser.getId()); - return result; - } - - String tenantCode = getTenantCode(user); - - String baseFolder = storageOperate.getResourceFullName(tenantCode, "DATA_TRANSFER"); - - LocalDateTime now = LocalDateTime.now(); - now = now.minus(days, ChronoUnit.DAYS); - String deleteDate = now.toLocalDate().toString().replace("-", ""); - List storageEntities; - try { - storageEntities = new ArrayList<>( - storageOperate.listFilesStatus(baseFolder, baseFolder, tenantCode, ResourceType.FILE)); - } catch (Exception e) { - log.error("delete data transfer data error", e); - putMsg(result, Status.DELETE_RESOURCE_ERROR); - return result; - } - - List successList = new ArrayList<>(); - List failList = new ArrayList<>(); - - for (StorageEntity storageEntity : storageEntities) { - File path = new File(storageEntity.getFullName()); - String date = path.getName(); - if (date.compareTo(deleteDate) <= 0) { - try { - storageOperate.delete(storageEntity.getFullName(), true); - successList.add(storageEntity.getFullName()); - } catch (Exception ex) { - log.error("delete data transfer data {} error, please delete it manually", date, ex); - failList.add(storageEntity.getFullName()); - } - } - } - - result.setSuccessList(successList); - result.setFailedList(failList); - putMsg(result, Status.SUCCESS); - return result; - } + public String queryResourceBaseDir(User loginUser, ResourceType type) { - /** - * get resource base dir - * - * @param loginUser login user - * @param type resource type - * @return - */ - @Override - public Result queryResourceBaseDir(User loginUser, ResourceType type) { - Result result = new Result<>(); - if (storageOperate == null) { - putMsg(result, Status.SUCCESS); - result.setData(EMPTY_STRING); - return result; - } User user = userMapper.selectById(loginUser.getId()); if (user == null) { - log.error("user {} not exists", loginUser.getId()); - putMsg(result, Status.USER_NOT_EXIST, loginUser.getId()); - return result; + throw new ServiceException(Status.USER_NOT_EXIST); } - String tenantCode = getTenantCode(user); - - String baseDir = isAdmin(loginUser) ? storageOperate.getDir(ResourceType.ALL, tenantCode) - : storageOperate.getDir(type, tenantCode); - - putMsg(result, Status.SUCCESS); - result.setData(baseDir); - - return result; + Tenant tenant = tenantDao.queryOptionalById(user.getTenantId()) + .orElseThrow(() -> new ServiceException(Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST)); + return storageOperator.getStorageBaseDirectory(tenant.getTenantCode(), type); } - /** - * check permission by comparing login user's tenantCode with tenantCode in the request - * - * @param isAdmin is the login user admin - * @param userTenantCode loginUser's tenantCode - * @param resTenantCode tenantCode in the request field "resTenantCode" for tenant code owning the resource, - * can be different from the login user in the case of logging in as admin users. - * @return isValid - */ - private boolean isUserTenantValid(boolean isAdmin, String userTenantCode, - String resTenantCode) throws ServiceException { - if (isAdmin) { - return true; - } - if (StringUtils.isEmpty(resTenantCode)) { - // TODO: resource tenant code will be empty when query resources list, need to be optimized - return true; - } - return resTenantCode.equals(userTenantCode); + // Copy the file to the local file system and return the local file absolute path + @SneakyThrows + private String copyFileToLocal(MultipartFile multipartFile) { + String localTmpFileAbsolutePath = FileUtils.getUploadFileLocalTmpAbsolutePath(); + FileUtils.copyInputStreamToFile(multipartFile.getInputStream(), localTmpFileAbsolutePath); + return localTmpFileAbsolutePath; } - private String getTenantCode(User user) { - Tenant tenant = tenantMapper.queryById(user.getTenantId()); - if (tenant == null) { - throw new ServiceException(Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST); - } - return tenant.getTenantCode(); + // Copy the file to the local file system and return the local file absolute path + private String copyFileToLocal(String fileContent) { + String localTmpFileAbsolutePath = FileUtils.getUploadFileLocalTmpAbsolutePath(); + FileUtils.writeContent2File(fileContent, localTmpFileAbsolutePath); + return localTmpFileAbsolutePath; } - private void checkFullName(String userTenantCode, String fullName) { - String baseDir = storageOperate.getDir(ResourceType.ALL, userTenantCode); - if (StringUtils.isNotBlank(fullName) && !StringUtils.startsWith(fullName, baseDir)) { - throw new ServiceException("Resource file: " + fullName + " is illegal"); - } - } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SchedulerServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SchedulerServiceImpl.java index 80e59670f7f9..6967222b7f35 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SchedulerServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SchedulerServiceImpl.java @@ -808,6 +808,7 @@ private void updateSchedule(Map result, Schedule schedule, Proce log.info("Schedule update complete, projectCode:{}, processDefinitionCode:{}, scheduleId:{}.", processDefinition.getProjectCode(), processDefinition.getCode(), schedule.getId()); + result.put(Constants.DATA_LIST, schedule); putMsg(result, Status.SUCCESS); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskDefinitionServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskDefinitionServiceImpl.java index 4a0c3f8b68d7..ae3410cb7747 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskDefinitionServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskDefinitionServiceImpl.java @@ -24,6 +24,7 @@ import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.TASK_VERSION_VIEW; import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.WORKFLOW_DEFINITION; import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.WORKFLOW_SWITCH_TO_THIS_VERSION; +import static org.apache.dolphinscheduler.plugin.task.api.TaskPluginManager.checkTaskParameters; import org.apache.dolphinscheduler.api.dto.task.TaskCreateRequest; import org.apache.dolphinscheduler.api.dto.task.TaskFilterRequest; @@ -67,15 +68,12 @@ import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper; import org.apache.dolphinscheduler.dao.repository.ProcessTaskRelationLogDao; import org.apache.dolphinscheduler.dao.repository.TaskDefinitionDao; -import org.apache.dolphinscheduler.plugin.task.api.TaskPluginManager; -import org.apache.dolphinscheduler.plugin.task.api.parameters.ParametersNode; import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.MapUtils; import org.apache.commons.lang3.StringUtils; -import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -105,8 +103,6 @@ @Slf4j public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDefinitionService { - private static final String RELEASESTATE = "releaseState"; - @Autowired private ProjectMapper projectMapper; @@ -137,9 +133,6 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe @Autowired private ProcessService processService; - @Autowired - private TaskPluginManager taskPluginManager; - @Autowired private ProcessDefinitionService processDefinitionService; @@ -149,8 +142,8 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe /** * create task definition * - * @param loginUser login user - * @param projectCode project code + * @param loginUser login user + * @param projectCode project code * @param taskDefinitionJson task definition json */ @Transactional @@ -173,11 +166,7 @@ public Map createTaskDefinition(User loginUser, return result; } for (TaskDefinitionLog taskDefinitionLog : taskDefinitionLogs) { - if (!taskPluginManager.checkTaskParameters(ParametersNode.builder() - .taskType(taskDefinitionLog.getTaskType()) - .taskParams(taskDefinitionLog.getTaskParams()) - .dependence(taskDefinitionLog.getDependence()) - .build())) { + if (!checkTaskParameters(taskDefinitionLog.getTaskType(), taskDefinitionLog.getTaskParams())) { log.warn("Task definition {} parameters are invalid.", taskDefinitionLog.getName()); putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskDefinitionLog.getName()); return result; @@ -214,11 +203,7 @@ private void checkTaskDefinitionValid(User user, TaskDefinition taskDefinition, Project project = projectMapper.queryByCode(taskDefinition.getProjectCode()); projectService.checkProjectAndAuthThrowException(user, project, permissions); - if (!taskPluginManager.checkTaskParameters(ParametersNode.builder() - .taskType(taskDefinition.getTaskType()) - .taskParams(taskDefinition.getTaskParams()) - .dependence(taskDefinition.getDependence()) - .build())) { + if (!checkTaskParameters(taskDefinition.getTaskType(), taskDefinition.getTaskParams())) { throw new ServiceException(Status.PROCESS_NODE_S_PARAMETER_INVALID, taskDefinition.getName()); } } @@ -244,7 +229,7 @@ private ProcessDefinition updateWorkflowLocation(User user, ProcessDefinition pr /** * Create resource task definition * - * @param loginUser login user + * @param loginUser login user * @param taskCreateRequest task definition json * @return new TaskDefinition have created */ @@ -265,7 +250,7 @@ public TaskDefinition createTaskDefinitionV2(User loginUser, long taskDefinitionCode; try { - taskDefinitionCode = CodeGenerateUtils.getInstance().genCode(); + taskDefinitionCode = CodeGenerateUtils.genCode(); } catch (CodeGenerateException e) { throw new ServiceException(Status.INTERNAL_SERVER_ERROR_ARGS); } @@ -288,11 +273,11 @@ public TaskDefinition createTaskDefinitionV2(User loginUser, /** * create single task definition that binds the workflow * - * @param loginUser login user - * @param projectCode project code + * @param loginUser login user + * @param projectCode project code * @param processDefinitionCode process definition code * @param taskDefinitionJsonObj task definition json object - * @param upstreamCodes upstream task codes, sep comma + * @param upstreamCodes upstream task codes, sep comma * @return create result code */ @Transactional @@ -327,18 +312,13 @@ public Map createTaskBindsWorkFlow(User loginUser, putMsg(result, Status.DATA_IS_NOT_VALID, taskDefinitionJsonObj); return result; } - if (!taskPluginManager.checkTaskParameters(ParametersNode.builder() - .taskType(taskDefinition.getTaskType()) - .taskParams(taskDefinition.getTaskParams()) - .dependence(taskDefinition.getDependence()) - .build())) { - log.error("Task definition {} parameters are invalid", taskDefinition.getName()); + if (!checkTaskParameters(taskDefinition.getTaskType(), taskDefinition.getTaskParams())) { putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskDefinition.getName()); return result; } long taskCode = taskDefinition.getCode(); if (taskCode == 0) { - taskDefinition.setCode(CodeGenerateUtils.getInstance().genCode()); + taskDefinition.setCode(CodeGenerateUtils.genCode()); } List processTaskRelationLogList = processTaskRelationMapper.queryByProcessCode(processDefinitionCode) @@ -414,10 +394,10 @@ public Map createTaskBindsWorkFlow(User loginUser, /** * query task definition * - * @param loginUser login user + * @param loginUser login user * @param projectCode project code * @param processCode process code - * @param taskName task name + * @param taskName task name */ @Override public Map queryTaskDefinitionByName(User loginUser, long projectCode, long processCode, @@ -475,12 +455,12 @@ private void taskCanDeleteValid(User user, TaskDefinition taskDefinition, User l /** * Delete resource task definition by code - * + *

* Only task release state offline and no downstream tasks can be deleted, will also remove the exists * task relation [upstreamTaskCode, taskCode] * * @param loginUser login user - * @param taskCode task code + * @param taskCode task code */ @Transactional @Override @@ -548,9 +528,9 @@ public void updateDag(User loginUser, long processDefinitionCode, /** * update task definition * - * @param loginUser login user - * @param projectCode project code - * @param taskCode task code + * @param loginUser login user + * @param projectCode project code + * @param taskCode task code * @param taskDefinitionJsonObj task definition json object */ @Transactional @@ -606,8 +586,8 @@ private void TaskDefinitionUpdateValid(TaskDefinition taskDefinitionOriginal, Ta /** * update task definition * - * @param loginUser login user - * @param taskCode task code + * @param loginUser login user + * @param taskCode task code * @param taskUpdateRequest task definition json object * @return new TaskDefinition have updated */ @@ -621,13 +601,8 @@ public TaskDefinition updateTaskDefinitionV2(User loginUser, throw new ServiceException(Status.TASK_DEFINITION_NOT_EXISTS, taskCode); } - TaskDefinition taskDefinitionUpdate; - try { - taskDefinitionUpdate = taskUpdateRequest.mergeIntoTaskDefinition(taskDefinitionOriginal); - } catch (InvocationTargetException | IllegalAccessException | InstantiationException - | NoSuchMethodException e) { - throw new ServiceException(Status.REQUEST_PARAMS_NOT_VALID_ERROR, taskUpdateRequest.toString()); - } + TaskDefinition taskDefinitionUpdate = taskUpdateRequest.mergeIntoTaskDefinition(taskDefinitionOriginal); + this.checkTaskDefinitionValid(loginUser, taskDefinitionUpdate, TASK_DEFINITION_UPDATE); this.TaskDefinitionUpdateValid(taskDefinitionOriginal, taskDefinitionUpdate); @@ -658,7 +633,7 @@ public TaskDefinition updateTaskDefinitionV2(User loginUser, * Get resource task definition by code * * @param loginUser login user - * @param taskCode task code + * @param taskCode task code * @return TaskDefinition */ @Override @@ -676,7 +651,7 @@ public TaskDefinition getTaskDefinition(User loginUser, /** * Get resource task definition according to query parameter * - * @param loginUser login user + * @param loginUser login user * @param taskFilterRequest taskFilterRequest object you want to filter the resource task definitions * @return TaskDefinitions of page */ @@ -743,13 +718,7 @@ private TaskDefinitionLog updateTask(User loginUser, long projectCode, long task putMsg(result, Status.DATA_IS_NOT_VALID, taskDefinitionJsonObj); return null; } - if (!taskPluginManager.checkTaskParameters(ParametersNode.builder() - .taskType(taskDefinitionToUpdate.getTaskType()) - .taskParams(taskDefinitionToUpdate.getTaskParams()) - .dependence(taskDefinitionToUpdate.getDependence()) - .build())) { - log.warn("Task definition parameters are invalid, taskDefinitionName:{}.", - taskDefinitionToUpdate.getName()); + if (!checkTaskParameters(taskDefinitionToUpdate.getTaskType(), taskDefinitionToUpdate.getTaskParams())) { putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskDefinitionToUpdate.getName()); return null; } @@ -848,11 +817,11 @@ private TaskDefinitionLog updateTask(User loginUser, long projectCode, long task /** * update task definition and upstream * - * @param loginUser login user - * @param projectCode project code - * @param taskCode task definition code + * @param loginUser login user + * @param projectCode project code + * @param taskCode task definition code * @param taskDefinitionJsonObj task definition json object - * @param upstreamCodes upstream task codes, sep comma + * @param upstreamCodes upstream task codes, sep comma * @return update result code */ @Override @@ -1021,10 +990,10 @@ private ProcessTaskRelationLog createProcessTaskRelationLog(User loginUser, /** * switch task definition * - * @param loginUser login user + * @param loginUser login user * @param projectCode project code - * @param taskCode task code - * @param version the version user want to switch + * @param taskCode task code + * @param version the version user want to switch */ @Transactional @Override @@ -1264,7 +1233,7 @@ public Map genTaskCodeList(Integer genNum) { List taskCodes = new ArrayList<>(); try { for (int i = 0; i < genNum; i++) { - taskCodes.add(CodeGenerateUtils.getInstance().genCode()); + taskCodes.add(CodeGenerateUtils.genCode()); } } catch (CodeGenerateException e) { log.error("Generate task definition code error.", e); @@ -1279,9 +1248,9 @@ public Map genTaskCodeList(Integer genNum) { /** * release task definition * - * @param loginUser login user - * @param projectCode project code - * @param code task definition code + * @param loginUser login user + * @param projectCode project code + * @param code task definition code * @param releaseState releaseState * @return update result code */ @@ -1297,7 +1266,7 @@ public Map releaseTaskDefinition(User loginUser, long projectCod return result; } if (null == releaseState) { - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, RELEASESTATE); + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.RELEASE_STATE); return result; } TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(code); @@ -1337,7 +1306,7 @@ public Map releaseTaskDefinition(User loginUser, long projectCod break; default: log.warn("Parameter releaseState is invalid."); - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, RELEASESTATE); + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.RELEASE_STATE); return result; } int update = taskDefinitionMapper.updateById(taskDefinition); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskGroupServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskGroupServiceImpl.java index a1d9b4acf820..379c629ea6a0 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskGroupServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskGroupServiceImpl.java @@ -129,6 +129,7 @@ public Map createTaskGroup(User loginUser, Long projectCode, Str if (taskGroupMapper.insert(taskGroup) > 0) { log.info("Create task group complete, taskGroupName:{}.", taskGroup.getName()); + result.put(Constants.DATA_LIST, taskGroup); putMsg(result, Status.SUCCESS); } else { log.error("Create task group error, taskGroupName:{}.", taskGroup.getName()); @@ -194,6 +195,7 @@ public Map updateTaskGroup(User loginUser, int id, String name, int i = taskGroupMapper.updateById(taskGroup); if (i > 0) { log.info("Update task group complete, taskGroupId:{}.", id); + result.put(Constants.DATA_LIST, taskGroup); putMsg(result, Status.SUCCESS); } else { log.error("Update task group error, taskGroupId:{}.", id); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskInstanceServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskInstanceServiceImpl.java index f06f8115a924..49e42da56157 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskInstanceServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskInstanceServiceImpl.java @@ -23,6 +23,7 @@ import org.apache.dolphinscheduler.api.dto.taskInstance.TaskInstanceRemoveCacheResponse; import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.exceptions.ServiceException; import org.apache.dolphinscheduler.api.service.ProjectService; import org.apache.dolphinscheduler.api.service.TaskGroupQueueService; import org.apache.dolphinscheduler.api.service.TaskInstanceService; @@ -33,14 +34,15 @@ import org.apache.dolphinscheduler.common.enums.TaskExecuteType; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.Project; -import org.apache.dolphinscheduler.dao.entity.TaskDefinition; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; import org.apache.dolphinscheduler.dao.repository.DqExecuteResultDao; +import org.apache.dolphinscheduler.dao.repository.ProcessInstanceDao; import org.apache.dolphinscheduler.dao.repository.TaskInstanceDao; import org.apache.dolphinscheduler.dao.utils.TaskCacheUtils; import org.apache.dolphinscheduler.extract.base.client.SingletonJdkDynamicRpcClientProxyFactory; @@ -107,6 +109,9 @@ public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInst @Autowired private TaskGroupQueueService taskGroupQueueService; + @Autowired + private ProcessInstanceDao workflowInstanceDao; + /** * query task list by project, process instance, task name, task start time, task end time, task status, keyword paging * @@ -216,58 +221,39 @@ public Result queryTaskListPaging(User loginUser, */ @Transactional @Override - public Result forceTaskSuccess(User loginUser, long projectCode, Integer taskInstanceId) { - Result result = new Result(); - Project project = projectMapper.queryByCode(projectCode); + public void forceTaskSuccess(User loginUser, long projectCode, Integer taskInstanceId) { // check user access for project - Map checkResult = - projectService.checkProjectAndAuth(loginUser, project, projectCode, FORCED_SUCCESS); - Status status = (Status) checkResult.get(Constants.STATUS); - if (status != Status.SUCCESS) { - putMsg(result, status); - return result; - } + projectService.checkProjectAndAuthThrowException(loginUser, projectCode, FORCED_SUCCESS); - // check whether the task instance can be found - TaskInstance task = taskInstanceMapper.selectById(taskInstanceId); - if (task == null) { - log.error("Task instance can not be found, projectCode:{}, taskInstanceId:{}.", projectCode, - taskInstanceId); - putMsg(result, Status.TASK_INSTANCE_NOT_FOUND); - return result; + TaskInstance task = taskInstanceDao.queryOptionalById(taskInstanceId) + .orElseThrow(() -> new ServiceException(Status.TASK_INSTANCE_NOT_FOUND)); + + if (task.getProjectCode() != projectCode) { + throw new ServiceException("The task instance is not under the project: " + projectCode); } - TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(task.getTaskCode()); - if (taskDefinition != null && projectCode != taskDefinition.getProjectCode()) { - log.error("Task definition can not be found, projectCode:{}, taskDefinitionCode:{}.", projectCode, - task.getTaskCode()); - putMsg(result, Status.TASK_INSTANCE_NOT_FOUND, taskInstanceId); - return result; + ProcessInstance processInstance = workflowInstanceDao.queryOptionalById(task.getProcessInstanceId()) + .orElseThrow( + () -> new ServiceException(Status.PROCESS_INSTANCE_NOT_EXIST, task.getProcessInstanceId())); + if (!processInstance.getState().isFinished()) { + throw new ServiceException("The workflow instance is not finished: " + processInstance.getState() + + " cannot force start task instance"); } // check whether the task instance state type is failure or cancel if (!task.getState().isFailure() && !task.getState().isKill()) { - log.warn("{} type task instance can not perform force success, projectCode:{}, taskInstanceId:{}.", - task.getState().getDesc(), projectCode, taskInstanceId); - putMsg(result, Status.TASK_INSTANCE_STATE_OPERATION_ERROR, taskInstanceId, task.getState().toString()); - return result; + throw new ServiceException(Status.TASK_INSTANCE_STATE_OPERATION_ERROR, taskInstanceId, task.getState()); } // change the state of the task instance task.setState(TaskExecutionStatus.FORCED_SUCCESS); task.setEndTime(new Date()); int changedNum = taskInstanceMapper.updateById(task); - if (changedNum > 0) { - processService.forceProcessInstanceSuccessByTaskInstanceId(taskInstanceId); - log.info("Task instance performs force success complete, projectCode:{}, taskInstanceId:{}", projectCode, - taskInstanceId); - putMsg(result, Status.SUCCESS); - } else { - log.error("Task instance performs force success complete, projectCode:{}, taskInstanceId:{}", - projectCode, taskInstanceId); - putMsg(result, Status.FORCE_TASK_SUCCESS_ERROR); + if (changedNum <= 0) { + throw new ServiceException(Status.FORCE_TASK_SUCCESS_ERROR); } - return result; + processService.forceProcessInstanceSuccessByTaskInstanceId(task); + log.info("Force success task instance:{} success", taskInstanceId); } @Override @@ -381,10 +367,15 @@ public void deleteByWorkflowInstanceId(Integer workflowInstanceId) { } for (TaskInstance taskInstance : needToDeleteTaskInstances) { if (StringUtils.isNotBlank(taskInstance.getLogPath())) { - ILogService iLogService = - SingletonJdkDynamicRpcClientProxyFactory.getProxyClient(taskInstance.getHost(), - ILogService.class); - iLogService.removeTaskInstanceLog(taskInstance.getLogPath()); + try { + // Remove task instance log failed will not affect the deletion of task instance + ILogService iLogService = + SingletonJdkDynamicRpcClientProxyFactory.getProxyClient(taskInstance.getHost(), + ILogService.class); + iLogService.removeTaskInstanceLog(taskInstance.getLogPath()); + } catch (Exception ex) { + log.error("Remove task instance log error", ex); + } } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java index e77baed1d3d1..7bec4d67801f 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java @@ -39,7 +39,7 @@ import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper; import org.apache.dolphinscheduler.dao.mapper.TenantMapper; import org.apache.dolphinscheduler.dao.mapper.UserMapper; -import org.apache.dolphinscheduler.plugin.storage.api.StorageOperate; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; @@ -84,7 +84,7 @@ public class TenantServiceImpl extends BaseServiceImpl implements TenantService private QueueService queueService; @Autowired(required = false) - private StorageOperate storageOperate; + private StorageOperator storageOperator; /** * Check the tenant new object valid or not @@ -136,14 +136,13 @@ && checkTenantExists(updateTenant.getTenantCode())) { * @param queueId queue id * @param desc description * @return create result code - * @throws Exception exception */ @Override @Transactional(rollbackFor = Exception.class) public Tenant createTenant(User loginUser, String tenantCode, int queueId, - String desc) throws Exception { + String desc) { if (!canOperatorPermissions(loginUser, null, AuthorizationType.TENANT, TENANT_CREATE)) { throw new ServiceException(Status.USER_NO_OPERATION_PERM); } @@ -154,7 +153,6 @@ public Tenant createTenant(User loginUser, createTenantValid(tenant); tenantMapper.insert(tenant); - storageOperate.createTenantDirIfNotExists(tenantCode); return tenant; } @@ -209,11 +207,6 @@ public void updateTenant(User loginUser, updateTenantValid(existsTenant, updateTenant); updateTenant.setCreateTime(existsTenant.getCreateTime()); - // updateProcessInstance tenant - // if the tenant code is modified, the original resource needs to be copied to the new tenant. - if (!Objects.equals(existsTenant.getTenantCode(), updateTenant.getTenantCode())) { - storageOperate.createTenantDirIfNotExists(tenantCode); - } int update = tenantMapper.updateById(updateTenant); if (update <= 0) { throw new ServiceException(Status.UPDATE_TENANT_ERROR); @@ -262,7 +255,6 @@ public void deleteTenantById(User loginUser, int id) throws Exception { } processInstanceMapper.updateProcessInstanceByTenantCode(tenant.getTenantCode(), Constants.DEFAULT); - storageOperate.deleteTenant(tenant.getTenantCode()); } private List getProcessInstancesByTenant(Tenant tenant) { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UdfFuncServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UdfFuncServiceImpl.java deleted file mode 100644 index 1bf7d23a6b42..000000000000 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UdfFuncServiceImpl.java +++ /dev/null @@ -1,405 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.api.service.impl; - -import org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant; -import org.apache.dolphinscheduler.api.enums.Status; -import org.apache.dolphinscheduler.api.service.UdfFuncService; -import org.apache.dolphinscheduler.api.utils.PageInfo; -import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.common.enums.AuthorizationType; -import org.apache.dolphinscheduler.common.enums.UdfType; -import org.apache.dolphinscheduler.dao.entity.UdfFunc; -import org.apache.dolphinscheduler.dao.entity.User; -import org.apache.dolphinscheduler.dao.mapper.UDFUserMapper; -import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper; -import org.apache.dolphinscheduler.plugin.storage.api.StorageOperate; - -import org.apache.commons.lang3.StringUtils; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Date; -import java.util.List; -import java.util.Set; - -import lombok.extern.slf4j.Slf4j; - -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; -import org.springframework.transaction.annotation.Transactional; - -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; - -/** - * udf func service impl - */ -@Service -@Slf4j -public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncService { - - @Autowired - private UdfFuncMapper udfFuncMapper; - - @Autowired - private UDFUserMapper udfUserMapper; - - @Autowired(required = false) - private StorageOperate storageOperate; - - /** - * create udf function - * - * @param loginUser login user - * @param type udf type - * @param funcName function name - * @param argTypes argument types - * @param database database - * @param desc description - * @param className class name - * @return create result code - */ - @Override - @Transactional - public Result createUdfFunction(User loginUser, - String funcName, - String className, - String fullName, - String argTypes, - String database, - String desc, - UdfType type) { - Result result = new Result<>(); - - boolean canOperatorPermissions = canOperatorPermissions(loginUser, null, AuthorizationType.UDF, - ApiFuncIdentificationConstant.UDF_FUNCTION_CREATE); - if (!canOperatorPermissions) { - putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION); - return result; - } - if (checkDescriptionLength(desc)) { - log.warn("Parameter description is too long."); - putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR); - return result; - } - - // verify udf func name exist - if (checkUdfFuncNameExists(funcName)) { - log.warn("Udf function with the same name already exists."); - putMsg(result, Status.UDF_FUNCTION_EXISTS); - return result; - } - - Boolean existResource = false; - try { - existResource = storageOperate.exists(fullName); - } catch (IOException e) { - log.error("Check resource error: {}", fullName, e); - } - - if (!existResource) { - log.error("resource full name {} is not exist", fullName); - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - - // save data - UdfFunc udf = new UdfFunc(); - Date now = new Date(); - udf.setUserId(loginUser.getId()); - udf.setFuncName(funcName); - udf.setClassName(className); - if (!StringUtils.isEmpty(argTypes)) { - udf.setArgTypes(argTypes); - } - if (!StringUtils.isEmpty(database)) { - udf.setDatabase(database); - } - udf.setDescription(desc); - // set resourceId to -1 because we do not store resource to db anymore, instead we use fullName - udf.setResourceId(-1); - udf.setResourceName(fullName); - udf.setType(type); - - udf.setCreateTime(now); - udf.setUpdateTime(now); - - udfFuncMapper.insert(udf); - log.info("UDF function create complete, udfFuncName:{}.", udf.getFuncName()); - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * - * @param name name - * @return check result code - */ - private boolean checkUdfFuncNameExists(String name) { - List resource = udfFuncMapper.queryUdfByIdStr(null, name); - return resource != null && !resource.isEmpty(); - } - - /** - * query udf function - * - * @param id udf function id - * @return udf function detail - */ - @Override - public Result queryUdfFuncDetail(User loginUser, int id) { - Result result = new Result<>(); - boolean canOperatorPermissions = canOperatorPermissions(loginUser, new Object[]{id}, AuthorizationType.UDF, - ApiFuncIdentificationConstant.UDF_FUNCTION_VIEW); - if (!canOperatorPermissions) { - putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION); - return result; - } - UdfFunc udfFunc = udfFuncMapper.selectById(id); - if (udfFunc == null) { - log.error("Resource does not exist, udf func id:{}.", id); - putMsg(result, Status.RESOURCE_NOT_EXIST); - return result; - } - result.setData(udfFunc); - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * updateProcessInstance udf function - * - * @param udfFuncId udf function id - * @param type resource type - * @param funcName function name - * @param argTypes argument types - * @param database data base - * @param desc description - * @param fullName resource full name - * @param className class name - * @return update result code - */ - @Override - public Result updateUdfFunc(User loginUser, - int udfFuncId, - String funcName, - String className, - String argTypes, - String database, - String desc, - UdfType type, - String fullName) { - Result result = new Result<>(); - - boolean canOperatorPermissions = canOperatorPermissions(loginUser, new Object[]{udfFuncId}, - AuthorizationType.UDF, ApiFuncIdentificationConstant.UDF_FUNCTION_UPDATE); - if (!canOperatorPermissions) { - putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION); - return result; - } - if (checkDescriptionLength(desc)) { - log.warn("Parameter description is too long."); - putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR); - return result; - } - // verify udfFunc is exist - UdfFunc udf = udfFuncMapper.selectUdfById(udfFuncId); - - if (udf == null) { - log.error("UDF function does not exist, udfFuncId:{}.", udfFuncId); - result.setCode(Status.UDF_FUNCTION_NOT_EXIST.getCode()); - result.setMsg(Status.UDF_FUNCTION_NOT_EXIST.getMsg()); - return result; - } - - // verify udfFuncName is exist - if (!funcName.equals(udf.getFuncName())) { - if (checkUdfFuncNameExists(funcName)) { - log.warn("Udf function exists, can not create again, udfFuncName:{}.", funcName); - result.setCode(Status.UDF_FUNCTION_EXISTS.getCode()); - result.setMsg(Status.UDF_FUNCTION_EXISTS.getMsg()); - return result; - } - } - - Boolean doesResExist = false; - try { - doesResExist = storageOperate.exists(fullName); - } catch (Exception e) { - log.error("udf resource :{} checking error", fullName, e); - result.setCode(Status.RESOURCE_NOT_EXIST.getCode()); - result.setMsg(Status.RESOURCE_NOT_EXIST.getMsg()); - return result; - } - - if (!doesResExist) { - log.error("resource full name {} is not exist", fullName); - result.setCode(Status.RESOURCE_NOT_EXIST.getCode()); - result.setMsg(Status.RESOURCE_NOT_EXIST.getMsg()); - return result; - } - - Date now = new Date(); - udf.setFuncName(funcName); - udf.setClassName(className); - udf.setArgTypes(argTypes); - if (!StringUtils.isEmpty(database)) { - udf.setDatabase(database); - } - udf.setDescription(desc); - // set resourceId to -1 because we do not store resource to db anymore, instead we use fullName - udf.setResourceId(-1); - udf.setResourceName(fullName); - udf.setType(type); - - udf.setUpdateTime(now); - - udfFuncMapper.updateById(udf); - log.info("UDF function update complete, udfFuncId:{}, udfFuncName:{}.", udfFuncId, funcName); - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * query udf function list paging - * - * @param loginUser login user - * @param pageNo page number - * @param pageSize page size - * @param searchVal search value - * @return udf function list page - */ - @Override - public Result queryUdfFuncListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { - Result result = new Result(); - boolean canOperatorPermissions = canOperatorPermissions(loginUser, null, AuthorizationType.UDF, - ApiFuncIdentificationConstant.UDF_FUNCTION_VIEW); - if (!canOperatorPermissions) { - putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION); - return result; - } - PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); - IPage udfFuncList = getUdfFuncsPage(loginUser, searchVal, pageSize, pageNo); - pageInfo.setTotal((int) udfFuncList.getTotal()); - pageInfo.setTotalList(udfFuncList.getRecords()); - result.setData(pageInfo); - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * get udf functions - * - * @param loginUser login user - * @param searchVal search value - * @param pageSize page size - * @param pageNo page number - * @return udf function list page - */ - private IPage getUdfFuncsPage(User loginUser, String searchVal, Integer pageSize, int pageNo) { - Set udfFuncIds = resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.UDF, - loginUser.getId(), log); - Page page = new Page<>(pageNo, pageSize); - if (udfFuncIds.isEmpty()) { - return page; - } - return udfFuncMapper.queryUdfFuncPaging(page, new ArrayList<>(udfFuncIds), searchVal); - } - - /** - * query udf list - * - * @param loginUser login user - * @param type udf type - * @return udf func list - */ - @Override - public Result queryUdfFuncList(User loginUser, Integer type) { - Result result = new Result<>(); - - boolean canOperatorPermissions = canOperatorPermissions(loginUser, null, AuthorizationType.UDF, - ApiFuncIdentificationConstant.UDF_FUNCTION_VIEW); - if (!canOperatorPermissions) { - putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION); - return result; - } - Set udfFuncIds = resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.UDF, - loginUser.getId(), log); - if (udfFuncIds.isEmpty()) { - result.setData(Collections.emptyList()); - putMsg(result, Status.SUCCESS); - return result; - } - List udfFuncList = udfFuncMapper.getUdfFuncByType(new ArrayList<>(udfFuncIds), type); - - result.setData(udfFuncList); - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * delete udf function - * - * @param id udf function id - * @return delete result code - */ - @Override - @Transactional - public Result delete(User loginUser, int id) { - Result result = new Result<>(); - - boolean canOperatorPermissions = canOperatorPermissions(loginUser, new Object[]{id}, AuthorizationType.UDF, - ApiFuncIdentificationConstant.UDF_FUNCTION_DELETE); - if (!canOperatorPermissions) { - putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION); - return result; - } - udfFuncMapper.deleteById(id); - udfUserMapper.deleteByUdfFuncId(id); - log.info("UDF function delete complete, udfFuncId:{}.", id); - putMsg(result, Status.SUCCESS); - return result; - } - - /** - * verify udf function by name - * - * @param name name - * @return true if the name can user, otherwise return false - */ - @Override - public Result verifyUdfFuncByName(User loginUser, String name) { - Result result = new Result<>(); - boolean canOperatorPermissions = canOperatorPermissions(loginUser, null, AuthorizationType.UDF, - ApiFuncIdentificationConstant.UDF_FUNCTION_VIEW); - if (!canOperatorPermissions) { - putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION); - return result; - } - - if (checkUdfFuncNameExists(name)) { - log.warn("Udf function with the same already exists."); - putMsg(result, Status.UDF_FUNCTION_EXISTS); - } else { - putMsg(result, Status.SUCCESS); - } - return result; - } -} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UsersServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UsersServiceImpl.java index 7b9746921c82..9b8e56c7b368 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UsersServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UsersServiceImpl.java @@ -19,7 +19,6 @@ import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.USER_MANAGER; -import org.apache.dolphinscheduler.api.dto.resources.ResourceComponent; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ServiceException; import org.apache.dolphinscheduler.api.service.MetricsCleanUpService; @@ -39,7 +38,6 @@ import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.ProjectUser; import org.apache.dolphinscheduler.dao.entity.Tenant; -import org.apache.dolphinscheduler.dao.entity.UDFUser; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.AccessTokenMapper; import org.apache.dolphinscheduler.dao.mapper.AlertGroupMapper; @@ -48,9 +46,8 @@ import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectUserMapper; import org.apache.dolphinscheduler.dao.mapper.TenantMapper; -import org.apache.dolphinscheduler.dao.mapper.UDFUserMapper; import org.apache.dolphinscheduler.dao.mapper.UserMapper; -import org.apache.dolphinscheduler.plugin.storage.api.StorageOperate; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; @@ -100,9 +97,6 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { @Autowired private DataSourceUserMapper datasourceUserMapper; - @Autowired - private UDFUserMapper udfUserMapper; - @Autowired private AlertGroupMapper alertGroupMapper; @@ -110,7 +104,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { private ProjectMapper projectMapper; @Autowired(required = false) - private StorageOperate storageOperate; + private StorageOperator storageOperator; @Autowired private K8sNamespaceUserMapper k8sNamespaceUserMapper; @@ -171,9 +165,6 @@ public Map createUser(User loginUser, User user = createUser(userName, userPassword, email, tenantId, phone, queue, state); - Tenant tenant = tenantMapper.queryById(tenantId); - storageOperate.createTenantDirIfNotExists(tenant.getTenantCode()); - log.info("User is created and id is {}.", user.getId()); result.put(Constants.DATA_LIST, user); putMsg(result, Status.SUCCESS); @@ -496,9 +487,10 @@ public Map deleteUserById(User loginUser, int id) throws IOExcep /** * revoke the project permission for specified user by id - * @param loginUser Login user - * @param userId User id - * @param projectIds project id array + * + * @param loginUser Login user + * @param userId User id + * @param projectIds project id array * @return */ @Override @@ -541,8 +533,8 @@ public Map revokeProjectById(User loginUser, int userId, String /** * grant project with read permission * - * @param loginUser login user - * @param userId user id + * @param loginUser login user + * @param userId user id * @param projectIds project id array * @return grant result code */ @@ -751,62 +743,6 @@ public Map revokeProject(User loginUser, int userId, long projec return result; } - /** - * grant udf function - * - * @param loginUser login user - * @param userId user id - * @param udfIds udf id array - * @return grant result code - */ - @Override - @Transactional - public Map grantUDFFunction(User loginUser, int userId, String udfIds) { - Map result = new HashMap<>(); - - if (resourcePermissionCheckService.functionDisabled()) { - putMsg(result, Status.FUNCTION_DISABLED); - return result; - } - User user = userMapper.selectById(userId); - if (user == null) { - log.error("User does not exist, userId:{}.", userId); - putMsg(result, Status.USER_NOT_EXIST, userId); - return result; - } - - if (!isAdmin(loginUser)) { - putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION); - return result; - } - - udfUserMapper.deleteByUserId(userId); - - if (check(result, StringUtils.isEmpty(udfIds), Status.SUCCESS)) { - log.warn("Parameter udfIds is empty."); - return result; - } - - String[] resourcesIdArr = udfIds.split(","); - - for (String udfId : resourcesIdArr) { - Date now = new Date(); - UDFUser udfUser = new UDFUser(); - udfUser.setUserId(userId); - udfUser.setUdfId(Integer.parseInt(udfId)); - udfUser.setPerm(Constants.AUTHORIZE_WRITABLE_PERM); - udfUser.setCreateTime(now); - udfUser.setUpdateTime(now); - udfUserMapper.insert(udfUser); - } - - log.info("User is granted permission for UDF, userName:{}.", user.getUserName()); - - putMsg(result, Status.SUCCESS); - - return result; - } - /** * grant namespace * @@ -1128,54 +1064,6 @@ private String checkUserParams(String userName, String password, String email, S return msg; } - /** - * copy resource files - * xxx unchecked - * - * @param resourceComponent resource component - * @param srcBasePath src base path - * @param dstBasePath dst base path - * @throws IOException io exception - */ - private void copyResourceFiles(String oldTenantCode, String newTenantCode, ResourceComponent resourceComponent, - String srcBasePath, String dstBasePath) { - List components = resourceComponent.getChildren(); - - try { - if (CollectionUtils.isNotEmpty(components)) { - for (ResourceComponent component : components) { - // verify whether exist - if (!storageOperate.exists( - String.format(Constants.FORMAT_S_S, srcBasePath, component.getFullName()))) { - log.error("Resource file: {} does not exist, copy error.", component.getFullName()); - throw new ServiceException(Status.RESOURCE_NOT_EXIST); - } - - if (!component.isDirctory()) { - // copy it to dst - storageOperate.copy(String.format(Constants.FORMAT_S_S, srcBasePath, component.getFullName()), - String.format(Constants.FORMAT_S_S, dstBasePath, component.getFullName()), false, true); - continue; - } - - if (CollectionUtils.isEmpty(component.getChildren())) { - // if not exist,need create it - if (!storageOperate - .exists(String.format(Constants.FORMAT_S_S, dstBasePath, component.getFullName()))) { - storageOperate.mkdir(newTenantCode, - String.format(Constants.FORMAT_S_S, dstBasePath, component.getFullName())); - } - } else { - copyResourceFiles(oldTenantCode, newTenantCode, component, srcBasePath, dstBasePath); - } - } - - } - } catch (IOException e) { - log.error("copy the resources failed,the error message is {}", e.getMessage()); - } - } - /** * registry user, default state is 0, default tenant_id is 1, no phone, no queue * diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkFlowLineageServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkFlowLineageServiceImpl.java index 014d22af5724..9763f21b0f86 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkFlowLineageServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkFlowLineageServiceImpl.java @@ -17,8 +17,6 @@ package org.apache.dolphinscheduler.api.service.impl; -import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.TASK_TYPE_DEPENDENT; - import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ServiceException; import org.apache.dolphinscheduler.api.service.WorkFlowLineageService; @@ -39,6 +37,7 @@ import org.apache.dolphinscheduler.plugin.task.api.model.DependentItem; import org.apache.dolphinscheduler.plugin.task.api.model.DependentTaskModel; import org.apache.dolphinscheduler.plugin.task.api.parameters.DependentParameters; +import org.apache.dolphinscheduler.plugin.task.api.utils.TaskTypeUtils; import org.apache.commons.lang3.StringUtils; @@ -147,7 +146,7 @@ private void getProcessDefinitionCodeByDependentDefinitionList(List processDefinitionCodes) { for (DependentProcessDefinition dependentProcessDefinition : dependentDefinitionList) { for (DependentTaskModel dependentTaskModel : dependentProcessDefinition.getDependentParameters() - .getDependTaskList()) { + .getDependence().getDependTaskList()) { for (DependentItem dependentItem : dependentTaskModel.getDependItemList()) { if (!processDefinitionCodes.contains(dependentItem.getDefinitionCode())) { processDefinitionCodes.add(dependentItem.getDefinitionCode()); @@ -220,12 +219,12 @@ private Set querySourceWorkFlowCodes(long projectCode, long workFlowCode, List taskDefinitionLogs = taskDefinitionLogMapper.queryByTaskDefinitions(taskDefinitionList); for (TaskDefinitionLog taskDefinitionLog : taskDefinitionLogs) { if (taskDefinitionLog.getProjectCode() == projectCode) { - if (taskDefinitionLog.getTaskType().equals(TASK_TYPE_DEPENDENT)) { + if (TaskTypeUtils.isDependentTask(taskDefinitionLog.getTaskType())) { DependentParameters dependentParameters = JSONUtils.parseObject(taskDefinitionLog.getDependence(), DependentParameters.class); if (dependentParameters != null) { List dependTaskList = - dependentParameters.getDependTaskList(); + dependentParameters.getDependence().getDependTaskList(); if (!CollectionUtils.isEmpty(dependTaskList)) { for (DependentTaskModel taskModel : dependTaskList) { List dependItemList = taskModel.getDependItemList(); @@ -247,9 +246,9 @@ private Set querySourceWorkFlowCodes(long projectCode, long workFlowCode, /** * Query and return tasks dependence with string format, is a wrapper of queryTaskDepOnTask and task query method. * - * @param projectCode Project code want to query tasks dependence + * @param projectCode Project code want to query tasks dependence * @param processDefinitionCode Process definition code want to query tasks dependence - * @param taskCode Task code want to query tasks dependence + * @param taskCode Task code want to query tasks dependence * @return Optional of formatter message */ @Override @@ -271,7 +270,7 @@ public Optional taskDepOnTaskMsg(long projectCode, long processDefinitio /** * Query tasks depend on process definition, include upstream or downstream * - * @param projectCode Project code want to query tasks dependence + * @param projectCode Project code want to query tasks dependence * @param processDefinitionCode Process definition code want to query tasks dependence * @return Set of TaskMainInfo */ @@ -291,7 +290,7 @@ public Set queryTaskDepOnProcess(long projectCode, long processDef * Query downstream tasks depend on a process definition or a task * * @param processDefinitionCode Process definition code want to query tasks dependence - * @param taskCode Task code want to query tasks dependence + * @param taskCode Task code want to query tasks dependence * @return downstream dependent tasks */ @Override diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkerGroupServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkerGroupServiceImpl.java index 88cd3247ae28..465234798749 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkerGroupServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkerGroupServiceImpl.java @@ -32,7 +32,6 @@ import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.Schedule; import org.apache.dolphinscheduler.dao.entity.TaskDefinition; -import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.WorkerGroup; import org.apache.dolphinscheduler.dao.mapper.EnvironmentWorkerGroupRelationMapper; @@ -41,6 +40,7 @@ import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper; import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.WorkerGroupMapper; +import org.apache.dolphinscheduler.dao.utils.WorkerGroupUtils; import org.apache.dolphinscheduler.registry.api.RegistryClient; import org.apache.dolphinscheduler.registry.api.enums.RegistryNodeType; import org.apache.dolphinscheduler.service.process.ProcessService; @@ -157,6 +157,7 @@ public Map saveWorkerGroup(User loginUser, int id, String name, handleDefaultWorkGroup(workerGroupMapper, workerGroup, loginUser, otherParamsJson); log.info("Worker group save complete, workerGroupName:{}.", workerGroup.getName()); putMsg(result, Status.SUCCESS); + result.put(Constants.DATA_LIST, workerGroup); return result; } @@ -356,12 +357,12 @@ private List getWorkerGroups(List ids) { workerGroups = workerGroupMapper.queryAllWorkerGroup(); } boolean containDefaultWorkerGroups = workerGroups.stream() - .anyMatch(workerGroup -> Constants.DEFAULT_WORKER_GROUP.equals(workerGroup.getName())); + .anyMatch(workerGroup -> WorkerGroupUtils.isWorkerGroupEmpty(workerGroup.getName())); if (!containDefaultWorkerGroups) { // there doesn't exist a default WorkerGroup, we will add all worker to the default worker group. Set activeWorkerNodes = registryClient.getServerNodeSet(RegistryNodeType.WORKER); WorkerGroup defaultWorkerGroup = new WorkerGroup(); - defaultWorkerGroup.setName(Constants.DEFAULT_WORKER_GROUP); + defaultWorkerGroup.setName(WorkerGroupUtils.getDefaultWorkerGroup()); defaultWorkerGroup.setAddrList(String.join(Constants.COMMA, activeWorkerNodes)); defaultWorkerGroup.setCreateTime(new Date()); defaultWorkerGroup.setUpdateTime(new Date()); @@ -430,27 +431,6 @@ public Map getWorkerAddressList() { return result; } - @Override - public String getTaskWorkerGroup(TaskInstance taskInstance) { - if (taskInstance == null) { - return null; - } - - String workerGroup = taskInstance.getWorkerGroup(); - - if (StringUtils.isNotEmpty(workerGroup)) { - return workerGroup; - } - int processInstanceId = taskInstance.getProcessInstanceId(); - ProcessInstance processInstance = processService.findProcessInstanceById(processInstanceId); - - if (processInstance != null) { - return processInstance.getWorkerGroup(); - } - log.info("task : {} will use default worker group", taskInstance.getId()); - return Constants.DEFAULT_WORKER_GROUP; - } - @Override public Map queryWorkerGroupByProcessDefinitionCodes(List processDefinitionCodeList) { List processDefinitionScheduleList = diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/PageInfo.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/PageInfo.java index 2cfdd8f840cb..85e47c96b954 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/PageInfo.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/PageInfo.java @@ -20,12 +20,16 @@ import java.util.Collections; import java.util.List; +import lombok.AllArgsConstructor; +import lombok.Builder; import lombok.Data; import lombok.Setter; import com.baomidou.mybatisplus.core.metadata.IPage; @Data +@Builder +@AllArgsConstructor public class PageInfo { /** diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/AuditSubscriber.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/ITransformer.java similarity index 80% rename from dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/AuditSubscriber.java rename to dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/ITransformer.java index 07202237b23a..8aedad3a9077 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/AuditSubscriber.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/ITransformer.java @@ -15,14 +15,10 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.api.audit; +package org.apache.dolphinscheduler.api.validator; -public interface AuditSubscriber { +public interface ITransformer { + + R transform(T t); - /** - * process the audit message - * - * @param message - */ - void execute(AuditMessage message); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/IValidator.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/IValidator.java new file mode 100644 index 000000000000..7570fa67d5ca --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/IValidator.java @@ -0,0 +1,24 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.validator; + +public interface IValidator { + + void validate(T t); + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/AbstractResourceTransformer.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/AbstractResourceTransformer.java new file mode 100644 index 000000000000..4f721188dc8e --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/AbstractResourceTransformer.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.validator.resource; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.exceptions.ServiceException; +import org.apache.dolphinscheduler.api.validator.ITransformer; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.repository.TenantDao; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; +import org.apache.dolphinscheduler.spi.enums.ResourceType; + +import org.apache.commons.lang3.StringUtils; + +import lombok.AllArgsConstructor; + +@AllArgsConstructor +public abstract class AbstractResourceTransformer implements ITransformer { + + protected TenantDao tenantDao; + + protected StorageOperator storageOperator; + + protected String getParentDirectoryAbsolutePath(User loginUser, String parentAbsoluteDirectory, ResourceType type) { + String tenantCode = tenantDao.queryOptionalById(loginUser.getTenantId()) + .orElseThrow(() -> new ServiceException(Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST)) + .getTenantCode(); + String userResRootPath = storageOperator.getStorageBaseDirectory(tenantCode, type); + // If the parent directory is / then will transform to userResRootPath + // This only happens when the front-end go into the resource page first + // todo: we need to change the front-end logic to avoid this + if (parentAbsoluteDirectory.equals("/")) { + return userResRootPath; + } + + if (!StringUtils.startsWith(parentAbsoluteDirectory, userResRootPath)) { + throw new ServiceException(Status.ILLEGAL_RESOURCE_PATH, parentAbsoluteDirectory); + } + return parentAbsoluteDirectory; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/AbstractResourceValidator.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/AbstractResourceValidator.java new file mode 100644 index 000000000000..35656b4d828d --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/AbstractResourceValidator.java @@ -0,0 +1,138 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.validator.resource; + +import org.apache.dolphinscheduler.api.dto.resources.AbstractResourceDto; +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.exceptions.ServiceException; +import org.apache.dolphinscheduler.api.validator.IValidator; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.common.utils.FileUtils; +import org.apache.dolphinscheduler.dao.entity.Tenant; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.repository.TenantDao; +import org.apache.dolphinscheduler.plugin.storage.api.ResourceMetadata; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; + +import org.apache.commons.lang3.StringUtils; + +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +import org.springframework.web.multipart.MultipartFile; + +import com.google.common.io.Files; + +public abstract class AbstractResourceValidator implements IValidator { + + private static final Set FILE_SUFFIXES_WHICH_CAN_FETCH_CONTENT = new HashSet<>(Arrays.asList( + StringUtils.defaultIfBlank(FileUtils.getResourceViewSuffixes(), "").split(","))); + + protected final StorageOperator storageOperator; + + private final TenantDao tenantDao; + + public AbstractResourceValidator(StorageOperator storageOperator, TenantDao tenantDao) { + this.storageOperator = storageOperator; + this.tenantDao = tenantDao; + } + + public void exceptionResourceAbsolutePathInvalidated(String resourceAbsolutePath) { + if (StringUtils.isBlank(resourceAbsolutePath)) { + throw new ServiceException("The resource path is null"); + } + if (!resourceAbsolutePath.startsWith(storageOperator.getStorageBaseDirectory())) { + throw new ServiceException("Invalidated resource path: " + resourceAbsolutePath); + } + if (resourceAbsolutePath.contains("..")) { + throw new ServiceException("Invalidated resource path: " + resourceAbsolutePath); + } + } + + public void exceptionFileInvalidated(MultipartFile file) { + if (file == null) { + throw new ServiceException("The file is null"); + } + } + + public void exceptionFileContentInvalidated(String fileContent) { + if (StringUtils.isEmpty(fileContent)) { + throw new ServiceException("The file content is null"); + } + } + + public void exceptionFileContentCannotFetch(String fileAbsolutePath) { + String fileExtension = Files.getFileExtension(fileAbsolutePath); + if (!FILE_SUFFIXES_WHICH_CAN_FETCH_CONTENT.contains(fileExtension)) { + throw new ServiceException("The file type: " + fileExtension + " cannot be fetched"); + } + } + + public void exceptionResourceNotExists(String resourceAbsolutePath) { + if (!storageOperator.exists(resourceAbsolutePath)) { + throw new ServiceException("Thr resource is not exists: " + resourceAbsolutePath); + } + } + + public void exceptionResourceExists(String resourceAbsolutePath) { + if (storageOperator.exists(resourceAbsolutePath)) { + throw new ServiceException("The resource is already exist: " + resourceAbsolutePath); + } + } + + public void exceptionResourceIsNotDirectory(String resourceAbsolutePath) { + if (StringUtils.isNotEmpty(Files.getFileExtension(resourceAbsolutePath))) { + throw new ServiceException("The path is not a directory: " + resourceAbsolutePath); + } + } + + public void exceptionResourceIsNotFile(String fileAbsolutePath) { + if (StringUtils.isEmpty(Files.getFileExtension(fileAbsolutePath))) { + throw new ServiceException("The path is not a file: " + fileAbsolutePath); + } + } + + public void exceptionUserNoResourcePermission(User user, AbstractResourceDto resourceDto) { + exceptionUserNoResourcePermission(user, resourceDto.getResourceAbsolutePath()); + } + + public void exceptionUserNoResourcePermission(User user, String resourceAbsolutePath) { + if (user.getUserType() == UserType.ADMIN_USER) { + return; + } + // check if the user have resource tenant permission + // Parse the resource path to get the tenant code + ResourceMetadata resourceMetaData = storageOperator.getResourceMetaData(resourceAbsolutePath); + + if (!resourceAbsolutePath.startsWith(resourceMetaData.getResourceBaseDirectory())) { + throw new ServiceException("Invalidated resource path: " + resourceAbsolutePath); + } + + // todo: inject the tenant when login + Tenant tenant = tenantDao.queryOptionalById(user.getTenantId()) + .orElseThrow(() -> new ServiceException(Status.TENANT_NOT_EXIST, user.getTenantId())); + String userTenant = tenant.getTenantCode(); + if (!userTenant.equals(resourceMetaData.getTenant())) { + throw new ServiceException( + "The user's tenant is " + userTenant + " have no permission to access the resource: " + + resourceAbsolutePath); + } + } + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/CreateDirectoryDtoValidator.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/CreateDirectoryDtoValidator.java new file mode 100644 index 000000000000..248f7fbff346 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/CreateDirectoryDtoValidator.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.validator.resource; + +import org.apache.dolphinscheduler.api.dto.resources.CreateDirectoryDto; +import org.apache.dolphinscheduler.api.exceptions.ServiceException; +import org.apache.dolphinscheduler.dao.repository.TenantDao; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; + +import org.apache.commons.lang3.StringUtils; + +import org.springframework.stereotype.Component; + +import com.google.common.io.Files; + +@Component +public class CreateDirectoryDtoValidator extends AbstractResourceValidator { + + public CreateDirectoryDtoValidator(StorageOperator storageOperator, TenantDao tenantDao) { + super(storageOperator, tenantDao); + } + + @Override + public void validate(CreateDirectoryDto createDirectoryDto) { + String directoryAbsolutePath = createDirectoryDto.getDirectoryAbsolutePath(); + + exceptionResourceAbsolutePathInvalidated(directoryAbsolutePath); + exceptionResourceExists(directoryAbsolutePath); + exceptionUserNoResourcePermission(createDirectoryDto.getLoginUser(), directoryAbsolutePath); + exceptionResourceIsNotDirectory(directoryAbsolutePath); + if (StringUtils.isNotEmpty(Files.getFileExtension(directoryAbsolutePath))) { + throw new ServiceException("The path is not a directory: " + directoryAbsolutePath); + } + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/CreateDirectoryRequestTransformer.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/CreateDirectoryRequestTransformer.java new file mode 100644 index 000000000000..75985477f33c --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/CreateDirectoryRequestTransformer.java @@ -0,0 +1,87 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.validator.resource; + +import static com.google.common.base.Preconditions.checkNotNull; + +import org.apache.dolphinscheduler.api.dto.resources.CreateDirectoryDto; +import org.apache.dolphinscheduler.api.dto.resources.CreateDirectoryRequest; +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.exceptions.ServiceException; +import org.apache.dolphinscheduler.api.validator.ITransformer; +import org.apache.dolphinscheduler.common.utils.FileUtils; +import org.apache.dolphinscheduler.dao.repository.TenantDao; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; + +import org.apache.commons.lang3.StringUtils; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +@Component +public class CreateDirectoryRequestTransformer implements ITransformer { + + @Autowired + private TenantDao tenantDao; + + @Autowired + private StorageOperator storageOperator; + + @Override + public CreateDirectoryDto transform(CreateDirectoryRequest createDirectoryRequest) { + validateCreateDirectoryRequest(createDirectoryRequest); + return doTransform(createDirectoryRequest); + } + + private CreateDirectoryDto doTransform(CreateDirectoryRequest createDirectoryRequest) { + String directoryAbsolutePath = getDirectoryAbsolutePath(createDirectoryRequest); + return CreateDirectoryDto.builder() + .loginUser(createDirectoryRequest.getLoginUser()) + .directoryAbsolutePath(directoryAbsolutePath) + .build(); + } + + private void validateCreateDirectoryRequest(CreateDirectoryRequest createDirectoryRequest) { + checkNotNull(createDirectoryRequest.getLoginUser(), "loginUser is null"); + checkNotNull(createDirectoryRequest.getType(), "resource type is null"); + checkNotNull(createDirectoryRequest.getDirectoryName(), "directory name is null"); + checkNotNull(createDirectoryRequest.getParentAbsoluteDirectory(), "parent directory is null"); + + } + + private String getDirectoryAbsolutePath(CreateDirectoryRequest createDirectoryRequest) { + String tenantCode = tenantDao.queryOptionalById(createDirectoryRequest.getLoginUser().getTenantId()) + .orElseThrow(() -> new ServiceException(Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST)) + .getTenantCode(); + String userResRootPath = storageOperator.getStorageBaseDirectory(tenantCode, createDirectoryRequest.getType()); + String parentDirectoryName = createDirectoryRequest.getParentAbsoluteDirectory(); + String directoryName = createDirectoryRequest.getDirectoryName(); + + // If the parent directory is / then will transform to userResRootPath + // This only happens when the front-end go into the resource page first + // todo: we need to change the front-end logic to avoid this + if (parentDirectoryName.equals("/")) { + return FileUtils.concatFilePath(userResRootPath, directoryName); + } + + if (!StringUtils.startsWith(parentDirectoryName, userResRootPath)) { + throw new ServiceException(Status.ILLEGAL_RESOURCE_PATH, parentDirectoryName); + } + return FileUtils.concatFilePath(parentDirectoryName, directoryName); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/CreateFileDtoValidator.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/CreateFileDtoValidator.java new file mode 100644 index 000000000000..d2c91387a68b --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/CreateFileDtoValidator.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.validator.resource; + +import org.apache.dolphinscheduler.api.dto.resources.CreateFileDto; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.repository.TenantDao; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; + +import org.springframework.stereotype.Component; +import org.springframework.web.multipart.MultipartFile; + +@Component +public class CreateFileDtoValidator extends AbstractResourceValidator { + + public CreateFileDtoValidator(StorageOperator storageOperator, TenantDao tenantDao) { + super(storageOperator, tenantDao); + } + + @Override + public void validate(CreateFileDto createFileDto) { + String fileAbsolutePath = createFileDto.getFileAbsolutePath(); + User loginUser = createFileDto.getLoginUser(); + MultipartFile file = createFileDto.getFile(); + + exceptionResourceAbsolutePathInvalidated(fileAbsolutePath); + exceptionResourceExists(fileAbsolutePath); + exceptionFileInvalidated(file); + exceptionUserNoResourcePermission(loginUser, fileAbsolutePath); + exceptionResourceIsNotFile(fileAbsolutePath); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/CreateFileFromContentDtoValidator.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/CreateFileFromContentDtoValidator.java new file mode 100644 index 000000000000..ae61f47041ab --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/CreateFileFromContentDtoValidator.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.validator.resource; + +import org.apache.dolphinscheduler.api.dto.resources.CreateFileFromContentDto; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.repository.TenantDao; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; + +import org.springframework.stereotype.Component; + +@Component +public class CreateFileFromContentDtoValidator extends AbstractResourceValidator { + + public CreateFileFromContentDtoValidator(StorageOperator storageOperator, TenantDao tenantDao) { + super(storageOperator, tenantDao); + } + + @Override + public void validate(CreateFileFromContentDto createFileFromContentDto) { + String fileAbsolutePath = createFileFromContentDto.getFileAbsolutePath(); + User loginUser = createFileFromContentDto.getLoginUser(); + String fileContent = createFileFromContentDto.getFileContent(); + + exceptionResourceAbsolutePathInvalidated(fileAbsolutePath); + exceptionResourceIsNotFile(fileAbsolutePath); + exceptionResourceExists(fileAbsolutePath); + exceptionUserNoResourcePermission(loginUser, fileAbsolutePath); + exceptionFileContentInvalidated(fileContent); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/DeleteResourceDtoValidator.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/DeleteResourceDtoValidator.java new file mode 100644 index 000000000000..e337d9e07c90 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/DeleteResourceDtoValidator.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.validator.resource; + +import org.apache.dolphinscheduler.api.dto.resources.DeleteResourceDto; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.repository.TenantDao; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; + +import org.springframework.stereotype.Component; + +@Component +public class DeleteResourceDtoValidator extends AbstractResourceValidator { + + public DeleteResourceDtoValidator(StorageOperator storageOperator, TenantDao tenantDao) { + super(storageOperator, tenantDao); + } + + @Override + public void validate(DeleteResourceDto deleteResourceDto) { + String resourceAbsolutePath = deleteResourceDto.getResourceAbsolutePath(); + User loginUser = deleteResourceDto.getLoginUser(); + + exceptionResourceAbsolutePathInvalidated(resourceAbsolutePath); + exceptionResourceNotExists(resourceAbsolutePath); + exceptionUserNoResourcePermission(loginUser, resourceAbsolutePath); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/DownloadFileDtoValidator.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/DownloadFileDtoValidator.java new file mode 100644 index 000000000000..ac15d279acfa --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/DownloadFileDtoValidator.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.validator.resource; + +import org.apache.dolphinscheduler.api.dto.resources.DownloadFileDto; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.repository.TenantDao; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; + +import org.springframework.stereotype.Component; + +@Component +public class DownloadFileDtoValidator extends AbstractResourceValidator { + + public DownloadFileDtoValidator(StorageOperator storageOperator, TenantDao tenantDao) { + super(storageOperator, tenantDao); + } + + @Override + public void validate(DownloadFileDto downloadFileDto) { + String fileAbsolutePath = downloadFileDto.getFileAbsolutePath(); + User loginUser = downloadFileDto.getLoginUser(); + + exceptionResourceNotExists(fileAbsolutePath); + exceptionResourceAbsolutePathInvalidated(fileAbsolutePath); + exceptionResourceIsNotFile(fileAbsolutePath); + exceptionUserNoResourcePermission(loginUser, fileAbsolutePath); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/FetchFileContentDtoValidator.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/FetchFileContentDtoValidator.java new file mode 100644 index 000000000000..b69ec68a7cca --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/FetchFileContentDtoValidator.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.validator.resource; + +import org.apache.dolphinscheduler.api.dto.resources.FetchFileContentDto; +import org.apache.dolphinscheduler.api.exceptions.ServiceException; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.repository.TenantDao; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; + +import org.springframework.stereotype.Component; + +@Component +public class FetchFileContentDtoValidator extends AbstractResourceValidator { + + public FetchFileContentDtoValidator(StorageOperator storageOperator, TenantDao tenantDao) { + super(storageOperator, tenantDao); + } + + @Override + public void validate(FetchFileContentDto fetchFileContentDto) { + if (fetchFileContentDto.getSkipLineNum() < 0) { + throw new ServiceException("skipLineNum must be greater than or equal to 0"); + } + String resourceFileAbsolutePath = fetchFileContentDto.getResourceFileAbsolutePath(); + User loginUser = fetchFileContentDto.getLoginUser(); + + exceptionResourceAbsolutePathInvalidated(resourceFileAbsolutePath); + exceptionResourceIsNotFile(resourceFileAbsolutePath); + exceptionUserNoResourcePermission(loginUser, resourceFileAbsolutePath); + exceptionFileContentCannotFetch(resourceFileAbsolutePath); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/FileFromContentRequestTransformer.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/FileFromContentRequestTransformer.java new file mode 100644 index 000000000000..7f29646e3694 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/FileFromContentRequestTransformer.java @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.validator.resource; + +import static com.google.common.base.Preconditions.checkNotNull; + +import org.apache.dolphinscheduler.api.dto.resources.CreateFileFromContentDto; +import org.apache.dolphinscheduler.api.dto.resources.CreateFileFromContentRequest; +import org.apache.dolphinscheduler.common.utils.FileUtils; +import org.apache.dolphinscheduler.dao.repository.TenantDao; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; + +import org.springframework.stereotype.Component; + +@Component +public class FileFromContentRequestTransformer + extends + AbstractResourceTransformer { + + public FileFromContentRequestTransformer(TenantDao tenantDao, StorageOperator storageOperator) { + super(tenantDao, storageOperator); + } + + @Override + public CreateFileFromContentDto transform(CreateFileFromContentRequest createFileFromContentRequest) { + validateCreateFileRequest(createFileFromContentRequest); + return doTransform(createFileFromContentRequest); + } + + private void validateCreateFileRequest(CreateFileFromContentRequest createFileFromContentRequest) { + checkNotNull(createFileFromContentRequest.getLoginUser(), "loginUser is null"); + checkNotNull(createFileFromContentRequest.getType(), "resource type is null"); + checkNotNull(createFileFromContentRequest.getFileName(), "file name is null"); + checkNotNull(createFileFromContentRequest.getParentAbsoluteDirectory(), "parent directory is null"); + checkNotNull(createFileFromContentRequest.getFileContent(), "file content is null"); + } + + private CreateFileFromContentDto doTransform(CreateFileFromContentRequest createFileFromContentRequest) { + String fileAbsolutePath = getFileAbsolutePath(createFileFromContentRequest); + return CreateFileFromContentDto.builder() + .loginUser(createFileFromContentRequest.getLoginUser()) + .fileAbsolutePath(fileAbsolutePath) + .fileContent(createFileFromContentRequest.getFileContent()) + .build(); + + } + + private String getFileAbsolutePath(CreateFileFromContentRequest createFileFromContentRequest) { + String parentDirectoryAbsolutePath = getParentDirectoryAbsolutePath( + createFileFromContentRequest.getLoginUser(), + createFileFromContentRequest.getParentAbsoluteDirectory(), + createFileFromContentRequest.getType()); + return FileUtils.concatFilePath(parentDirectoryAbsolutePath, createFileFromContentRequest.getFileName()); + } + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/FileRequestTransformer.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/FileRequestTransformer.java new file mode 100644 index 000000000000..c2f007dbf7df --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/FileRequestTransformer.java @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.validator.resource; + +import static com.google.common.base.Preconditions.checkNotNull; + +import org.apache.dolphinscheduler.api.dto.resources.CreateFileDto; +import org.apache.dolphinscheduler.api.dto.resources.CreateFileRequest; +import org.apache.dolphinscheduler.common.utils.FileUtils; +import org.apache.dolphinscheduler.dao.repository.TenantDao; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; + +import org.springframework.stereotype.Component; + +@Component +public class FileRequestTransformer extends AbstractResourceTransformer { + + public FileRequestTransformer(TenantDao tenantDao, StorageOperator storageOperator) { + super(tenantDao, storageOperator); + } + + @Override + public CreateFileDto transform(CreateFileRequest createFileRequest) { + validateCreateFileRequest(createFileRequest); + return doTransform(createFileRequest); + } + + private void validateCreateFileRequest(CreateFileRequest createFileRequest) { + checkNotNull(createFileRequest.getLoginUser(), "loginUser is null"); + checkNotNull(createFileRequest.getType(), "resource type is null"); + checkNotNull(createFileRequest.getFileName(), "file name is null"); + checkNotNull(createFileRequest.getParentAbsoluteDirectory(), "parent directory is null"); + checkNotNull(createFileRequest.getFile(), "file is null"); + } + + private CreateFileDto doTransform(CreateFileRequest createFileRequest) { + String fileAbsolutePath = getFileAbsolutePath(createFileRequest); + return CreateFileDto.builder() + .loginUser(createFileRequest.getLoginUser()) + .file(createFileRequest.getFile()) + .fileAbsolutePath(fileAbsolutePath) + .build(); + + } + + private String getFileAbsolutePath(CreateFileRequest createFileRequest) { + String parentDirectoryAbsolutePath = getParentDirectoryAbsolutePath( + createFileRequest.getLoginUser(), + createFileRequest.getParentAbsoluteDirectory(), + createFileRequest.getType()); + return FileUtils.concatFilePath(parentDirectoryAbsolutePath, createFileRequest.getFileName()); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/PagingResourceItemRequestTransformer.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/PagingResourceItemRequestTransformer.java new file mode 100644 index 000000000000..9be51e413eea --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/PagingResourceItemRequestTransformer.java @@ -0,0 +1,90 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.validator.resource; + +import static com.google.common.base.Preconditions.checkNotNull; + +import org.apache.dolphinscheduler.api.dto.resources.PagingResourceItemRequest; +import org.apache.dolphinscheduler.api.dto.resources.QueryResourceDto; +import org.apache.dolphinscheduler.api.validator.ITransformer; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.dao.entity.Tenant; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.repository.TenantDao; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; +import org.apache.dolphinscheduler.spi.enums.ResourceType; + +import org.apache.commons.lang3.StringUtils; + +import java.util.List; +import java.util.stream.Collectors; + +import lombok.AllArgsConstructor; + +import org.springframework.stereotype.Component; + +import com.google.common.collect.Lists; + +@Component +@AllArgsConstructor +public class PagingResourceItemRequestTransformer implements ITransformer { + + private final StorageOperator storageOperator; + + private final TenantDao tenantDao; + + @Override + public QueryResourceDto transform(PagingResourceItemRequest pagingResourceItemRequest) { + validatePagingResourceItemRequest(pagingResourceItemRequest); + + if (StringUtils.isNotEmpty(pagingResourceItemRequest.getResourceAbsolutePath())) { + // query from the given path + return QueryResourceDto.builder() + .resourceAbsolutePaths(Lists.newArrayList(pagingResourceItemRequest.getResourceAbsolutePath())) + .build(); + } + + ResourceType resourceType = pagingResourceItemRequest.getResourceType(); + User loginUser = pagingResourceItemRequest.getLoginUser(); + if (loginUser.getUserType() == UserType.ADMIN_USER) { + // If the current user is admin + // then will query all tenant resources + List resourceAbsolutePaths = tenantDao.queryAll() + .stream() + .map(tenant -> storageOperator.getStorageBaseDirectory(tenant.getTenantCode(), resourceType)) + .collect(Collectors.toList()); + return QueryResourceDto.builder() + .resourceAbsolutePaths(resourceAbsolutePaths) + .build(); + } else { + // todo: inject the tenantCode when login + Tenant tenant = tenantDao.queryById(loginUser.getTenantId()); + String storageBaseDirectory = storageOperator.getStorageBaseDirectory(tenant.getTenantCode(), resourceType); + return QueryResourceDto.builder() + .resourceAbsolutePaths(Lists.newArrayList(storageBaseDirectory)) + .build(); + } + + } + + private void validatePagingResourceItemRequest(PagingResourceItemRequest pagingResourceItemRequest) { + checkNotNull(pagingResourceItemRequest.getLoginUser(), "loginUser is null"); + checkNotNull(pagingResourceItemRequest.getResourceType(), "resourceType is null"); + } + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/RenameDirectoryDtoValidator.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/RenameDirectoryDtoValidator.java new file mode 100644 index 000000000000..b76ea5b6fce5 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/RenameDirectoryDtoValidator.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.validator.resource; + +import org.apache.dolphinscheduler.api.dto.resources.RenameDirectoryDto; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.repository.TenantDao; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; + +import org.springframework.stereotype.Component; + +@Component +public class RenameDirectoryDtoValidator extends AbstractResourceValidator { + + public RenameDirectoryDtoValidator(StorageOperator storageOperator, TenantDao tenantDao) { + super(storageOperator, tenantDao); + } + + @Override + public void validate(RenameDirectoryDto renameDirectoryDto) { + String originDirectoryAbsolutePath = renameDirectoryDto.getOriginDirectoryAbsolutePath(); + User loginUser = renameDirectoryDto.getLoginUser(); + String targetDirectoryAbsolutePath = renameDirectoryDto.getTargetDirectoryAbsolutePath(); + + exceptionResourceAbsolutePathInvalidated(originDirectoryAbsolutePath); + exceptionResourceIsNotDirectory(originDirectoryAbsolutePath); + exceptionResourceNotExists(originDirectoryAbsolutePath); + exceptionUserNoResourcePermission(loginUser, originDirectoryAbsolutePath); + + exceptionResourceAbsolutePathInvalidated(targetDirectoryAbsolutePath); + exceptionResourceIsNotDirectory(targetDirectoryAbsolutePath); + exceptionResourceExists(targetDirectoryAbsolutePath); + exceptionUserNoResourcePermission(loginUser, targetDirectoryAbsolutePath); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/RenameDirectoryRequestTransformer.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/RenameDirectoryRequestTransformer.java new file mode 100644 index 000000000000..4d6745df3d81 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/RenameDirectoryRequestTransformer.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.validator.resource; + +import org.apache.dolphinscheduler.api.dto.resources.RenameDirectoryDto; +import org.apache.dolphinscheduler.api.dto.resources.RenameDirectoryRequest; +import org.apache.dolphinscheduler.api.validator.ITransformer; + +import org.apache.commons.lang3.StringUtils; + +import java.io.File; + +import lombok.extern.slf4j.Slf4j; + +import org.springframework.stereotype.Component; + +@Slf4j +@Component +public class RenameDirectoryRequestTransformer implements ITransformer { + + @Override + public RenameDirectoryDto transform(RenameDirectoryRequest renameDirectoryRequest) { + String originDirectoryAbsolutePath = renameDirectoryRequest.getDirectoryAbsolutePath(); + String targetDirectoryName = renameDirectoryRequest.getNewDirectoryName(); + + String targetDirectoryAbsolutePath = + getTargetDirectoryAbsolutePath(originDirectoryAbsolutePath, targetDirectoryName); + + return RenameDirectoryDto.builder() + .loginUser(renameDirectoryRequest.getLoginUser()) + .originDirectoryAbsolutePath(originDirectoryAbsolutePath) + .targetDirectoryAbsolutePath(targetDirectoryAbsolutePath) + .build(); + } + + private String getTargetDirectoryAbsolutePath(String originDirectoryAbsolutePath, String targetDirectoryName) { + String originDirectoryParentAbsolutePath = StringUtils.substringBeforeLast( + originDirectoryAbsolutePath, File.separator); + return originDirectoryParentAbsolutePath + File.separator + targetDirectoryName; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/RenameFileDtoValidator.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/RenameFileDtoValidator.java new file mode 100644 index 000000000000..8c9e333dbbd3 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/RenameFileDtoValidator.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.validator.resource; + +import org.apache.dolphinscheduler.api.dto.resources.RenameFileDto; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.repository.TenantDao; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; + +import org.springframework.stereotype.Component; + +@Component +public class RenameFileDtoValidator extends AbstractResourceValidator { + + public RenameFileDtoValidator(StorageOperator storageOperator, TenantDao tenantDao) { + super(storageOperator, tenantDao); + } + + @Override + public void validate(RenameFileDto renameFileDto) { + String originFileAbsolutePath = renameFileDto.getOriginFileAbsolutePath(); + User loginUser = renameFileDto.getLoginUser(); + String targetFileAbsolutePath = renameFileDto.getTargetFileAbsolutePath(); + + exceptionResourceAbsolutePathInvalidated(originFileAbsolutePath); + exceptionResourceNotExists(originFileAbsolutePath); + exceptionResourceIsNotFile(originFileAbsolutePath); + exceptionUserNoResourcePermission(loginUser, originFileAbsolutePath); + + exceptionResourceAbsolutePathInvalidated(targetFileAbsolutePath); + exceptionResourceExists(targetFileAbsolutePath); + exceptionResourceIsNotFile(targetFileAbsolutePath); + exceptionUserNoResourcePermission(loginUser, targetFileAbsolutePath); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/RenameFileRequestTransformer.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/RenameFileRequestTransformer.java new file mode 100644 index 000000000000..43cb55c461d9 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/RenameFileRequestTransformer.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.validator.resource; + +import org.apache.dolphinscheduler.api.dto.resources.RenameFileDto; +import org.apache.dolphinscheduler.api.dto.resources.RenameFileRequest; +import org.apache.dolphinscheduler.api.validator.ITransformer; +import org.apache.dolphinscheduler.common.utils.FileUtils; +import org.apache.dolphinscheduler.plugin.storage.api.ResourceMetadata; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +@Component +public class RenameFileRequestTransformer implements ITransformer { + + @Autowired + private StorageOperator storageOperator; + + @Override + public RenameFileDto transform(RenameFileRequest renameFileRequest) { + ResourceMetadata resourceMetaData = + storageOperator.getResourceMetaData(renameFileRequest.getFileAbsolutePath()); + return RenameFileDto.builder() + .loginUser(renameFileRequest.getLoginUser()) + .originFileAbsolutePath(renameFileRequest.getFileAbsolutePath()) + .targetFileAbsolutePath(FileUtils.concatFilePath(resourceMetaData.getResourceParentAbsolutePath(), + renameFileRequest.getNewFileName())) + .build(); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/UpdateFileDtoValidator.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/UpdateFileDtoValidator.java new file mode 100644 index 000000000000..468c6510878d --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/UpdateFileDtoValidator.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.validator.resource; + +import org.apache.dolphinscheduler.api.dto.resources.UpdateFileDto; +import org.apache.dolphinscheduler.api.exceptions.ServiceException; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.repository.TenantDao; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; + +import java.util.Objects; + +import org.springframework.stereotype.Component; +import org.springframework.web.multipart.MultipartFile; + +import com.google.common.io.Files; + +@Component +public class UpdateFileDtoValidator extends AbstractResourceValidator { + + public UpdateFileDtoValidator(StorageOperator storageOperator, TenantDao tenantDao) { + super(storageOperator, tenantDao); + } + + @Override + public void validate(UpdateFileDto updateFileDto) { + String fileAbsolutePath = updateFileDto.getFileAbsolutePath(); + User loginUser = updateFileDto.getLoginUser(); + MultipartFile file = updateFileDto.getFile(); + + if (!Objects.equals(Files.getFileExtension(file.getName()), + Files.getFileExtension(updateFileDto.getFileAbsolutePath()))) { + throw new ServiceException("file extension cannot not change"); + } + + exceptionResourceAbsolutePathInvalidated(fileAbsolutePath); + exceptionResourceNotExists(fileAbsolutePath); + exceptionResourceIsNotFile(fileAbsolutePath); + exceptionUserNoResourcePermission(loginUser, fileAbsolutePath); + exceptionFileInvalidated(file); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/UpdateFileFromContentDtoValidator.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/UpdateFileFromContentDtoValidator.java new file mode 100644 index 000000000000..524575963986 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/UpdateFileFromContentDtoValidator.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.validator.resource; + +import org.apache.dolphinscheduler.api.dto.resources.UpdateFileFromContentDto; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.repository.TenantDao; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; + +import org.springframework.stereotype.Component; + +@Component +public class UpdateFileFromContentDtoValidator extends AbstractResourceValidator { + + public UpdateFileFromContentDtoValidator(StorageOperator storageOperator, TenantDao tenantDao) { + super(storageOperator, tenantDao); + } + + @Override + public void validate(UpdateFileFromContentDto updateFileFromContentDto) { + String fileAbsolutePath = updateFileFromContentDto.getFileAbsolutePath(); + User loginUser = updateFileFromContentDto.getLoginUser(); + String fileContent = updateFileFromContentDto.getFileContent(); + + exceptionResourceAbsolutePathInvalidated(fileAbsolutePath); + exceptionResourceNotExists(fileAbsolutePath); + exceptionResourceIsNotFile(fileAbsolutePath); + exceptionUserNoResourcePermission(loginUser, fileAbsolutePath); + exceptionFileContentCannotFetch(fileAbsolutePath); + exceptionFileContentInvalidated(fileContent); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/UpdateFileFromContentRequestTransformer.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/UpdateFileFromContentRequestTransformer.java new file mode 100644 index 000000000000..961d0e604984 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/UpdateFileFromContentRequestTransformer.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.validator.resource; + +import org.apache.dolphinscheduler.api.dto.resources.UpdateFileFromContentDto; +import org.apache.dolphinscheduler.api.dto.resources.UpdateFileFromContentRequest; +import org.apache.dolphinscheduler.api.validator.ITransformer; + +import org.springframework.stereotype.Component; + +@Component +public class UpdateFileFromContentRequestTransformer + implements + ITransformer { + + @Override + public UpdateFileFromContentDto transform(UpdateFileFromContentRequest updateFileContentRequest) { + return UpdateFileFromContentDto.builder() + .loginUser(updateFileContentRequest.getLoginUser()) + .fileAbsolutePath(updateFileContentRequest.getFileAbsolutePath()) + .fileContent(updateFileContentRequest.getFileContent()) + .build(); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/UpdateFileRequestTransformer.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/UpdateFileRequestTransformer.java new file mode 100644 index 000000000000..5c7646fbf912 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/validator/resource/UpdateFileRequestTransformer.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.validator.resource; + +import org.apache.dolphinscheduler.api.dto.resources.UpdateFileDto; +import org.apache.dolphinscheduler.api.dto.resources.UpdateFileRequest; +import org.apache.dolphinscheduler.api.validator.ITransformer; + +import org.springframework.stereotype.Component; + +@Component +public class UpdateFileRequestTransformer implements ITransformer { + + @Override + public UpdateFileDto transform(UpdateFileRequest updateFileRequest) { + return UpdateFileDto.builder() + .loginUser(updateFileRequest.getLoginUser()) + .fileAbsolutePath(updateFileRequest.getFileAbsolutePath()) + .file(updateFileRequest.getFile()) + .build(); + } + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/vo/ResourceItemVO.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/vo/ResourceItemVO.java new file mode 100644 index 000000000000..9470ded4ba61 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/vo/ResourceItemVO.java @@ -0,0 +1,74 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.vo; + +import org.apache.dolphinscheduler.plugin.storage.api.StorageEntity; +import org.apache.dolphinscheduler.spi.enums.ResourceType; + +import org.apache.commons.lang3.StringUtils; + +import java.io.File; +import java.util.Date; + +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@NoArgsConstructor +@AllArgsConstructor +public class ResourceItemVO { + + // todo: remove this field, directly use fileName + private String alias; + + // todo: use tenantName instead of userName + private String userName; + + private String fileName; + + private String fullName; + + private boolean isDirectory; + + private ResourceType type; + + private long size; + + private Date createTime; + + private Date updateTime; + + public ResourceItemVO(StorageEntity storageEntity) { + this.isDirectory = storageEntity.isDirectory(); + this.alias = storageEntity.getFileName(); + this.fileName = storageEntity.getFileName(); + this.fullName = storageEntity.getFullName(); + this.type = storageEntity.getType(); + this.size = storageEntity.getSize(); + this.createTime = storageEntity.getCreateTime(); + this.updateTime = storageEntity.getUpdateTime(); + + if (isDirectory) { + alias = StringUtils.removeEndIgnoreCase(alias, File.separator); + fileName = StringUtils.removeEndIgnoreCase(fileName, File.separator); + fullName = StringUtils.removeEndIgnoreCase(fullName, File.separator); + } + } + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/vo/resources/FetchFileContentResponse.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/vo/resources/FetchFileContentResponse.java new file mode 100644 index 000000000000..1f228f42fb9e --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/vo/resources/FetchFileContentResponse.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.vo.resources; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class FetchFileContentResponse { + + private String content; + +} diff --git a/dolphinscheduler-api/src/main/resources/application.yaml b/dolphinscheduler-api/src/main/resources/application.yaml index c93fd99a590e..9b0e94d64451 100644 --- a/dolphinscheduler-api/src/main/resources/application.yaml +++ b/dolphinscheduler-api/src/main/resources/application.yaml @@ -51,15 +51,7 @@ spring: password: root hikari: connection-test-query: select 1 - minimum-idle: 5 - auto-commit: true - validation-timeout: 3000 pool-name: DolphinScheduler - maximum-pool-size: 50 - connection-timeout: 30000 - idle-timeout: 600000 - leak-detection-threshold: 0 - initialization-fail-timeout: 1 quartz: auto-startup: false job-store-type: jdbc @@ -83,6 +75,8 @@ spring: pathmatch: matching-strategy: ANT_PATH_MATCHER static-path-pattern: /static/** + cloud.discovery.client.composite-indicator.enabled: false + springdoc: swagger-ui: path: /swagger-ui.html @@ -126,8 +120,8 @@ registry: namespace: dolphinscheduler connect-string: localhost:2181 retry-policy: - base-sleep-time: 60ms - max-sleep: 300ms + base-sleep-time: 1s + max-sleep: 3s max-retries: 5 session-timeout: 60s connection-timeout: 15s @@ -149,8 +143,8 @@ api: #tenant1: 11 #tenant2: 20 python-gateway: - # Weather enable python gateway server or not. The default value is true. - enabled: true + # Weather enable python gateway server or not. The default value is false. + enabled: false # Authentication token for connection from python api to python gateway server. Should be changed the default value # when you deploy in public network. auth-token: jwUDzpLsNKEFER4*a8gruBH_GsAurNxU7A@Xc diff --git a/dolphinscheduler-api/src/main/resources/dynamic-task-type-config.yaml b/dolphinscheduler-api/src/main/resources/dynamic-task-type-config.yaml index a9a2d55ff8ad..74a2504d668e 100644 --- a/dolphinscheduler-api/src/main/resources/dynamic-task-type-config.yaml +++ b/dolphinscheduler-api/src/main/resources/dynamic-task-type-config.yaml @@ -28,5 +28,3 @@ dynamic-task: - {name: DATA_QUALITY,icon: shell-icon.png,hover: shell-hover.png} machineLearning: - {name: JUPYTER,icon: shell-icon.png,hover: shell-hover.png} - other: - - {name: PIGEON,icon: shell-icon.png,hover: shell-hover.png} \ No newline at end of file diff --git a/dolphinscheduler-api/src/main/resources/i18n/messages.properties b/dolphinscheduler-api/src/main/resources/i18n/messages.properties index 0a5e6b0ca0d1..dc287b87c11f 100644 --- a/dolphinscheduler-api/src/main/resources/i18n/messages.properties +++ b/dolphinscheduler-api/src/main/resources/i18n/messages.properties @@ -117,21 +117,9 @@ SUFFIX=resource file suffix CONTENT=resource file content UPDATE_RESOURCE_NOTES=edit resource file online DOWNLOAD_RESOURCE_NOTES=download resource file -CREATE_UDF_FUNCTION_NOTES=create udf function -UDF_TYPE=UDF type FUNC_NAME=function name CLASS_NAME=package and class name ARG_TYPES=arguments -UDF_DESC=udf desc -VIEW_UDF_FUNCTION_NOTES=view udf function -UPDATE_UDF_FUNCTION_NOTES=update udf function -QUERY_UDF_FUNCTION_LIST_PAGING_NOTES=query udf function list paging -VERIFY_UDF_FUNCTION_NAME_NOTES=verify udf function name -DELETE_UDF_FUNCTION_NOTES=delete udf function -AUTHORIZED_FILE_NOTES=authorized file -UNAUTHORIZED_FILE_NOTES=unauthorized file -AUTHORIZED_UDF_FUNC_NOTES=authorized udf func -UNAUTHORIZED_UDF_FUNC_NOTES=unauthorized udf func VERIFY_QUEUE_NOTES=verify queue TENANT_TAG=tenant related operation CREATE_TENANT_NOTES=create tenant @@ -259,8 +247,6 @@ UNAUTHORIZED_USER_NOTES=cancel authorization ALERT_GROUP_ID=alert group id ALERT_INSTANCE_IDS=alert instance ids(string format, multiple instances separated by ",") AUTHORIZED_USER_NOTES=authorized user -GRANT_UDF_FUNC_NOTES=grant udf function -UDF_IDS=udf ids(string format, multiple udf functions separated by ",") GRANT_DATASOURCE_NOTES=grant datasource DATASOURCE_IDS=datasource ids(string format, multiple datasources separated by ",") QUERY_SUBPROCESS_INSTANCE_BY_TASK_ID_NOTES=query subprocess instance by task instance id @@ -454,4 +440,4 @@ UPDATE_PROJECT_PREFERENCE_NOTES=update project preference UPDATE_PROJECT_PREFERENCE_STATE_NOTES=update the state of the project preference PROJECT_PREFERENCES_STATE= the state of the project preference PROJECT_PREFERENCES=project preferences -QUERY_PROJECT_PREFERENCE_NOTES=query project preference \ No newline at end of file +QUERY_PROJECT_PREFERENCE_NOTES=query project preference diff --git a/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties b/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties index c4ac93620cfc..032da3583844 100644 --- a/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties +++ b/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties @@ -122,21 +122,11 @@ SUFFIX=resource file suffix CONTENT=resource file content UPDATE_RESOURCE_NOTES=edit resource file online DOWNLOAD_RESOURCE_NOTES=download resource file -CREATE_UDF_FUNCTION_NOTES=create udf function -UDF_TYPE=UDF type FUNC_NAME=function name CLASS_NAME=package and class name ARG_TYPES=arguments -UDF_DESC=udf desc -VIEW_UDF_FUNCTION_NOTES=view udf function -UPDATE_UDF_FUNCTION_NOTES=update udf function -QUERY_UDF_FUNCTION_LIST_PAGING_NOTES=query udf function list paging -VERIFY_UDF_FUNCTION_NAME_NOTES=verify udf function name -DELETE_UDF_FUNCTION_NOTES=delete udf function AUTHORIZED_FILE_NOTES=authorized file UNAUTHORIZED_FILE_NOTES=unauthorized file -AUTHORIZED_UDF_FUNC_NOTES=authorized udf func -UNAUTHORIZED_UDF_FUNC_NOTES=unauthorized udf func VERIFY_QUEUE_NOTES=verify queue TENANT_TAG=tenant related operation CREATE_TENANT_NOTES=create tenant @@ -251,7 +241,6 @@ QUERY_WORKER_ADDRESS_LIST_NOTES=query worker address list QUERY_WORKFLOW_LINEAGE_BY_IDS_NOTES=query workflow lineage by ids QUERY_WORKFLOW_LINEAGE_BY_NAME_NOTES=query workflow lineage by name VIEW_TREE_NOTES=view tree -UDF_ID=udf id GET_NODE_LIST_BY_DEFINITION_ID_NOTES=get task node list by process definition id GET_NODE_LIST_BY_DEFINITION_CODE_NOTES=get node list by definition code QUERY_PROCESS_DEFINITION_BY_NAME_NOTES=query process definition by name @@ -301,10 +290,7 @@ QUERY_RESOURCE_LIST_PAGING_NOTES=query resource list paging RESOURCE_PID=parent directory ID of the current resource RESOURCE_FULL_NAME=resource full name QUERY_BY_RESOURCE_NAME=query by resource name -QUERY_UDF_FUNC_LIST_NOTES=query udf funciton list VERIFY_RESOURCE_NAME_NOTES=verify resource name -GRANT_UDF_FUNC_NOTES=grant udf function -UDF_IDS=udf ids(string format, multiple udf functions separated by ",") GRANT_DATASOURCE_NOTES=grant datasource DATASOURCE_IDS=datasource ids(string format, multiple datasources separated by ",") QUERY_SUBPROCESS_INSTANCE_BY_TASK_ID_NOTES=query subprocess instance by task instance id @@ -486,4 +472,4 @@ UPDATE_PROJECT_PARAMETER_NOTES=update project parameter PROJECT_PARAMETER_CODE=project parameter code DELETE_PROJECT_PARAMETER_NOTES=delete project parameter QUERY_PROJECT_PARAMETER_LIST_PAGING_NOTES=query project parameter list paging -QUERY_PROJECT_PARAMETER_NOTES=query project parameter \ No newline at end of file +QUERY_PROJECT_PARAMETER_NOTES=query project parameter diff --git a/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties b/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties index e5d10aaf7d1e..f1aabee824dc 100644 --- a/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties +++ b/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties @@ -102,7 +102,6 @@ RESOURCE_FILE=资源文件 RESOURCE_ID=资源ID QUERY_RESOURCE_LIST_NOTES=查询资源列表 QUERY_BY_RESOURCE_NAME=通过资源名称查询 -QUERY_UDF_FUNC_LIST_NOTES=查询UDF函数列表 VERIFY_RESOURCE_NAME_NOTES=验证资源名称 DELETE_RESOURCE_BY_ID_NOTES=通过ID删除资源 VIEW_RESOURCE_BY_ID_NOTES=通过ID浏览资源 @@ -111,21 +110,11 @@ SUFFIX=资源文件后缀 CONTENT=资源文件内容 UPDATE_RESOURCE_NOTES=在线更新资源文件 DOWNLOAD_RESOURCE_NOTES=下载资源文件 -CREATE_UDF_FUNCTION_NOTES=创建UDF函数 -UDF_TYPE=UDF类型 FUNC_NAME=函数名称 CLASS_NAME=包名类名 ARG_TYPES=参数 -UDF_DESC=udf描述,使用说明 -VIEW_UDF_FUNCTION_NOTES=查看udf函数 -UPDATE_UDF_FUNCTION_NOTES=更新udf函数 -QUERY_UDF_FUNCTION_LIST_PAGING_NOTES=分页查询udf函数列表 -VERIFY_UDF_FUNCTION_NAME_NOTES=验证udf函数名 -DELETE_UDF_FUNCTION_NOTES=删除UDF函数 AUTHORIZED_FILE_NOTES=授权文件 UNAUTHORIZED_FILE_NOTES=取消授权文件 -AUTHORIZED_UDF_FUNC_NOTES=授权udf函数 -UNAUTHORIZED_UDF_FUNC_NOTES=取消udf函数授权 VERIFY_QUEUE_NOTES=验证队列 TENANT_TAG=租户相关操作 CREATE_TENANT_NOTES=创建租户 @@ -231,7 +220,6 @@ PLUGIN_ID=插件ID USER_ID=用户ID PAGE_SIZE=页大小 LIMIT=显示多少条 -UDF_ID=udf ID AUTHORIZE_RESOURCE_TREE_NOTES=授权资源树 RESOURCE_CURRENTDIR=当前资源目录 RESOURCE_PID=资源父目录ID @@ -285,8 +273,6 @@ UNAUTHORIZED_USER_NOTES=取消授权 ALERT_GROUP_ID=告警组ID ALERT_INSTANCE_IDS=告警实例ID列表(字符串格式,多个告警实例ID以","分割) AUTHORIZED_USER_NOTES=授权用户 -GRANT_UDF_FUNC_NOTES=授权udf函数 -UDF_IDS=udf函数id列表(字符串格式,多个udf函数ID以","分割) GRANT_DATASOURCE_NOTES=授权数据源 DATASOURCE_IDS=数据源ID列表(字符串格式,多个数据源ID以","分割) QUERY_SUBPROCESS_INSTANCE_BY_TASK_ID_NOTES=通过任务实例ID查询子流程实例 @@ -483,4 +469,4 @@ UPDATE_PROJECT_PARAMETER_NOTES=更新项目参数 PROJECT_PARAMETER_CODE=项目参数code DELETE_PROJECT_PARAMETER_NOTES=删除项目参数 QUERY_PROJECT_PARAMETER_LIST_PAGING_NOTES=分页查询项目参数 -QUERY_PROJECT_PARAMETER_NOTES=查询项目参数 \ No newline at end of file +QUERY_PROJECT_PARAMETER_NOTES=查询项目参数 diff --git a/dolphinscheduler-api/src/main/resources/task-type-config.yaml b/dolphinscheduler-api/src/main/resources/task-type-config.yaml index 9105d5069758..05d1e6290aa4 100644 --- a/dolphinscheduler-api/src/main/resources/task-type-config.yaml +++ b/dolphinscheduler-api/src/main/resources/task-type-config.yaml @@ -56,7 +56,6 @@ task: - 'PYTORCH' - 'KUBEFLOW' other: - - 'PIGEON' - 'ZEPPELIN' - 'CHUNJUN' - 'DATASYNC' diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/AssertionsHelper.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/AssertionsHelper.java index d2da5bc638c0..21977bd50db7 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/AssertionsHelper.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/AssertionsHelper.java @@ -20,16 +20,29 @@ import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ServiceException; +import java.text.MessageFormat; + import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.function.Executable; public class AssertionsHelper extends Assertions { + public static void assertThrowServiceException(String message, Executable executable) { + ServiceException exception = Assertions.assertThrows(ServiceException.class, executable); + Assertions.assertEquals(message, exception.getMessage()); + } + public static void assertThrowsServiceException(Status status, Executable executable) { ServiceException exception = Assertions.assertThrows(ServiceException.class, executable); Assertions.assertEquals(status.getCode(), exception.getCode()); } + public static void assertThrowsServiceException(String message, Executable executable) { + ServiceException exception = Assertions.assertThrows(ServiceException.class, executable); + Assertions.assertEquals(MessageFormat.format(Status.INTERNAL_SERVER_ERROR_ARGS.getMsg(), message), + exception.getMessage()); + } + public static void assertDoesNotThrow(Executable executable) { Assertions.assertDoesNotThrow(executable); } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AccessTokenControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AccessTokenControllerTest.java index 5d8308760b7c..9114daa04fbd 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AccessTokenControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AccessTokenControllerTest.java @@ -47,7 +47,7 @@ public class AccessTokenControllerTest extends AbstractControllerTest { @Test public void testCreateToken() throws Exception { MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("userId", "4"); + paramsMap.add("userId", "1"); paramsMap.add("expireTime", "2019-12-18 00:00:00"); paramsMap.add("token", "607f5aeaaa2093dbdff5d5522ce00510"); MvcResult mvcResult = mockMvc.perform(post("/access-tokens") @@ -64,7 +64,7 @@ public void testCreateToken() throws Exception { @Test public void testCreateTokenIfAbsent() throws Exception { MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("userId", "4"); + paramsMap.add("userId", "1"); paramsMap.add("expireTime", "2019-12-18 00:00:00"); paramsMap.add("token", null); @@ -101,7 +101,7 @@ public void testExceptionHandler() throws Exception { @Test public void testGenerateToken() throws Exception { MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("userId", "4"); + paramsMap.add("userId", "1"); paramsMap.add("expireTime", "2019-12-28 00:00:00"); MvcResult mvcResult = mockMvc.perform(post("/access-tokens/generate") .header("sessionId", sessionId) @@ -161,7 +161,7 @@ public void testDelAccessTokenById() throws Exception { public void testUpdateToken() throws Exception { testCreateToken(); MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("userId", "4"); + paramsMap.add("userId", "1"); paramsMap.add("expireTime", "2019-12-20 00:00:00"); paramsMap.add("token", "cxctoken123update"); MvcResult mvcResult = mockMvc.perform(put("/access-tokens/1") @@ -180,7 +180,7 @@ public void testUpdateTokenIfAbsent() throws Exception { this.testCreateTokenIfAbsent(); MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("userId", "4"); + paramsMap.add("userId", "1"); paramsMap.add("expireTime", "2019-12-20 00:00:00"); paramsMap.add("token", null); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AlertPluginInstanceControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AlertPluginInstanceControllerTest.java index 5ad7b2da526d..649d95bb4cd3 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AlertPluginInstanceControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AlertPluginInstanceControllerTest.java @@ -35,8 +35,10 @@ import org.apache.dolphinscheduler.common.enums.AlertPluginInstanceType; import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.dao.entity.AlertPluginInstance; import org.apache.dolphinscheduler.dao.entity.User; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import org.springframework.boot.test.mock.mockito.MockBean; import org.springframework.http.MediaType; @@ -49,6 +51,7 @@ */ public class AlertPluginInstanceControllerTest extends AbstractControllerTest { + private static AlertPluginInstance alertPluginInstance = new AlertPluginInstance(); private static final int pluginDefineId = 1; private static final String instanceName = "instanceName"; private static final String pluginInstanceParams = "pluginInstanceParams"; @@ -60,6 +63,12 @@ public class AlertPluginInstanceControllerTest extends AbstractControllerTest { @MockBean(name = "alertPluginInstanceServiceImpl") private AlertPluginInstanceService alertPluginInstanceService; + @BeforeAll + public static void initInstance() { + alertPluginInstance.setId(1); + alertPluginInstance.setInstanceName(instanceName); + } + @Test public void testCreateAlertPluginInstance() throws Exception { // Given @@ -71,7 +80,7 @@ public void testCreateAlertPluginInstance() throws Exception { paramsMap.add("pluginInstanceParams", pluginInstanceParams); when(alertPluginInstanceService.create(any(User.class), eq(pluginDefineId), eq(instanceName), - eq(pluginInstanceType), eq(warningType), eq(pluginInstanceParams))).thenReturn(null); + eq(pluginInstanceType), eq(warningType), eq(pluginInstanceParams))).thenReturn(alertPluginInstance); // When final MvcResult mvcResult = mockMvc.perform(post("/alert-plugin-instances") @@ -122,7 +131,7 @@ public void testUpdateAlertPluginInstance() throws Exception { paramsMap.add("pluginInstanceParams", pluginInstanceParams); when(alertPluginInstanceService.updateById(any(User.class), eq(pluginDefineId), eq(instanceName), - eq(warningType), eq(pluginInstanceParams))).thenReturn(null); + eq(warningType), eq(pluginInstanceParams))).thenReturn(alertPluginInstance); // When final MvcResult mvcResult = mockMvc.perform(put("/alert-plugin-instances/{id}", pluginDefineId) diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataAnalysisControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataAnalysisControllerTest.java index 2e9961b5d2ce..36622b8b98cf 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataAnalysisControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataAnalysisControllerTest.java @@ -150,4 +150,40 @@ public void testCountQueueState() throws Exception { assertThat(result.getCode().intValue()).isEqualTo(Status.SUCCESS.getCode()); logger.info(mvcResult.getResponse().getContentAsString()); } + + @Test + public void testListCommand() throws Exception { + MultiValueMap paramsMap = new LinkedMultiValueMap<>(); + paramsMap.add("projectCode", "16"); + paramsMap.add("pageNo", "1"); + paramsMap.add("pageSize", "10"); + + MvcResult mvcResult = mockMvc.perform(get("/projects/analysis/listCommand") + .header("sessionId", sessionId) + .params(paramsMap)) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON)) + .andReturn(); + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + assertThat(result.getCode().intValue()).isEqualTo(Status.SUCCESS.getCode()); + logger.info(mvcResult.getResponse().getContentAsString()); + } + + @Test + public void testListErrorCommand() throws Exception { + MultiValueMap paramsMap = new LinkedMultiValueMap<>(); + paramsMap.add("projectCode", "16"); + paramsMap.add("pageNo", "1"); + paramsMap.add("pageSize", "10"); + + MvcResult mvcResult = mockMvc.perform(get("/projects/analysis/listErrorCommand") + .header("sessionId", sessionId) + .params(paramsMap)) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON)) + .andReturn(); + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + assertThat(result.getCode().intValue()).isEqualTo(Status.SUCCESS.getCode()); + logger.info(mvcResult.getResponse().getContentAsString()); + } } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ExecuteFunctionControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ExecuteFunctionControllerTest.java index 7cd97a555144..5b77acc0991c 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ExecuteFunctionControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ExecuteFunctionControllerTest.java @@ -38,8 +38,13 @@ import org.apache.dolphinscheduler.common.enums.TaskDependType; import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.plugin.task.api.enums.DataType; +import org.apache.dolphinscheduler.plugin.task.api.enums.Direct; +import org.apache.dolphinscheduler.plugin.task.api.model.Property; +import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import org.junit.jupiter.api.Test; @@ -75,12 +80,13 @@ public class ExecuteFunctionControllerTest extends AbstractControllerTest { final String tenantCode = "root"; final Long environmentCode = 4L; final Integer timeout = 5; - final ImmutableMap startParams = ImmutableMap.of("start", "params"); + final List startParams = + Collections.singletonList(new Property("start", Direct.IN, DataType.VARCHAR, "params")); final Integer expectedParallelismNumber = 6; final int dryRun = 7; final int testFlag = 0; final ComplementDependentMode complementDependentMode = ComplementDependentMode.OFF_MODE; - final Integer version = null; + final Integer version = 1; final boolean allLevelDependent = false; final JsonObject expectResponseContent = gson .fromJson("{\"code\":0,\"msg\":\"success\",\"data\":\"Test Data\",\"success\":true,\"failed\":false}", @@ -115,6 +121,7 @@ public void testStartProcessInstanceWithFullParams() throws Exception { paramsMap.add("dryRun", String.valueOf(dryRun)); paramsMap.add("testFlag", String.valueOf(testFlag)); paramsMap.add("executionOrder", String.valueOf(executionOrder)); + paramsMap.add("version", String.valueOf(version)); when(executorService.execProcessInstance(any(User.class), eq(projectCode), eq(processDefinitionCode), eq(scheduleTime), eq(execType), eq(failureStrategy), eq(startNodeList), eq(taskDependType), @@ -162,6 +169,7 @@ public void testStartProcessInstanceWithoutTimeout() throws Exception { paramsMap.add("dryRun", String.valueOf(dryRun)); paramsMap.add("testFlag", String.valueOf(testFlag)); paramsMap.add("executionOrder", String.valueOf(executionOrder)); + paramsMap.add("version", String.valueOf(version)); when(executorService.execProcessInstance(any(User.class), eq(projectCode), eq(processDefinitionCode), eq(scheduleTime), eq(execType), eq(failureStrategy), eq(startNodeList), eq(taskDependType), @@ -209,6 +217,7 @@ public void testStartProcessInstanceWithoutStartParams() throws Exception { paramsMap.add("dryRun", String.valueOf(dryRun)); paramsMap.add("testFlag", String.valueOf(testFlag)); paramsMap.add("executionOrder", String.valueOf(executionOrder)); + paramsMap.add("version", String.valueOf(version)); when(executorService.execProcessInstance(any(User.class), eq(projectCode), eq(processDefinitionCode), eq(scheduleTime), eq(execType), eq(failureStrategy), eq(startNodeList), eq(taskDependType), @@ -241,6 +250,7 @@ public void testStartProcessInstanceWithRequiredParams() throws Exception { paramsMap.add("failureStrategy", String.valueOf(failureStrategy)); paramsMap.add("warningType", String.valueOf(warningType)); paramsMap.add("scheduleTime", scheduleTime); + paramsMap.add("version", String.valueOf(version)); when(executorService.execProcessInstance(any(User.class), eq(projectCode), eq(processDefinitionCode), eq(scheduleTime), eq(null), eq(failureStrategy), eq(null), eq(null), eq(warningType), diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/MonitorControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/MonitorControllerTest.java index 737faa16368a..93dc48b80c57 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/MonitorControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/MonitorControllerTest.java @@ -41,8 +41,7 @@ public class MonitorControllerTest extends AbstractControllerTest { @Test public void testListMaster() throws Exception { - - MvcResult mvcResult = mockMvc.perform(get("/monitor/masters") + MvcResult mvcResult = mockMvc.perform(get("/monitor/MASTER") .header(SESSION_ID, sessionId) /* .param("type", ResourceType.FILE.name()) */) .andExpect(status().isOk()) @@ -59,7 +58,7 @@ public void testListMaster() throws Exception { @Test public void testListWorker() throws Exception { - MvcResult mvcResult = mockMvc.perform(get("/monitor/workers") + MvcResult mvcResult = mockMvc.perform(get("/monitor/WORKER") .header(SESSION_ID, sessionId) /* .param("type", ResourceType.FILE.name()) */) .andExpect(status().isOk()) @@ -74,8 +73,9 @@ public void testListWorker() throws Exception { } @Test - public void testQueryDatabaseState() throws Exception { - MvcResult mvcResult = mockMvc.perform(get("/monitor/databases") + public void testListAlert() throws Exception { + + MvcResult mvcResult = mockMvc.perform(get("/monitor/ALERT_SERVER") .header(SESSION_ID, sessionId) /* .param("type", ResourceType.FILE.name()) */) .andExpect(status().isOk()) @@ -89,4 +89,19 @@ public void testQueryDatabaseState() throws Exception { logger.info(mvcResult.getResponse().getContentAsString()); } + @Test + public void testQueryDatabaseState() throws Exception { + MvcResult mvcResult = mockMvc.perform(get("/monitor/databases") + .header(SESSION_ID, sessionId)) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON)) + .andReturn(); + + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + result.getCode().equals(Status.SUCCESS.getCode()); + + Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); + logger.info(mvcResult.getResponse().getContentAsString()); + } + } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProjectParameterControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProjectParameterControllerTest.java index f1bd20bb2358..ea8bbf67fddb 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProjectParameterControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProjectParameterControllerTest.java @@ -22,6 +22,7 @@ import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.plugin.task.api.enums.DataType; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; @@ -48,8 +49,9 @@ public void testCreateProjectParameter() { User loginUser = getGeneralUser(); Mockito.when(projectParameterService.createProjectParameter(Mockito.any(), Mockito.anyLong(), Mockito.any(), - Mockito.any())).thenReturn(getSuccessResult()); - Result result = projectParameterController.createProjectParameter(loginUser, 1, "key", "value"); + Mockito.any(), Mockito.any())).thenReturn(getSuccessResult()); + Result result = projectParameterController.createProjectParameter(loginUser, 1, "key", "value", + DataType.VARCHAR.name()); Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode()); } @@ -58,8 +60,9 @@ public void testUpdateProjectParameter() { User loginUser = getGeneralUser(); Mockito.when(projectParameterService.updateProjectParameter(Mockito.any(), Mockito.anyLong(), Mockito.anyLong(), - Mockito.any(), Mockito.any())).thenReturn(getSuccessResult()); - Result result = projectParameterController.updateProjectParameter(loginUser, 1, 1L, "key", "value"); + Mockito.any(), Mockito.any(), Mockito.any())).thenReturn(getSuccessResult()); + Result result = projectParameterController.updateProjectParameter(loginUser, 1, 1L, "key", "value", + DataType.LONG.name()); Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode()); } @@ -88,8 +91,9 @@ public void testQueryProjectParameterListPaging() { User loginUser = getGeneralUser(); Mockito.when(projectParameterService.queryProjectParameterListPaging(Mockito.any(), Mockito.anyLong(), - Mockito.anyInt(), Mockito.anyInt(), Mockito.any())).thenReturn(getSuccessResult()); - Result result = projectParameterController.queryProjectParameterListPaging(loginUser, 1, "1", 1, 10); + Mockito.anyInt(), Mockito.anyInt(), Mockito.any(), Mockito.any())).thenReturn(getSuccessResult()); + Result result = projectParameterController.queryProjectParameterListPaging(loginUser, 1, "1", + DataType.VARCHAR.name(), 1, 10); Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode()); } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ResourcesControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ResourcesControllerTest.java index bfed64f9f621..2d4effd7ade4 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ResourcesControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ResourcesControllerTest.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.api.controller; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; @@ -26,31 +27,24 @@ import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.ResourcesService; -import org.apache.dolphinscheduler.api.service.UdfFuncService; import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.common.constants.Constants; -import org.apache.dolphinscheduler.common.enums.UdfType; +import org.apache.dolphinscheduler.api.vo.resources.FetchFileContentResponse; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.spi.enums.ResourceType; -import java.util.HashMap; -import java.util.Map; - import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.mockito.Mockito; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.boot.test.mock.mockito.MockBean; -import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.test.web.servlet.MvcResult; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; -/** - * resources controller test - */ +import com.fasterxml.jackson.core.type.TypeReference; + public class ResourcesControllerTest extends AbstractControllerTest { private static final Logger logger = LoggerFactory.getLogger(ResourcesControllerTest.class); @@ -58,40 +52,12 @@ public class ResourcesControllerTest extends AbstractControllerTest { @MockBean(name = "resourcesServiceImpl") private ResourcesService resourcesService; - @MockBean(name = "udfFuncServiceImpl") - private UdfFuncService udfFuncService; - - @Test - public void testQuerytResourceList() throws Exception { - Map mockResult = new HashMap<>(); - mockResult.put(Constants.STATUS, Status.SUCCESS); - Mockito.when(resourcesService.queryResourceList(Mockito.any(), Mockito.any(), Mockito.anyString())) - .thenReturn(mockResult); - - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("fullName", "dolphinscheduler/resourcePath"); - paramsMap.add("type", ResourceType.FILE.name()); - MvcResult mvcResult = mockMvc.perform(get("/resources/list") - .header(SESSION_ID, sessionId) - .params(paramsMap)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - - Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } - @Test public void testQueryResourceListPaging() throws Exception { Result mockResult = new Result<>(); mockResult.setCode(Status.SUCCESS.getCode()); - Mockito.when(resourcesService.queryResourceListPaging( - Mockito.any(), Mockito.anyString(), Mockito.anyString(), Mockito.any(), - Mockito.anyString(), Mockito.anyInt(), Mockito.anyInt())) - .thenReturn(mockResult); + // Mockito.when(resourcesService.pagingResourceItem() + // .thenReturn(mockResult); MultiValueMap paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("type", String.valueOf(ResourceType.FILE)); @@ -111,41 +77,17 @@ public void testQueryResourceListPaging() throws Exception { Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } - - @Test - public void testVerifyResourceName() throws Exception { - Result mockResult = new Result<>(); - mockResult.setCode(Status.TENANT_NOT_EXIST.getCode()); - Mockito.when(resourcesService.verifyResourceName(Mockito.anyString(), Mockito.any(), Mockito.any())) - .thenReturn(mockResult); - - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("fullName", "list_resources_1.sh"); - paramsMap.add("type", "FILE"); - - MvcResult mvcResult = mockMvc.perform(get("/resources/verify-name") - .header(SESSION_ID, sessionId) - .params(paramsMap)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - - Assertions.assertEquals(Status.TENANT_NOT_EXIST.getCode(), result.getCode().intValue()); + assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } @Test public void testViewResource() throws Exception { - Result mockResult = new Result<>(); - mockResult.setCode(Status.HDFS_NOT_STARTUP.getCode()); - Mockito.when(resourcesService.readResource(Mockito.any(), - Mockito.anyString(), Mockito.anyString(), Mockito.anyInt(), Mockito.anyInt())) - .thenReturn(mockResult); + FetchFileContentResponse fetchFileContentResponse = FetchFileContentResponse.builder() + .content("echo hello") + .build(); + Mockito.when(resourcesService.fetchResourceFileContent(Mockito.any())) + .thenReturn(fetchFileContentResponse); MultiValueMap paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("skipLineNum", "2"); @@ -160,19 +102,17 @@ public void testViewResource() throws Exception { .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andReturn(); - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), + new TypeReference>() { + }); - Assertions.assertEquals(Status.HDFS_NOT_STARTUP.getCode(), result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); + assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); + assertEquals(fetchFileContentResponse, result.getData()); } @Test public void testCreateResourceFile() throws Exception { - Result mockResult = new Result<>(); - mockResult.setCode(Status.TENANT_NOT_EXIST.getCode()); - Mockito.when(resourcesService.createResourceFile(Mockito.any(), Mockito.any(), Mockito.anyString(), - Mockito.anyString(), Mockito.anyString(), Mockito.anyString())) - .thenReturn(mockResult); + Mockito.doNothing().when(resourcesService).createFileFromContent(Mockito.any()); MultiValueMap paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("type", String.valueOf(ResourceType.FILE)); @@ -190,19 +130,16 @@ public void testCreateResourceFile() throws Exception { .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andReturn(); - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + Result result = + JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), new TypeReference>() { + }); - Assertions.assertEquals(Status.TENANT_NOT_EXIST.getCode(), result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); + assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); } @Test public void testUpdateResourceContent() throws Exception { - Result mockResult = new Result<>(); - mockResult.setCode(Status.TENANT_NOT_EXIST.getCode()); - Mockito.when(resourcesService.updateResourceContent(Mockito.any(), Mockito.anyString(), - Mockito.anyString(), Mockito.anyString())) - .thenReturn(mockResult); + Mockito.doNothing().when(resourcesService).updateFileFromContent(Mockito.any()); MultiValueMap paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("id", "1"); @@ -217,210 +154,33 @@ public void testUpdateResourceContent() throws Exception { .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andReturn(); - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + Result result = + JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), new TypeReference>() { + }); - Assertions.assertEquals(Status.TENANT_NOT_EXIST.getCode(), result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); + assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); } @Test public void testDownloadResource() throws Exception { - Mockito.when(resourcesService.downloadResource(Mockito.any(), Mockito.anyString())) - .thenReturn(null); + Mockito.doNothing().when(resourcesService).downloadResource(Mockito.any(), Mockito.any()); MultiValueMap paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("fullName", "dolphinscheduler/resourcePath"); MvcResult mvcResult = mockMvc.perform(get("/resources/download") .params(paramsMap) - .header(SESSION_ID, sessionId)) - .andExpect(status().is(HttpStatus.BAD_REQUEST.value())) - .andReturn(); - - Assertions.assertNotNull(mvcResult); - } - - @Test - public void testCreateUdfFunc() throws Exception { - Result mockResult = new Result<>(); - mockResult.setCode(Status.TENANT_NOT_EXIST.getCode()); - Mockito.when(udfFuncService - .createUdfFunction(Mockito.any(), Mockito.anyString(), Mockito.anyString(), Mockito.anyString(), - Mockito.anyString(), Mockito.anyString(), Mockito.anyString(), Mockito.any())) - .thenReturn(mockResult); - - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("type", String.valueOf(UdfType.HIVE)); - paramsMap.add("funcName", "test_udf"); - paramsMap.add("className", "com.test.word.contWord"); - paramsMap.add("argTypes", "argTypes"); - paramsMap.add("database", "database"); - paramsMap.add("description", "description"); - paramsMap.add("resourceId", "1"); - paramsMap.add("fullName", "dolphinscheduler/resourcePath"); - - MvcResult mvcResult = mockMvc.perform(post("/resources/udf-func") - .header(SESSION_ID, sessionId) - .params(paramsMap)) - .andExpect(status().isCreated()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - - Assertions.assertEquals(Status.TENANT_NOT_EXIST.getCode(), result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } - - @Test - public void testViewUIUdfFunction() throws Exception { - Result mockResult = new Result<>(); - putMsg(mockResult, Status.TENANT_NOT_EXIST); - Mockito.when(udfFuncService - .queryUdfFuncDetail(Mockito.any(), Mockito.anyInt())) - .thenReturn(mockResult); - - MvcResult mvcResult = mockMvc.perform(get("/resources/{id}/udf-func", "123") .header(SESSION_ID, sessionId)) .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andReturn(); - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - - Assertions.assertEquals(Status.TENANT_NOT_EXIST.getCode(), result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } - - @Test - public void testUpdateUdfFunc() throws Exception { - Result mockResult = new Result<>(); - mockResult.setCode(Status.TENANT_NOT_EXIST.getCode()); - Mockito.when(udfFuncService - .updateUdfFunc(Mockito.any(), Mockito.anyInt(), Mockito.anyString(), Mockito.anyString(), - Mockito.anyString(), Mockito.anyString(), Mockito.anyString(), Mockito.any(), - Mockito.anyString())) - .thenReturn(mockResult); - - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("id", "1"); - paramsMap.add("type", String.valueOf(UdfType.HIVE)); - paramsMap.add("funcName", "update_duf"); - paramsMap.add("className", "com.test.word.contWord"); - paramsMap.add("argTypes", "argTypes"); - paramsMap.add("database", "database"); - paramsMap.add("description", "description"); - paramsMap.add("resourceId", "1"); - paramsMap.add("fullName", "dolphinscheduler/resourcePath"); - - MvcResult mvcResult = mockMvc.perform(put("/resources/udf-func/{id}", "456") - .header(SESSION_ID, sessionId) - .params(paramsMap)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - - Assertions.assertEquals(Status.TENANT_NOT_EXIST.getCode(), result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } - - @Test - public void testQueryUdfFuncList() throws Exception { - Result mockResult = new Result<>(); - mockResult.setCode(Status.SUCCESS.getCode()); - Mockito.when(udfFuncService.queryUdfFuncListPaging(Mockito.any(), Mockito.anyString(), Mockito.anyInt(), - Mockito.anyInt())).thenReturn(mockResult); - - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("pageNo", "1"); - paramsMap.add("searchVal", "udf"); - paramsMap.add("pageSize", "1"); - - MvcResult mvcResult = mockMvc.perform(get("/resources/udf-func") - .header(SESSION_ID, sessionId) - .params(paramsMap)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - - Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } - - @Test - public void testQueryResourceList() throws Exception { - Result mockResult = new Result<>(); - mockResult.setCode(Status.SUCCESS.getCode()); - Mockito.when(udfFuncService.queryUdfFuncList(Mockito.any(), Mockito.anyInt())).thenReturn(mockResult); - - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("type", String.valueOf(UdfType.HIVE)); - - MvcResult mvcResult = mockMvc.perform(get("/resources/udf-func/list") - .header(SESSION_ID, sessionId) - .params(paramsMap)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - - Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } - - @Test - public void testVerifyUdfFuncName() throws Exception { - Result mockResult = new Result<>(); - mockResult.setCode(Status.SUCCESS.getCode()); - Mockito.when(udfFuncService.verifyUdfFuncByName(Mockito.any(), Mockito.anyString())).thenReturn(mockResult); - - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("name", "test"); - - MvcResult mvcResult = mockMvc.perform(get("/resources/udf-func/verify-name") - .header(SESSION_ID, sessionId) - .params(paramsMap)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - - Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } - - @Test - public void testDeleteUdfFunc() throws Exception { - Result mockResult = new Result<>(); - mockResult.setCode(Status.SUCCESS.getCode()); - Mockito.when(udfFuncService.delete(Mockito.any(), Mockito.anyInt())).thenReturn(mockResult); - - MvcResult mvcResult = mockMvc.perform(delete("/resources/udf-func/{id}", "123") - .header(SESSION_ID, sessionId)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - - Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); + Assertions.assertNotNull(mvcResult); } @Test public void testDeleteResource() throws Exception { - Result mockResult = new Result<>(); - mockResult.setCode(Status.SUCCESS.getCode()); - Mockito.when(resourcesService.delete(Mockito.any(), Mockito.anyString(), - Mockito.anyString())) - .thenReturn(mockResult); + Mockito.doNothing().when(resourcesService).delete(Mockito.any()); MultiValueMap paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("fullName", "dolphinscheduler/resourcePath"); paramsMap.add("tenantCode", "123"); @@ -431,9 +191,10 @@ public void testDeleteResource() throws Exception { .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andReturn(); - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + Result result = + JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), new TypeReference>() { + }); - Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); + assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); } } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/SchedulerControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/SchedulerControllerTest.java index 5644065a333e..1be311fbaf68 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/SchedulerControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/SchedulerControllerTest.java @@ -30,6 +30,7 @@ import org.apache.dolphinscheduler.api.service.SchedulerService; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.constants.Constants; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.WarningType; @@ -38,6 +39,7 @@ import org.apache.dolphinscheduler.dao.entity.User; import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import org.mockito.Mockito; import org.slf4j.Logger; @@ -48,13 +50,25 @@ import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; +import com.google.common.collect.ImmutableMap; + public class SchedulerControllerTest extends AbstractControllerTest { + private static Schedule scheduleObj = new Schedule(); + private static final Logger logger = LoggerFactory.getLogger(SchedulerControllerTest.class); + final ImmutableMap result = + ImmutableMap.of(Constants.STATUS, Status.SUCCESS, Constants.DATA_LIST, scheduleObj); + @MockBean(name = "schedulerService") private SchedulerService schedulerService; + @BeforeAll + public static void initInstance() { + scheduleObj.setId(1); + } + @Test public void testCreateSchedule() throws Exception { MultiValueMap paramsMap = new LinkedMultiValueMap<>(); @@ -72,7 +86,7 @@ public void testCreateSchedule() throws Exception { Mockito.when(schedulerService.insertSchedule(isA(User.class), isA(Long.class), isA(Long.class), isA(String.class), isA(WarningType.class), isA(int.class), isA(FailureStrategy.class), - isA(Priority.class), isA(String.class), isA(String.class), isA(Long.class))).thenReturn(success()); + isA(Priority.class), isA(String.class), isA(String.class), isA(Long.class))).thenReturn(result); MvcResult mvcResult = mockMvc.perform(post("/projects/{projectCode}/schedules/", 123) .header(SESSION_ID, sessionId) @@ -103,7 +117,7 @@ public void testUpdateSchedule() throws Exception { Mockito.when(schedulerService.updateSchedule(isA(User.class), isA(Long.class), isA(Integer.class), isA(String.class), isA(WarningType.class), isA(Integer.class), isA(FailureStrategy.class), - isA(Priority.class), isA(String.class), isA(String.class), isA(Long.class))).thenReturn(success()); + isA(Priority.class), isA(String.class), isA(String.class), isA(Long.class))).thenReturn(result); MvcResult mvcResult = mockMvc.perform(put("/projects/{projectCode}/schedules/{id}", 123, 37) .header(SESSION_ID, sessionId) diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TaskInstanceControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TaskInstanceControllerTest.java index 7ebe5bf7576d..b58944537b87 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TaskInstanceControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TaskInstanceControllerTest.java @@ -82,9 +82,7 @@ public void testForceTaskSuccess() throws Exception { MultiValueMap paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("taskInstanceId", "104"); - Result mockResult = new Result(); - putMsg(mockResult, Status.SUCCESS); - when(taskInstanceService.forceTaskSuccess(any(User.class), anyLong(), anyInt())).thenReturn(mockResult); + Mockito.doNothing().when(taskInstanceService).forceTaskSuccess(any(User.class), anyLong(), anyInt()); MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/task-instance/force-success", "cxc_1113") .header(SESSION_ID, sessionId) diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/UsersControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/UsersControllerTest.java index 1f6eca0572c7..303d4735bc82 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/UsersControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/UsersControllerTest.java @@ -145,24 +145,6 @@ public void testRevokeProject() throws Exception { logger.info(mvcResult.getResponse().getContentAsString()); } - @Test - public void testGrantUDFFunc() throws Exception { - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("userId", "32"); - paramsMap.add("udfIds", "5"); - - MvcResult mvcResult = mockMvc.perform(post("/users/grant-udf-func") - .header(SESSION_ID, sessionId) - .params(paramsMap)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assertions.assertEquals(Status.USER_NOT_EXIST.getCode(), result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } - @Test public void testGrantDataSource() throws Exception { MultiValueMap paramsMap = new LinkedMultiValueMap<>(); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/v2/TaskInstanceV2ControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/v2/TaskInstanceV2ControllerTest.java index cc70f200e1eb..8e76ec1694f6 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/v2/TaskInstanceV2ControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/v2/TaskInstanceV2ControllerTest.java @@ -23,6 +23,7 @@ import org.apache.dolphinscheduler.api.controller.AbstractControllerTest; import org.apache.dolphinscheduler.api.dto.taskInstance.TaskInstanceQueryRequest; +import org.apache.dolphinscheduler.api.dto.taskInstance.TaskInstanceSuccessResponse; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.TaskInstanceService; import org.apache.dolphinscheduler.api.utils.PageInfo; @@ -85,12 +86,9 @@ public void testQueryTaskListPaging() { @Test public void testForceTaskSuccess() { - Result mockResult = new Result(); - putMsg(mockResult, Status.SUCCESS); - - when(taskInstanceService.forceTaskSuccess(any(), Mockito.anyLong(), Mockito.anyInt())).thenReturn(mockResult); + Mockito.doNothing().when(taskInstanceService).forceTaskSuccess(any(), Mockito.anyLong(), Mockito.anyInt()); - Result taskResult = taskInstanceV2Controller.forceTaskSuccess(null, 1L, 1); + TaskInstanceSuccessResponse taskResult = taskInstanceV2Controller.forceTaskSuccess(null, 1L, 1); Assertions.assertEquals(Integer.valueOf(Status.SUCCESS.getCode()), taskResult.getCode()); } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/permission/UdfFuncPermissionCheckTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/permission/UdfFuncPermissionCheckTest.java deleted file mode 100644 index 23a01c8f0a8b..000000000000 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/permission/UdfFuncPermissionCheckTest.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.api.permission; - -import org.apache.dolphinscheduler.common.enums.AuthorizationType; -import org.apache.dolphinscheduler.common.enums.UserType; -import org.apache.dolphinscheduler.dao.entity.Project; -import org.apache.dolphinscheduler.dao.entity.UdfFunc; -import org.apache.dolphinscheduler.dao.entity.User; -import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper; - -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.InjectMocks; -import org.mockito.Mock; -import org.mockito.Mockito; -import org.mockito.junit.jupiter.MockitoExtension; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@ExtendWith(MockitoExtension.class) -public class UdfFuncPermissionCheckTest { - - private static final Logger logger = LoggerFactory.getLogger(UdfFuncPermissionCheckTest.class); - @InjectMocks - private ResourcePermissionCheckServiceImpl.UdfFuncPermissionCheck udfFuncPermissionCheck; - - @Mock - private UdfFuncMapper udfFuncMapper; - - @Test - public void testPermissionCheck() { - User user = getLoginUser(); - Assertions.assertTrue(udfFuncPermissionCheck.permissionCheck(user.getId(), null, logger)); - } - - @Test - public void testAuthorizationTypes() { - List authorizationTypes = udfFuncPermissionCheck.authorizationTypes(); - Assertions.assertEquals(Collections.singletonList(AuthorizationType.UDF), authorizationTypes); - } - - @Test - public void testListAuthorizedResourceIds() { - User user = getLoginUser(); - UdfFunc udfFunc = new UdfFunc(); - Set ids = new HashSet(); - ids.add(udfFunc.getId()); - List udfFuncs = Arrays.asList(udfFunc); - - Mockito.when(udfFuncMapper.listAuthorizedUdfByUserId(user.getId())).thenReturn(udfFuncs); - - Assertions.assertEquals(ids, udfFuncPermissionCheck.listAuthorizedResourceIds(user.getId(), logger)); - } - - private User getLoginUser() { - User loginUser = new User(); - loginUser.setUserType(UserType.GENERAL_USER); - loginUser.setUserName("test"); - loginUser.setId(1); - return loginUser; - } - - private Project getProject() { - Project project = new Project(); - project.setCode(1L); - project.setId(1); - project.setName("projectName"); - project.setUserId(1); - return project; - } -} diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/python/PythonGatewayTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/python/PythonGatewayTest.java index 173ae988546a..5ae26af82606 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/python/PythonGatewayTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/python/PythonGatewayTest.java @@ -97,19 +97,6 @@ public void testGetDependentInfo() { Assertions.assertEquals((long) result.get("taskDefinitionCode"), taskDefinition.getCode()); } - @Test - public void testCreateResource() { - User user = getTestUser(); - String resourceDir = "/dir1/dir2/"; - String resourceName = "test"; - String resourceSuffix = "py"; - String content = "content"; - String resourceFullName = resourceDir + resourceName + "." + resourceSuffix; - - Assertions.assertDoesNotThrow( - () -> pythonGateway.createOrUpdateResource(user.getUserName(), resourceFullName, content)); - } - @Test public void testQueryResourcesFileInfo() throws Exception { User user = getTestUser(); @@ -118,12 +105,11 @@ public void testQueryResourcesFileInfo() throws Exception { Mockito.when(resourcesService.queryFileStatus(user.getUserName(), storageEntity.getFullName())) .thenReturn(storageEntity); StorageEntity result = pythonGateway.queryResourcesFileInfo(user.getUserName(), storageEntity.getFullName()); - Assertions.assertEquals(result.getId(), storageEntity.getId()); + Assertions.assertEquals(result.getFullName(), storageEntity.getFullName()); } private StorageEntity getTestResource() { StorageEntity storageEntity = new StorageEntity(); - storageEntity.setId(1); storageEntity.setType(ResourceType.FILE); storageEntity.setFullName("/dev/test.py"); return storageEntity; diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertGroupServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertGroupServiceTest.java index 85cbcbc833bc..6571d8b0f8c1 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertGroupServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertGroupServiceTest.java @@ -22,6 +22,7 @@ import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.ALERT_GROUP_CREATE; import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.ALERT_GROUP_DELETE; import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.ALERT_GROUP_UPDATE; +import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.ALERT_GROUP_VIEW; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; @@ -41,6 +42,7 @@ import org.apache.commons.collections4.CollectionUtils; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import org.junit.jupiter.api.Assertions; @@ -67,6 +69,12 @@ public class AlertGroupServiceTest { private static final Logger baseServiceLogger = LoggerFactory.getLogger(BaseServiceImpl.class); private static final Logger logger = LoggerFactory.getLogger(AlertGroupServiceTest.class); private static final Logger alertGroupServiceLogger = LoggerFactory.getLogger(AlertGroupServiceImpl.class); + private String tooLongDescription = + "this is a toooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo" + + + "ooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo" + + + "ooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo long description"; @InjectMocks private AlertGroupServiceImpl alertGroupService; @@ -81,10 +89,25 @@ public class AlertGroupServiceTest { @Test public void testQueryAlertGroup() { + User user = getLoginUser(); when(alertGroupMapper.queryAllGroupList()).thenReturn(getList()); - List alertGroups = alertGroupService.queryAllAlertGroup(getLoginUser()); + List alertGroups = alertGroupService.queryAllAlertGroup(user); Assertions.assertEquals(2, alertGroups.size()); + + user.setUserType(UserType.GENERAL_USER); + user.setId(2); + + when(resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.ALERT_GROUP, 2, + alertGroupServiceLogger)) + .thenReturn(Collections.emptySet()); + Assertions.assertEquals(alertGroupService.queryAllAlertGroup(user).size(), 0); + + user.setId(3); + when(resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.ALERT_GROUP, 3, + alertGroupServiceLogger)) + .thenReturn(Collections.singleton(1)); + assertDoesNotThrow(() -> alertGroupService.queryAllAlertGroup(user)); } @Test @@ -95,6 +118,35 @@ public void testQueryNormalAlertGroup() { Assertions.assertEquals(1, alertGroups.size()); } + @Test + public void testQueryAlertGroupById() { + User user = getLoginUser(); + user.setId(2); + user.setUserType(UserType.GENERAL_USER); + + when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.ALERT_GROUP, 2, ALERT_GROUP_VIEW, + baseServiceLogger)) + .thenReturn(false); + + assertThrowsServiceException(Status.USER_NO_OPERATION_PERM, + () -> alertGroupService.queryAlertGroupById(user, 1)); + + user.setId(1); + when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.ALERT_GROUP, 1, ALERT_GROUP_VIEW, + baseServiceLogger)) + .thenReturn(true); + when(resourcePermissionCheckService.resourcePermissionCheck(AuthorizationType.ALERT_GROUP, new Object[]{999}, 1, + baseServiceLogger)) + .thenReturn(true); + when(alertGroupMapper.selectById(999)).thenReturn(null); + + assertThrowsServiceException(Status.ALERT_GROUP_NOT_EXIST, + () -> alertGroupService.queryAlertGroupById(user, 999)); + + when(alertGroupMapper.selectById(999)).thenReturn(getEntity()); + assertDoesNotThrow(() -> alertGroupService.queryAlertGroupById(user, 999)); + } + @Test public void testListPaging() { IPage page = new Page<>(1, 10); @@ -114,6 +166,18 @@ public void testListPaging() { alertGroupPageInfo = alertGroupService.listPaging(user, groupName, 1, 10); Assertions.assertTrue(CollectionUtils.isNotEmpty(alertGroupPageInfo.getTotalList())); + user.setUserType(UserType.GENERAL_USER); + user.setId(99); + page.setTotal(1); + page.setRecords(Collections.singletonList(getEntity())); + + when(resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.ALERT_GROUP, user.getId(), + alertGroupServiceLogger)) + .thenReturn(Collections.singleton(1)); + when(alertGroupMapper.queryAlertGroupPageByIds(any(Page.class), any(List.class), eq(groupName))) + .thenReturn(page); + + alertGroupService.listPaging(user, groupName, 1, 10).getTotal(); } @Test @@ -134,8 +198,19 @@ public void testCreateAlertgroup() { ALERT_GROUP_CREATE, baseServiceLogger)).thenReturn(true); when(resourcePermissionCheckService.resourcePermissionCheck(AuthorizationType.ALERT_GROUP, null, user.getId(), baseServiceLogger)).thenReturn(true); + + assertThrowsServiceException(Status.DESCRIPTION_TOO_LONG_ERROR, + () -> alertGroupService.createAlertGroup(user, groupName, tooLongDescription, null)); AlertGroup alertGroup = alertGroupService.createAlertGroup(user, groupName, groupName, null); assertNotNull(alertGroup); + + when(alertGroupMapper.insert(any(AlertGroup.class))).thenReturn(-1); + assertThrowsServiceException(Status.CREATE_ALERT_GROUP_ERROR, + () -> alertGroupService.createAlertGroup(user, groupName, groupName, null)); + + when(alertGroupMapper.insert(any(AlertGroup.class))).thenThrow(DuplicateKeyException.class); + assertThrowsServiceException(Status.ALERT_GROUP_EXIST, + () -> alertGroupService.createAlertGroup(user, groupName, groupName, null)); } @Test @@ -162,7 +237,7 @@ public void testUpdateAlertgroup() { user.setUserType(UserType.GENERAL_USER); assertThrowsServiceException(Status.USER_NO_OPERATION_PERM, () -> alertGroupService.updateAlertGroupById(user, 1, groupName, groupName, null)); - user.setUserType(UserType.ADMIN_USER); + // not exist user.setUserType(UserType.ADMIN_USER); when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.ALERT_GROUP, user.getId(), @@ -171,6 +246,10 @@ public void testUpdateAlertgroup() { baseServiceLogger)).thenReturn(true); assertThrowsServiceException(Status.ALERT_GROUP_NOT_EXIST, () -> alertGroupService.updateAlertGroupById(user, 1, groupName, groupName, null)); + + assertThrowsServiceException(Status.DESCRIPTION_TOO_LONG_ERROR, + () -> alertGroupService.updateAlertGroupById(user, 1, groupName, tooLongDescription, null)); + // success when(resourcePermissionCheckService.resourcePermissionCheck(AuthorizationType.ALERT_GROUP, new Object[]{3}, user.getId(), baseServiceLogger)).thenReturn(true); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertPluginInstanceServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertPluginInstanceServiceTest.java index 52910858d584..bd8a03995119 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertPluginInstanceServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertPluginInstanceServiceTest.java @@ -18,9 +18,11 @@ package org.apache.dolphinscheduler.api.service; import static org.apache.dolphinscheduler.api.AssertionsHelper.assertThrowsServiceException; -import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.ALART_INSTANCE_CREATE; +import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.ALARM_INSTANCE_MANAGE; +import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.ALERT_INSTANCE_CREATE; import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.ALERT_PLUGIN_DELETE; import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.ALERT_PLUGIN_UPDATE; +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.mockito.Mockito.when; @@ -40,7 +42,6 @@ import org.apache.dolphinscheduler.dao.mapper.AlertGroupMapper; import org.apache.dolphinscheduler.dao.mapper.AlertPluginInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.PluginDefineMapper; -import org.apache.dolphinscheduler.extract.alert.request.AlertSendResponse; import org.apache.dolphinscheduler.registry.api.RegistryClient; import org.apache.dolphinscheduler.registry.api.enums.RegistryNodeType; @@ -60,6 +61,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; + /** * alert plugin instance service test */ @@ -90,12 +94,16 @@ public class AlertPluginInstanceServiceTest { private User user; + private User noPermUser; + private final AlertPluginInstanceType normalInstanceType = AlertPluginInstanceType.NORMAL; private final AlertPluginInstanceType globalInstanceType = AlertPluginInstanceType.GLOBAL; private final WarningType warningType = WarningType.ALL; + private final Integer GLOBAL_ALERT_GROUP_ID = 2; + private String uiParams = "[\n" + " {\n" + " \"field\":\"userParams\",\n" @@ -172,21 +180,33 @@ public class AlertPluginInstanceServiceTest { private String paramsMap = "{\"path\":\"/kris/script/path\",\"userParams\":\"userParams\",\"type\":\"0\"}"; + private AlertPluginInstance alertPluginInstance; + @BeforeEach public void before() { user = new User(); user.setUserType(UserType.ADMIN_USER); user.setId(1); - AlertPluginInstance alertPluginInstance = getAlertPluginInstance(1, normalInstanceType, "test"); + + noPermUser = new User(); + noPermUser.setUserType(UserType.GENERAL_USER); + noPermUser.setId(2); + + alertPluginInstance = getAlertPluginInstance(1, normalInstanceType, "test"); alertPluginInstances = new ArrayList<>(); alertPluginInstances.add(alertPluginInstance); } @Test public void testCreate() { + when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.ALERT_PLUGIN_INSTANCE, + noPermUser.getId(), ALERT_INSTANCE_CREATE, baseServiceLogger)).thenReturn(false); + assertThrowsServiceException(Status.USER_NO_OPERATION_PERM, () -> alertPluginInstanceService.create(noPermUser, + 1, "test", normalInstanceType, warningType, uiParams)); + when(alertPluginInstanceMapper.existInstanceName("test")).thenReturn(true); when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.ALERT_PLUGIN_INSTANCE, - 1, ALART_INSTANCE_CREATE, baseServiceLogger)).thenReturn(true); + 1, ALERT_INSTANCE_CREATE, baseServiceLogger)).thenReturn(true); when(resourcePermissionCheckService.resourcePermissionCheck(AuthorizationType.ALERT_PLUGIN_INSTANCE, null, 0, baseServiceLogger)).thenReturn(true); assertThrowsServiceException(Status.PLUGIN_INSTANCE_ALREADY_EXISTS, @@ -195,6 +215,19 @@ public void testCreate() { AlertPluginInstance alertPluginInstance = alertPluginInstanceService.create(user, 1, "test1", normalInstanceType, warningType, uiParams); assertNotNull(alertPluginInstance); + + when(alertGroupMapper.selectById(GLOBAL_ALERT_GROUP_ID)).thenReturn(getGlobalAlertGroup()); + assertDoesNotThrow(() -> alertPluginInstanceService.create(user, 1, "global_plugin_instance", + AlertPluginInstanceType.GLOBAL, warningType, uiParams)); + + when(alertGroupMapper.selectById(GLOBAL_ALERT_GROUP_ID)).thenReturn(getGlobalAlertGroup("1")); + assertDoesNotThrow(() -> alertPluginInstanceService.create(user, 1, "global_plugin_instance", + AlertPluginInstanceType.GLOBAL, warningType, uiParams)); + + when(alertPluginInstanceMapper.insert(Mockito.any())).thenReturn(-1); + assertThrowsServiceException(Status.SAVE_ERROR, + () -> alertPluginInstanceService.create(user, 1, "test_insert_error", normalInstanceType, warningType, + uiParams)); } @Test @@ -202,11 +235,10 @@ public void testSendAlert() { Mockito.when(registryClient.getServerList(RegistryNodeType.ALERT_SERVER)).thenReturn(new ArrayList<>()); assertThrowsServiceException(Status.ALERT_SERVER_NOT_EXIST, () -> alertPluginInstanceService.testSend(1, uiParams)); - AlertSendResponse.AlertSendResponseResult alertResult = new AlertSendResponse.AlertSendResponseResult(); - alertResult.setSuccess(true); Server server = new Server(); server.setPort(50052); server.setHost("127.0.0.1"); + Mockito.when(registryClient.getServerList(RegistryNodeType.ALERT_SERVER)) .thenReturn(Collections.singletonList(server)); assertThrowsServiceException(Status.ALERT_TEST_SENDING_FAILED, @@ -215,6 +247,11 @@ public void testSendAlert() { @Test public void testDelete() { + when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.ALERT_PLUGIN_INSTANCE, + noPermUser.getId(), ALERT_PLUGIN_DELETE, baseServiceLogger)).thenReturn(false); + assertThrowsServiceException(Status.USER_NO_OPERATION_PERM, + () -> alertPluginInstanceService.deleteById(noPermUser, 1)); + List ids = Arrays.asList("11,2,3", "5,96", null, "98,1"); when(alertGroupMapper.queryInstanceIdsList()).thenReturn(ids); when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.ALERT_PLUGIN_INSTANCE, @@ -241,10 +278,18 @@ public void testDelete() { when(alertPluginInstanceMapper.deleteById(5)).thenReturn(1); Assertions.assertDoesNotThrow(() -> alertPluginInstanceService.deleteById(user, 5)); + + when(alertGroupMapper.queryInstanceIdsList()).thenReturn(Collections.emptyList()); + Assertions.assertDoesNotThrow(() -> alertPluginInstanceService.deleteById(user, 9)); } @Test public void testUpdate() { + when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.ALERT_PLUGIN_INSTANCE, + noPermUser.getId(), ALERT_PLUGIN_UPDATE, baseServiceLogger)).thenReturn(false); + assertThrowsServiceException(Status.USER_NO_OPERATION_PERM, + () -> alertPluginInstanceService.updateById(noPermUser, 1, "test", warningType, uiParams)); + when(alertPluginInstanceMapper.updateById(Mockito.any())).thenReturn(0); when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.ALERT_PLUGIN_INSTANCE, 1, ALERT_PLUGIN_UPDATE, baseServiceLogger)).thenReturn(true); @@ -259,8 +304,51 @@ public void testUpdate() { Assertions.assertNotNull(alertPluginInstance); } + @Test + public void testGetById() { + when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.ALERT_PLUGIN_INSTANCE, + noPermUser.getId(), ALARM_INSTANCE_MANAGE, baseServiceLogger)).thenReturn(false); + assertThrowsServiceException(Status.USER_NO_OPERATION_PERM, + () -> alertPluginInstanceService.getById(noPermUser, 1)); + + when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.ALERT_PLUGIN_INSTANCE, + user.getId(), ALARM_INSTANCE_MANAGE, baseServiceLogger)).thenReturn(true); + when(resourcePermissionCheckService.resourcePermissionCheck(AuthorizationType.ALERT_PLUGIN_INSTANCE, null, 0, + baseServiceLogger)).thenReturn(true); + when(alertPluginInstanceMapper.selectById(1)) + .thenReturn(getAlertPluginInstance(1, AlertPluginInstanceType.NORMAL, "test_get_instance")); + + Assertions.assertEquals(alertPluginInstanceService.getById(user, 1).getId(), 1); + } + + @Test + public void testCheckExistPluginInstanceName() { + when(alertPluginInstanceMapper.existInstanceName(Mockito.any(String.class))).thenReturn(false); + Assertions.assertEquals(false, alertPluginInstanceService.checkExistPluginInstanceName("test")); + } + + @Test + public void testListPaging() { + IPage page = new Page<>(); + page.setRecords(Collections.singletonList(alertPluginInstance)); + page.setTotal(1); + page.setPages(1); + + when(alertPluginInstanceMapper.queryByInstanceNamePage(Mockito.any(Page.class), Mockito.any(String.class))) + .thenReturn(page); + assertDoesNotThrow(() -> alertPluginInstanceService.listPaging(user, "test", 1, 1)); + } + @Test public void testQueryAll() { + when(alertPluginInstanceMapper.queryAllAlertPluginInstanceList()).thenReturn(Collections.emptyList()); + Assertions.assertEquals(0, alertPluginInstanceService.queryAll().size()); + + when(alertPluginInstanceMapper.queryAllAlertPluginInstanceList()) + .thenReturn(Collections.singletonList(alertPluginInstance)); + when(pluginDefineMapper.queryAllPluginDefineList()).thenReturn(Collections.emptyList()); + Assertions.assertEquals(0, alertPluginInstanceService.queryAll().size()); + AlertPluginInstance alertPluginInstance = getAlertPluginInstance(1, normalInstanceType, "test"); PluginDefine pluginDefine = new PluginDefine("script", "script", uiParams); pluginDefine.setId(1); @@ -283,4 +371,11 @@ private AlertPluginInstance getAlertPluginInstance(int id, AlertPluginInstanceTy return alertPluginInstance; } + private AlertGroup getGlobalAlertGroup(String... alertPluginInstanceIds) { + AlertGroup globalAlertGroup = new AlertGroup(); + globalAlertGroup.setId(2); + globalAlertGroup.setAlertInstanceIds(String.join(",", alertPluginInstanceIds)); + + return globalAlertGroup; + } } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AuditServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AuditServiceTest.java index 3bde4a1b8d0c..86238f05980d 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AuditServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AuditServiceTest.java @@ -21,9 +21,10 @@ import static org.mockito.Mockito.when; import org.apache.dolphinscheduler.api.service.impl.AuditServiceImpl; +import org.apache.dolphinscheduler.common.enums.AuditModelType; +import org.apache.dolphinscheduler.common.enums.AuditOperationType; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.dao.entity.AuditLog; -import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.AuditLogMapper; import java.util.ArrayList; @@ -66,15 +67,16 @@ public void testQueryLogListPaging() { page.setRecords(getLists()); page.setTotal(1L); when(auditLogMapper.queryAuditLog(Mockito.any(Page.class), Mockito.any(), Mockito.any(), Mockito.eq(""), + Mockito.eq(""), eq(start), eq(end))).thenReturn(page); Assertions.assertDoesNotThrow(() -> { auditService.queryLogListPaging( - new User(), - null, - null, + "", + "", "2020-11-01 00:00:00", "2020-11-02 00:00:00", "", + "", 1, 10); }); @@ -89,8 +91,8 @@ private List getLists() { private AuditLog getAuditLog() { AuditLog auditLog = new AuditLog(); auditLog.setUserName("testName"); - auditLog.setOperation(0); - auditLog.setResourceType(0); + auditLog.setOperationType(AuditOperationType.CREATE.getName()); + auditLog.setModelType(AuditModelType.PROJECT.getName()); return auditLog; } } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java index 32033cc2afa1..a2d655f47f55 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java @@ -32,12 +32,15 @@ import org.apache.dolphinscheduler.api.exceptions.ServiceException; import org.apache.dolphinscheduler.api.permission.ResourcePermissionCheckService; import org.apache.dolphinscheduler.api.service.impl.DataAnalysisServiceImpl; +import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.vo.TaskInstanceCountVO; import org.apache.dolphinscheduler.common.constants.Constants; import org.apache.dolphinscheduler.common.enums.AuthorizationType; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.dao.entity.Command; import org.apache.dolphinscheduler.dao.entity.CommandCount; +import org.apache.dolphinscheduler.dao.entity.ErrorCommand; import org.apache.dolphinscheduler.dao.entity.ExecuteStatusCount; import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.User; @@ -70,6 +73,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; + /** * data analysis service test */ @@ -110,13 +116,17 @@ public class DataAnalysisServiceTest { private User user; + private Project project; + @BeforeEach public void setUp() { user = new User(); + user.setUserType(UserType.ADMIN_USER); user.setId(1); - Project project = new Project(); + project = new Project(); project.setId(1); + project.setCode(1); project.setName("test"); resultMap = new HashMap<>(); @@ -285,6 +295,66 @@ public void testCountQueueState() { .allMatch(count -> count.equals(0)); } + @Test + public void testListPendingCommands() { + IPage page = new Page<>(1, 10); + page.setTotal(2L); + page.setRecords(getList()); + when(commandMapper.queryCommandPage(any())).thenReturn(page); + PageInfo list1 = dataAnalysisServiceImpl.listPendingCommands(user, 1L, 1, 10); + assertThat(list1.getTotal()).isEqualTo(2); + user.setUserType(UserType.GENERAL_USER); + when(resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, 1, + serviceLogger)) + .thenReturn(projectIds()); + PageInfo list2 = dataAnalysisServiceImpl.listPendingCommands(user, 1L, 1, 10); + assertThat(list2.getTotal()).isEqualTo(0); + when(projectMapper.selectBatchIds(any())).thenReturn(Collections.singletonList(project)); + when(processDefinitionMapper.queryDefinitionCodeListByProjectCodes(any())) + .thenReturn(Collections.singletonList(1L)); + when(commandMapper.queryCommandPageByIds(any(), any())).thenReturn(page); + PageInfo list3 = dataAnalysisServiceImpl.listPendingCommands(user, 1L, 1, 10); + assertThat(list3.getTotal()).isEqualTo(2); + } + + @Test + public void testListErrorCommand() { + IPage page = new Page<>(1, 10); + page.setTotal(2L); + page.setRecords(getErrorList()); + when(errorCommandMapper.queryErrorCommandPage(any())).thenReturn(page); + PageInfo list1 = dataAnalysisServiceImpl.listErrorCommand(user, 1L, 1, 10); + assertThat(list1.getTotal()).isEqualTo(2); + user.setUserType(UserType.GENERAL_USER); + when(resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, 1, + serviceLogger)) + .thenReturn(projectIds()); + PageInfo list2 = dataAnalysisServiceImpl.listErrorCommand(user, 1L, 1, 10); + assertThat(list2.getTotal()).isEqualTo(0); + when(projectMapper.selectBatchIds(any())).thenReturn(Collections.singletonList(project)); + when(processDefinitionMapper.queryDefinitionCodeListByProjectCodes(any())) + .thenReturn(Collections.singletonList(1L)); + when(errorCommandMapper.queryErrorCommandPageByIds(any(), any())).thenReturn(page); + PageInfo list3 = dataAnalysisServiceImpl.listErrorCommand(user, 1L, 1, 10); + assertThat(list3.getTotal()).isEqualTo(2); + } + + private List getList() { + List commandList = new ArrayList<>(); + Command command = new Command(); + command.setId(1); + commandList.add(command); + return commandList; + } + + private List getErrorList() { + List commandList = new ArrayList<>(); + ErrorCommand command = new ErrorCommand(); + command.setId(1); + commandList.add(command); + return commandList; + } + private Set projectIds() { Set projectIds = new HashSet<>(); projectIds.add(1); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataSourceServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataSourceServiceTest.java index 68d4db0ff2ad..b4e7aae4b8d3 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataSourceServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataSourceServiceTest.java @@ -52,15 +52,15 @@ import org.apache.commons.collections4.CollectionUtils; +import java.nio.charset.StandardCharsets; import java.sql.Connection; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Set; +import java.util.Random; import java.util.concurrent.ExecutionException; import org.junit.jupiter.api.Assertions; @@ -73,6 +73,9 @@ import org.mockito.junit.jupiter.MockitoExtension; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.dao.DuplicateKeyException; + +import com.baomidou.mybatisplus.core.metadata.IPage; /** * data source service test @@ -96,6 +99,15 @@ public class DataSourceServiceTest { @Mock private ResourcePermissionCheckService resourcePermissionCheckService; + @Mock + private IPage dataSourceList; + + private String randomStringWithLengthN(int n) { + byte[] bitArray = new byte[n]; + new Random().nextBytes(bitArray); + return new String(bitArray, StandardCharsets.UTF_8); + } + private void passResourcePermissionCheckService() { when(resourcePermissionCheckService.operationPermissionCheck(Mockito.any(), Mockito.anyInt(), Mockito.anyString(), Mockito.any())).thenReturn(true); @@ -131,6 +143,7 @@ public void createDataSourceTest() throws ExecutionException { when(dataSourceMapper.queryDataSourceByName(dataSourceName.trim())).thenReturn(dataSourceList); passResourcePermissionCheckService(); + // DATASOURCE_EXIST assertThrowsServiceException(Status.DATASOURCE_EXIST, () -> dataSourceService.createDataSource(loginUser, postgreSqlDatasourceParam)); @@ -140,13 +153,24 @@ public void createDataSourceTest() throws ExecutionException { when(dataSourceMapper.queryDataSourceByName(dataSourceName.trim())).thenReturn(null); + // DESCRIPTION TOO LONG + postgreSqlDatasourceParam.setNote(randomStringWithLengthN(512)); + assertThrowsServiceException(Status.DESCRIPTION_TOO_LONG_ERROR, + () -> dataSourceService.createDataSource(loginUser, postgreSqlDatasourceParam)); + postgreSqlDatasourceParam.setNote(dataSourceDesc); + // SUCCESS assertDoesNotThrow(() -> dataSourceService.createDataSource(loginUser, postgreSqlDatasourceParam)); + + // Duplicated Key Exception + when(dataSourceMapper.insert(Mockito.any(DataSource.class))).thenThrow(DuplicateKeyException.class); + assertThrowsServiceException(Status.DATASOURCE_EXIST, + () -> dataSourceService.createDataSource(loginUser, postgreSqlDatasourceParam)); } } @Test - public void updateDataSourceTest() throws ExecutionException { + public void updateDataSourceTest() { User loginUser = getAdminUser(); int dataSourceId = 12; @@ -200,32 +224,74 @@ public void updateDataSourceTest() throws ExecutionException { // DATASOURCE_CONNECT_FAILED when(dataSourceMapper.queryDataSourceByName(postgreSqlDatasourceParam.getName())).thenReturn(null); + // DESCRIPTION TOO LONG + postgreSqlDatasourceParam.setNote(randomStringWithLengthN(512)); + assertThrowsServiceException(Status.DESCRIPTION_TOO_LONG_ERROR, + () -> dataSourceService.updateDataSource(loginUser, postgreSqlDatasourceParam)); + postgreSqlDatasourceParam.setNote(dataSourceDesc); + // SUCCESS assertDoesNotThrow(() -> dataSourceService.updateDataSource(loginUser, postgreSqlDatasourceParam)); + + // Duplicated Key Exception + when(dataSourceMapper.updateById(Mockito.any(DataSource.class))).thenThrow(DuplicateKeyException.class); + assertThrowsServiceException(Status.DATASOURCE_EXIST, + () -> dataSourceService.updateDataSource(loginUser, postgreSqlDatasourceParam)); } } @Test - public void queryDataSourceListPagingTest() { - User loginUser = getAdminUser(); + public void testQueryDataSourceListPaging() { + + User adminUser = getAdminUser(); + User generalUser = getGeneralUser(); String searchVal = ""; int pageNo = 1; int pageSize = 10; PageInfo pageInfo = - dataSourceService.queryDataSourceListPaging(loginUser, searchVal, pageNo, pageSize); + dataSourceService.queryDataSourceListPaging(adminUser, searchVal, pageNo, pageSize); Assertions.assertNotNull(pageInfo); + + // test query datasource as general user with no datasource authed + when(dataSourceList.getRecords()).thenReturn(getSingleDataSourceList()); + when(dataSourceMapper.selectPagingByIds(Mockito.any(), Mockito.any(), Mockito.any())) + .thenReturn(dataSourceList); + assertDoesNotThrow(() -> dataSourceService.queryDataSourceListPaging(generalUser, searchVal, pageNo, pageSize)); + + // test query datasource as general user with datasource authed + when(resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.DATASOURCE, + generalUser.getId(), dataSourceServiceLogger)).thenReturn(Collections.singleton(1)); + + assertDoesNotThrow(() -> dataSourceService.queryDataSourceListPaging(generalUser, searchVal, pageNo, pageSize)); } @Test - public void connectionTest() { + public void testConnectionTest() { int dataSourceId = -1; when(dataSourceMapper.selectById(dataSourceId)).thenReturn(null); assertThrowsServiceException(Status.RESOURCE_NOT_EXIST, () -> dataSourceService.connectionTest(dataSourceId)); + + try ( + MockedStatic ignored = + Mockito.mockStatic(DataSourceUtils.class)) { + DataSource dataSource = getOracleDataSource(999); + when(dataSourceMapper.selectById(dataSource.getId())).thenReturn(dataSource); + DataSourceProcessor dataSourceProcessor = Mockito.mock(DataSourceProcessor.class); + + when(DataSourceUtils.getDatasourceProcessor(Mockito.any())).thenReturn(dataSourceProcessor); + when(dataSourceProcessor.checkDataSourceConnectivity(Mockito.any())).thenReturn(true); + assertDoesNotThrow(() -> dataSourceService.connectionTest(dataSource.getId())); + + when(dataSourceProcessor.checkDataSourceConnectivity(Mockito.any())).thenReturn(false); + assertThrowsServiceException(Status.CONNECTION_TEST_FAILURE, + () -> dataSourceService.connectionTest(dataSource.getId())); + } + } @Test - public void deleteTest() { + public void testDelete() { User loginUser = getAdminUser(); int dataSourceId = 1; // resource not exist @@ -252,7 +318,7 @@ public void deleteTest() { } @Test - public void unauthDatasourceTest() { + public void testUnAuthDatasource() { User loginUser = getAdminUser(); loginUser.setId(1); loginUser.setUserType(UserType.ADMIN_USER); @@ -279,7 +345,7 @@ public void unauthDatasourceTest() { } @Test - public void authedDatasourceTest() { + public void testAuthedDatasource() { User loginUser = getAdminUser(); loginUser.setId(1); loginUser.setUserType(UserType.ADMIN_USER); @@ -300,19 +366,28 @@ public void authedDatasourceTest() { } @Test - public void queryDataSourceListTest() { - User loginUser = new User(); - loginUser.setUserType(UserType.GENERAL_USER); - Set dataSourceIds = new HashSet<>(); - dataSourceIds.add(1); + public void testQueryDataSourceList() { + User adminUser = getAdminUser(); + assertDoesNotThrow(() -> dataSourceService.queryDataSourceList(adminUser, DbType.MYSQL.ordinal())); + + User generalUser = getGeneralUser(); + when(resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.DATASOURCE, - loginUser.getId(), dataSourceServiceLogger)).thenReturn(dataSourceIds); + generalUser.getId(), dataSourceServiceLogger)).thenReturn(Collections.emptySet()); + List emptyList = dataSourceService.queryDataSourceList(generalUser, DbType.MYSQL.ordinal()); + Assertions.assertEquals(emptyList.size(), 0); + + when(resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.DATASOURCE, + generalUser.getId(), dataSourceServiceLogger)).thenReturn(Collections.singleton(1)); DataSource dataSource = new DataSource(); + dataSource.setId(1); dataSource.setType(DbType.MYSQL); - when(dataSourceMapper.selectBatchIds(dataSourceIds)).thenReturn(Collections.singletonList(dataSource)); + when(dataSourceMapper.selectBatchIds(Collections.singleton(1))) + .thenReturn(Collections.singletonList(dataSource)); + List list = - dataSourceService.queryDataSourceList(loginUser, DbType.MYSQL.ordinal()); + dataSourceService.queryDataSourceList(generalUser, DbType.MYSQL.ordinal()); Assertions.assertNotNull(list); } @@ -327,21 +402,28 @@ public void verifyDataSourceNameTest() { } @Test - public void queryDataSourceTest() { - when(dataSourceMapper.selectById(Mockito.anyInt())).thenReturn(null); + public void testQueryDataSource() { + // datasource not exists + when(dataSourceMapper.selectById(999)).thenReturn(null); User loginUser = new User(); loginUser.setUserType(UserType.GENERAL_USER); loginUser.setId(2); - try { - dataSourceService.queryDataSource(Mockito.anyInt(), loginUser); - } catch (Exception e) { - Assertions.assertTrue(e.getMessage().contains(Status.RESOURCE_NOT_EXIST.getMsg())); - } + + assertThrowsServiceException(Status.RESOURCE_NOT_EXIST, + () -> dataSourceService.queryDataSource(999, loginUser)); DataSource dataSource = getOracleDataSource(1); - when(dataSourceMapper.selectById(Mockito.anyInt())).thenReturn(dataSource); + when(dataSourceMapper.selectById(dataSource.getId())).thenReturn(dataSource); when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.DATASOURCE, loginUser.getId(), DATASOURCE, baseServiceLogger)).thenReturn(true); + + // no perm + when(resourcePermissionCheckService.resourcePermissionCheck(AuthorizationType.DATASOURCE, + new Object[]{dataSource.getId()}, loginUser.getId(), baseServiceLogger)).thenReturn(false); + assertThrowsServiceException(Status.USER_NO_OPERATION_PERM, + () -> dataSourceService.queryDataSource(dataSource.getId(), loginUser)); + + // success when(resourcePermissionCheckService.resourcePermissionCheck(AuthorizationType.DATASOURCE, new Object[]{dataSource.getId()}, loginUser.getId(), baseServiceLogger)).thenReturn(true); BaseDataSourceParamDTO paramDTO = dataSourceService.queryDataSource(dataSource.getId(), loginUser); @@ -472,8 +554,16 @@ public void buildParameterWithDecodePassword() { */ private User getAdminUser() { User loginUser = new User(); - loginUser.setId(-1); + loginUser.setId(1); loginUser.setUserName("admin"); + loginUser.setUserType(UserType.ADMIN_USER); + return loginUser; + } + + private User getGeneralUser() { + User loginUser = new User(); + loginUser.setId(2); + loginUser.setUserName("user"); loginUser.setUserType(UserType.GENERAL_USER); return loginUser; } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecuteFunctionServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecuteFunctionServiceTest.java index 23085cb3f595..04dabf5eadcb 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecuteFunctionServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecuteFunctionServiceTest.java @@ -67,6 +67,8 @@ import org.apache.dolphinscheduler.dao.mapper.TaskGroupQueueMapper; import org.apache.dolphinscheduler.dao.mapper.TenantMapper; import org.apache.dolphinscheduler.dao.repository.ProcessInstanceDao; +import org.apache.dolphinscheduler.dao.utils.WorkerGroupUtils; +import org.apache.dolphinscheduler.registry.api.enums.RegistryNodeType; import org.apache.dolphinscheduler.service.command.CommandService; import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.process.TriggerRelationService; @@ -242,7 +244,7 @@ public void init() { Mockito.when(processService.getTenantForProcess(tenantCode, userId)).thenReturn(tenantCode); doReturn(1).when(commandService).createCommand(argThat(c -> c.getId() == null)); doReturn(0).when(commandService).createCommand(argThat(c -> c.getId() != null)); - Mockito.when(monitorService.getServerListFromRegistry(true)).thenReturn(getMasterServersList()); + Mockito.when(monitorService.listServer(RegistryNodeType.MASTER)).thenReturn(getMasterServersList()); Mockito.when(processService.findProcessInstanceDetailById(processInstanceId)) .thenReturn(Optional.ofNullable(processInstance)); Mockito.when(processService.findProcessDefinition(1L, 1)).thenReturn(this.processDefinition); @@ -270,7 +272,7 @@ public void testNoComplement() { null, null, null, null, null, RunMode.RUN_MODE_SERIAL, - Priority.LOW, Constants.DEFAULT_WORKER_GROUP, tenantCode, 100L, 10, null, null, + Priority.LOW, WorkerGroupUtils.getDefaultWorkerGroup(), tenantCode, 100L, 10, null, null, Constants.DRY_RUN_FLAG_NO, Constants.TEST_FLAG_NO, ComplementDependentMode.OFF_MODE, null, @@ -297,7 +299,7 @@ public void testComplementWithStartNodeList() { null, "123456789,987654321", null, null, null, RunMode.RUN_MODE_SERIAL, - Priority.LOW, Constants.DEFAULT_WORKER_GROUP, tenantCode, 100L, 110, null, null, + Priority.LOW, WorkerGroupUtils.getDefaultWorkerGroup(), tenantCode, 100L, 110, null, null, Constants.DRY_RUN_FLAG_NO, Constants.TEST_FLAG_NO, ComplementDependentMode.OFF_MODE, null, @@ -322,7 +324,7 @@ public void testComplementWithOldStartNodeList() { null, "1123456789,987654321", null, null, null, RunMode.RUN_MODE_SERIAL, - Priority.LOW, Constants.DEFAULT_WORKER_GROUP, tenantCode, 100L, 110, null, 0, + Priority.LOW, WorkerGroupUtils.getDefaultWorkerGroup(), tenantCode, 100L, 110, null, 0, Constants.DRY_RUN_FLAG_NO, Constants.TEST_FLAG_NO, ComplementDependentMode.OFF_MODE, null, @@ -353,14 +355,14 @@ public void testComplementWithDependentMode() { dependentProcessDefinition.setProcessDefinitionCode(2); dependentProcessDefinition.setProcessDefinitionVersion(1); dependentProcessDefinition.setTaskDefinitionCode(1); - dependentProcessDefinition.setWorkerGroup(Constants.DEFAULT_WORKER_GROUP); + dependentProcessDefinition.setWorkerGroup(WorkerGroupUtils.getDefaultWorkerGroup()); dependentProcessDefinition.setTaskParams( "{\"localParams\":[],\"resourceList\":[],\"dependence\":{\"relation\":\"AND\",\"dependTaskList\":[{\"relation\":\"AND\",\"dependItemList\":[{\"depTaskCode\":2,\"status\":\"SUCCESS\"}]}]},\"conditionResult\":{\"successNode\":[1],\"failedNode\":[1]}}"); Mockito.when(processService.queryDependentProcessDefinitionByProcessDefinitionCode(processDefinitionCode)) .thenReturn(Lists.newArrayList(dependentProcessDefinition)); Map processDefinitionWorkerGroupMap = new HashMap<>(); - processDefinitionWorkerGroupMap.put(1L, Constants.DEFAULT_WORKER_GROUP); + processDefinitionWorkerGroupMap.put(1L, WorkerGroupUtils.getDefaultWorkerGroup()); Mockito.when(workerGroupService.queryWorkerGroupByProcessDefinitionCodes(Lists.newArrayList(1L))) .thenReturn(processDefinitionWorkerGroupMap); @@ -369,7 +371,7 @@ public void testComplementWithDependentMode() { command.setCommandType(CommandType.COMPLEMENT_DATA); command.setCommandParam( "{\"StartNodeList\":\"1\",\"complementStartDate\":\"2020-01-01 00:00:00\",\"complementEndDate\":\"2020-01-31 23:00:00\"}"); - command.setWorkerGroup(Constants.DEFAULT_WORKER_GROUP); + command.setWorkerGroup(WorkerGroupUtils.getDefaultWorkerGroup()); command.setProcessDefinitionCode(processDefinitionCode); command.setExecutorId(1); @@ -382,7 +384,7 @@ public void testComplementWithDependentMode() { childDependent.setProcessDefinitionCode(3); childDependent.setProcessDefinitionVersion(1); childDependent.setTaskDefinitionCode(4); - childDependent.setWorkerGroup(Constants.DEFAULT_WORKER_GROUP); + childDependent.setWorkerGroup(WorkerGroupUtils.getDefaultWorkerGroup()); childDependent.setTaskParams( "{\"localParams\":[],\"resourceList\":[],\"dependence\":{\"relation\":\"AND\",\"dependTaskList\":[{\"relation\":\"AND\",\"dependItemList\":[{\"depTaskCode\":3,\"status\":\"SUCCESS\"}]}]},\"conditionResult\":{\"successNode\":[1],\"failedNode\":[1]}}"); Mockito.when(processService.queryDependentProcessDefinitionByProcessDefinitionCode( @@ -408,7 +410,8 @@ public void testDateError() { null, null, null, null, null, RunMode.RUN_MODE_SERIAL, - Priority.LOW, Constants.DEFAULT_WORKER_GROUP, tenantCode, 100L, 110, null, 2, Constants.DRY_RUN_FLAG_NO, + Priority.LOW, WorkerGroupUtils.getDefaultWorkerGroup(), tenantCode, 100L, 110, null, 2, + Constants.DRY_RUN_FLAG_NO, Constants.TEST_FLAG_NO, ComplementDependentMode.OFF_MODE, null, false, @@ -433,7 +436,7 @@ public void testSerial() { null, null, null, null, null, RunMode.RUN_MODE_SERIAL, - Priority.LOW, Constants.DEFAULT_WORKER_GROUP, tenantCode, 100L, 110, null, null, + Priority.LOW, WorkerGroupUtils.getDefaultWorkerGroup(), tenantCode, 100L, 110, null, null, Constants.DRY_RUN_FLAG_NO, Constants.TEST_FLAG_NO, ComplementDependentMode.OFF_MODE, null, @@ -459,7 +462,8 @@ public void testParallelWithOutSchedule() { null, null, null, null, null, RunMode.RUN_MODE_PARALLEL, - Priority.LOW, Constants.DEFAULT_WORKER_GROUP, tenantCode, 100L, 110, null, 2, Constants.DRY_RUN_FLAG_NO, + Priority.LOW, WorkerGroupUtils.getDefaultWorkerGroup(), tenantCode, 100L, 110, null, 2, + Constants.DRY_RUN_FLAG_NO, Constants.TEST_FLAG_NO, ComplementDependentMode.OFF_MODE, null, false, @@ -485,7 +489,7 @@ public void testParallelWithSchedule() { null, null, null, null, null, RunMode.RUN_MODE_PARALLEL, - Priority.LOW, Constants.DEFAULT_WORKER_GROUP, tenantCode, 100L, 110, null, 15, + Priority.LOW, WorkerGroupUtils.getDefaultWorkerGroup(), tenantCode, 100L, 110, null, 15, Constants.DRY_RUN_FLAG_NO, Constants.TEST_FLAG_NO, ComplementDependentMode.OFF_MODE, null, @@ -498,7 +502,7 @@ public void testParallelWithSchedule() { @Test public void testNoMasterServers() { - Mockito.when(monitorService.getServerListFromRegistry(true)).thenReturn(new ArrayList<>()); + Mockito.when(monitorService.listServer(RegistryNodeType.MASTER)).thenReturn(new ArrayList<>()); Assertions.assertThrows(ServiceException.class, () -> executorService.execProcessInstance( loginUser, @@ -513,7 +517,7 @@ public void testNoMasterServers() { null, RunMode.RUN_MODE_PARALLEL, Priority.LOW, - Constants.DEFAULT_WORKER_GROUP, + WorkerGroupUtils.getDefaultWorkerGroup(), tenantCode, 100L, 110, @@ -552,7 +556,7 @@ public void testOfTestRun() { null, null, null, null, 0, RunMode.RUN_MODE_PARALLEL, - Priority.LOW, Constants.DEFAULT_WORKER_GROUP, tenantCode, 100L, 110, null, 15, + Priority.LOW, WorkerGroupUtils.getDefaultWorkerGroup(), tenantCode, 100L, 110, null, 15, Constants.DRY_RUN_FLAG_NO, Constants.TEST_FLAG_YES, ComplementDependentMode.OFF_MODE, null, diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java index 2c4de2ab7ef4..972092602fe6 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java @@ -18,8 +18,10 @@ package org.apache.dolphinscheduler.api.service; import static org.apache.dolphinscheduler.api.AssertionsHelper.assertDoesNotThrow; +import static org.apache.dolphinscheduler.api.AssertionsHelper.assertThrowsServiceException; import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.DOWNLOAD_LOG; import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.VIEW_LOG; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.when; @@ -38,7 +40,6 @@ import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper; import org.apache.dolphinscheduler.dao.repository.TaskInstanceDao; -import org.apache.dolphinscheduler.extract.base.NettyRemotingServer; import org.apache.dolphinscheduler.extract.base.config.NettyServerConfig; import org.apache.dolphinscheduler.extract.base.server.SpringServerMethodInvokerDiscovery; import org.apache.dolphinscheduler.extract.common.ILogService; @@ -89,7 +90,7 @@ public class LoggerServiceTest { @Mock private TaskDefinitionMapper taskDefinitionMapper; - private NettyRemotingServer nettyRemotingServer; + private SpringServerMethodInvokerDiscovery springServerMethodInvokerDiscovery; private int nettyServerPort = 18080; @@ -101,19 +102,32 @@ public void setUp() { return; } - nettyRemotingServer = new NettyRemotingServer(NettyServerConfig.builder().listenPort(nettyServerPort).build()); - nettyRemotingServer.start(); - SpringServerMethodInvokerDiscovery springServerMethodInvokerDiscovery = - new SpringServerMethodInvokerDiscovery(nettyRemotingServer); - springServerMethodInvokerDiscovery.postProcessAfterInitialization(new ILogService() { + springServerMethodInvokerDiscovery = new SpringServerMethodInvokerDiscovery( + NettyServerConfig.builder().serverName("TestLogServer").listenPort(nettyServerPort).build()); + springServerMethodInvokerDiscovery.start(); + springServerMethodInvokerDiscovery.registerServerMethodInvokerProvider(new ILogService() { @Override public TaskInstanceLogFileDownloadResponse getTaskInstanceWholeLogFileBytes(TaskInstanceLogFileDownloadRequest taskInstanceLogFileDownloadRequest) { - return new TaskInstanceLogFileDownloadResponse(new byte[0]); + if (taskInstanceLogFileDownloadRequest.getTaskInstanceId() == 1) { + return new TaskInstanceLogFileDownloadResponse(new byte[0]); + } else if (taskInstanceLogFileDownloadRequest.getTaskInstanceId() == 10) { + return new TaskInstanceLogFileDownloadResponse("log content".getBytes()); + } + + throw new ServiceException("download error"); } @Override public TaskInstanceLogPageQueryResponse pageQueryTaskInstanceLog(TaskInstanceLogPageQueryRequest taskInstanceLogPageQueryRequest) { + if (taskInstanceLogPageQueryRequest.getTaskInstanceId() != null) { + if (taskInstanceLogPageQueryRequest.getTaskInstanceId() == 100) { + throw new ServiceException("query log error"); + } else if (taskInstanceLogPageQueryRequest.getTaskInstanceId() == 10) { + return new TaskInstanceLogPageQueryResponse("log content"); + } + } + return new TaskInstanceLogPageQueryResponse(); } @@ -126,13 +140,14 @@ public GetAppIdResponse getAppId(GetAppIdRequest getAppIdRequest) { public void removeTaskInstanceLog(String taskInstanceLogAbsolutePath) { } - }, "iLogServiceImpl"); + }); + springServerMethodInvokerDiscovery.start(); } @AfterEach public void tearDown() { - if (nettyRemotingServer != null) { - nettyRemotingServer.close(); + if (springServerMethodInvokerDiscovery != null) { + springServerMethodInvokerDiscovery.close(); } } @@ -177,6 +192,13 @@ public void testQueryLog() { when(taskInstanceDao.queryById(1)).thenReturn(taskInstance); result = loggerService.queryLog(loginUser, 1, 1, 1); Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); + + result = loggerService.queryLog(loginUser, 1, 0, 1); + Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); + + taskInstance.setLogPath(""); + assertThrowsServiceException(Status.QUERY_TASK_INSTANCE_LOG_ERROR, + () -> loggerService.queryLog(loginUser, 1, 1, 1)); } @Test @@ -237,9 +259,15 @@ public void testQueryLogInSpecifiedProject() { loginUser.setUserType(UserType.GENERAL_USER); TaskInstance taskInstance = new TaskInstance(); when(taskInstanceDao.queryById(1)).thenReturn(taskInstance); + when(taskInstanceDao.queryById(10)).thenReturn(null); + + assertThrowsServiceException(Status.TASK_INSTANCE_NOT_FOUND, + () -> loggerService.queryLog(loginUser, projectCode, 10, 1, 1)); + TaskDefinition taskDefinition = new TaskDefinition(); taskDefinition.setProjectCode(projectCode); taskDefinition.setCode(1L); + // SUCCESS taskInstance.setTaskCode(1L); taskInstance.setId(1); @@ -249,13 +277,27 @@ public void testQueryLogInSpecifiedProject() { when(taskInstanceDao.queryById(1)).thenReturn(taskInstance); when(taskDefinitionMapper.queryByCode(taskInstance.getTaskCode())).thenReturn(taskDefinition); assertDoesNotThrow(() -> loggerService.queryLog(loginUser, projectCode, 1, 1, 1)); + + taskDefinition.setProjectCode(10); + assertThrowsServiceException(Status.TASK_INSTANCE_NOT_FOUND, + () -> loggerService.queryLog(loginUser, projectCode, 1, 1, 1)); + + taskDefinition.setProjectCode(1); + taskInstance.setId(10); + when(taskInstanceDao.queryById(10)).thenReturn(taskInstance); + String result = loggerService.queryLog(loginUser, projectCode, 10, 1, 1); + assertEquals("log content", result); + + taskInstance.setId(100); + when(taskInstanceDao.queryById(100)).thenReturn(taskInstance); + assertThrowsServiceException(Status.QUERY_TASK_INSTANCE_LOG_ERROR, + () -> loggerService.queryLog(loginUser, projectCode, 10, 1, 1)); } @Test public void testGetLogBytesInSpecifiedProject() { long projectCode = 1L; when(projectMapper.queryByCode(projectCode)).thenReturn(getProject(projectCode)); - Project project = getProject(projectCode); User loginUser = new User(); loginUser.setId(-1); @@ -272,9 +314,24 @@ public void testGetLogBytesInSpecifiedProject() { taskInstance.setHost("127.0.0.1:" + nettyServerPort); taskInstance.setLogPath("/temp/log"); doNothing().when(projectService).checkProjectAndAuthThrowException(loginUser, projectCode, DOWNLOAD_LOG); + + when(taskInstanceDao.queryById(1)).thenReturn(null); + assertThrowsServiceException( + Status.INTERNAL_SERVER_ERROR_ARGS, () -> loggerService.getLogBytes(loginUser, projectCode, 1)); + when(taskInstanceDao.queryById(1)).thenReturn(taskInstance); when(taskDefinitionMapper.queryByCode(taskInstance.getTaskCode())).thenReturn(taskDefinition); assertDoesNotThrow(() -> loggerService.getLogBytes(loginUser, projectCode, 1)); + + taskDefinition.setProjectCode(2L); + assertThrowsServiceException(Status.INTERNAL_SERVER_ERROR_ARGS, + () -> loggerService.getLogBytes(loginUser, projectCode, 1)); + + taskDefinition.setProjectCode(1L); + taskInstance.setId(100); + when(taskInstanceDao.queryById(100)).thenReturn(taskInstance); + assertThrowsServiceException(Status.DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR, + () -> loggerService.getLogBytes(loginUser, projectCode, 100)); } /** diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/MonitorServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/MonitorServiceTest.java index 6bfbffd0936d..e1318e936dfb 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/MonitorServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/MonitorServiceTest.java @@ -98,13 +98,13 @@ public void testQueryDatabaseState() { public void testQueryMaster() { mockPermissionCheck(ApiFuncIdentificationConstant.MONITOR_MASTER_VIEW, true); Mockito.when(registryClient.getServerList(RegistryNodeType.MASTER)).thenReturn(getServerList()); - assertDoesNotThrow(() -> monitorService.queryMaster(user)); + assertDoesNotThrow(() -> monitorService.listServer(RegistryNodeType.MASTER)); } @Test public void testQueryWorker() { Mockito.when(registryClient.getServerList(RegistryNodeType.WORKER)).thenReturn(getServerList()); - AssertionsHelper.assertDoesNotThrow(() -> monitorService.queryWorker(user)); + AssertionsHelper.assertDoesNotThrow(() -> monitorService.listServer(RegistryNodeType.WORKER)); } @Test diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java index ed3a5f639b2c..37af53dbbd56 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java @@ -72,6 +72,7 @@ import org.apache.dolphinscheduler.dao.repository.ProcessDefinitionDao; import org.apache.dolphinscheduler.dao.repository.ProcessDefinitionLogDao; import org.apache.dolphinscheduler.dao.repository.TaskDefinitionLogDao; +import org.apache.dolphinscheduler.dao.utils.WorkerGroupUtils; import org.apache.dolphinscheduler.service.alert.ListenerEventAlertManager; import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.spi.enums.DbType; @@ -1143,7 +1144,7 @@ private Schedule getSchedule() { schedule.setProcessInstancePriority(Priority.MEDIUM); schedule.setWarningType(WarningType.NONE); schedule.setWarningGroupId(1); - schedule.setWorkerGroup(Constants.DEFAULT_WORKER_GROUP); + schedule.setWorkerGroup(WorkerGroupUtils.getDefaultWorkerGroup()); return schedule; } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java index 9fb4d830528d..26bbc9541202 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java @@ -64,7 +64,6 @@ import org.apache.dolphinscheduler.dao.repository.ProcessInstanceMapDao; import org.apache.dolphinscheduler.dao.repository.TaskInstanceDao; import org.apache.dolphinscheduler.plugin.task.api.TaskPluginManager; -import org.apache.dolphinscheduler.plugin.task.api.enums.DependResult; import org.apache.dolphinscheduler.plugin.task.api.enums.TaskExecutionStatus; import org.apache.dolphinscheduler.service.expand.CuringParamsService; import org.apache.dolphinscheduler.service.model.TaskNode; @@ -84,6 +83,7 @@ import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.InjectMocks; import org.mockito.Mock; +import org.mockito.MockedStatic; import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; import org.mockito.junit.jupiter.MockitoSettings; @@ -142,9 +142,6 @@ public class ProcessInstanceServiceTest { @Mock TaskDefinitionMapper taskDefinitionMapper; - @Mock - TaskPluginManager taskPluginManager; - @Mock ScheduleMapper scheduleMapper; @@ -483,23 +480,6 @@ public void testQueryTaskListByProcessId() throws IOException { Assertions.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); } - @Test - public void testParseLogForDependentResult() throws IOException { - String logString = - "[INFO] 2019-03-19 17:11:08.475 org.apache.dolphinscheduler.server.worker.log.TaskLogger:[172]" - + " - [taskAppId=TASK_223_10739_452334] dependent item complete, :|| dependentKey: 223-ALL-day-last1Day, result: SUCCESS, dependentDate: Wed Mar 19 17:10:36 CST 2019\n" - + "[INFO] 2019-03-19 17:11:08.476 org.apache.dolphinscheduler.server.worker.runner.TaskScheduleThread:[172]" - + " - task : 223_10739_452334 exit status code : 0\n" - + "[root@node2 current]# "; - Map resultMap = - processInstanceService.parseLogForDependentResult(logString); - Assertions.assertEquals(1, resultMap.size()); - - resultMap.clear(); - resultMap = processInstanceService.parseLogForDependentResult(""); - Assertions.assertEquals(0, resultMap.size()); - } - @Test public void testQuerySubProcessInstanceByTaskId() { long projectCode = 1L; @@ -625,21 +605,28 @@ public void testUpdateProcessInstance() { List taskDefinitionLogs = JSONUtils.toList(taskDefinitionJson, TaskDefinitionLog.class); when(processDefinitionService.checkProcessNodeList(taskRelationJson, taskDefinitionLogs)).thenReturn(result); putMsg(result, Status.SUCCESS, projectCode); - when(taskPluginManager.checkTaskParameters(Mockito.any())).thenReturn(true); - Map processInstanceFinishRes = - processInstanceService.updateProcessInstance(loginUser, projectCode, 1, - taskRelationJson, taskDefinitionJson, "2020-02-21 00:00:00", true, "", "", 0); - Assertions.assertEquals(Status.SUCCESS, processInstanceFinishRes.get(Constants.STATUS)); - - // success - when(processDefineMapper.queryByCode(46L)).thenReturn(processDefinition); - putMsg(result, Status.SUCCESS, projectCode); - when(processService.saveProcessDefine(loginUser, processDefinition, Boolean.FALSE, Boolean.FALSE)) - .thenReturn(1); - Map successRes = processInstanceService.updateProcessInstance(loginUser, projectCode, 1, - taskRelationJson, taskDefinitionJson, "2020-02-21 00:00:00", Boolean.FALSE, "", "", 0); - Assertions.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); + try ( + MockedStatic taskPluginManagerMockedStatic = + Mockito.mockStatic(TaskPluginManager.class)) { + taskPluginManagerMockedStatic + .when(() -> TaskPluginManager.checkTaskParameters(Mockito.any(), Mockito.any())) + .thenReturn(true); + Map processInstanceFinishRes = + processInstanceService.updateProcessInstance(loginUser, projectCode, 1, + taskRelationJson, taskDefinitionJson, "2020-02-21 00:00:00", true, "", "", 0); + Assertions.assertEquals(Status.SUCCESS, processInstanceFinishRes.get(Constants.STATUS)); + + // success + when(processDefineMapper.queryByCode(46L)).thenReturn(processDefinition); + putMsg(result, Status.SUCCESS, projectCode); + + when(processService.saveProcessDefine(loginUser, processDefinition, Boolean.FALSE, Boolean.FALSE)) + .thenReturn(1); + Map successRes = processInstanceService.updateProcessInstance(loginUser, projectCode, 1, + taskRelationJson, taskDefinitionJson, "2020-02-21 00:00:00", Boolean.FALSE, "", "", 0); + Assertions.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); + } } @Test diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectParameterServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectParameterServiceTest.java index 4ab22bda21d6..ef8ffd2768a9 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectParameterServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectParameterServiceTest.java @@ -17,27 +17,41 @@ package org.apache.dolphinscheduler.api.service; +import static org.apache.dolphinscheduler.api.utils.ServiceTestUtil.getGeneralUser; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.Mockito.when; + +import org.apache.dolphinscheduler.api.AssertionsHelper; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.impl.ProjectParameterServiceImpl; import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl; import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.common.utils.CodeGenerateUtils; import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.ProjectParameter; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectParameterMapper; +import org.apache.dolphinscheduler.plugin.task.api.enums.DataType; + +import java.util.Collections; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.InjectMocks; import org.mockito.Mock; +import org.mockito.MockedStatic; import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; import org.mockito.junit.jupiter.MockitoSettings; import org.mockito.quality.Strictness; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; + @ExtendWith(MockitoExtension.class) @MockitoSettings(strictness = Strictness.LENIENT) public class ProjectParameterServiceTest { @@ -60,89 +74,200 @@ public class ProjectParameterServiceTest { public void testCreateProjectParameter() { User loginUser = getGeneralUser(); - // PROJECT_PARAMETER_ALREADY_EXISTS - Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(getProject(projectCode)); - Mockito.when(projectParameterMapper.selectOne(Mockito.any())).thenReturn(getProjectParameter()); - Mockito.when(projectService.hasProjectAndWritePerm(Mockito.any(), Mockito.any(), Mockito.any(Result.class))) + // PERMISSION DENIED + when(projectService.hasProjectAndWritePerm(Mockito.any(), Mockito.any(), Mockito.any(Result.class))) + .thenReturn(false); + Result result = projectParameterService.createProjectParameter(loginUser, projectCode, "key", "value", + DataType.VARCHAR.name()); + assertNull(result.getData()); + assertNull(result.getCode()); + assertNull(result.getMsg()); + + when(projectService.hasProjectAndWritePerm(Mockito.any(), Mockito.any(), Mockito.any(Result.class))) .thenReturn(true); - Result result = projectParameterService.createProjectParameter(loginUser, projectCode, "key", "value"); - Assertions.assertEquals(Status.PROJECT_PARAMETER_ALREADY_EXISTS.getCode(), result.getCode()); + + // CODE GENERATION ERROR + try (MockedStatic ignored = Mockito.mockStatic(CodeGenerateUtils.class)) { + when(CodeGenerateUtils.genCode()).thenThrow(CodeGenerateUtils.CodeGenerateException.class); + + result = projectParameterService.createProjectParameter(loginUser, projectCode, "key", "value", + DataType.VARCHAR.name()); + assertEquals(Status.CREATE_PROJECT_PARAMETER_ERROR.getCode(), result.getCode()); + } + + // PROJECT_PARAMETER_ALREADY_EXISTS + when(projectMapper.queryByCode(projectCode)).thenReturn(getProject(projectCode)); + when(projectParameterMapper.selectOne(Mockito.any())).thenReturn(getProjectParameter()); + result = projectParameterService.createProjectParameter(loginUser, projectCode, "key", "value", + DataType.VARCHAR.name()); + assertEquals(Status.PROJECT_PARAMETER_ALREADY_EXISTS.getCode(), result.getCode()); + + // INSERT DATA ERROR + when(projectParameterMapper.selectOne(Mockito.any())).thenReturn(null); + when(projectParameterMapper.insert(Mockito.any())).thenReturn(-1); + result = projectParameterService.createProjectParameter(loginUser, projectCode, "key1", "value", + DataType.VARCHAR.name()); + assertEquals(Status.CREATE_PROJECT_PARAMETER_ERROR.getCode(), result.getCode()); // SUCCESS - Mockito.when(projectParameterMapper.selectOne(Mockito.any())).thenReturn(null); - Mockito.when(projectParameterMapper.insert(Mockito.any())).thenReturn(1); - result = projectParameterService.createProjectParameter(loginUser, projectCode, "key1", "value"); - Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode()); + when(projectParameterMapper.insert(Mockito.any())).thenReturn(1); + result = projectParameterService.createProjectParameter(loginUser, projectCode, "key1", "value", + DataType.VARCHAR.name()); + assertEquals(Status.SUCCESS.getCode(), result.getCode()); } @Test public void testUpdateProjectParameter() { User loginUser = getGeneralUser(); + // NO PERMISSION + when(projectService.hasProjectAndWritePerm(Mockito.any(), Mockito.any(), Mockito.any(Result.class))) + .thenReturn(false); + Result result = projectParameterService.updateProjectParameter(loginUser, projectCode, 1, "key", "value", + DataType.VARCHAR.name()); + assertNull(result.getData()); + assertNull(result.getCode()); + assertNull(result.getMsg()); + // PROJECT_PARAMETER_NOT_EXISTS - Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(getProject(projectCode)); - Mockito.when(projectService.hasProjectAndWritePerm(Mockito.any(), Mockito.any(), Mockito.any(Result.class))) + when(projectMapper.queryByCode(projectCode)).thenReturn(getProject(projectCode)); + when(projectService.hasProjectAndWritePerm(Mockito.any(), Mockito.any(), Mockito.any(Result.class))) .thenReturn(true); - Mockito.when(projectParameterMapper.queryByCode(Mockito.anyLong())).thenReturn(null); - Result result = projectParameterService.updateProjectParameter(loginUser, projectCode, 1, "key", "value"); - Assertions.assertEquals(Status.PROJECT_PARAMETER_NOT_EXISTS.getCode(), result.getCode()); + when(projectParameterMapper.queryByCode(Mockito.anyLong())).thenReturn(null); + result = projectParameterService.updateProjectParameter(loginUser, projectCode, 1, "key", "value", + DataType.VARCHAR.name()); + assertEquals(Status.PROJECT_PARAMETER_NOT_EXISTS.getCode(), result.getCode()); // PROJECT_PARAMETER_ALREADY_EXISTS - Mockito.when(projectParameterMapper.queryByCode(Mockito.anyLong())).thenReturn(getProjectParameter()); - Mockito.when(projectParameterMapper.selectOne(Mockito.any())).thenReturn(getProjectParameter()); - result = projectParameterService.updateProjectParameter(loginUser, projectCode, 1, "key", "value"); - Assertions.assertEquals(Status.PROJECT_PARAMETER_ALREADY_EXISTS.getCode(), result.getCode()); + when(projectParameterMapper.queryByCode(Mockito.anyLong())).thenReturn(getProjectParameter()); + when(projectParameterMapper.selectOne(Mockito.any())).thenReturn(getProjectParameter()); + result = projectParameterService.updateProjectParameter(loginUser, projectCode, 1, "key", "value", + DataType.VARCHAR.name()); + assertEquals(Status.PROJECT_PARAMETER_ALREADY_EXISTS.getCode(), result.getCode()); + + // PROJECT_UPDATE_ERROR + when(projectParameterMapper.selectOne(Mockito.any())).thenReturn(null); + when(projectParameterMapper.updateById(Mockito.any())).thenReturn(-1); + result = projectParameterService.updateProjectParameter(loginUser, projectCode, 1, "key1", "value", + DataType.VARCHAR.name()); + assertEquals(Status.UPDATE_PROJECT_PARAMETER_ERROR.getCode(), result.getCode()); // SUCCESS - Mockito.when(projectParameterMapper.selectOne(Mockito.any())).thenReturn(null); - Mockito.when(projectParameterMapper.updateById(Mockito.any())).thenReturn(1); - result = projectParameterService.updateProjectParameter(loginUser, projectCode, 1, "key1", "value"); - Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode()); + when(projectParameterMapper.updateById(Mockito.any())).thenReturn(1); + result = projectParameterService.updateProjectParameter(loginUser, projectCode, 1, "key1", "value", + DataType.LONG.name()); + assertEquals(Status.SUCCESS.getCode(), result.getCode()); + ProjectParameter projectParameter = (ProjectParameter) result.getData(); + assertNotNull(projectParameter.getOperator()); + assertNotNull(projectParameter.getUpdateTime()); + assertEquals(DataType.LONG.name(), projectParameter.getParamDataType()); } @Test public void testDeleteProjectParametersByCode() { User loginUser = getGeneralUser(); + // NO PERMISSION + when(projectService.hasProjectAndWritePerm(Mockito.any(), Mockito.any(), Mockito.any(Result.class))) + .thenReturn(false); + Result result = projectParameterService.deleteProjectParametersByCode(loginUser, projectCode, 1); + assertNull(result.getData()); + assertNull(result.getCode()); + assertNull(result.getMsg()); + // PROJECT_PARAMETER_NOT_EXISTS - Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(getProject(projectCode)); - Mockito.when(projectService.hasProjectAndWritePerm(Mockito.any(), Mockito.any(), Mockito.any(Result.class))) + when(projectMapper.queryByCode(projectCode)).thenReturn(getProject(projectCode)); + when(projectService.hasProjectAndWritePerm(Mockito.any(), Mockito.any(), Mockito.any(Result.class))) .thenReturn(true); - Mockito.when(projectParameterMapper.queryByCode(Mockito.anyLong())).thenReturn(null); - Result result = projectParameterService.deleteProjectParametersByCode(loginUser, projectCode, 1); - Assertions.assertEquals(Status.PROJECT_PARAMETER_NOT_EXISTS.getCode(), result.getCode()); + when(projectParameterMapper.queryByCode(Mockito.anyLong())).thenReturn(null); + result = projectParameterService.deleteProjectParametersByCode(loginUser, projectCode, 1); + assertEquals(Status.PROJECT_PARAMETER_NOT_EXISTS.getCode(), result.getCode()); + + // DATABASE OPERATION ERROR + when(projectParameterMapper.queryByCode(Mockito.anyLong())).thenReturn(getProjectParameter()); + when(projectParameterMapper.deleteById(Mockito.anyInt())).thenReturn(-1); + result = projectParameterService.deleteProjectParametersByCode(loginUser, projectCode, 1); + assertEquals(Status.DELETE_PROJECT_PARAMETER_ERROR.getCode(), result.getCode()); // SUCCESS - Mockito.when(projectParameterMapper.queryByCode(Mockito.anyLong())).thenReturn(getProjectParameter()); - Mockito.when(projectParameterMapper.deleteById(Mockito.anyInt())).thenReturn(1); + when(projectParameterMapper.deleteById(Mockito.anyInt())).thenReturn(1); result = projectParameterService.deleteProjectParametersByCode(loginUser, projectCode, 1); - Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode()); + assertEquals(Status.SUCCESS.getCode(), result.getCode()); } @Test public void testQueryProjectParameterByCode() { User loginUser = getGeneralUser(); + // NO PERMISSION + when(projectService.hasProjectAndPerm(Mockito.any(), Mockito.any(), Mockito.any(Result.class), + Mockito.any())) + .thenReturn(false); + + Result result = projectParameterService.queryProjectParameterByCode(loginUser, projectCode, 1); + assertNull(result.getData()); + assertNull(result.getCode()); + assertNull(result.getMsg()); + // PROJECT_PARAMETER_NOT_EXISTS - Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(getProject(projectCode)); - Mockito.when(projectService.hasProjectAndPerm(Mockito.any(), Mockito.any(), Mockito.any(Result.class), + when(projectMapper.queryByCode(projectCode)).thenReturn(getProject(projectCode)); + when(projectService.hasProjectAndPerm(Mockito.any(), Mockito.any(), Mockito.any(Result.class), Mockito.any())).thenReturn(true); - Mockito.when(projectParameterMapper.queryByCode(Mockito.anyLong())).thenReturn(null); - Result result = projectParameterService.queryProjectParameterByCode(loginUser, projectCode, 1); - Assertions.assertEquals(Status.PROJECT_PARAMETER_NOT_EXISTS.getCode(), result.getCode()); + when(projectParameterMapper.queryByCode(Mockito.anyLong())).thenReturn(null); + result = projectParameterService.queryProjectParameterByCode(loginUser, projectCode, 1); + assertEquals(Status.PROJECT_PARAMETER_NOT_EXISTS.getCode(), result.getCode()); // SUCCESS - Mockito.when(projectParameterMapper.queryByCode(Mockito.anyLong())).thenReturn(getProjectParameter()); + when(projectParameterMapper.queryByCode(Mockito.anyLong())).thenReturn(getProjectParameter()); result = projectParameterService.queryProjectParameterByCode(loginUser, projectCode, 1); - Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode()); + assertEquals(Status.SUCCESS.getCode(), result.getCode()); + } + + @Test + public void testQueryProjectParameterListPaging() { + User loginUser = getGeneralUser(); + Integer pageSize = 10; + Integer pageNo = 1; + + // NO PERMISSION + when(projectService.hasProjectAndPerm(Mockito.any(), Mockito.any(), Mockito.any(Result.class), + Mockito.any())) + .thenReturn(false); + + Result result = + projectParameterService.queryProjectParameterListPaging(loginUser, projectCode, pageSize, pageNo, null, + DataType.VARCHAR.name()); + assertNull(result.getData()); + assertNull(result.getCode()); + assertNull(result.getMsg()); + + // SUCCESS + when(projectService.hasProjectAndPerm(any(), any(), any(Result.class), any())) + .thenReturn(true); + + Page page = new Page<>(pageNo, pageSize); + page.setRecords(Collections.singletonList(getProjectParameter())); + + when(projectParameterMapper.queryProjectParameterListPaging(any(), anyLong(), any(), any(), any())) + .thenReturn(page); + result = projectParameterService.queryProjectParameterListPaging(loginUser, projectCode, pageSize, pageNo, + null, null); + assertEquals(Status.SUCCESS.getCode(), result.getCode()); } - private User getGeneralUser() { - User loginUser = new User(); - loginUser.setUserType(UserType.GENERAL_USER); - loginUser.setUserName("userName"); - loginUser.setId(1); - return loginUser; + @Test + public void testBatchDeleteProjectParametersByCodes() { + User loginUser = getGeneralUser(); + + Result result = projectParameterService.batchDeleteProjectParametersByCodes(loginUser, projectCode, ""); + assertEquals(Status.PROJECT_PARAMETER_CODE_EMPTY.getCode(), result.getCode()); + + when(projectParameterMapper.queryByCodes(any())).thenReturn(Collections.singletonList(getProjectParameter())); + + AssertionsHelper.assertThrowsServiceException(Status.PROJECT_PARAMETER_NOT_EXISTS, + () -> projectParameterService.batchDeleteProjectParametersByCodes(loginUser, projectCode, "1,2")); + + projectParameterService.batchDeleteProjectParametersByCodes(loginUser, projectCode, "1"); } private Project getProject(long projectCode) { diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectPreferenceServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectPreferenceServiceTest.java index 530c15d48e61..7a74a7c26547 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectPreferenceServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectPreferenceServiceTest.java @@ -60,28 +60,65 @@ public class ProjectPreferenceServiceTest { public void testUpdateProjectPreference() { User loginUser = getGeneralUser(); + // no permission + Mockito.when(projectService.hasProjectAndWritePerm(Mockito.any(), Mockito.any(), Mockito.any(Result.class))) + .thenReturn(false); + Result result = projectPreferenceService.updateProjectPreference(loginUser, projectCode, "value"); + Assertions.assertNull(result.getCode()); + Assertions.assertNull(result.getData()); + Assertions.assertNull(result.getMsg()); + + // when preference exists in project + Mockito.when(projectPreferenceMapper.selectOne(Mockito.any())).thenReturn(null); Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(getProject(projectCode)); + + // success Mockito.when(projectService.hasProjectAndWritePerm(Mockito.any(), Mockito.any(), Mockito.any(Result.class))) .thenReturn(true); - Mockito.when(projectPreferenceMapper.selectOne(Mockito.any())).thenReturn(null); Mockito.when(projectPreferenceMapper.insert(Mockito.any())).thenReturn(1); - Result result = projectPreferenceService.updateProjectPreference(loginUser, projectCode, "value"); + result = projectPreferenceService.updateProjectPreference(loginUser, projectCode, "value"); + Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode()); + + // database operatation fail + Mockito.when(projectPreferenceMapper.insert(Mockito.any())).thenReturn(-1); + result = projectPreferenceService.updateProjectPreference(loginUser, projectCode, "value"); + Assertions.assertEquals(Status.CREATE_PROJECT_PREFERENCE_ERROR.getCode(), result.getCode()); + + // when preference exists in project + Mockito.when(projectPreferenceMapper.selectOne(Mockito.any())).thenReturn(getProjectPreference()); + + // success + Mockito.when(projectPreferenceMapper.updateById(Mockito.any())).thenReturn(1); + result = projectPreferenceService.updateProjectPreference(loginUser, projectCode, "value"); Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode()); + + // database operation fail + Mockito.when(projectPreferenceMapper.updateById(Mockito.any())).thenReturn(-1); + result = projectPreferenceService.updateProjectPreference(loginUser, projectCode, "value"); + Assertions.assertEquals(Status.UPDATE_PROJECT_PREFERENCE_ERROR.getCode(), result.getCode()); } @Test public void testQueryProjectPreferenceByProjectCode() { User loginUser = getGeneralUser(); + // no permission + Mockito.when(projectService.hasProjectAndWritePerm(Mockito.any(), Mockito.any(), Mockito.any(Result.class))) + .thenReturn(false); + Result result = projectPreferenceService.queryProjectPreferenceByProjectCode(loginUser, projectCode); + Assertions.assertNull(result.getCode()); + Assertions.assertNull(result.getData()); + Assertions.assertNull(result.getMsg()); + // PROJECT_PARAMETER_NOT_EXISTS Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(getProject(projectCode)); Mockito.when(projectService.hasProjectAndPerm(Mockito.any(), Mockito.any(), Mockito.any(Result.class), Mockito.any())).thenReturn(true); Mockito.when(projectPreferenceMapper.selectOne(Mockito.any())).thenReturn(null); - Result result = projectPreferenceService.queryProjectPreferenceByProjectCode(loginUser, projectCode); + result = projectPreferenceService.queryProjectPreferenceByProjectCode(loginUser, projectCode); Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode()); // SUCCESS @@ -94,14 +131,29 @@ public void testQueryProjectPreferenceByProjectCode() { public void testEnableProjectPreference() { User loginUser = getGeneralUser(); + // no permission + Mockito.when(projectService.hasProjectAndWritePerm(Mockito.any(), Mockito.any(), Mockito.any(Result.class))) + .thenReturn(false); + Result result = projectPreferenceService.enableProjectPreference(loginUser, projectCode, 1); + Assertions.assertNull(result.getCode()); + Assertions.assertNull(result.getData()); + Assertions.assertNull(result.getMsg()); + Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(getProject(projectCode)); Mockito.when(projectService.hasProjectAndWritePerm(Mockito.any(), Mockito.any(), Mockito.any(Result.class))) .thenReturn(true); + // success Mockito.when(projectPreferenceMapper.selectOne(Mockito.any())).thenReturn(getProjectPreference()); - Result result = projectPreferenceService.enableProjectPreference(loginUser, projectCode, 1); + Mockito.when(projectPreferenceMapper.updateById(Mockito.any())).thenReturn(1); + result = projectPreferenceService.enableProjectPreference(loginUser, projectCode, 2); Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode()); + // db operation fail + Mockito.when(projectPreferenceMapper.selectOne(Mockito.any())).thenReturn(getProjectPreference()); + Mockito.when(projectPreferenceMapper.updateById(Mockito.any())).thenReturn(-1); + result = projectPreferenceService.enableProjectPreference(loginUser, projectCode, 2); + Assertions.assertEquals(Status.UPDATE_PROJECT_PREFERENCE_STATE_ERROR.getCode(), result.getCode()); } private User getGeneralUser() { diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectWorkerGroupRelationServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectWorkerGroupRelationServiceTest.java index a8e8b8beb2f2..6f6b7ca2d6c1 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectWorkerGroupRelationServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectWorkerGroupRelationServiceTest.java @@ -17,13 +17,17 @@ package org.apache.dolphinscheduler.api.service; +import static org.apache.dolphinscheduler.api.utils.ServiceTestUtil.getAdminUser; +import static org.apache.dolphinscheduler.api.utils.ServiceTestUtil.getGeneralUser; + +import org.apache.dolphinscheduler.api.AssertionsHelper; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.impl.ProjectWorkerGroupRelationServiceImpl; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.constants.Constants; -import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.ProjectWorkerGroup; +import org.apache.dolphinscheduler.dao.entity.TaskDefinition; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.WorkerGroup; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; @@ -33,6 +37,7 @@ import org.apache.dolphinscheduler.dao.mapper.WorkerGroupMapper; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Map; @@ -77,27 +82,87 @@ public class ProjectWorkerGroupRelationServiceTest { @Test public void testAssignWorkerGroupsToProject() { + User generalUser = getGeneralUser(); User loginUser = getAdminUser(); + // no permission + Result result = projectWorkerGroupRelationService.assignWorkerGroupsToProject(generalUser, projectCode, + getWorkerGroups()); + Assertions.assertEquals(Status.USER_NO_OPERATION_PERM.getCode(), result.getCode()); + + // project code is null + result = projectWorkerGroupRelationService.assignWorkerGroupsToProject(loginUser, null, + getWorkerGroups()); + Assertions.assertEquals(Status.PROJECT_NOT_EXIST.getCode(), result.getCode()); + + // worker group is empty + result = projectWorkerGroupRelationService.assignWorkerGroupsToProject(loginUser, projectCode, + Collections.emptyList()); + Assertions.assertEquals(Status.WORKER_GROUP_TO_PROJECT_IS_EMPTY.getCode(), result.getCode()); + + // project not exists Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(null); - Result result = projectWorkerGroupRelationService.assignWorkerGroupsToProject(loginUser, projectCode, + result = projectWorkerGroupRelationService.assignWorkerGroupsToProject(loginUser, projectCode, getWorkerGroups()); Assertions.assertEquals(Status.PROJECT_NOT_EXIST.getCode(), result.getCode()); + // worker group not exists WorkerGroup workerGroup = new WorkerGroup(); workerGroup.setName("test"); Mockito.when(projectMapper.queryByCode(Mockito.anyLong())).thenReturn(getProject()); - Mockito.when(workerGroupMapper.queryAllWorkerGroup()).thenReturn(Lists.newArrayList(workerGroup)); + Mockito.when(workerGroupMapper.queryAllWorkerGroup()).thenReturn(Collections.singletonList(workerGroup)); + result = projectWorkerGroupRelationService.assignWorkerGroupsToProject(loginUser, projectCode, + getDiffWorkerGroups()); + Assertions.assertEquals(Status.WORKER_GROUP_NOT_EXIST.getCode(), result.getCode()); + + // db insertion fail + Mockito.when(workerGroupMapper.queryAllWorkerGroup()).thenReturn(Collections.singletonList(workerGroup)); + Mockito.when(projectWorkerGroupMapper.insert(Mockito.any())).thenReturn(-1); + AssertionsHelper.assertThrowsServiceException(Status.ASSIGN_WORKER_GROUP_TO_PROJECT_ERROR, + () -> projectWorkerGroupRelationService.assignWorkerGroupsToProject(loginUser, projectCode, + getWorkerGroups())); + + // success Mockito.when(projectWorkerGroupMapper.insert(Mockito.any())).thenReturn(1); result = projectWorkerGroupRelationService.assignWorkerGroupsToProject(loginUser, projectCode, getWorkerGroups()); Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode()); + + // success when there is diff between current wg and assigned wg + Mockito.when(projectWorkerGroupMapper.selectList(Mockito.any())) + .thenReturn(Collections.singletonList(getDiffProjectWorkerGroup())); + Mockito.when(projectWorkerGroupMapper.delete(Mockito.any())).thenReturn(1); + result = projectWorkerGroupRelationService.assignWorkerGroupsToProject(loginUser, projectCode, + getWorkerGroups()); + Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode()); + + // db deletion fail + Mockito.when(projectWorkerGroupMapper.delete(Mockito.any())).thenReturn(-1); + AssertionsHelper.assertThrowsServiceException(Status.ASSIGN_WORKER_GROUP_TO_PROJECT_ERROR, + () -> projectWorkerGroupRelationService.assignWorkerGroupsToProject(loginUser, projectCode, + getWorkerGroups())); + + // fail when wg is referenced by task definition + Mockito.when(taskDefinitionMapper.queryAllDefinitionList(Mockito.anyLong())) + .thenReturn(Collections.singletonList(getTaskDefinitionWithDiffWorkerGroup())); + AssertionsHelper.assertThrowsServiceException(Status.USED_WORKER_GROUP_EXISTS, + () -> projectWorkerGroupRelationService.assignWorkerGroupsToProject(loginUser, projectCode, + getWorkerGroups())); } @Test public void testQueryWorkerGroupsByProject() { + // no permission + Mockito.when(projectService.hasProjectAndPerm(Mockito.any(), Mockito.any(), Mockito.anyMap(), Mockito.any())) + .thenReturn(false); + Map result = + projectWorkerGroupRelationService.queryWorkerGroupsByProject(getGeneralUser(), projectCode); + + Assertions.assertTrue(result.isEmpty()); + + // success Mockito.when(projectService.hasProjectAndPerm(Mockito.any(), Mockito.any(), Mockito.anyMap(), Mockito.any())) .thenReturn(true); @@ -113,8 +178,7 @@ public void testQueryWorkerGroupsByProject() { Mockito.when(scheduleMapper.querySchedulerListByProjectName(Mockito.any())) .thenReturn(Lists.newArrayList()); - Map result = - projectWorkerGroupRelationService.queryWorkerGroupsByProject(getGeneralUser(), projectCode); + result = projectWorkerGroupRelationService.queryWorkerGroupsByProject(getGeneralUser(), projectCode); ProjectWorkerGroup[] actualValue = ((List) result.get(Constants.DATA_LIST)).toArray(new ProjectWorkerGroup[0]); @@ -126,20 +190,8 @@ private List getWorkerGroups() { return Lists.newArrayList("default"); } - private User getGeneralUser() { - User loginUser = new User(); - loginUser.setUserType(UserType.GENERAL_USER); - loginUser.setUserName("userName"); - loginUser.setId(1); - return loginUser; - } - - private User getAdminUser() { - User loginUser = new User(); - loginUser.setUserType(UserType.ADMIN_USER); - loginUser.setUserName("userName"); - loginUser.setId(1); - return loginUser; + private List getDiffWorkerGroups() { + return Lists.newArrayList("default", "new"); } private Project getProject() { @@ -158,4 +210,20 @@ private ProjectWorkerGroup getProjectWorkerGroup() { projectWorkerGroup.setWorkerGroup("default"); return projectWorkerGroup; } + + private ProjectWorkerGroup getDiffProjectWorkerGroup() { + ProjectWorkerGroup projectWorkerGroup = new ProjectWorkerGroup(); + projectWorkerGroup.setId(2); + projectWorkerGroup.setProjectCode(projectCode); + projectWorkerGroup.setWorkerGroup("new"); + return projectWorkerGroup; + } + + private TaskDefinition getTaskDefinitionWithDiffWorkerGroup() { + TaskDefinition taskDefinition = new TaskDefinition(); + taskDefinition.setProjectCode(projectCode); + taskDefinition.setId(1); + taskDefinition.setWorkerGroup("new"); + return taskDefinition; + } } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java deleted file mode 100644 index 0679b8892dff..000000000000 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java +++ /dev/null @@ -1,698 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.api.service; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.when; - -import org.apache.dolphinscheduler.api.dto.resources.DeleteDataTransferResponse; -import org.apache.dolphinscheduler.api.dto.resources.ResourceComponent; -import org.apache.dolphinscheduler.api.enums.Status; -import org.apache.dolphinscheduler.api.exceptions.ServiceException; -import org.apache.dolphinscheduler.api.permission.ResourcePermissionCheckService; -import org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl; -import org.apache.dolphinscheduler.api.utils.PageInfo; -import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.common.constants.Constants; -import org.apache.dolphinscheduler.common.enums.UserType; -import org.apache.dolphinscheduler.common.utils.FileUtils; -import org.apache.dolphinscheduler.common.utils.PropertyUtils; -import org.apache.dolphinscheduler.dao.entity.Tenant; -import org.apache.dolphinscheduler.dao.entity.User; -import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; -import org.apache.dolphinscheduler.dao.mapper.TenantMapper; -import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper; -import org.apache.dolphinscheduler.dao.mapper.UserMapper; -import org.apache.dolphinscheduler.plugin.storage.api.StorageEntity; -import org.apache.dolphinscheduler.plugin.storage.api.StorageOperate; -import org.apache.dolphinscheduler.spi.enums.ResourceType; - -import org.apache.commons.collections4.CollectionUtils; - -import java.io.IOException; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.time.LocalDateTime; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Random; - -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.InjectMocks; -import org.mockito.Mock; -import org.mockito.MockedStatic; -import org.mockito.Mockito; -import org.mockito.junit.jupiter.MockitoExtension; -import org.mockito.junit.jupiter.MockitoSettings; -import org.mockito.quality.Strictness; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.mock.web.MockMultipartFile; - -import com.google.common.io.Files; - -/** - * resources service test - */ -@ExtendWith(MockitoExtension.class) -@MockitoSettings(strictness = Strictness.LENIENT) -public class ResourcesServiceTest { - - private static final Logger logger = LoggerFactory.getLogger(ResourcesServiceTest.class); - - private static final String tenantCode = "123"; - - @InjectMocks - private ResourcesServiceImpl resourcesService; - - @Mock - private TenantMapper tenantMapper; - - @Mock - private StorageOperate storageOperate; - - @Mock - private UserMapper userMapper; - - @Mock - private UdfFuncMapper udfFunctionMapper; - - @Mock - private ProcessDefinitionMapper processDefinitionMapper; - - @Mock - private ResourcePermissionCheckService resourcePermissionCheckService; - - private MockedStatic mockedStaticFileUtils; - - private MockedStatic mockedStaticFiles; - - private MockedStatic mockedStaticDolphinschedulerFileUtils; - - private MockedStatic mockedStaticPropertyUtils; - - private MockedStatic mockedStaticPaths; - - private MockedStatic filesMockedStatic; - - private Exception exception; - - @BeforeEach - public void setUp() { - mockedStaticFileUtils = Mockito.mockStatic(FileUtils.class); - mockedStaticFiles = Mockito.mockStatic(Files.class); - mockedStaticDolphinschedulerFileUtils = - Mockito.mockStatic(org.apache.dolphinscheduler.api.utils.FileUtils.class); - - mockedStaticPropertyUtils = Mockito.mockStatic(PropertyUtils.class); - mockedStaticPaths = Mockito.mockStatic(Paths.class); - filesMockedStatic = Mockito.mockStatic(java.nio.file.Files.class); - } - - @AfterEach - public void after() { - mockedStaticFileUtils.close(); - mockedStaticFiles.close(); - mockedStaticDolphinschedulerFileUtils.close(); - mockedStaticPropertyUtils.close(); - mockedStaticPaths.close(); - filesMockedStatic.close(); - } - - @Test - public void testCreateResource() { - User user = new User(); - user.setId(1); - user.setUserType(UserType.GENERAL_USER); - - // CURRENT_LOGIN_USER_TENANT_NOT_EXIST - when(userMapper.selectById(user.getId())).thenReturn(getUser()); - when(tenantMapper.queryById(1)).thenReturn(null); - Assertions.assertThrows(ServiceException.class, - () -> resourcesService.uploadResource(user, "ResourcesServiceTest", ResourceType.FILE, - new MockMultipartFile("test.pdf", "test.pdf", "pdf", "test".getBytes()), "/")); - // set tenant for user - user.setTenantId(1); - when(tenantMapper.queryById(1)).thenReturn(getTenant()); - - // RESOURCE_FILE_IS_EMPTY - MockMultipartFile mockMultipartFile = new MockMultipartFile("test.pdf", "".getBytes()); - Result result = resourcesService.uploadResource(user, "ResourcesServiceTest", ResourceType.FILE, - mockMultipartFile, "/"); - logger.info(result.toString()); - assertEquals(Status.RESOURCE_FILE_IS_EMPTY.getMsg(), result.getMsg()); - - // RESOURCE_SUFFIX_FORBID_CHANGE - mockMultipartFile = new MockMultipartFile("test.pdf", "test.pdf", "pdf", "test".getBytes()); - when(Files.getFileExtension("test.pdf")).thenReturn("pdf"); - when(Files.getFileExtension("ResourcesServiceTest.jar")).thenReturn("jar"); - result = resourcesService.uploadResource(user, "ResourcesServiceTest.jar", ResourceType.FILE, mockMultipartFile, - "/"); - logger.info(result.toString()); - assertEquals(Status.RESOURCE_SUFFIX_FORBID_CHANGE.getMsg(), result.getMsg()); - - // UDF_RESOURCE_SUFFIX_NOT_JAR - mockMultipartFile = - new MockMultipartFile("ResourcesServiceTest.pdf", "ResourcesServiceTest.pdf", "pdf", "test".getBytes()); - when(Files.getFileExtension("ResourcesServiceTest.pdf")).thenReturn("pdf"); - result = resourcesService.uploadResource(user, "ResourcesServiceTest.pdf", ResourceType.UDF, mockMultipartFile, - "/"); - logger.info(result.toString()); - assertEquals(Status.UDF_RESOURCE_SUFFIX_NOT_JAR.getMsg(), result.getMsg()); - - // FULL_FILE_NAME_TOO_LONG - String tooLongFileName = getRandomStringWithLength(Constants.RESOURCE_FULL_NAME_MAX_LENGTH) + ".pdf"; - mockMultipartFile = new MockMultipartFile(tooLongFileName, tooLongFileName, "pdf", "test".getBytes()); - when(Files.getFileExtension(tooLongFileName)).thenReturn("pdf"); - // '/databasePath/tenantCode/RESOURCE/' - when(storageOperate.getResDir(tenantCode)).thenReturn("/dolphinscheduler/123/resources/"); - result = resourcesService.uploadResource(user, tooLongFileName, ResourceType.FILE, mockMultipartFile, "/"); - logger.info(result.toString()); - assertEquals(Status.RESOURCE_FULL_NAME_TOO_LONG_ERROR.getMsg(), result.getMsg()); - } - - @Test - public void testCreateDirecotry() { - User user = new User(); - user.setId(1); - user.setUserType(UserType.GENERAL_USER); - - // RESOURCE_EXIST - user.setId(1); - user.setTenantId(1); - when(tenantMapper.queryById(1)).thenReturn(getTenant()); - when(userMapper.selectById(user.getId())).thenReturn(getUser()); - when(storageOperate.getResDir(tenantCode)).thenReturn("/dolphinscheduler/123/resources/"); - try { - when(storageOperate.exists("/dolphinscheduler/123/resources/directoryTest")).thenReturn(true); - } catch (IOException e) { - logger.error(e.getMessage(), e); - } - Result result = resourcesService.createDirectory(user, "directoryTest", ResourceType.FILE, -1, "/"); - logger.info(result.toString()); - assertEquals(Status.RESOURCE_EXIST.getMsg(), result.getMsg()); - } - - @Test - public void testUpdateResource() { - User user = new User(); - user.setId(1); - user.setUserType(UserType.GENERAL_USER); - user.setTenantId(1); - - when(userMapper.selectById(user.getId())).thenReturn(getUser()); - when(tenantMapper.queryById(1)).thenReturn(getTenant()); - when(storageOperate.getResDir(tenantCode)).thenReturn("/dolphinscheduler/123/resources/"); - - // USER_NO_OPERATION_PERM - user.setUserType(UserType.GENERAL_USER); - // tenant who have access to resource is 123, - Tenant tenantWNoPermission = new Tenant(); - tenantWNoPermission.setTenantCode("321"); - when(tenantMapper.queryById(1)).thenReturn(tenantWNoPermission); - Result result = resourcesService.updateResource(user, "/dolphinscheduler/123/resources/ResourcesServiceTest", - tenantCode, "ResourcesServiceTest", ResourceType.FILE, null); - logger.info(result.toString()); - assertEquals(Status.NO_CURRENT_OPERATING_PERMISSION.getMsg(), result.getMsg()); - - // SUCCESS - when(tenantMapper.queryById(1)).thenReturn(getTenant()); - try { - when(storageOperate.exists(Mockito.any())).thenReturn(false); - } catch (IOException e) { - logger.error(e.getMessage(), e); - } - - try { - when(storageOperate.getFileStatus("/dolphinscheduler/123/resources/ResourcesServiceTest", - "/dolphinscheduler/123/resources/", tenantCode, ResourceType.FILE)) - .thenReturn(getStorageEntityResource()); - result = resourcesService.updateResource(user, "/dolphinscheduler/123/resources/ResourcesServiceTest", - tenantCode, "ResourcesServiceTest", ResourceType.FILE, null); - logger.info(result.toString()); - assertEquals(Status.SUCCESS.getMsg(), result.getMsg()); - } catch (Exception e) { - logger.error(e.getMessage() + " Resource path: {}", "/dolphinscheduler/123/resources/ResourcesServiceTest", - e); - } - - // Tests for udf resources. - // RESOURCE_EXIST - try { - when(storageOperate.exists("/dolphinscheduler/123/resources/ResourcesServiceTest2.jar")).thenReturn(true); - } catch (IOException e) { - logger.error("error occurred when checking resource: " - + "/dolphinscheduler/123/resources/ResourcesServiceTest2.jar"); - } - - try { - when(storageOperate.getFileStatus("/dolphinscheduler/123/resources/ResourcesServiceTest1.jar", - "/dolphinscheduler/123/resources/", tenantCode, ResourceType.UDF)) - .thenReturn(getStorageEntityUdfResource()); - } catch (Exception e) { - logger.error(e.getMessage() + " Resource path: {}", - "/dolphinscheduler/123/resources/ResourcesServiceTest1.jar", e); - } - result = resourcesService.updateResource(user, "/dolphinscheduler/123/resources/ResourcesServiceTest1.jar", - tenantCode, "ResourcesServiceTest2.jar", ResourceType.UDF, null); - logger.info(result.toString()); - assertEquals(Status.RESOURCE_EXIST.getMsg(), result.getMsg()); - - // TENANT_NOT_EXIST - when(tenantMapper.queryById(Mockito.anyInt())).thenReturn(null); - Assertions.assertThrows(ServiceException.class, () -> resourcesService.updateResource(user, - "ResourcesServiceTest1.jar", "", "ResourcesServiceTest", ResourceType.UDF, null)); - - // SUCCESS - when(tenantMapper.queryById(1)).thenReturn(getTenant()); - - result = resourcesService.updateResource(user, "/dolphinscheduler/123/resources/ResourcesServiceTest1.jar", - tenantCode, "ResourcesServiceTest1.jar", ResourceType.UDF, null); - logger.info(result.toString()); - assertEquals(Status.SUCCESS.getMsg(), result.getMsg()); - } - - @Test - public void testQueryResourceListPaging() { - User loginUser = new User(); - loginUser.setId(1); - loginUser.setTenantId(1); - loginUser.setTenantCode("tenant1"); - loginUser.setUserType(UserType.ADMIN_USER); - List mockResList = new ArrayList(); - mockResList.add(getStorageEntityResource()); - List mockUserList = new ArrayList(); - mockUserList.add(getUser()); - when(userMapper.selectList(null)).thenReturn(mockUserList); - when(userMapper.selectById(getUser().getId())).thenReturn(getUser()); - when(tenantMapper.queryById(getUser().getTenantId())).thenReturn(getTenant()); - when(storageOperate.getResDir(tenantCode)).thenReturn("/dolphinscheduler/123/resources/"); - - try { - when(storageOperate.listFilesStatus("/dolphinscheduler/123/resources/", "/dolphinscheduler/123/resources/", - tenantCode, ResourceType.FILE)).thenReturn(mockResList); - } catch (Exception e) { - logger.error("QueryResourceListPaging Error"); - } - Result result = resourcesService.queryResourceListPaging(loginUser, "", "", ResourceType.FILE, "Test", 1, 10); - logger.info(result.toString()); - assertEquals(Status.SUCCESS.getCode(), (int) result.getCode()); - PageInfo pageInfo = (PageInfo) result.getData(); - Assertions.assertTrue(CollectionUtils.isNotEmpty(pageInfo.getTotalList())); - - } - - @Test - public void testQueryResourceList() { - User loginUser = getUser(); - - when(userMapper.selectList(null)).thenReturn(Collections.singletonList(loginUser)); - when(userMapper.selectById(loginUser.getId())).thenReturn(loginUser); - when(tenantMapper.queryById(Mockito.anyInt())).thenReturn(getTenant()); - when(storageOperate.getDir(ResourceType.ALL, tenantCode)).thenReturn("/dolphinscheduler"); - when(storageOperate.getDir(ResourceType.FILE, tenantCode)).thenReturn("/dolphinscheduler/123/resources/"); - when(storageOperate.getResDir(tenantCode)).thenReturn("/dolphinscheduler/123/resources/"); - when(storageOperate.listFilesStatusRecursively("/dolphinscheduler/123/resources/", - "/dolphinscheduler/123/resources/", tenantCode, ResourceType.FILE)) - .thenReturn(Collections.singletonList(getStorageEntityResource())); - Map result = - resourcesService.queryResourceList(loginUser, ResourceType.FILE, "/dolphinscheduler/123/resources/"); - assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); - List resourceList = (List) result.get(Constants.DATA_LIST); - Assertions.assertTrue(CollectionUtils.isNotEmpty(resourceList)); - - // test udf - when(storageOperate.getDir(ResourceType.UDF, tenantCode)).thenReturn("/dolphinscheduler/123/udfs/"); - when(storageOperate.getUdfDir(tenantCode)).thenReturn("/dolphinscheduler/123/udfs/"); - when(storageOperate.listFilesStatusRecursively("/dolphinscheduler/123/udfs/", "/dolphinscheduler/123/udfs/", - tenantCode, ResourceType.UDF)).thenReturn(Arrays.asList(getStorageEntityUdfResource())); - loginUser.setUserType(UserType.GENERAL_USER); - result = resourcesService.queryResourceList(loginUser, ResourceType.UDF, "/dolphinscheduler/123/udfs/"); - assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); - resourceList = (List) result.get(Constants.DATA_LIST); - Assertions.assertTrue(CollectionUtils.isNotEmpty(resourceList)); - } - - @Test - public void testDelete() throws Exception { - - User loginUser = new User(); - loginUser.setId(0); - loginUser.setUserType(UserType.GENERAL_USER); - - // TENANT_NOT_EXIST - loginUser.setUserType(UserType.ADMIN_USER); - loginUser.setTenantId(2); - when(userMapper.selectById(loginUser.getId())).thenReturn(loginUser); - Assertions.assertThrows(ServiceException.class, () -> resourcesService.delete(loginUser, "", "")); - - // RESOURCE_NOT_EXIST - when(tenantMapper.queryById(Mockito.anyInt())).thenReturn(getTenant()); - when(storageOperate.getDir(ResourceType.ALL, tenantCode)).thenReturn("/dolphinscheduler"); - when(storageOperate.getResDir(getTenant().getTenantCode())).thenReturn("/dolphinscheduler/123/resources/"); - when(storageOperate.getFileStatus("/dolphinscheduler/123/resources/ResourcesServiceTest", - "/dolphinscheduler/123/resources/", tenantCode, null)) - .thenReturn(getStorageEntityResource()); - Result result = resourcesService.delete(loginUser, "/dolphinscheduler/123/resources/ResNotExist", tenantCode); - assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(), result.getMsg()); - - // SUCCESS - loginUser.setTenantId(1); - result = resourcesService.delete(loginUser, "/dolphinscheduler/123/resources/ResourcesServiceTest", tenantCode); - assertEquals(Status.SUCCESS.getMsg(), result.getMsg()); - } - - @Test - public void testVerifyResourceName() { - - User user = new User(); - user.setId(1); - user.setUserType(UserType.GENERAL_USER); - try { - when(storageOperate.exists("/ResourcesServiceTest.jar")).thenReturn(true); - } catch (IOException e) { - logger.error("error occurred when checking resource: /ResourcesServiceTest.jar\""); - } - Result result = resourcesService.verifyResourceName("/ResourcesServiceTest.jar", ResourceType.FILE, user); - logger.info(result.toString()); - assertEquals(Status.RESOURCE_EXIST.getMsg(), result.getMsg()); - - // RESOURCE_FILE_EXIST - result = resourcesService.verifyResourceName("/ResourcesServiceTest.jar", ResourceType.FILE, user); - logger.info(result.toString()); - Assertions.assertTrue(Status.RESOURCE_EXIST.getCode() == result.getCode()); - - // SUCCESS - result = resourcesService.verifyResourceName("test2", ResourceType.FILE, user); - logger.info(result.toString()); - assertEquals(Status.SUCCESS.getMsg(), result.getMsg()); - - } - - @Test - public void testReadResource() throws IOException { - // RESOURCE_NOT_EXIST - when(userMapper.selectById(getUser().getId())).thenReturn(getUser()); - when(tenantMapper.queryById(getUser().getTenantId())).thenReturn(getTenant()); - Result result = resourcesService.readResource(getUser(), "", "", 1, 10); - assertEquals(Status.RESOURCE_FILE_NOT_EXIST.getCode(), (int) result.getCode()); - - // RESOURCE_SUFFIX_NOT_SUPPORT_VIEW - when(FileUtils.getResourceViewSuffixes()).thenReturn("class"); - result = resourcesService.readResource(getUser(), "", "", 1, 10); - assertEquals(Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW.getMsg(), result.getMsg()); - - // USER_NOT_EXIST - when(userMapper.selectById(getUser().getId())).thenReturn(null); - when(FileUtils.getResourceViewSuffixes()).thenReturn("jar"); - when(Files.getFileExtension("ResourcesServiceTest.jar")).thenReturn("jar"); - result = resourcesService.readResource(getUser(), "", "", 1, 10); - assertEquals(Status.USER_NOT_EXIST.getCode(), (int) result.getCode()); - - // TENANT_NOT_EXIST - when(userMapper.selectById(getUser().getId())).thenReturn(getUser()); - when(tenantMapper.queryById(getUser().getTenantId())).thenReturn(null); - Assertions.assertThrows(ServiceException.class, () -> resourcesService.readResource(getUser(), "", "", 1, 10)); - - // SUCCESS - when(FileUtils.getResourceViewSuffixes()).thenReturn("jar,sh"); - when(storageOperate.getDir(ResourceType.ALL, tenantCode)).thenReturn("/dolphinscheduler"); - when(storageOperate.getResDir(getTenant().getTenantCode())).thenReturn("/dolphinscheduler/123/resources/"); - when(userMapper.selectById(getUser().getId())).thenReturn(getUser()); - when(tenantMapper.queryById(getUser().getTenantId())).thenReturn(getTenant()); - when(storageOperate.exists(Mockito.any())).thenReturn(true); - when(storageOperate.vimFile(Mockito.any(), Mockito.any(), eq(1), eq(10))).thenReturn(getContent()); - when(Files.getFileExtension("/dolphinscheduler/123/resources/test.jar")).thenReturn("jar"); - result = resourcesService.readResource(getUser(), "/dolphinscheduler/123/resources/test.jar", tenantCode, 1, - 10); - assertEquals(Status.SUCCESS.getMsg(), result.getMsg()); - } - - @Test - public void testCreateOrUpdateResource() throws Exception { - User user = getUser(); - when(userMapper.queryByUserNameAccurately(user.getUserName())).thenReturn(getUser()); - - // RESOURCE_SUFFIX_NOT_SUPPORT_VIEW - exception = Assertions.assertThrows(IllegalArgumentException.class, - () -> resourcesService.createOrUpdateResource(user.getUserName(), "filename", "my-content")); - Assertions.assertTrue( - exception.getMessage().contains("Not allow create or update resources without extension name")); - - // SUCCESS - when(storageOperate.getResDir(user.getTenantCode())).thenReturn("/dolphinscheduler/123/resources/"); - when(FileUtils.getUploadFilename(Mockito.anyString(), Mockito.anyString())).thenReturn("test"); - when(FileUtils.writeContent2File(Mockito.anyString(), Mockito.anyString())).thenReturn(true); - when(storageOperate.getFileStatus(Mockito.anyString(), Mockito.anyString(), Mockito.anyString(), Mockito.any())) - .thenReturn(getStorageEntityResource()); - StorageEntity storageEntity = - resourcesService.createOrUpdateResource(user.getUserName(), "filename.txt", "my-content"); - Assertions.assertNotNull(storageEntity); - assertEquals("/dolphinscheduler/123/resources/ResourcesServiceTest", storageEntity.getFullName()); - } - - @Test - public void testUpdateResourceContent() throws Exception { - // RESOURCE_PATH_ILLEGAL - when(userMapper.selectById(getUser().getId())).thenReturn(getUser()); - when(tenantMapper.queryById(1)).thenReturn(getTenant()); - when(storageOperate.getResDir(Mockito.anyString())).thenReturn("/tmp"); - ServiceException serviceException = - Assertions.assertThrows(ServiceException.class, () -> resourcesService.updateResourceContent(getUser(), - "/dolphinscheduler/123/resources/ResourcesServiceTest.jar", tenantCode, "content")); - assertTrue(serviceException.getMessage() - .contains("Resource file: /dolphinscheduler/123/resources/ResourcesServiceTest.jar is illegal")); - - // RESOURCE_NOT_EXIST - when(storageOperate.getDir(ResourceType.ALL, tenantCode)).thenReturn("/dolphinscheduler"); - when(storageOperate.getResDir(Mockito.anyString())).thenReturn("/dolphinscheduler/123/resources"); - when(storageOperate.getFileStatus("/dolphinscheduler/123/resources/ResourcesServiceTest.jar", "", tenantCode, - ResourceType.FILE)).thenReturn(null); - Result result = resourcesService.updateResourceContent(getUser(), - "/dolphinscheduler/123/resources/ResourcesServiceTest.jar", tenantCode, "content"); - assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(), result.getMsg()); - - // RESOURCE_SUFFIX_NOT_SUPPORT_VIEW - when(FileUtils.getResourceViewSuffixes()).thenReturn("class"); - when(storageOperate.getFileStatus("/dolphinscheduler/123/resources", "", tenantCode, ResourceType.FILE)) - .thenReturn(getStorageEntityResource()); - - result = resourcesService.updateResourceContent(getUser(), "/dolphinscheduler/123/resources", tenantCode, - "content"); - assertEquals(Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW.getMsg(), result.getMsg()); - - // USER_NOT_EXIST - when(userMapper.selectById(getUser().getId())).thenReturn(null); - result = resourcesService.updateResourceContent(getUser(), "/dolphinscheduler/123/resources/123.class", - tenantCode, - "content"); - Assertions.assertTrue(Status.USER_NOT_EXIST.getCode() == result.getCode()); - - // TENANT_NOT_EXIST - when(userMapper.selectById(getUser().getId())).thenReturn(getUser()); - when(tenantMapper.queryById(1)).thenReturn(null); - Assertions.assertThrows(ServiceException.class, () -> resourcesService.updateResourceContent(getUser(), - "/dolphinscheduler/123/resources/ResourcesServiceTest.jar", tenantCode, "content")); - - // SUCCESS - when(storageOperate.getFileStatus("/dolphinscheduler/123/resources/ResourcesServiceTest.jar", "", tenantCode, - ResourceType.FILE)).thenReturn(getStorageEntityResource()); - - when(Files.getFileExtension(Mockito.anyString())).thenReturn("jar"); - when(FileUtils.getResourceViewSuffixes()).thenReturn("jar"); - when(userMapper.selectById(getUser().getId())).thenReturn(getUser()); - when(tenantMapper.queryById(1)).thenReturn(getTenant()); - when(FileUtils.getUploadFilename(Mockito.anyString(), Mockito.anyString())).thenReturn("test"); - when(FileUtils.writeContent2File(Mockito.anyString(), Mockito.anyString())).thenReturn(true); - result = resourcesService.updateResourceContent(getUser(), - "/dolphinscheduler/123/resources/ResourcesServiceTest.jar", tenantCode, "content"); - logger.info(result.toString()); - assertEquals(Status.SUCCESS.getMsg(), result.getMsg()); - } - - @Test - public void testDownloadResource() { - when(tenantMapper.queryById(1)).thenReturn(getTenant()); - when(userMapper.selectById(1)).thenReturn(getUser()); - org.springframework.core.io.Resource resourceMock = Mockito.mock(org.springframework.core.io.Resource.class); - Path path = Mockito.mock(Path.class); - when(Paths.get(Mockito.any())).thenReturn(path); - try { - when(java.nio.file.Files.size(Mockito.any())).thenReturn(1L); - // resource null - org.springframework.core.io.Resource resource = resourcesService.downloadResource(getUser(), ""); - Assertions.assertNull(resource); - - when(org.apache.dolphinscheduler.api.utils.FileUtils.file2Resource(Mockito.any())).thenReturn(resourceMock); - resource = resourcesService.downloadResource(getUser(), ""); - Assertions.assertNotNull(resource); - } catch (Exception e) { - logger.error("DownloadResource error", e); - Assertions.assertTrue(false); - } - - } - - @Test - public void testDeleteDataTransferData() throws Exception { - User user = getUser(); - when(userMapper.selectById(user.getId())).thenReturn(getUser()); - when(tenantMapper.queryById(user.getTenantId())).thenReturn(getTenant()); - - StorageEntity storageEntity1 = Mockito.mock(StorageEntity.class); - StorageEntity storageEntity2 = Mockito.mock(StorageEntity.class); - StorageEntity storageEntity3 = Mockito.mock(StorageEntity.class); - StorageEntity storageEntity4 = Mockito.mock(StorageEntity.class); - StorageEntity storageEntity5 = Mockito.mock(StorageEntity.class); - - when(storageEntity1.getFullName()).thenReturn("DATA_TRANSFER/20220101"); - when(storageEntity2.getFullName()).thenReturn("DATA_TRANSFER/20220102"); - when(storageEntity3.getFullName()).thenReturn("DATA_TRANSFER/20220103"); - when(storageEntity4.getFullName()).thenReturn("DATA_TRANSFER/20220104"); - when(storageEntity5.getFullName()).thenReturn("DATA_TRANSFER/20220105"); - - List storageEntityList = new ArrayList<>(); - storageEntityList.add(storageEntity1); - storageEntityList.add(storageEntity2); - storageEntityList.add(storageEntity3); - storageEntityList.add(storageEntity4); - storageEntityList.add(storageEntity5); - - when(storageOperate.listFilesStatus(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any())) - .thenReturn(storageEntityList); - - LocalDateTime localDateTime = LocalDateTime.of(2022, 1, 5, 0, 0, 0); - try (MockedStatic mockHook = Mockito.mockStatic(LocalDateTime.class)) { - mockHook.when(LocalDateTime::now).thenReturn(localDateTime); - DeleteDataTransferResponse response = resourcesService.deleteDataTransferData(user, 3); - - assertEquals(response.getSuccessList().size(), 2); - assertEquals(response.getSuccessList().get(0), "DATA_TRANSFER/20220101"); - assertEquals(response.getSuccessList().get(1), "DATA_TRANSFER/20220102"); - } - - try (MockedStatic mockHook = Mockito.mockStatic(LocalDateTime.class)) { - mockHook.when(LocalDateTime::now).thenReturn(localDateTime); - DeleteDataTransferResponse response = resourcesService.deleteDataTransferData(user, 0); - assertEquals(response.getSuccessList().size(), 5); - } - - } - - @Test - public void testCatFile() { - // SUCCESS - try { - List list = storageOperate.vimFile(Mockito.any(), Mockito.anyString(), eq(1), eq(10)); - Assertions.assertNotNull(list); - - } catch (IOException e) { - logger.error("hadoop error", e); - } - } - - @Test - void testQueryBaseDir() { - User user = getUser(); - when(userMapper.selectById(user.getId())).thenReturn(getUser()); - when(tenantMapper.queryById(user.getTenantId())).thenReturn(getTenant()); - when(storageOperate.getDir(ResourceType.FILE, tenantCode)).thenReturn("/dolphinscheduler/123/resources/"); - try { - when(storageOperate.getFileStatus(Mockito.anyString(), Mockito.anyString(), Mockito.anyString(), - Mockito.any())).thenReturn(getStorageEntityResource()); - } catch (Exception e) { - logger.error(e.getMessage() + " Resource path: {}", "/dolphinscheduler/123/resources/ResourcesServiceTest", - e); - } - Result result = resourcesService.queryResourceBaseDir(user, ResourceType.FILE); - assertEquals(Status.SUCCESS.getMsg(), result.getMsg()); - } - - private Tenant getTenant() { - Tenant tenant = new Tenant(); - tenant.setTenantCode(tenantCode); - return tenant; - } - - private User getUser() { - User user = new User(); - user.setId(1); - user.setUserType(UserType.GENERAL_USER); - user.setTenantId(1); - user.setTenantCode(tenantCode); - return user; - } - - private StorageEntity getStorageEntityResource() { - StorageEntity entity = new StorageEntity(); - entity.setAlias("ResourcesServiceTest"); - entity.setFileName("ResourcesServiceTest"); - entity.setDirectory(false); - entity.setUserName(tenantCode); - entity.setType(ResourceType.FILE); - entity.setFullName("/dolphinscheduler/123/resources/ResourcesServiceTest"); - return entity; - } - - private StorageEntity getStorageEntityUdfResource() { - StorageEntity entity = new StorageEntity(); - entity.setAlias("ResourcesServiceTest1.jar"); - entity.setFileName("ResourcesServiceTest1.jar"); - entity.setDirectory(false); - entity.setUserName(tenantCode); - entity.setType(ResourceType.UDF); - entity.setFullName("/dolphinscheduler/123/resources/ResourcesServiceTest1.jar"); - - return entity; - } - - private List getContent() { - List contentList = new ArrayList<>(); - contentList.add("test"); - return contentList; - } - - private List> getResources() { - List> resources = new ArrayList<>(); - Map resource = new HashMap<>(); - resource.put("id", 1); - resource.put("resource_ids", "1"); - resources.add(resource); - return resources; - } - - private static String getRandomStringWithLength(int length) { - Random r = new Random(); - StringBuilder sb = new StringBuilder(); - while (sb.length() < length) { - char c = (char) (r.nextInt(26) + 'a'); - sb.append(c); - } - return sb.toString(); - } -} diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskDefinitionServiceImplTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskDefinitionServiceImplTest.java index e77eab8029e6..8460c5992468 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskDefinitionServiceImplTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskDefinitionServiceImplTest.java @@ -17,13 +17,19 @@ package org.apache.dolphinscheduler.api.service; +import static org.apache.dolphinscheduler.api.AssertionsHelper.assertDoesNotThrow; +import static org.apache.dolphinscheduler.api.AssertionsHelper.assertThrowsServiceException; import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.TASK_DEFINITION; import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.TASK_DEFINITION_CREATE; import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.TASK_DEFINITION_DELETE; import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.TASK_DEFINITION_UPDATE; import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.WORKFLOW_SWITCH_TO_THIS_VERSION; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.ArgumentMatchers.isA; +import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.when; import org.apache.dolphinscheduler.api.dto.task.TaskCreateRequest; import org.apache.dolphinscheduler.api.dto.task.TaskUpdateRequest; @@ -58,6 +64,7 @@ import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper; import org.apache.dolphinscheduler.dao.repository.ProcessTaskRelationLogDao; import org.apache.dolphinscheduler.plugin.task.api.TaskPluginManager; +import org.apache.dolphinscheduler.plugin.task.shell.ShellTaskChannelFactory; import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.process.ProcessServiceImpl; @@ -75,13 +82,17 @@ import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.InjectMocks; import org.mockito.Mock; +import org.mockito.MockedStatic; import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; @ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) public class TaskDefinitionServiceImplTest { @InjectMocks @@ -114,9 +125,6 @@ public class TaskDefinitionServiceImplTest { @Mock private ProcessTaskRelationMapper processTaskRelationMapper; - @Mock - private TaskPluginManager taskPluginManager; - @Mock private ProcessTaskRelationService processTaskRelationService; @@ -155,61 +163,75 @@ public void before() { @Test public void createTaskDefinition() { - Project project = getProject(); - Mockito.when(projectMapper.queryByCode(PROJECT_CODE)).thenReturn(project); - - Map result = new HashMap<>(); - Mockito.when(projectService.hasProjectAndWritePerm(user, project, result)) - .thenReturn(true); - Mockito.when(taskPluginManager.checkTaskParameters(Mockito.any())).thenReturn(true); + try ( + MockedStatic taskPluginManagerMockedStatic = + Mockito.mockStatic(TaskPluginManager.class)) { + taskPluginManagerMockedStatic + .when(() -> TaskPluginManager.checkTaskParameters(Mockito.any(), Mockito.any())) + .thenReturn(true); + Project project = getProject(); + when(projectMapper.queryByCode(PROJECT_CODE)).thenReturn(project); + + Map result = new HashMap<>(); + when(projectService.hasProjectAndWritePerm(user, project, result)) + .thenReturn(true); + + String createTaskDefinitionJson = + "[{\"name\":\"detail_up\",\"description\":\"\",\"taskType\":\"SHELL\",\"taskParams\":" + + "\"{\\\"resourceList\\\":[],\\\"localParams\\\":[{\\\"prop\\\":\\\"datetime\\\",\\\"direct\\\":\\\"IN\\\"," + + "\\\"type\\\":\\\"VARCHAR\\\",\\\"value\\\":\\\"${system.datetime}\\\"}],\\\"rawScript\\\":" + + "\\\"echo ${datetime}\\\",\\\"conditionResult\\\":\\\"{\\\\\\\"successNode\\\\\\\":[\\\\\\\"\\\\\\\"]," + + "\\\\\\\"failedNode\\\\\\\":[\\\\\\\"\\\\\\\"]}\\\",\\\"dependence\\\":{}}\",\"flag\":0,\"taskPriority\":0," + + "\"workerGroup\":\"default\",\"failRetryTimes\":0,\"failRetryInterval\":0,\"timeoutFlag\":0," + + "\"timeoutNotifyStrategy\":0,\"timeout\":0,\"delayTime\":0,\"resourceIds\":\"\"}]"; + Map relation = taskDefinitionService + .createTaskDefinition(user, PROJECT_CODE, createTaskDefinitionJson); + assertEquals(Status.SUCCESS, relation.get(Constants.STATUS)); - String createTaskDefinitionJson = - "[{\"name\":\"detail_up\",\"description\":\"\",\"taskType\":\"SHELL\",\"taskParams\":" - + "\"{\\\"resourceList\\\":[],\\\"localParams\\\":[{\\\"prop\\\":\\\"datetime\\\",\\\"direct\\\":\\\"IN\\\"," - + "\\\"type\\\":\\\"VARCHAR\\\",\\\"value\\\":\\\"${system.datetime}\\\"}],\\\"rawScript\\\":" - + "\\\"echo ${datetime}\\\",\\\"conditionResult\\\":\\\"{\\\\\\\"successNode\\\\\\\":[\\\\\\\"\\\\\\\"]," - + "\\\\\\\"failedNode\\\\\\\":[\\\\\\\"\\\\\\\"]}\\\",\\\"dependence\\\":{}}\",\"flag\":0,\"taskPriority\":0," - + "\"workerGroup\":\"default\",\"failRetryTimes\":0,\"failRetryInterval\":0,\"timeoutFlag\":0," - + "\"timeoutNotifyStrategy\":0,\"timeout\":0,\"delayTime\":0,\"resourceIds\":\"\"}]"; - Map relation = taskDefinitionService - .createTaskDefinition(user, PROJECT_CODE, createTaskDefinitionJson); - Assertions.assertEquals(Status.SUCCESS, relation.get(Constants.STATUS)); + } } @Test public void updateTaskDefinition() { - String taskDefinitionJson = getTaskDefinitionJson();; - - Project project = getProject(); - Mockito.when(projectMapper.queryByCode(PROJECT_CODE)).thenReturn(project); - - Map result = new HashMap<>(); - putMsg(result, Status.SUCCESS, PROJECT_CODE); - Mockito.when(projectService.hasProjectAndWritePerm(user, project, new HashMap<>())).thenReturn(true); - - Mockito.when(processService.isTaskOnline(TASK_CODE)).thenReturn(Boolean.FALSE); - Mockito.when(taskDefinitionMapper.queryByCode(TASK_CODE)).thenReturn(new TaskDefinition()); - Mockito.when(taskDefinitionMapper.updateById(Mockito.any(TaskDefinitionLog.class))).thenReturn(1); - Mockito.when(taskDefinitionLogMapper.insert(Mockito.any(TaskDefinitionLog.class))).thenReturn(1); - Mockito.when(processTaskRelationLogDao.insert(Mockito.any(ProcessTaskRelationLog.class))).thenReturn(1); - Mockito.when(processDefinitionMapper.queryByCode(2L)).thenReturn(new ProcessDefinition()); - Mockito.when(processDefinitionMapper.updateById(Mockito.any(ProcessDefinition.class))).thenReturn(1); - Mockito.when(processDefinitionLogMapper.insert(Mockito.any(ProcessDefinitionLog.class))).thenReturn(1); - Mockito.when(taskDefinitionLogMapper.queryMaxVersionForDefinition(TASK_CODE)).thenReturn(1); - Mockito.when(taskPluginManager.checkTaskParameters(Mockito.any())).thenReturn(true); - Mockito.when(processTaskRelationMapper.queryProcessTaskRelationByTaskCodeAndTaskVersion(TASK_CODE, 0)) - .thenReturn(getProcessTaskRelationList2()); - Mockito.when(processTaskRelationMapper - .updateProcessTaskRelationTaskVersion(Mockito.any(ProcessTaskRelation.class))).thenReturn(1); - result = taskDefinitionService.updateTaskDefinition(user, PROJECT_CODE, TASK_CODE, taskDefinitionJson); - Assertions.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); - // failure - Mockito.when(processTaskRelationMapper - .updateProcessTaskRelationTaskVersion(Mockito.any(ProcessTaskRelation.class))).thenReturn(2); - exception = Assertions.assertThrows(ServiceException.class, - () -> taskDefinitionService.updateTaskDefinition(user, PROJECT_CODE, TASK_CODE, taskDefinitionJson)); - Assertions.assertEquals(Status.PROCESS_TASK_RELATION_BATCH_UPDATE_ERROR.getCode(), - ((ServiceException) exception).getCode()); + try ( + MockedStatic taskPluginManagerMockedStatic = + Mockito.mockStatic(TaskPluginManager.class)) { + taskPluginManagerMockedStatic + .when(() -> TaskPluginManager.checkTaskParameters(Mockito.any(), Mockito.any())) + .thenReturn(true); + String taskDefinitionJson = getTaskDefinitionJson(); + + Project project = getProject(); + when(projectMapper.queryByCode(PROJECT_CODE)).thenReturn(project); + + Map result = new HashMap<>(); + putMsg(result, Status.SUCCESS, PROJECT_CODE); + when(projectService.hasProjectAndWritePerm(user, project, new HashMap<>())).thenReturn(true); + + when(processService.isTaskOnline(TASK_CODE)).thenReturn(Boolean.FALSE); + when(taskDefinitionMapper.queryByCode(TASK_CODE)).thenReturn(new TaskDefinition()); + when(taskDefinitionMapper.updateById(Mockito.any(TaskDefinitionLog.class))).thenReturn(1); + when(taskDefinitionLogMapper.insert(Mockito.any(TaskDefinitionLog.class))).thenReturn(1); + when(processTaskRelationLogDao.insert(Mockito.any(ProcessTaskRelationLog.class))).thenReturn(1); + when(processDefinitionMapper.queryByCode(2L)).thenReturn(new ProcessDefinition()); + when(processDefinitionMapper.updateById(Mockito.any(ProcessDefinition.class))).thenReturn(1); + when(processDefinitionLogMapper.insert(Mockito.any(ProcessDefinitionLog.class))).thenReturn(1); + when(taskDefinitionLogMapper.queryMaxVersionForDefinition(TASK_CODE)).thenReturn(1); + when(processTaskRelationMapper.queryProcessTaskRelationByTaskCodeAndTaskVersion(TASK_CODE, 0)) + .thenReturn(getProcessTaskRelationList2()); + when(processTaskRelationMapper + .updateProcessTaskRelationTaskVersion(Mockito.any(ProcessTaskRelation.class))).thenReturn(1); + result = taskDefinitionService.updateTaskDefinition(user, PROJECT_CODE, TASK_CODE, taskDefinitionJson); + assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); + // failure + when(processTaskRelationMapper + .updateProcessTaskRelationTaskVersion(Mockito.any(ProcessTaskRelation.class))).thenReturn(2); + exception = Assertions.assertThrows(ServiceException.class, + () -> taskDefinitionService.updateTaskDefinition(user, PROJECT_CODE, TASK_CODE, + taskDefinitionJson)); + assertEquals(Status.PROCESS_TASK_RELATION_BATCH_UPDATE_ERROR.getCode(), + ((ServiceException) exception).getCode()); + } } @@ -217,72 +239,72 @@ public void updateTaskDefinition() { public void queryTaskDefinitionByName() { String taskName = "task"; Project project = getProject(); - Mockito.when(projectMapper.queryByCode(PROJECT_CODE)).thenReturn(project); + when(projectMapper.queryByCode(PROJECT_CODE)).thenReturn(project); Map result = new HashMap<>(); putMsg(result, Status.SUCCESS, PROJECT_CODE); - Mockito.when(projectService.checkProjectAndAuth(user, project, PROJECT_CODE, TASK_DEFINITION)) + when(projectService.checkProjectAndAuth(user, project, PROJECT_CODE, TASK_DEFINITION)) .thenReturn(result); - Mockito.when(taskDefinitionMapper.queryByName(project.getCode(), PROCESS_DEFINITION_CODE, taskName)) + when(taskDefinitionMapper.queryByName(project.getCode(), PROCESS_DEFINITION_CODE, taskName)) .thenReturn(new TaskDefinition()); Map relation = taskDefinitionService .queryTaskDefinitionByName(user, PROJECT_CODE, PROCESS_DEFINITION_CODE, taskName); - Assertions.assertEquals(Status.SUCCESS, relation.get(Constants.STATUS)); + assertEquals(Status.SUCCESS, relation.get(Constants.STATUS)); } @Test public void deleteTaskDefinitionByCode() { Project project = getProject(); - Mockito.when(projectMapper.queryByCode(PROJECT_CODE)).thenReturn(project); + when(projectMapper.queryByCode(PROJECT_CODE)).thenReturn(project); // error task definition not find exception = Assertions.assertThrows(ServiceException.class, () -> taskDefinitionService.deleteTaskDefinitionByCode(user, TASK_CODE)); - Assertions.assertEquals(Status.TASK_DEFINE_NOT_EXIST.getCode(), ((ServiceException) exception).getCode()); + assertEquals(Status.TASK_DEFINE_NOT_EXIST.getCode(), ((ServiceException) exception).getCode()); // error delete single task definition object - Mockito.when(taskDefinitionMapper.queryByCode(TASK_CODE)).thenReturn(getTaskDefinition()); - Mockito.when(taskDefinitionMapper.deleteByCode(TASK_CODE)).thenReturn(0); - Mockito.when(projectService.hasProjectAndWritePerm(user, project, new HashMap<>())).thenReturn(true); + when(taskDefinitionMapper.queryByCode(TASK_CODE)).thenReturn(getTaskDefinition()); + when(taskDefinitionMapper.deleteByCode(TASK_CODE)).thenReturn(0); + when(projectService.hasProjectAndWritePerm(user, project, new HashMap<>())).thenReturn(true); exception = Assertions.assertThrows(ServiceException.class, () -> taskDefinitionService.deleteTaskDefinitionByCode(user, TASK_CODE)); - Assertions.assertEquals(Status.DELETE_TASK_DEFINE_BY_CODE_MSG_ERROR.getCode(), + assertEquals(Status.DELETE_TASK_DEFINE_BY_CODE_MSG_ERROR.getCode(), ((ServiceException) exception).getCode()); // success - Mockito.doNothing().when(projectService).checkProjectAndAuthThrowException(user, project, + doNothing().when(projectService).checkProjectAndAuthThrowException(user, project, TASK_DEFINITION_DELETE); - Mockito.when(processTaskRelationMapper.queryDownstreamByTaskCode(TASK_CODE)).thenReturn(new ArrayList<>()); - Mockito.when(taskDefinitionMapper.deleteByCode(TASK_CODE)).thenReturn(1); + when(processTaskRelationMapper.queryDownstreamByTaskCode(TASK_CODE)).thenReturn(new ArrayList<>()); + when(taskDefinitionMapper.deleteByCode(TASK_CODE)).thenReturn(1); Assertions.assertDoesNotThrow(() -> taskDefinitionService.deleteTaskDefinitionByCode(user, TASK_CODE)); } @Test public void switchVersion() { Project project = getProject(); - Mockito.when(projectMapper.queryByCode(PROJECT_CODE)).thenReturn(project); + when(projectMapper.queryByCode(PROJECT_CODE)).thenReturn(project); Map result = new HashMap<>(); putMsg(result, Status.SUCCESS, PROJECT_CODE); - Mockito.when( + when( projectService.checkProjectAndAuth(user, project, PROJECT_CODE, WORKFLOW_SWITCH_TO_THIS_VERSION)) - .thenReturn(result); + .thenReturn(result); - Mockito.when(taskDefinitionLogMapper.queryByDefinitionCodeAndVersion(TASK_CODE, VERSION)) + when(taskDefinitionLogMapper.queryByDefinitionCodeAndVersion(TASK_CODE, VERSION)) .thenReturn(new TaskDefinitionLog()); TaskDefinition taskDefinition = new TaskDefinition(); taskDefinition.setProjectCode(PROJECT_CODE); - Mockito.when(taskDefinitionMapper.queryByCode(TASK_CODE)) + when(taskDefinitionMapper.queryByCode(TASK_CODE)) .thenReturn(taskDefinition); - Mockito.when(taskDefinitionMapper.updateById(new TaskDefinitionLog())).thenReturn(1); + when(taskDefinitionMapper.updateById(new TaskDefinitionLog())).thenReturn(1); Map relation = taskDefinitionService .switchVersion(user, PROJECT_CODE, TASK_CODE, VERSION); - Assertions.assertEquals(Status.SUCCESS, relation.get(Constants.STATUS)); + assertEquals(Status.SUCCESS, relation.get(Constants.STATUS)); } private void putMsg(Map result, Status status, Object... statusParams) { @@ -331,7 +353,7 @@ public void checkJson() { @Test public void genTaskCodeList() { Map genTaskCodeList = taskDefinitionService.genTaskCodeList(10); - Assertions.assertEquals(Status.SUCCESS, genTaskCodeList.get(Constants.STATUS)); + assertEquals(Status.SUCCESS, genTaskCodeList.get(Constants.STATUS)); } @Test @@ -348,31 +370,31 @@ public void testQueryTaskDefinitionListPaging() { taskMainInfo.setUpstreamTaskName("4"); taskMainInfoIPage.setRecords(Collections.singletonList(taskMainInfo)); taskMainInfoIPage.setTotal(10L); - Mockito.when(projectMapper.queryByCode(PROJECT_CODE)).thenReturn(project); - Mockito.when(projectService.checkProjectAndAuth(user, project, PROJECT_CODE, TASK_DEFINITION)) + when(projectMapper.queryByCode(PROJECT_CODE)).thenReturn(project); + when(projectService.checkProjectAndAuth(user, project, PROJECT_CODE, TASK_DEFINITION)) .thenReturn(checkResult); - Mockito.when(taskDefinitionMapper.queryDefineListPaging(Mockito.any(Page.class), Mockito.anyLong(), + when(taskDefinitionMapper.queryDefineListPaging(Mockito.any(Page.class), Mockito.anyLong(), Mockito.isNull(), Mockito.anyString(), Mockito.isNull())) - .thenReturn(taskMainInfoIPage); - Mockito.when(taskDefinitionMapper.queryDefineListByCodeList(PROJECT_CODE, Collections.singletonList(3L))) + .thenReturn(taskMainInfoIPage); + when(taskDefinitionMapper.queryDefineListByCodeList(PROJECT_CODE, Collections.singletonList(3L))) .thenReturn(Collections.singletonList(taskMainInfo)); Result result = taskDefinitionService.queryTaskDefinitionListPaging(user, PROJECT_CODE, null, null, null, pageNo, pageSize); - Assertions.assertEquals(Status.SUCCESS.getMsg(), result.getMsg()); + assertEquals(Status.SUCCESS.getMsg(), result.getMsg()); } @Test public void testReleaseTaskDefinition() { - Mockito.when(projectMapper.queryByCode(PROJECT_CODE)).thenReturn(getProject()); + when(projectMapper.queryByCode(PROJECT_CODE)).thenReturn(getProject()); Project project = getProject(); // check task dose not exist Map result = new HashMap<>(); putMsg(result, Status.TASK_DEFINE_NOT_EXIST, TASK_CODE); - Mockito.when(projectService.checkProjectAndAuth(user, project, PROJECT_CODE, null)).thenReturn(result); + when(projectService.checkProjectAndAuth(user, project, PROJECT_CODE, null)).thenReturn(result); Map map = taskDefinitionService.releaseTaskDefinition(user, PROJECT_CODE, TASK_CODE, ReleaseState.OFFLINE); - Assertions.assertEquals(Status.TASK_DEFINE_NOT_EXIST, map.get(Constants.STATUS)); + assertEquals(Status.TASK_DEFINE_NOT_EXIST, map.get(Constants.STATUS)); // process definition offline putMsg(result, Status.SUCCESS); @@ -384,23 +406,23 @@ public void testReleaseTaskDefinition() { "{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"echo 1\",\"conditionResult\":{\"successNode\":[\"\"],\"failedNode\":[\"\"]},\"dependence\":{}}"; taskDefinition.setTaskParams(params); taskDefinition.setTaskType("SHELL"); - Mockito.when(taskDefinitionMapper.queryByCode(TASK_CODE)).thenReturn(taskDefinition); + when(taskDefinitionMapper.queryByCode(TASK_CODE)).thenReturn(taskDefinition); TaskDefinitionLog taskDefinitionLog = new TaskDefinitionLog(taskDefinition); - Mockito.when(taskDefinitionLogMapper.queryByDefinitionCodeAndVersion(TASK_CODE, taskDefinition.getVersion())) + when(taskDefinitionLogMapper.queryByDefinitionCodeAndVersion(TASK_CODE, taskDefinition.getVersion())) .thenReturn(taskDefinitionLog); Map offlineTaskResult = taskDefinitionService.releaseTaskDefinition(user, PROJECT_CODE, TASK_CODE, ReleaseState.OFFLINE); - Assertions.assertEquals(Status.SUCCESS, offlineTaskResult.get(Constants.STATUS)); + assertEquals(Status.SUCCESS, offlineTaskResult.get(Constants.STATUS)); // process definition online, resource exist Map onlineTaskResult = taskDefinitionService.releaseTaskDefinition(user, PROJECT_CODE, TASK_CODE, ReleaseState.ONLINE); - Assertions.assertEquals(Status.SUCCESS, onlineTaskResult.get(Constants.STATUS)); + assertEquals(Status.SUCCESS, onlineTaskResult.get(Constants.STATUS)); // release error code Map failResult = taskDefinitionService.releaseTaskDefinition(user, PROJECT_CODE, TASK_CODE, ReleaseState.getEnum(2)); - Assertions.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, failResult.get(Constants.STATUS)); + assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, failResult.get(Constants.STATUS)); } @Test @@ -410,133 +432,131 @@ public void testCreateTaskDefinitionV2() { taskCreateRequest.setWorkflowCode(PROCESS_DEFINITION_CODE); // error process definition not find - exception = Assertions.assertThrows(ServiceException.class, + assertThrowsServiceException(Status.PROCESS_DEFINE_NOT_EXIST, () -> taskDefinitionService.createTaskDefinitionV2(user, taskCreateRequest)); - Assertions.assertEquals(Status.PROCESS_DEFINE_NOT_EXIST.getCode(), ((ServiceException) exception).getCode()); // error project not find - Mockito.when(processDefinitionMapper.queryByCode(PROCESS_DEFINITION_CODE)).thenReturn(getProcessDefinition()); - Mockito.when(projectMapper.queryByCode(PROJECT_CODE)).thenReturn(getProject()); - Mockito.doThrow(new ServiceException(Status.PROJECT_NOT_EXIST)).when(projectService) + when(processDefinitionMapper.queryByCode(PROCESS_DEFINITION_CODE)).thenReturn(getProcessDefinition()); + when(projectMapper.queryByCode(PROJECT_CODE)).thenReturn(getProject()); + doThrow(new ServiceException(Status.PROJECT_NOT_EXIST)).when(projectService) .checkProjectAndAuthThrowException(user, getProject(), TASK_DEFINITION_CREATE); - exception = Assertions.assertThrows(ServiceException.class, + assertThrowsServiceException(Status.PROJECT_NOT_EXIST, () -> taskDefinitionService.createTaskDefinitionV2(user, taskCreateRequest)); - Assertions.assertEquals(Status.PROJECT_NOT_EXIST.getCode(), ((ServiceException) exception).getCode()); // error task definition - taskCreateRequest.setTaskParams(TASK_PARAMETER); - Mockito.doNothing().when(projectService).checkProjectAndAuthThrowException(user, getProject(), - TASK_DEFINITION_CREATE); - exception = Assertions.assertThrows(ServiceException.class, - () -> taskDefinitionService.createTaskDefinitionV2(user, taskCreateRequest)); - Assertions.assertEquals(Status.PROCESS_NODE_S_PARAMETER_INVALID.getCode(), - ((ServiceException) exception).getCode()); - - // error create task definition object - Mockito.when(taskPluginManager.checkTaskParameters(Mockito.any())).thenReturn(true); - Mockito.when(taskDefinitionMapper.insert(isA(TaskDefinition.class))).thenReturn(0); - exception = Assertions.assertThrows(ServiceException.class, + taskCreateRequest.setTaskType(ShellTaskChannelFactory.NAME); + taskCreateRequest.setTaskParams(JSONUtils.toJsonString(new HashMap<>())); + doNothing().when(projectService).checkProjectAndAuthThrowException(user, getProject(), TASK_DEFINITION_CREATE); + assertThrowsServiceException(Status.PROCESS_NODE_S_PARAMETER_INVALID, () -> taskDefinitionService.createTaskDefinitionV2(user, taskCreateRequest)); - Assertions.assertEquals(Status.CREATE_TASK_DEFINITION_ERROR.getCode(), - ((ServiceException) exception).getCode()); - // error sync to task definition log - Mockito.when(taskDefinitionMapper.insert(isA(TaskDefinition.class))).thenReturn(1); - Mockito.when(taskDefinitionLogMapper.insert(isA(TaskDefinitionLog.class))).thenReturn(0); - exception = Assertions.assertThrows(ServiceException.class, - () -> taskDefinitionService.createTaskDefinitionV2(user, taskCreateRequest)); - Assertions.assertEquals(Status.CREATE_TASK_DEFINITION_LOG_ERROR.getCode(), - ((ServiceException) exception).getCode()); - - // success - Mockito.when(taskDefinitionLogMapper.insert(isA(TaskDefinitionLog.class))).thenReturn(1); - // we do not test updateUpstreamTaskDefinition, because it should be tested in processTaskRelationService - Mockito.when( - processTaskRelationService.updateUpstreamTaskDefinitionWithSyncDag(isA(User.class), isA(Long.class), - isA(Boolean.class), - isA(TaskRelationUpdateUpstreamRequest.class))) - .thenReturn(getProcessTaskRelationList()); - Mockito.when(processDefinitionService.updateSingleProcessDefinition(isA(User.class), isA(Long.class), - isA(WorkflowUpdateRequest.class))).thenReturn(getProcessDefinition()); - Assertions.assertDoesNotThrow(() -> taskDefinitionService.createTaskDefinitionV2(user, taskCreateRequest)); + try ( + MockedStatic taskPluginManagerMockedStatic = + Mockito.mockStatic(TaskPluginManager.class)) { + taskPluginManagerMockedStatic + .when(() -> TaskPluginManager.checkTaskParameters(Mockito.any(), Mockito.any())) + .thenReturn(true); + + // error create task definition object + when(taskDefinitionMapper.insert(isA(TaskDefinition.class))).thenReturn(0); + assertThrowsServiceException(Status.CREATE_TASK_DEFINITION_ERROR, + () -> taskDefinitionService.createTaskDefinitionV2(user, taskCreateRequest)); + + // error sync to task definition log + when(taskDefinitionMapper.insert(isA(TaskDefinition.class))).thenReturn(1); + when(taskDefinitionLogMapper.insert(isA(TaskDefinitionLog.class))).thenReturn(0); + assertThrowsServiceException(Status.CREATE_TASK_DEFINITION_LOG_ERROR, + () -> taskDefinitionService.createTaskDefinitionV2(user, taskCreateRequest)); + + // success + when(taskDefinitionLogMapper.insert(isA(TaskDefinitionLog.class))).thenReturn(1); + // we do not test updateUpstreamTaskDefinition, because it should be tested in processTaskRelationService + when( + processTaskRelationService.updateUpstreamTaskDefinitionWithSyncDag(isA(User.class), isA(Long.class), + isA(Boolean.class), + isA(TaskRelationUpdateUpstreamRequest.class))) + .thenReturn(getProcessTaskRelationList()); + when(processDefinitionService.updateSingleProcessDefinition(isA(User.class), isA(Long.class), + isA(WorkflowUpdateRequest.class))).thenReturn(getProcessDefinition()); + assertDoesNotThrow(() -> taskDefinitionService.createTaskDefinitionV2(user, taskCreateRequest)); + } } @Test public void testUpdateTaskDefinitionV2() { TaskUpdateRequest taskUpdateRequest = new TaskUpdateRequest(); + TaskDefinition taskDefinition = getTaskDefinition(); + Project project = getProject(); // error task definition not exists - exception = Assertions.assertThrows(ServiceException.class, + assertThrowsServiceException(Status.TASK_DEFINITION_NOT_EXISTS, () -> taskDefinitionService.updateTaskDefinitionV2(user, TASK_CODE, taskUpdateRequest)); - Assertions.assertEquals(Status.TASK_DEFINITION_NOT_EXISTS.getCode(), ((ServiceException) exception).getCode()); // error project not find - Mockito.when(taskDefinitionMapper.queryByCode(TASK_CODE)).thenReturn(getTaskDefinition()); - Mockito.when(projectMapper.queryByCode(isA(Long.class))).thenReturn(getProject()); - Mockito.doThrow(new ServiceException(Status.PROJECT_NOT_EXIST)).when(projectService) - .checkProjectAndAuthThrowException(user, getProject(), TASK_DEFINITION_UPDATE); - exception = Assertions.assertThrows(ServiceException.class, + when(taskDefinitionMapper.queryByCode(TASK_CODE)).thenReturn(taskDefinition); + when(projectMapper.queryByCode(isA(Long.class))).thenReturn(project); + doThrow(new ServiceException(Status.PROJECT_NOT_EXIST)).when(projectService) + .checkProjectAndAuthThrowException(user, project, TASK_DEFINITION_UPDATE); + assertThrowsServiceException(Status.PROJECT_NOT_EXIST, () -> taskDefinitionService.updateTaskDefinitionV2(user, TASK_CODE, taskUpdateRequest)); - Assertions.assertEquals(Status.PROJECT_NOT_EXIST.getCode(), ((ServiceException) exception).getCode()); // error task definition - Mockito.doNothing().when(projectService).checkProjectAndAuthThrowException(user, getProject(), - TASK_DEFINITION_UPDATE); - exception = Assertions.assertThrows(ServiceException.class, - () -> taskDefinitionService.updateTaskDefinitionV2(user, TASK_CODE, taskUpdateRequest)); - Assertions.assertEquals(Status.PROCESS_NODE_S_PARAMETER_INVALID.getCode(), - ((ServiceException) exception).getCode()); - - // error task definition already online - exception = Assertions.assertThrows(ServiceException.class, - () -> taskDefinitionService.updateTaskDefinitionV2(user, TASK_CODE, taskUpdateRequest)); - Assertions.assertEquals(Status.PROCESS_NODE_S_PARAMETER_INVALID.getCode(), - ((ServiceException) exception).getCode()); - - // error task definition nothing update - Mockito.when(processService.isTaskOnline(TASK_CODE)).thenReturn(false); - Mockito.when(taskPluginManager.checkTaskParameters(Mockito.any())).thenReturn(true); - exception = Assertions.assertThrows(ServiceException.class, - () -> taskDefinitionService.updateTaskDefinitionV2(user, TASK_CODE, taskUpdateRequest)); - Assertions.assertEquals(Status.TASK_DEFINITION_NOT_CHANGE.getCode(), ((ServiceException) exception).getCode()); - - // error task definition version invalid - taskUpdateRequest.setTaskPriority(String.valueOf(Priority.HIGH)); - exception = Assertions.assertThrows(ServiceException.class, - () -> taskDefinitionService.updateTaskDefinitionV2(user, TASK_CODE, taskUpdateRequest)); - Assertions.assertEquals(Status.DATA_IS_NOT_VALID.getCode(), ((ServiceException) exception).getCode()); - - // error task definition update effect number - Mockito.when(taskDefinitionLogMapper.queryMaxVersionForDefinition(TASK_CODE)).thenReturn(VERSION); - Mockito.when(taskDefinitionMapper.updateById(isA(TaskDefinition.class))).thenReturn(0); - exception = Assertions.assertThrows(ServiceException.class, - () -> taskDefinitionService.updateTaskDefinitionV2(user, TASK_CODE, taskUpdateRequest)); - Assertions.assertEquals(Status.UPDATE_TASK_DEFINITION_ERROR.getCode(), - ((ServiceException) exception).getCode()); - - // error task definition log insert - Mockito.when(taskDefinitionMapper.updateById(isA(TaskDefinition.class))).thenReturn(1); - Mockito.when(taskDefinitionLogMapper.insert(isA(TaskDefinitionLog.class))).thenReturn(0); - exception = Assertions.assertThrows(ServiceException.class, - () -> taskDefinitionService.updateTaskDefinitionV2(user, TASK_CODE, taskUpdateRequest)); - Assertions.assertEquals(Status.CREATE_TASK_DEFINITION_LOG_ERROR.getCode(), - ((ServiceException) exception).getCode()); - - // success - Mockito.when(taskDefinitionLogMapper.insert(isA(TaskDefinitionLog.class))).thenReturn(1); - // we do not test updateUpstreamTaskDefinition, because it should be tested in processTaskRelationService - Mockito.when( - processTaskRelationService.updateUpstreamTaskDefinitionWithSyncDag(isA(User.class), isA(Long.class), - isA(Boolean.class), - isA(TaskRelationUpdateUpstreamRequest.class))) - .thenReturn(getProcessTaskRelationList()); - Assertions.assertDoesNotThrow( - () -> taskDefinitionService.updateTaskDefinitionV2(user, TASK_CODE, taskUpdateRequest)); + doNothing().when(projectService).checkProjectAndAuthThrowException(user, project, TASK_DEFINITION_UPDATE); + + try ( + MockedStatic taskPluginManagerMockedStatic = + Mockito.mockStatic(TaskPluginManager.class)) { + taskPluginManagerMockedStatic + .when(() -> TaskPluginManager.checkTaskParameters(Mockito.any(), Mockito.any())) + .thenReturn(false); + assertThrowsServiceException(Status.PROCESS_NODE_S_PARAMETER_INVALID, + () -> taskDefinitionService.updateTaskDefinitionV2(user, TASK_CODE, taskUpdateRequest)); + } - TaskDefinition taskDefinition = - taskDefinitionService.updateTaskDefinitionV2(user, TASK_CODE, taskUpdateRequest); - Assertions.assertEquals(getTaskDefinition().getVersion() + 1, taskDefinition.getVersion()); + try ( + MockedStatic taskPluginManagerMockedStatic = + Mockito.mockStatic(TaskPluginManager.class)) { + taskPluginManagerMockedStatic + .when(() -> TaskPluginManager.checkTaskParameters(Mockito.any(), Mockito.any())) + .thenReturn(true); + // error task definition nothing update + when(processService.isTaskOnline(TASK_CODE)).thenReturn(false); + assertThrowsServiceException(Status.TASK_DEFINITION_NOT_CHANGE, + () -> taskDefinitionService.updateTaskDefinitionV2(user, TASK_CODE, taskUpdateRequest)); + + // error task definition version invalid + taskUpdateRequest.setTaskPriority(String.valueOf(Priority.HIGH)); + assertThrowsServiceException(Status.DATA_IS_NOT_VALID, + () -> taskDefinitionService.updateTaskDefinitionV2(user, TASK_CODE, taskUpdateRequest)); + + // error task definition update effect number + when(taskDefinitionLogMapper.queryMaxVersionForDefinition(TASK_CODE)).thenReturn(VERSION); + when(taskDefinitionMapper.updateById(isA(TaskDefinition.class))).thenReturn(0); + assertThrowsServiceException(Status.UPDATE_TASK_DEFINITION_ERROR, + () -> taskDefinitionService.updateTaskDefinitionV2(user, TASK_CODE, taskUpdateRequest)); + + // error task definition log insert + when(taskDefinitionMapper.updateById(isA(TaskDefinition.class))).thenReturn(1); + when(taskDefinitionLogMapper.insert(isA(TaskDefinitionLog.class))).thenReturn(0); + assertThrowsServiceException(Status.CREATE_TASK_DEFINITION_LOG_ERROR, + () -> taskDefinitionService.updateTaskDefinitionV2(user, TASK_CODE, taskUpdateRequest)); + + // success + when(taskDefinitionLogMapper.insert(isA(TaskDefinitionLog.class))).thenReturn(1); + // we do not test updateUpstreamTaskDefinition, because it should be tested in processTaskRelationService + when( + processTaskRelationService.updateUpstreamTaskDefinitionWithSyncDag(isA(User.class), isA(Long.class), + isA(Boolean.class), + isA(TaskRelationUpdateUpstreamRequest.class))) + .thenReturn(getProcessTaskRelationList()); + Assertions.assertDoesNotThrow( + () -> taskDefinitionService.updateTaskDefinitionV2(user, TASK_CODE, taskUpdateRequest)); + + taskDefinition = + taskDefinitionService.updateTaskDefinitionV2(user, TASK_CODE, taskUpdateRequest); + assertEquals(getTaskDefinition().getVersion() + 1, taskDefinition.getVersion()); + } } @Test @@ -549,28 +569,28 @@ public void testUpdateDag() { ArrayList taskDefinitionLogs = new ArrayList<>(); taskDefinitionLogs.add(taskDefinitionLog); Integer version = 1; - Mockito.when(processDefinitionMapper.queryByCode(isA(long.class))).thenReturn(processDefinition); + when(processDefinitionMapper.queryByCode(isA(long.class))).thenReturn(processDefinition); // saveProcessDefine - Mockito.when(processDefineLogMapper.queryMaxVersionForDefinition(isA(long.class))).thenReturn(version); - Mockito.when(processDefineLogMapper.insert(isA(ProcessDefinitionLog.class))).thenReturn(1); - Mockito.when(processDefinitionMapper.insert(isA(ProcessDefinitionLog.class))).thenReturn(1); + when(processDefineLogMapper.queryMaxVersionForDefinition(isA(long.class))).thenReturn(version); + when(processDefineLogMapper.insert(isA(ProcessDefinitionLog.class))).thenReturn(1); + when(processDefinitionMapper.insert(isA(ProcessDefinitionLog.class))).thenReturn(1); int insertVersion = processServiceImpl.saveProcessDefine(loginUser, processDefinition, Boolean.TRUE, Boolean.TRUE); - Mockito.when(processService.saveProcessDefine(loginUser, processDefinition, Boolean.TRUE, Boolean.TRUE)) + when(processService.saveProcessDefine(loginUser, processDefinition, Boolean.TRUE, Boolean.TRUE)) .thenReturn(insertVersion); - Assertions.assertEquals(insertVersion, version + 1); + assertEquals(insertVersion, version + 1); // saveTaskRelation List processTaskRelationLogList = getProcessTaskRelationLogList(); - Mockito.when(processTaskRelationMapper.queryByProcessCode(eq(processDefinition.getCode()))) + when(processTaskRelationMapper.queryByProcessCode(eq(processDefinition.getCode()))) .thenReturn(processTaskRelationList); - Mockito.when(processTaskRelationMapper.batchInsert(isA(List.class))).thenReturn(1); - Mockito.when(processTaskRelationLogMapper.batchInsert(isA(List.class))).thenReturn(1); + when(processTaskRelationMapper.batchInsert(isA(List.class))).thenReturn(1); + when(processTaskRelationLogMapper.batchInsert(isA(List.class))).thenReturn(1); int insertResult = processServiceImpl.saveTaskRelation(loginUser, processDefinition.getProjectCode(), processDefinition.getCode(), insertVersion, processTaskRelationLogList, taskDefinitionLogs, Boolean.TRUE); - Assertions.assertEquals(Constants.EXIT_CODE_SUCCESS, insertResult); + assertEquals(Constants.EXIT_CODE_SUCCESS, insertResult); Assertions.assertDoesNotThrow( () -> taskDefinitionService.updateDag(loginUser, processDefinition.getCode(), processTaskRelationList, taskDefinitionLogs)); @@ -581,55 +601,61 @@ public void testGetTaskDefinition() { // error task definition not exists exception = Assertions.assertThrows(ServiceException.class, () -> taskDefinitionService.getTaskDefinition(user, TASK_CODE)); - Assertions.assertEquals(Status.TASK_DEFINE_NOT_EXIST.getCode(), ((ServiceException) exception).getCode()); + assertEquals(Status.TASK_DEFINE_NOT_EXIST.getCode(), ((ServiceException) exception).getCode()); // error task definition not exists - Mockito.when(taskDefinitionMapper.queryByCode(TASK_CODE)).thenReturn(getTaskDefinition()); - Mockito.when(projectMapper.queryByCode(PROJECT_CODE)).thenReturn(getProject()); - Mockito.doThrow(new ServiceException(Status.USER_NO_OPERATION_PROJECT_PERM)).when(projectService) + when(taskDefinitionMapper.queryByCode(TASK_CODE)).thenReturn(getTaskDefinition()); + when(projectMapper.queryByCode(PROJECT_CODE)).thenReturn(getProject()); + doThrow(new ServiceException(Status.USER_NO_OPERATION_PROJECT_PERM)).when(projectService) .checkProjectAndAuthThrowException(user, getProject(), TASK_DEFINITION); exception = Assertions.assertThrows(ServiceException.class, () -> taskDefinitionService.getTaskDefinition(user, TASK_CODE)); - Assertions.assertEquals(Status.USER_NO_OPERATION_PROJECT_PERM.getCode(), + assertEquals(Status.USER_NO_OPERATION_PROJECT_PERM.getCode(), ((ServiceException) exception).getCode()); // success - Mockito.doNothing().when(projectService).checkProjectAndAuthThrowException(user, getProject(), TASK_DEFINITION); + doNothing().when(projectService).checkProjectAndAuthThrowException(user, getProject(), TASK_DEFINITION); Assertions.assertDoesNotThrow(() -> taskDefinitionService.getTaskDefinition(user, TASK_CODE)); } @Test public void testUpdateTaskWithUpstream() { - - String taskDefinitionJson = getTaskDefinitionJson(); - TaskDefinition taskDefinition = getTaskDefinition(); - taskDefinition.setFlag(Flag.NO); - TaskDefinition taskDefinitionSecond = getTaskDefinition(); - taskDefinitionSecond.setCode(5); - - user.setUserType(UserType.ADMIN_USER); - Mockito.when(projectMapper.queryByCode(PROJECT_CODE)).thenReturn(getProject()); - Mockito.when(projectService.hasProjectAndWritePerm(user, getProject(), new HashMap<>())).thenReturn(true); - Mockito.when(taskDefinitionMapper.queryByCode(TASK_CODE)).thenReturn(taskDefinition); - Mockito.when(taskPluginManager.checkTaskParameters(Mockito.any())).thenReturn(true); - Mockito.when(taskDefinitionLogMapper.queryMaxVersionForDefinition(TASK_CODE)).thenReturn(1); - Mockito.when(taskDefinitionMapper.updateById(Mockito.any())).thenReturn(1); - Mockito.when(taskDefinitionLogMapper.insert(Mockito.any())).thenReturn(1); - - Mockito.when(taskDefinitionMapper.queryByCodeList(Mockito.anySet())) - .thenReturn(Arrays.asList(taskDefinition, taskDefinitionSecond)); - - Mockito.when(processTaskRelationMapper.queryUpstreamByCode(PROJECT_CODE, TASK_CODE)) - .thenReturn(getProcessTaskRelationListV2()); - Mockito.when(processDefinitionMapper.queryByCode(PROCESS_DEFINITION_CODE)).thenReturn(getProcessDefinition()); - Mockito.when(processTaskRelationMapper.batchInsert(Mockito.anyList())).thenReturn(1); - Mockito.when(processTaskRelationMapper.updateById(Mockito.any())).thenReturn(1); - Mockito.when(processTaskRelationLogDao.batchInsert(Mockito.anyList())).thenReturn(2); - // success - Map successMap = taskDefinitionService.updateTaskWithUpstream(user, PROJECT_CODE, TASK_CODE, - taskDefinitionJson, UPSTREAM_CODE); - Assertions.assertEquals(Status.SUCCESS, successMap.get(Constants.STATUS)); - user.setUserType(UserType.GENERAL_USER); + try ( + MockedStatic taskPluginManagerMockedStatic = + Mockito.mockStatic(TaskPluginManager.class)) { + taskPluginManagerMockedStatic + .when(() -> TaskPluginManager.checkTaskParameters(Mockito.any(), Mockito.any())) + .thenReturn(true); + String taskDefinitionJson = getTaskDefinitionJson(); + TaskDefinition taskDefinition = getTaskDefinition(); + taskDefinition.setFlag(Flag.NO); + TaskDefinition taskDefinitionSecond = getTaskDefinition(); + taskDefinitionSecond.setCode(5); + + user.setUserType(UserType.ADMIN_USER); + when(projectMapper.queryByCode(PROJECT_CODE)).thenReturn(getProject()); + when(projectService.hasProjectAndWritePerm(user, getProject(), new HashMap<>())).thenReturn(true); + when(taskDefinitionMapper.queryByCode(TASK_CODE)).thenReturn(taskDefinition); + when(taskDefinitionLogMapper.queryMaxVersionForDefinition(TASK_CODE)).thenReturn(1); + when(taskDefinitionMapper.updateById(Mockito.any())).thenReturn(1); + when(taskDefinitionLogMapper.insert(Mockito.any())).thenReturn(1); + + when(taskDefinitionMapper.queryByCodeList(Mockito.anySet())) + .thenReturn(Arrays.asList(taskDefinition, taskDefinitionSecond)); + + when(processTaskRelationMapper.queryUpstreamByCode(PROJECT_CODE, TASK_CODE)) + .thenReturn(getProcessTaskRelationListV2()); + when(processDefinitionMapper.queryByCode(PROCESS_DEFINITION_CODE)) + .thenReturn(getProcessDefinition()); + when(processTaskRelationMapper.batchInsert(Mockito.anyList())).thenReturn(1); + when(processTaskRelationMapper.updateById(Mockito.any())).thenReturn(1); + when(processTaskRelationLogDao.batchInsert(Mockito.anyList())).thenReturn(2); + // success + Map successMap = taskDefinitionService.updateTaskWithUpstream(user, PROJECT_CODE, TASK_CODE, + taskDefinitionJson, UPSTREAM_CODE); + assertEquals(Status.SUCCESS, successMap.get(Constants.STATUS)); + user.setUserType(UserType.GENERAL_USER); + } } private String getTaskDefinitionJson() { diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java index aca0d80a6fae..dd7acb16d577 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.api.service; +import static org.apache.dolphinscheduler.api.AssertionsHelper.assertThrowsServiceException; import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.FORCED_SUCCESS; import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.TASK_INSTANCE; import static org.mockito.ArgumentMatchers.any; @@ -25,7 +26,6 @@ import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.when; -import org.apache.dolphinscheduler.api.ApiApplicationServer; import org.apache.dolphinscheduler.api.dto.taskInstance.TaskInstanceRemoveCacheResponse; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ServiceException; @@ -35,15 +35,16 @@ import org.apache.dolphinscheduler.common.constants.Constants; import org.apache.dolphinscheduler.common.enums.TaskExecuteType; import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.common.enums.WorkflowExecutionStatus; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.Project; -import org.apache.dolphinscheduler.dao.entity.TaskDefinition; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; +import org.apache.dolphinscheduler.dao.repository.ProcessInstanceDao; import org.apache.dolphinscheduler.dao.repository.TaskInstanceDao; import org.apache.dolphinscheduler.plugin.task.api.enums.TaskExecutionStatus; import org.apache.dolphinscheduler.service.process.ProcessService; @@ -65,7 +66,6 @@ import org.mockito.junit.jupiter.MockitoExtension; import org.mockito.junit.jupiter.MockitoSettings; import org.mockito.quality.Strictness; -import org.springframework.boot.test.context.SpringBootTest; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; @@ -74,7 +74,6 @@ */ @ExtendWith(MockitoExtension.class) @MockitoSettings(strictness = Strictness.LENIENT) -@SpringBootTest(classes = ApiApplicationServer.class) public class TaskInstanceServiceTest { @InjectMocks @@ -100,6 +99,8 @@ public class TaskInstanceServiceTest { @Mock TaskInstanceDao taskInstanceDao; + @Mock + ProcessInstanceDao workflowInstanceDao; @Test public void queryTaskListPaging() { @@ -324,6 +325,7 @@ private ProcessInstance getProcessInstance() { private TaskInstance getTaskInstance() { TaskInstance taskInstance = new TaskInstance(); taskInstance.setId(1); + taskInstance.setProjectCode(1L); taskInstance.setName("test_task_instance"); taskInstance.setStartTime(new Date()); taskInstance.setEndTime(new Date()); @@ -343,64 +345,69 @@ private void putMsg(Map result, Status status, Object... statusP } @Test - public void testForceTaskSuccess() { + public void testForceTaskSuccess_withNoPermission() { + User user = getAdminUser(); + TaskInstance task = getTaskInstance(); + doThrow(new ServiceException(Status.USER_NO_OPERATION_PROJECT_PERM)).when(projectService) + .checkProjectAndAuthThrowException(user, task.getProjectCode(), FORCED_SUCCESS); + assertThrowsServiceException(Status.USER_NO_OPERATION_PROJECT_PERM, + () -> taskInstanceService.forceTaskSuccess(user, task.getProjectCode(), task.getId())); + } + + @Test + public void testForceTaskSuccess_withTaskInstanceNotFound() { + User user = getAdminUser(); + TaskInstance task = getTaskInstance(); + doNothing().when(projectService).checkProjectAndAuthThrowException(user, task.getProjectCode(), FORCED_SUCCESS); + when(taskInstanceDao.queryOptionalById(task.getId())).thenReturn(Optional.empty()); + assertThrowsServiceException(Status.TASK_INSTANCE_NOT_FOUND, + () -> taskInstanceService.forceTaskSuccess(user, task.getProjectCode(), task.getId())); + } + + @Test + public void testForceTaskSuccess_withWorkflowInstanceNotFound() { + User user = getAdminUser(); + TaskInstance task = getTaskInstance(); + doNothing().when(projectService).checkProjectAndAuthThrowException(user, task.getProjectCode(), FORCED_SUCCESS); + when(taskInstanceDao.queryOptionalById(task.getId())).thenReturn(Optional.of(task)); + when(workflowInstanceDao.queryOptionalById(task.getProcessInstanceId())).thenReturn(Optional.empty()); + + assertThrowsServiceException(Status.PROCESS_INSTANCE_NOT_EXIST, + () -> taskInstanceService.forceTaskSuccess(user, task.getProjectCode(), task.getId())); + } + + @Test + public void testForceTaskSuccess_withWorkflowInstanceNotFinished() { User user = getAdminUser(); long projectCode = 1L; - Project project = getProject(projectCode); - int taskId = 1; TaskInstance task = getTaskInstance(); + ProcessInstance processInstance = getProcessInstance(); + processInstance.setState(WorkflowExecutionStatus.RUNNING_EXECUTION); + doNothing().when(projectService).checkProjectAndAuthThrowException(user, projectCode, FORCED_SUCCESS); + when(taskInstanceDao.queryOptionalById(task.getId())).thenReturn(Optional.of(task)); + when(workflowInstanceDao.queryOptionalById(task.getProcessInstanceId())) + .thenReturn(Optional.of(processInstance)); - Map mockSuccess = new HashMap<>(5); - putMsg(mockSuccess, Status.SUCCESS); - when(projectMapper.queryByCode(projectCode)).thenReturn(project); + assertThrowsServiceException( + "The workflow instance is not finished: " + processInstance.getState() + + " cannot force start task instance", + () -> taskInstanceService.forceTaskSuccess(user, projectCode, task.getId())); + } - // user auth failed - Map mockFailure = new HashMap<>(5); - putMsg(mockFailure, Status.USER_NO_OPERATION_PROJECT_PERM, user.getUserName(), projectCode); - when(projectService.checkProjectAndAuth(user, project, projectCode, FORCED_SUCCESS)).thenReturn(mockFailure); - Result authFailRes = taskInstanceService.forceTaskSuccess(user, projectCode, taskId); - Assertions.assertNotSame(Status.SUCCESS.getCode(), authFailRes.getCode()); - - // test task not found - when(projectService.checkProjectAndAuth(user, project, projectCode, FORCED_SUCCESS)).thenReturn(mockSuccess); - when(taskInstanceMapper.selectById(Mockito.anyInt())).thenReturn(null); - TaskDefinition taskDefinition = new TaskDefinition(); - taskDefinition.setProjectCode(projectCode); - when(taskDefinitionMapper.queryByCode(task.getTaskCode())).thenReturn(taskDefinition); - Result taskNotFoundRes = taskInstanceService.forceTaskSuccess(user, projectCode, taskId); - Assertions.assertEquals(Status.TASK_INSTANCE_NOT_FOUND.getCode(), taskNotFoundRes.getCode().intValue()); - - // test task instance state error - task.setState(TaskExecutionStatus.SUCCESS); - when(taskInstanceMapper.selectById(1)).thenReturn(task); - Map result = new HashMap<>(); - putMsg(result, Status.SUCCESS, projectCode); - when(projectMapper.queryByCode(projectCode)).thenReturn(project); - when(projectService.checkProjectAndAuth(user, project, projectCode, FORCED_SUCCESS)).thenReturn(result); - Result taskStateErrorRes = taskInstanceService.forceTaskSuccess(user, projectCode, taskId); - Assertions.assertEquals(Status.TASK_INSTANCE_STATE_OPERATION_ERROR.getCode(), - taskStateErrorRes.getCode().intValue()); - - // test error - task.setState(TaskExecutionStatus.FAILURE); - when(taskInstanceMapper.updateById(task)).thenReturn(0); - putMsg(result, Status.SUCCESS, projectCode); - when(projectMapper.queryByCode(projectCode)).thenReturn(project); - when(projectService.checkProjectAndAuth(user, project, projectCode, FORCED_SUCCESS)).thenReturn(result); - Result errorRes = taskInstanceService.forceTaskSuccess(user, projectCode, taskId); - Assertions.assertEquals(Status.FORCE_TASK_SUCCESS_ERROR.getCode(), errorRes.getCode().intValue()); - - // test success - task.setState(TaskExecutionStatus.FAILURE); - task.setEndTime(null); - when(taskInstanceMapper.updateById(task)).thenReturn(1); - putMsg(result, Status.SUCCESS, projectCode); - when(projectMapper.queryByCode(projectCode)).thenReturn(project); - when(projectService.checkProjectAndAuth(user, project, projectCode, FORCED_SUCCESS)).thenReturn(result); - Result successRes = taskInstanceService.forceTaskSuccess(user, projectCode, taskId); - Assertions.assertEquals(Status.SUCCESS.getCode(), successRes.getCode().intValue()); - Assertions.assertNotNull(task.getEndTime()); + @Test + public void testForceTaskSuccess_withTaskInstanceNotFinished() { + User user = getAdminUser(); + TaskInstance task = getTaskInstance(); + ProcessInstance processInstance = getProcessInstance(); + processInstance.setState(WorkflowExecutionStatus.FAILURE); + doNothing().when(projectService).checkProjectAndAuthThrowException(user, task.getProjectCode(), FORCED_SUCCESS); + when(taskInstanceDao.queryOptionalById(task.getId())).thenReturn(Optional.of(task)); + when(workflowInstanceDao.queryOptionalById(task.getProcessInstanceId())) + .thenReturn(Optional.of(processInstance)); + assertThrowsServiceException( + Status.TASK_INSTANCE_STATE_OPERATION_ERROR, + () -> taskInstanceService.forceTaskSuccess(user, task.getProjectCode(), task.getId())); } @Test diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TenantServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TenantServiceTest.java index 5746840e90b6..6925441b10db 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TenantServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TenantServiceTest.java @@ -41,7 +41,7 @@ import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper; import org.apache.dolphinscheduler.dao.mapper.TenantMapper; import org.apache.dolphinscheduler.dao.mapper.UserMapper; -import org.apache.dolphinscheduler.plugin.storage.api.StorageOperate; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; import org.apache.commons.collections4.CollectionUtils; @@ -95,7 +95,7 @@ public class TenantServiceTest { private ResourcePermissionCheckService resourcePermissionCheckService; @Mock - private StorageOperate storageOperate; + private StorageOperator storageOperator; private static final String tenantCode = "hayden"; private static final String tenantDesc = "This is the tenant desc"; diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UdfFuncServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UdfFuncServiceTest.java deleted file mode 100644 index f1721cbb4609..000000000000 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UdfFuncServiceTest.java +++ /dev/null @@ -1,315 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.api.service; - -import org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant; -import org.apache.dolphinscheduler.api.enums.Status; -import org.apache.dolphinscheduler.api.permission.ResourcePermissionCheckService; -import org.apache.dolphinscheduler.api.service.impl.BaseServiceImpl; -import org.apache.dolphinscheduler.api.service.impl.UdfFuncServiceImpl; -import org.apache.dolphinscheduler.api.utils.PageInfo; -import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.common.enums.AuthorizationType; -import org.apache.dolphinscheduler.common.enums.UdfType; -import org.apache.dolphinscheduler.common.enums.UserType; -import org.apache.dolphinscheduler.common.utils.PropertyUtils; -import org.apache.dolphinscheduler.dao.entity.UdfFunc; -import org.apache.dolphinscheduler.dao.entity.User; -import org.apache.dolphinscheduler.dao.mapper.UDFUserMapper; -import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper; -import org.apache.dolphinscheduler.plugin.storage.api.StorageOperate; - -import org.apache.commons.collections4.CollectionUtils; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Date; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.InjectMocks; -import org.mockito.Mock; -import org.mockito.MockedStatic; -import org.mockito.Mockito; -import org.mockito.junit.jupiter.MockitoExtension; -import org.mockito.junit.jupiter.MockitoSettings; -import org.mockito.quality.Strictness; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; - -/** - * udf func service test - */ -@ExtendWith(MockitoExtension.class) -@MockitoSettings(strictness = Strictness.LENIENT) -public class UdfFuncServiceTest { - - private static final Logger logger = LoggerFactory.getLogger(UdfFuncServiceTest.class); - - private MockedStatic mockedStaticPropertyUtils; - - @InjectMocks - private UdfFuncServiceImpl udfFuncService; - - @Mock - private UdfFuncMapper udfFuncMapper; - - @Mock - private UDFUserMapper udfUserMapper; - - @Mock - private StorageOperate storageOperate; - - @BeforeEach - public void setUp() { - mockedStaticPropertyUtils = Mockito.mockStatic(PropertyUtils.class); - } - - @Mock - private ResourcePermissionCheckService resourcePermissionCheckService; - - private static final Logger serviceLogger = LoggerFactory.getLogger(BaseServiceImpl.class); - private static final Logger udfLogger = LoggerFactory.getLogger(UdfFuncServiceImpl.class); - - @Test - public void testCreateUdfFunction() { - - Mockito.when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.UDF, 1, - ApiFuncIdentificationConstant.UDF_FUNCTION_CREATE, serviceLogger)).thenReturn(true); - Mockito.when( - resourcePermissionCheckService.resourcePermissionCheck(AuthorizationType.UDF, null, 0, serviceLogger)) - .thenReturn(true); - // resource not exist - Result result = udfFuncService.createUdfFunction(getLoginUser(), "UdfFuncServiceTest", - "org.apache.dolphinscheduler.api.service.UdfFuncServiceTest", "String", - "UdfFuncServiceTest", "UdfFuncServiceTest", "", UdfType.HIVE); - logger.info(result.toString()); - Assertions.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(), result.getMsg()); - // success - try { - Mockito.when(storageOperate.exists("String")).thenReturn(true); - } catch (IOException e) { - logger.error("AmazonServiceException when checking resource: String"); - } - - result = udfFuncService.createUdfFunction(getLoginUser(), "UdfFuncServiceTest", - "org.apache.dolphinscheduler.api.service.UdfFuncServiceTest", "String", - "UdfFuncServiceTest", "UdfFuncServiceTest", "", UdfType.HIVE); - logger.info(result.toString()); - Assertions.assertEquals(Status.SUCCESS.getMsg(), result.getMsg()); - } - - @Test - public void testQueryUdfFuncDetail() { - - Mockito.when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.UDF, 1, - ApiFuncIdentificationConstant.UDF_FUNCTION_VIEW, serviceLogger)).thenReturn(true); - Mockito.when(resourcePermissionCheckService.resourcePermissionCheck(AuthorizationType.UDF, new Object[]{2}, 0, - serviceLogger)).thenReturn(true); - Mockito.when(udfFuncMapper.selectById(1)).thenReturn(getUdfFunc()); - // resource not exist - Result result = udfFuncService.queryUdfFuncDetail(getLoginUser(), 2); - logger.info(result.toString()); - Assertions.assertTrue(Status.RESOURCE_NOT_EXIST.getCode() == result.getCode()); - // success - Mockito.when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.UDF, 1, - ApiFuncIdentificationConstant.UDF_FUNCTION_VIEW, serviceLogger)).thenReturn(true); - Mockito.when(resourcePermissionCheckService.resourcePermissionCheck(AuthorizationType.UDF, new Object[]{1}, 0, - serviceLogger)).thenReturn(true); - result = udfFuncService.queryUdfFuncDetail(getLoginUser(), 1); - logger.info(result.toString()); - Assertions.assertTrue(Status.SUCCESS.getCode() == result.getCode()); - } - - @Test - public void testUpdateUdfFunc() { - Mockito.when(udfFuncMapper.selectUdfById(1)).thenReturn(getUdfFunc()); - - // UDF_FUNCTION_NOT_EXIST - Mockito.when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.UDF, 1, - ApiFuncIdentificationConstant.UDF_FUNCTION_UPDATE, serviceLogger)).thenReturn(true); - Mockito.when(resourcePermissionCheckService.resourcePermissionCheck(AuthorizationType.UDF, new Object[]{12}, 0, - serviceLogger)).thenReturn(true); - Result result = udfFuncService.updateUdfFunc(getLoginUser(), 12, "UdfFuncServiceTest", - "org.apache.dolphinscheduler.api.service.UdfFuncServiceTest", "String", - "UdfFuncServiceTest", "UdfFuncServiceTest", UdfType.HIVE, ""); - logger.info(result.toString()); - Assertions.assertTrue(Status.UDF_FUNCTION_NOT_EXIST.getCode() == result.getCode()); - - // RESOURCE_NOT_EXIST - Mockito.when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.UDF, 1, - ApiFuncIdentificationConstant.UDF_FUNCTION_UPDATE, serviceLogger)).thenReturn(true); - Mockito.when(resourcePermissionCheckService.resourcePermissionCheck(AuthorizationType.UDF, new Object[]{11}, 0, - serviceLogger)).thenReturn(true); - Mockito.when(udfFuncMapper.selectUdfById(11)).thenReturn(getUdfFunc()); - result = udfFuncService.updateUdfFunc(getLoginUser(), 11, "UdfFuncServiceTest", - "org.apache.dolphinscheduler.api.service.UdfFuncServiceTest", "String", - "UdfFuncServiceTest", "UdfFuncServiceTest", UdfType.HIVE, ""); - logger.info(result.toString()); - Assertions.assertTrue(Status.RESOURCE_NOT_EXIST.getCode() == result.getCode()); - - // success - Mockito.when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.UDF, 1, - ApiFuncIdentificationConstant.UDF_FUNCTION_UPDATE, serviceLogger)).thenReturn(true); - try { - Mockito.when(storageOperate.exists("")).thenReturn(true); - } catch (IOException e) { - logger.error("AmazonServiceException when checking resource: "); - } - - result = udfFuncService.updateUdfFunc(getLoginUser(), 11, "UdfFuncServiceTest", - "org.apache.dolphinscheduler.api.service.UdfFuncServiceTest", "String", - "UdfFuncServiceTest", "UdfFuncServiceTest", UdfType.HIVE, ""); - logger.info(result.toString()); - Assertions.assertTrue(Status.SUCCESS.getCode() == result.getCode()); - - } - - @Test - public void testQueryUdfFuncListPaging() { - - Mockito.when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.UDF, 1, - ApiFuncIdentificationConstant.UDF_FUNCTION_VIEW, serviceLogger)).thenReturn(true); - Mockito.when( - resourcePermissionCheckService.resourcePermissionCheck(AuthorizationType.UDF, null, 0, serviceLogger)) - .thenReturn(true); - Mockito.when( - resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.UDF, 1, udfLogger)) - .thenReturn(getSetIds()); - IPage page = new Page<>(1, 10); - page.setTotal(1L); - page.setRecords(getList()); - Mockito.when(udfFuncMapper.queryUdfFuncPaging(Mockito.any(Page.class), Mockito.anyList(), Mockito.eq("test"))) - .thenReturn(page); - Result result = udfFuncService.queryUdfFuncListPaging(getLoginUser(), "test", 1, 10); - logger.info(result.toString()); - PageInfo pageInfo = (PageInfo) result.getData(); - Assertions.assertTrue(CollectionUtils.isNotEmpty(pageInfo.getTotalList())); - } - - @Test - public void testQueryUdfFuncList() { - Mockito.when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.UDF, 1, - ApiFuncIdentificationConstant.UDF_FUNCTION_VIEW, serviceLogger)).thenReturn(true); - Mockito.when( - resourcePermissionCheckService.resourcePermissionCheck(AuthorizationType.UDF, null, 1, serviceLogger)) - .thenReturn(true); - Mockito.when( - resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.UDF, 1, udfLogger)) - .thenReturn(getSetIds()); - - User user = getLoginUser(); - user.setUserType(UserType.GENERAL_USER); - user.setId(1); - Mockito.when(udfFuncMapper.getUdfFuncByType(Collections.singletonList(1), UdfType.HIVE.ordinal())) - .thenReturn(getList()); - Result result = udfFuncService.queryUdfFuncList(user, UdfType.HIVE.ordinal()); - logger.info(result.toString()); - Assertions.assertTrue(Status.SUCCESS.getCode() == result.getCode()); - List udfFuncList = (List) result.getData(); - Assertions.assertTrue(CollectionUtils.isNotEmpty(udfFuncList)); - } - - @Test - public void testDelete() { - Mockito.when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.UDF, 1, - ApiFuncIdentificationConstant.UDF_FUNCTION_DELETE, serviceLogger)).thenReturn(true); - Mockito.when(resourcePermissionCheckService.resourcePermissionCheck(AuthorizationType.UDF, new Object[]{122}, 0, - serviceLogger)).thenReturn(true); - - Mockito.when(udfFuncMapper.deleteById(Mockito.anyInt())).thenReturn(1); - Mockito.when(udfUserMapper.deleteByUdfFuncId(Mockito.anyInt())).thenReturn(1); - Result result = udfFuncService.delete(getLoginUser(), 122); - logger.info(result.toString()); - Assertions.assertEquals(Status.SUCCESS.getMsg(), result.getMsg()); - } - - @Test - public void testVerifyUdfFuncByName() { - - Mockito.when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.UDF, 1, - ApiFuncIdentificationConstant.UDF_FUNCTION_VIEW, serviceLogger)).thenReturn(true); - Mockito.when( - resourcePermissionCheckService.resourcePermissionCheck(AuthorizationType.UDF, null, 0, serviceLogger)) - .thenReturn(true); - // success - Mockito.when(udfFuncMapper.queryUdfByIdStr(null, "UdfFuncServiceTest")).thenReturn(getList()); - Result result = udfFuncService.verifyUdfFuncByName(getLoginUser(), "test"); - logger.info(result.toString()); - Assertions.assertEquals(Status.SUCCESS.getMsg(), result.getMsg()); - // exist - result = udfFuncService.verifyUdfFuncByName(getLoginUser(), "UdfFuncServiceTest"); - logger.info(result.toString()); - Assertions.assertEquals(Status.UDF_FUNCTION_EXISTS.getMsg(), result.getMsg()); - } - - private Set getSetIds() { - Set set = new HashSet(); - set.add(1); - return set; - } - - /** - * create admin user - * @return - */ - private User getLoginUser() { - - User loginUser = new User(); - loginUser.setUserType(UserType.ADMIN_USER); - loginUser.setId(1); - return loginUser; - } - - private List getList() { - List udfFuncList = new ArrayList<>(); - udfFuncList.add(getUdfFunc()); - return udfFuncList; - } - - /** - * get UdfFuncRequest id - */ - private UdfFunc getUdfFunc() { - UdfFunc udfFunc = new UdfFunc(); - udfFunc.setFuncName("UdfFuncServiceTest"); - udfFunc.setClassName("org.apache.dolphinscheduler.api.service.UdfFuncServiceTest"); - udfFunc.setResourceId(0); - udfFunc.setResourceName("UdfFuncServiceTest"); - udfFunc.setCreateTime(new Date()); - udfFunc.setDatabase("database"); - udfFunc.setUpdateTime(new Date()); - udfFunc.setType(UdfType.HIVE); - return udfFunc; - } - - @AfterEach - public void after() { - mockedStaticPropertyUtils.close(); - } -} diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java index 3cb71d97a05b..290ffefe2681 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java @@ -46,9 +46,8 @@ import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectUserMapper; import org.apache.dolphinscheduler.dao.mapper.TenantMapper; -import org.apache.dolphinscheduler.dao.mapper.UDFUserMapper; import org.apache.dolphinscheduler.dao.mapper.UserMapper; -import org.apache.dolphinscheduler.plugin.storage.api.StorageOperate; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; import org.apache.commons.collections4.CollectionUtils; @@ -107,9 +106,6 @@ public class UsersServiceTest { @Mock private MetricsCleanUpService metricsCleanUpService; - @Mock - private UDFUserMapper udfUserMapper; - @Mock private K8sNamespaceUserMapper k8sNamespaceUserMapper; @@ -117,7 +113,7 @@ public class UsersServiceTest { private ProjectMapper projectMapper; @Mock - private StorageOperate storageOperate; + private StorageOperator storageOperator; @Mock private ResourcePermissionCheckService resourcePermissionCheckService; @@ -532,33 +528,6 @@ public void testRevokeProjectById() { Assertions.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } - @Test - public void testGrantUDFFunction() { - String udfIds = "100000,120000"; - when(userMapper.selectById(1)).thenReturn(getUser()); - User loginUser = new User(); - - // user not exist - loginUser.setUserType(UserType.ADMIN_USER); - Map result = usersService.grantUDFFunction(loginUser, 2, udfIds); - logger.info(result.toString()); - Assertions.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS)); - - // success - when(udfUserMapper.deleteByUserId(1)).thenReturn(1); - result = usersService.grantUDFFunction(loginUser, 1, udfIds); - logger.info(result.toString()); - Assertions.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); - - // ERROR: NO_CURRENT_OPERATING_PERMISSION - loginUser.setId(2); - loginUser.setUserType(UserType.GENERAL_USER); - when(userMapper.selectById(2)).thenReturn(loginUser); - result = this.usersService.grantUDFFunction(loginUser, 2, udfIds); - logger.info(result.toString()); - Assertions.assertEquals(Status.NO_CURRENT_OPERATING_PERMISSION, result.get(Constants.STATUS)); - } - @Test public void testGrantNamespaces() { String namespaceIds = "100000,120000"; diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkerGroupServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkerGroupServiceTest.java index 08a541c5bb94..fce9aa3f1c08 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkerGroupServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkerGroupServiceTest.java @@ -30,7 +30,6 @@ import org.apache.dolphinscheduler.common.enums.AuthorizationType; import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; -import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.WorkerGroup; import org.apache.dolphinscheduler.dao.mapper.EnvironmentWorkerGroupRelationMapper; @@ -65,8 +64,6 @@ @MockitoSettings(strictness = Strictness.LENIENT) public class WorkerGroupServiceTest { - private static final Logger logger = LoggerFactory.getLogger(WorkerGroupServiceTest.class); - private static final Logger baseServiceLogger = LoggerFactory.getLogger(BaseServiceImpl.class); private static final Logger serviceLogger = LoggerFactory.getLogger(WorkerGroupService.class); @@ -288,47 +285,6 @@ public void testQueryAllGroupWithDefault() { Assertions.assertEquals("default", workerGroups.toArray()[0]); } - @Test - public void giveNull_whenGetTaskWorkerGroup_expectNull() { - String nullWorkerGroup = workerGroupService.getTaskWorkerGroup(null); - Assertions.assertNull(nullWorkerGroup); - } - - @Test - public void giveCorrectTaskInstance_whenGetTaskWorkerGroup_expectTaskWorkerGroup() { - TaskInstance taskInstance = new TaskInstance(); - taskInstance.setId(1); - taskInstance.setWorkerGroup("cluster1"); - - String workerGroup = workerGroupService.getTaskWorkerGroup(taskInstance); - Assertions.assertEquals("cluster1", workerGroup); - } - - @Test - public void giveNullWorkerGroup_whenGetTaskWorkerGroup_expectProcessWorkerGroup() { - TaskInstance taskInstance = new TaskInstance(); - taskInstance.setId(1); - taskInstance.setProcessInstanceId(1); - ProcessInstance processInstance = new ProcessInstance(); - processInstance.setId(1); - processInstance.setWorkerGroup("cluster1"); - Mockito.when(processService.findProcessInstanceById(1)).thenReturn(processInstance); - - String workerGroup = workerGroupService.getTaskWorkerGroup(taskInstance); - Assertions.assertEquals("cluster1", workerGroup); - } - - @Test - public void giveNullTaskAndProcessWorkerGroup_whenGetTaskWorkerGroup_expectDefault() { - TaskInstance taskInstance = new TaskInstance(); - taskInstance.setId(1); - taskInstance.setProcessInstanceId(1); - Mockito.when(processService.findProcessInstanceById(1)).thenReturn(null); - - String defaultWorkerGroup = workerGroupService.getTaskWorkerGroup(taskInstance); - Assertions.assertEquals(Constants.DEFAULT_WORKER_GROUP, defaultWorkerGroup); - } - /** * get Group */ diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/impl/EnvironmentServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/impl/EnvironmentServiceTest.java index 14e1ed956b47..b8161af13dfd 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/impl/EnvironmentServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/impl/EnvironmentServiceTest.java @@ -22,6 +22,7 @@ import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.ENVIRONMENT_CREATE; import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.ENVIRONMENT_DELETE; import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.ENVIRONMENT_UPDATE; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.when; @@ -29,9 +30,11 @@ import org.apache.dolphinscheduler.api.permission.ResourcePermissionCheckService; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.api.utils.ServiceTestUtil; import org.apache.dolphinscheduler.common.constants.Constants; import org.apache.dolphinscheduler.common.enums.AuthorizationType; import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.common.utils.CodeGenerateUtils; import org.apache.dolphinscheduler.dao.entity.Environment; import org.apache.dolphinscheduler.dao.entity.EnvironmentWorkerGroupRelation; import org.apache.dolphinscheduler.dao.entity.User; @@ -42,6 +45,7 @@ import org.apache.commons.collections4.CollectionUtils; import java.util.ArrayList; +import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -53,6 +57,7 @@ import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.InjectMocks; import org.mockito.Mock; +import org.mockito.MockedStatic; import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; import org.mockito.junit.jupiter.MockitoSettings; @@ -121,9 +126,23 @@ public void testCreateEnvironment() { when(environmentMapper.insert(any(Environment.class))).thenReturn(1); when(relationMapper.insert(any(EnvironmentWorkerGroupRelation.class))).thenReturn(1); + + assertThrowsServiceException(Status.DESCRIPTION_TOO_LONG_ERROR, + () -> environmentService.createEnvironment(adminUser, "testName", "test", + ServiceTestUtil.randomStringWithLengthN(512), workerGroups)); assertDoesNotThrow( () -> environmentService.createEnvironment(adminUser, "testName", "test", "test", workerGroups)); + when(environmentMapper.insert(any(Environment.class))).thenReturn(-1); + assertThrowsServiceException(Status.CREATE_ENVIRONMENT_ERROR, + () -> environmentService.createEnvironment(adminUser, "testName", "test", "test", workerGroups)); + + try (MockedStatic ignored = Mockito.mockStatic(CodeGenerateUtils.class)) { + when(CodeGenerateUtils.genCode()).thenThrow(CodeGenerateUtils.CodeGenerateException.class); + + assertThrowsServiceException(Status.INTERNAL_SERVER_ERROR_ARGS, + () -> environmentService.createEnvironment(adminUser, "testName", "test", "test", workerGroups)); + } } @Test @@ -156,25 +175,54 @@ public void testUpdateEnvironmentByCode() { assertThrowsServiceException(Status.ENVIRONMENT_NAME_EXISTS, () -> environmentService .updateEnvironmentByCode(adminUser, 2L, environmentName, getConfig(), getDesc(), workerGroups)); + when(environmentMapper.update(any(Environment.class), any(Wrapper.class))).thenReturn(-1); + assertThrowsServiceException(Status.UPDATE_ENVIRONMENT_ERROR, + () -> environmentService.updateEnvironmentByCode(adminUser, 1L, "testName", "test", "test", + workerGroups)); + when(environmentMapper.update(any(Environment.class), any(Wrapper.class))).thenReturn(1); + + assertThrowsServiceException(Status.DESCRIPTION_TOO_LONG_ERROR, + () -> environmentService.updateEnvironmentByCode(adminUser, 2L, environmentName, getConfig(), + ServiceTestUtil.randomStringWithLengthN(512), workerGroups)); + assertDoesNotThrow(() -> environmentService.updateEnvironmentByCode(adminUser, 1L, "testName", "test", "test", workerGroups)); + + assertDoesNotThrow(() -> environmentService.updateEnvironmentByCode(adminUser, 1L, "testName", "test", "test", + "")); + + when(relationMapper.queryByEnvironmentCode(any())) + .thenReturn(Collections.singletonList(getEnvironmentWorkerGroup())); + assertDoesNotThrow(() -> environmentService.updateEnvironmentByCode(adminUser, 1L, "testName", "test", "test", + "")); } @Test public void testQueryAllEnvironmentList() { + when(resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.ENVIRONMENT, + 1, environmentServiceLogger)).thenReturn(Collections.emptySet()); + Map result = environmentService.queryAllEnvironmentList(getAdminUser()); + assertEquals(0, ((List) result.get(Constants.DATA_LIST)).size()); + Set ids = new HashSet<>(); ids.add(1); when(resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.ENVIRONMENT, 1, environmentServiceLogger)).thenReturn(ids); when(environmentMapper.selectBatchIds(ids)).thenReturn(Lists.newArrayList(getEnvironment())); - Map result = environmentService.queryAllEnvironmentList(getAdminUser()); + result = environmentService.queryAllEnvironmentList(getAdminUser()); logger.info(result.toString()); Assertions.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); List list = (List) (result.get(Constants.DATA_LIST)); Assertions.assertEquals(1, list.size()); + + when(environmentMapper.selectBatchIds(ids)).thenReturn(Collections.emptyList()); + result = environmentService.queryAllEnvironmentList(getAdminUser()); + Assertions.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); + list = (List) (result.get(Constants.DATA_LIST)); + Assertions.assertEquals(0, list.size()); } @Test @@ -186,9 +234,27 @@ public void testQueryEnvironmentListPaging() { .thenReturn(page); Result result = environmentService.queryEnvironmentListPaging(getAdminUser(), 1, 10, environmentName); - logger.info(result.toString()); PageInfo pageInfo = (PageInfo) result.getData(); Assertions.assertTrue(CollectionUtils.isNotEmpty(pageInfo.getTotalList())); + + assertDoesNotThrow( + () -> environmentService.queryEnvironmentListPaging(getGeneralUser(), 1, 10, environmentName)); + + when(resourcePermissionCheckService.userOwnedResourceIdsAcquisition( + AuthorizationType.ENVIRONMENT, + 1, + environmentServiceLogger)).thenReturn(Collections.singleton(10)); + when(environmentMapper.queryEnvironmentListPagingByIds(any(Page.class), any(List.class), any(String.class))) + .thenReturn(page); + result = environmentService.queryEnvironmentListPaging(getGeneralUser(), 1, 10, environmentName); + assertEquals(0, result.getCode()); + assertEquals(1, ((PageInfo) result.getData()).getTotalList().size()); + + page.setRecords(Collections.emptyList()); + page.setTotal(0); + result = environmentService.queryEnvironmentListPaging(getGeneralUser(), 1, 10, environmentName); + assertEquals(0, result.getCode()); + assertEquals(0, ((PageInfo) result.getData()).getTotalList().size()); } @Test @@ -239,6 +305,10 @@ public void testDeleteEnvironmentByCode() { result = environmentService.deleteEnvironmentByCode(loginUser, 1L); logger.info(result.toString()); Assertions.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); + + when(environmentMapper.deleteByCode(1L)).thenReturn(-1); + result = environmentService.deleteEnvironmentByCode(loginUser, 1L); + Assertions.assertEquals(Status.DELETE_ENVIRONMENT_ERROR, result.get(Constants.STATUS)); } @Test @@ -251,6 +321,9 @@ public void testVerifyEnvironment() { result = environmentService.verifyEnvironment(environmentName); logger.info(result.toString()); Assertions.assertEquals(Status.ENVIRONMENT_NAME_EXISTS, result.get(Constants.STATUS)); + + when(environmentMapper.queryByEnvironmentName(environmentName)).thenReturn(null); + assertDoesNotThrow(() -> environmentService.verifyEnvironment(environmentName)); } private Environment getEnvironment() { @@ -264,6 +337,13 @@ private Environment getEnvironment() { return environment; } + private EnvironmentWorkerGroupRelation getEnvironmentWorkerGroup() { + EnvironmentWorkerGroupRelation relation = new EnvironmentWorkerGroupRelation(); + relation.setEnvironmentCode(1L); + relation.setWorkerGroup("new_worker_group"); + return relation; + } + /** * create an environment description */ diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/ServiceTestUtil.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/ServiceTestUtil.java new file mode 100644 index 000000000000..f33571d30909 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/ServiceTestUtil.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.utils; + +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.dao.entity.User; + +import java.nio.charset.StandardCharsets; +import java.util.Random; + +public class ServiceTestUtil { + + public static String randomStringWithLengthN(int n) { + byte[] bitArray = new byte[n]; + new Random().nextBytes(bitArray); + return new String(bitArray, StandardCharsets.UTF_8); + } + + private static User getUser(Integer userId, String userName, UserType userType) { + User user = new User(); + user.setUserType(userType); + user.setId(userId); + user.setUserName(userName); + return user; + } + + public static User getAdminUser() { + return getUser(1, "admin", UserType.ADMIN_USER); + } + public static User getGeneralUser() { + return getUser(10, "user", UserType.GENERAL_USER); + } +} diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/validator/resource/CreateDirectoryDtoValidatorTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/validator/resource/CreateDirectoryDtoValidatorTest.java new file mode 100644 index 000000000000..11803cba92e1 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/validator/resource/CreateDirectoryDtoValidatorTest.java @@ -0,0 +1,136 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.validator.resource; + +import static org.apache.dolphinscheduler.api.AssertionsHelper.assertThrowServiceException; +import static org.mockito.Mockito.when; + +import org.apache.dolphinscheduler.api.dto.resources.CreateDirectoryDto; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.dao.entity.Tenant; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.repository.TenantDao; +import org.apache.dolphinscheduler.plugin.storage.api.ResourceMetadata; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; + +import java.util.Locale; +import java.util.Optional; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.context.i18n.LocaleContextHolder; + +@ExtendWith(MockitoExtension.class) +class CreateDirectoryDtoValidatorTest { + + @Mock + private StorageOperator storageOperator; + + @Mock + private TenantDao tenantDao; + + @InjectMocks + private CreateDirectoryDtoValidator createDirectoryDtoValidator; + + private static final String BASE_DIRECTORY = "/tmp/dolphinscheduler"; + + private User loginUser; + + @BeforeEach + public void setup() { + when(storageOperator.getStorageBaseDirectory()).thenReturn(BASE_DIRECTORY); + loginUser = new User(); + loginUser.setTenantId(1); + LocaleContextHolder.setLocale(Locale.ENGLISH); + } + + @Test + void testValidate_notUnderBaseDirectory() { + CreateDirectoryDto createDirectoryDto = CreateDirectoryDto.builder() + .loginUser(loginUser) + .directoryAbsolutePath("/tmp") + .build(); + assertThrowServiceException( + "Internal Server Error: Invalidated resource path: /tmp", + () -> createDirectoryDtoValidator.validate(createDirectoryDto)); + } + + @Test + public void testValidate_directoryPathContainsIllegalSymbolic() { + CreateDirectoryDto createDirectoryDto = CreateDirectoryDto.builder() + .loginUser(loginUser) + .directoryAbsolutePath("/tmp/dolphinscheduler/default/resources/..") + .build(); + assertThrowServiceException( + "Internal Server Error: Invalidated resource path: /tmp/dolphinscheduler/default/resources/..", + () -> createDirectoryDtoValidator.validate(createDirectoryDto)); + } + + @Test + public void testValidate_directoryExist() { + CreateDirectoryDto createDirectoryDto = CreateDirectoryDto.builder() + .loginUser(loginUser) + .directoryAbsolutePath("/tmp/dolphinscheduler/default/resources/demo") + .build(); + when(storageOperator.exists(createDirectoryDto.getDirectoryAbsolutePath())).thenReturn(true); + assertThrowServiceException( + "Internal Server Error: The resource is already exist: /tmp/dolphinscheduler/default/resources/demo", + () -> createDirectoryDtoValidator.validate(createDirectoryDto)); + } + + @Test + public void testValidate_NoPermission() { + Tenant tenant = new Tenant(); + tenant.setTenantCode("test"); + when(tenantDao.queryOptionalById(loginUser.getTenantId())).thenReturn(Optional.of(tenant)); + + CreateDirectoryDto createDirectoryDto = CreateDirectoryDto.builder() + .loginUser(loginUser) + .directoryAbsolutePath("/tmp/dolphinscheduler/default/resources/demo") + .build(); + when(storageOperator.getResourceMetaData(createDirectoryDto.getDirectoryAbsolutePath())) + .thenReturn(ResourceMetadata.builder() + .resourceAbsolutePath(createDirectoryDto.getDirectoryAbsolutePath()) + .resourceBaseDirectory(BASE_DIRECTORY) + .resourceRelativePath("demo") + .isDirectory(true) + .tenant("default") + .build()); + when(storageOperator.exists(createDirectoryDto.getDirectoryAbsolutePath())).thenReturn(false); + assertThrowServiceException( + "Internal Server Error: The user's tenant is test have no permission to access the resource: /tmp/dolphinscheduler/default/resources/demo", + () -> createDirectoryDtoValidator.validate(createDirectoryDto)); + } + + @Test + public void testValidate_pathNotDirectory() { + CreateDirectoryDto createDirectoryDto = CreateDirectoryDto.builder() + .loginUser(loginUser) + .directoryAbsolutePath("/tmp/dolphinscheduler/default/resources/demo.sql") + .build(); + loginUser.setUserType(UserType.ADMIN_USER); + assertThrowServiceException( + "Internal Server Error: The path is not a directory: /tmp/dolphinscheduler/default/resources/demo.sql", + () -> createDirectoryDtoValidator.validate(createDirectoryDto)); + } + +} diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/validator/resource/CreateFileFromContentDtoValidatorTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/validator/resource/CreateFileFromContentDtoValidatorTest.java new file mode 100644 index 000000000000..6312346e40cb --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/validator/resource/CreateFileFromContentDtoValidatorTest.java @@ -0,0 +1,188 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.validator.resource; + +import static org.apache.dolphinscheduler.api.AssertionsHelper.assertDoesNotThrow; +import static org.apache.dolphinscheduler.api.AssertionsHelper.assertThrowServiceException; +import static org.mockito.Mockito.when; + +import org.apache.dolphinscheduler.api.dto.resources.CreateFileFromContentDto; +import org.apache.dolphinscheduler.dao.entity.Tenant; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.repository.TenantDao; +import org.apache.dolphinscheduler.plugin.storage.api.ResourceMetadata; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; + +import java.util.Locale; +import java.util.Optional; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.context.i18n.LocaleContextHolder; + +@ExtendWith(MockitoExtension.class) +class CreateFileFromContentDtoValidatorTest { + + @Mock + private StorageOperator storageOperator; + + @Mock + private TenantDao tenantDao; + + @InjectMocks + private CreateFileFromContentDtoValidator createFileFromContentDtoValidator; + + private static final String BASE_DIRECTORY = "/tmp/dolphinscheduler"; + + private User loginUser; + + @BeforeEach + public void setup() { + when(storageOperator.getStorageBaseDirectory()).thenReturn(BASE_DIRECTORY); + loginUser = new User(); + loginUser.setTenantId(1); + LocaleContextHolder.setLocale(Locale.ENGLISH); + } + + @Test + void testValidate_notUnderBaseDirectory() { + CreateFileFromContentDto createFileFromContentDto = CreateFileFromContentDto.builder() + .loginUser(loginUser) + .fileAbsolutePath("/tmp") + .fileContent("select * from t") + .build(); + assertThrowServiceException( + "Internal Server Error: Invalidated resource path: /tmp", + () -> createFileFromContentDtoValidator.validate(createFileFromContentDto)); + + } + + @Test + public void testValidate_filePathContainsIllegalSymbolic() { + CreateFileFromContentDto renameDirectoryDto = CreateFileFromContentDto.builder() + .loginUser(loginUser) + .fileAbsolutePath("/tmp/dolphinscheduler/default/resources/..") + .fileContent("select * from t") + .build(); + assertThrowServiceException( + "Internal Server Error: Invalidated resource path: /tmp/dolphinscheduler/default/resources/..", + () -> createFileFromContentDtoValidator.validate(renameDirectoryDto)); + } + + @Test + public void testValidate_IsNotFile() { + CreateFileFromContentDto createFileFromContentDto = CreateFileFromContentDto.builder() + .loginUser(loginUser) + .fileAbsolutePath("/tmp/dolphinscheduler/default/resources/a") + .fileContent("select * from t") + .build(); + assertThrowServiceException( + "Internal Server Error: The path is not a file: /tmp/dolphinscheduler/default/resources/a", + () -> createFileFromContentDtoValidator.validate(createFileFromContentDto)); + } + + @Test + public void testValidate_fileAlreadyExist() { + CreateFileFromContentDto createFileFromContentDto = CreateFileFromContentDto.builder() + .loginUser(loginUser) + .fileAbsolutePath("/tmp/dolphinscheduler/default/resources/a.sql") + .fileContent("select * from t") + .build(); + when(storageOperator.exists(createFileFromContentDto.getFileAbsolutePath())).thenReturn(true); + assertThrowServiceException( + "Internal Server Error: The resource is already exist: /tmp/dolphinscheduler/default/resources/a.sql", + () -> createFileFromContentDtoValidator.validate(createFileFromContentDto)); + } + + @Test + public void testValidate_fileNoPermission() { + Tenant tenant = new Tenant(); + tenant.setTenantCode("test"); + when(tenantDao.queryOptionalById(loginUser.getTenantId())).thenReturn(Optional.of(tenant)); + + CreateFileFromContentDto createFileFromContentDto = CreateFileFromContentDto.builder() + .loginUser(loginUser) + .fileAbsolutePath("/tmp/dolphinscheduler/default/resources/a.sql") + .fileContent("select * from t") + .build(); + when(storageOperator.exists(createFileFromContentDto.getFileAbsolutePath())).thenReturn(false); + when(storageOperator.getResourceMetaData(createFileFromContentDto.getFileAbsolutePath())) + .thenReturn(ResourceMetadata.builder() + .resourceAbsolutePath(createFileFromContentDto.getFileAbsolutePath()) + .resourceBaseDirectory(BASE_DIRECTORY) + .resourceRelativePath("a.sql") + .isDirectory(false) + .tenant("default") + .build()); + assertThrowServiceException( + "Internal Server Error: The user's tenant is test have no permission to access the resource: /tmp/dolphinscheduler/default/resources/a.sql", + () -> createFileFromContentDtoValidator.validate(createFileFromContentDto)); + } + + @Test + public void testValidate_contentIsInvalidated() { + Tenant tenant = new Tenant(); + tenant.setTenantCode("default"); + when(tenantDao.queryOptionalById(loginUser.getTenantId())).thenReturn(Optional.of(tenant)); + + CreateFileFromContentDto createFileFromContentDto = CreateFileFromContentDto.builder() + .loginUser(loginUser) + .fileAbsolutePath("/tmp/dolphinscheduler/default/resources/a.sql") + .fileContent("") + .build(); + when(storageOperator.exists(createFileFromContentDto.getFileAbsolutePath())).thenReturn(false); + when(storageOperator.getResourceMetaData(createFileFromContentDto.getFileAbsolutePath())) + .thenReturn(ResourceMetadata.builder() + .resourceAbsolutePath(createFileFromContentDto.getFileAbsolutePath()) + .resourceBaseDirectory(BASE_DIRECTORY) + .resourceRelativePath("a.sql") + .isDirectory(false) + .tenant("default") + .build()); + assertThrowServiceException( + "Internal Server Error: The file content is null", + () -> createFileFromContentDtoValidator.validate(createFileFromContentDto)); + } + + @Test + public void testValidate() { + Tenant tenant = new Tenant(); + tenant.setTenantCode("default"); + when(tenantDao.queryOptionalById(loginUser.getTenantId())).thenReturn(Optional.of(tenant)); + + CreateFileFromContentDto createFileFromContentDto = CreateFileFromContentDto.builder() + .loginUser(loginUser) + .fileAbsolutePath("/tmp/dolphinscheduler/default/resources/a.sql") + .fileContent("select * from t") + .build(); + when(storageOperator.exists(createFileFromContentDto.getFileAbsolutePath())).thenReturn(false); + when(storageOperator.getResourceMetaData(createFileFromContentDto.getFileAbsolutePath())) + .thenReturn(ResourceMetadata.builder() + .resourceAbsolutePath(createFileFromContentDto.getFileAbsolutePath()) + .resourceBaseDirectory(BASE_DIRECTORY) + .resourceRelativePath("a.sql") + .isDirectory(false) + .tenant("default") + .build()); + assertDoesNotThrow(() -> createFileFromContentDtoValidator.validate(createFileFromContentDto)); + } +} diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/validator/resource/FetchFileContentDtoValidatorTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/validator/resource/FetchFileContentDtoValidatorTest.java new file mode 100644 index 000000000000..69c31f2a0e66 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/validator/resource/FetchFileContentDtoValidatorTest.java @@ -0,0 +1,197 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.validator.resource; + +import static org.apache.dolphinscheduler.api.AssertionsHelper.assertThrowServiceException; +import static org.mockito.Mockito.when; + +import org.apache.dolphinscheduler.api.AssertionsHelper; +import org.apache.dolphinscheduler.api.dto.resources.FetchFileContentDto; +import org.apache.dolphinscheduler.dao.entity.Tenant; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.repository.TenantDao; +import org.apache.dolphinscheduler.plugin.storage.api.ResourceMetadata; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; + +import java.util.Locale; +import java.util.Optional; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; +import org.springframework.context.i18n.LocaleContextHolder; + +@MockitoSettings(strictness = Strictness.LENIENT) +@ExtendWith(MockitoExtension.class) +class FetchFileContentDtoValidatorTest { + + @Mock + private StorageOperator storageOperator; + + @Mock + private TenantDao tenantDao; + + @InjectMocks + private FetchFileContentDtoValidator fetchFileContentDtoValidator; + + private static final String BASE_DIRECTORY = "/tmp/dolphinscheduler"; + + private User loginUser; + + @BeforeEach + public void setup() { + when(storageOperator.getStorageBaseDirectory()).thenReturn(BASE_DIRECTORY); + loginUser = new User(); + loginUser.setTenantId(1); + LocaleContextHolder.setLocale(Locale.ENGLISH); + } + + @Test + void testValidate_skipLineNumInvalid() { + FetchFileContentDto fetchFileContentDto = FetchFileContentDto.builder() + .loginUser(loginUser) + .resourceFileAbsolutePath("/tmp") + .skipLineNum(-1) + .limit(-1) + .build(); + assertThrowServiceException( + "Internal Server Error: skipLineNum must be greater than or equal to 0", + () -> fetchFileContentDtoValidator.validate(fetchFileContentDto)); + + } + + @Test + void testValidate_notUnderBaseDirectory() { + FetchFileContentDto fetchFileContentDto = FetchFileContentDto.builder() + .loginUser(loginUser) + .resourceFileAbsolutePath("/tmp") + .skipLineNum(0) + .limit(-1) + .build(); + assertThrowServiceException( + "Internal Server Error: Invalidated resource path: /tmp", + () -> fetchFileContentDtoValidator.validate(fetchFileContentDto)); + + } + + @Test + public void testValidate_filePathContainsIllegalSymbolic() { + FetchFileContentDto fetchFileContentDto = FetchFileContentDto.builder() + .loginUser(loginUser) + .resourceFileAbsolutePath("/tmp/dolphinscheduler/default/resources/..") + .skipLineNum(0) + .limit(-1) + .build(); + assertThrowServiceException( + "Internal Server Error: Invalidated resource path: /tmp/dolphinscheduler/default/resources/..", + () -> fetchFileContentDtoValidator.validate(fetchFileContentDto)); + } + + @Test + public void testValidate_IsNotFile() { + FetchFileContentDto fetchFileContentDto = FetchFileContentDto.builder() + .loginUser(loginUser) + .resourceFileAbsolutePath("/tmp/dolphinscheduler/default/resources/a") + .skipLineNum(0) + .limit(-1) + .build(); + assertThrowServiceException( + "Internal Server Error: The path is not a file: /tmp/dolphinscheduler/default/resources/a", + () -> fetchFileContentDtoValidator.validate(fetchFileContentDto)); + } + + @Test + public void testValidate_fileNoPermission() { + Tenant tenant = new Tenant(); + tenant.setTenantCode("test"); + when(tenantDao.queryOptionalById(loginUser.getTenantId())).thenReturn(Optional.of(tenant)); + + FetchFileContentDto fetchFileContentDto = FetchFileContentDto.builder() + .loginUser(loginUser) + .resourceFileAbsolutePath("/tmp/dolphinscheduler/default/resources/a.sql") + .skipLineNum(0) + .limit(-1) + .build(); + when(storageOperator.exists(fetchFileContentDto.getResourceFileAbsolutePath())).thenReturn(false); + when(storageOperator.getResourceMetaData(fetchFileContentDto.getResourceFileAbsolutePath())) + .thenReturn(ResourceMetadata.builder() + .resourceAbsolutePath(fetchFileContentDto.getResourceFileAbsolutePath()) + .resourceBaseDirectory(BASE_DIRECTORY) + .resourceRelativePath("a.sql") + .isDirectory(false) + .tenant("default") + .build()); + assertThrowServiceException( + "Internal Server Error: The user's tenant is test have no permission to access the resource: /tmp/dolphinscheduler/default/resources/a.sql", + () -> fetchFileContentDtoValidator.validate(fetchFileContentDto)); + } + + @Test + void validate_fileExtensionInvalid() { + Tenant tenant = new Tenant(); + tenant.setTenantCode("default"); + when(tenantDao.queryOptionalById(loginUser.getTenantId())).thenReturn(Optional.of(tenant)); + + FetchFileContentDto fetchFileContentDto = FetchFileContentDto.builder() + .loginUser(loginUser) + .resourceFileAbsolutePath("/tmp/dolphinscheduler/default/resources/a.jar") + .skipLineNum(0) + .limit(-1) + .build(); + when(storageOperator.exists(fetchFileContentDto.getResourceFileAbsolutePath())).thenReturn(false); + when(storageOperator.getResourceMetaData(fetchFileContentDto.getResourceFileAbsolutePath())) + .thenReturn(ResourceMetadata.builder() + .resourceAbsolutePath(fetchFileContentDto.getResourceFileAbsolutePath()) + .resourceBaseDirectory(BASE_DIRECTORY) + .resourceRelativePath("a.jar") + .isDirectory(false) + .tenant("default") + .build()); + assertThrowServiceException( + "Internal Server Error: The file type: jar cannot be fetched", + () -> fetchFileContentDtoValidator.validate(fetchFileContentDto)); + } + + @Test + void validate() { + Tenant tenant = new Tenant(); + tenant.setTenantCode("default"); + when(tenantDao.queryOptionalById(loginUser.getTenantId())).thenReturn(Optional.of(tenant)); + + FetchFileContentDto fetchFileContentDto = FetchFileContentDto.builder() + .loginUser(loginUser) + .resourceFileAbsolutePath("/tmp/dolphinscheduler/default/resources/a.sql") + .skipLineNum(0) + .limit(-1) + .build(); + when(storageOperator.exists(fetchFileContentDto.getResourceFileAbsolutePath())).thenReturn(false); + when(storageOperator.getResourceMetaData(fetchFileContentDto.getResourceFileAbsolutePath())) + .thenReturn(ResourceMetadata.builder() + .resourceAbsolutePath(fetchFileContentDto.getResourceFileAbsolutePath()) + .resourceBaseDirectory(BASE_DIRECTORY) + .resourceRelativePath("a.sql") + .isDirectory(false) + .tenant("default") + .build()); + AssertionsHelper.assertDoesNotThrow(() -> fetchFileContentDtoValidator.validate(fetchFileContentDto)); + } +} diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/validator/resource/RenameDirectoryDtoValidatorTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/validator/resource/RenameDirectoryDtoValidatorTest.java new file mode 100644 index 000000000000..43e73f3b6043 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/validator/resource/RenameDirectoryDtoValidatorTest.java @@ -0,0 +1,188 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.validator.resource; + +import static org.apache.dolphinscheduler.api.AssertionsHelper.assertDoesNotThrow; +import static org.apache.dolphinscheduler.api.AssertionsHelper.assertThrowServiceException; +import static org.mockito.Mockito.when; + +import org.apache.dolphinscheduler.api.dto.resources.RenameDirectoryDto; +import org.apache.dolphinscheduler.dao.entity.Tenant; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.repository.TenantDao; +import org.apache.dolphinscheduler.plugin.storage.api.ResourceMetadata; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; + +import java.util.Locale; +import java.util.Optional; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.context.i18n.LocaleContextHolder; + +@ExtendWith(MockitoExtension.class) +class RenameDirectoryDtoValidatorTest { + + @Mock + private StorageOperator storageOperator; + + @Mock + private TenantDao tenantDao; + + @InjectMocks + private RenameDirectoryDtoValidator renameDirectoryDtoValidator; + + private static final String BASE_DIRECTORY = "/tmp/dolphinscheduler"; + + private User loginUser; + + @BeforeEach + public void setup() { + when(storageOperator.getStorageBaseDirectory()).thenReturn(BASE_DIRECTORY); + loginUser = new User(); + loginUser.setTenantId(1); + LocaleContextHolder.setLocale(Locale.ENGLISH); + } + + @Test + void testValidate_notUnderBaseDirectory() { + RenameDirectoryDto renameDirectoryDto = RenameDirectoryDto.builder() + .loginUser(loginUser) + .originDirectoryAbsolutePath("/tmp") + .targetDirectoryAbsolutePath("/tmp1") + .build(); + assertThrowServiceException( + "Internal Server Error: Invalidated resource path: /tmp", + () -> renameDirectoryDtoValidator.validate(renameDirectoryDto)); + + } + + @Test + public void testValidate_directoryPathContainsIllegalSymbolic() { + RenameDirectoryDto renameDirectoryDto = RenameDirectoryDto.builder() + .loginUser(loginUser) + .originDirectoryAbsolutePath("/tmp/dolphinscheduler/default/resources/..") + .targetDirectoryAbsolutePath("/tmp/dolphinscheduler/default/resources/a") + .build(); + assertThrowServiceException( + "Internal Server Error: Invalidated resource path: /tmp/dolphinscheduler/default/resources/..", + () -> renameDirectoryDtoValidator.validate(renameDirectoryDto)); + } + + @Test + public void testValidate_originDirectoryNotExist() { + RenameDirectoryDto renameDirectoryDto = RenameDirectoryDto.builder() + .loginUser(loginUser) + .originDirectoryAbsolutePath("/tmp/dolphinscheduler/default/resources/a") + .targetDirectoryAbsolutePath("/tmp/dolphinscheduler/default/resources/b") + .build(); + assertThrowServiceException( + "Internal Server Error: Thr resource is not exists: /tmp/dolphinscheduler/default/resources/a", + () -> renameDirectoryDtoValidator.validate(renameDirectoryDto)); + } + + @Test + public void testValidate_originDirectoryNoPermission() { + Tenant tenant = new Tenant(); + tenant.setTenantCode("test"); + when(tenantDao.queryOptionalById(loginUser.getTenantId())).thenReturn(Optional.of(tenant)); + + RenameDirectoryDto renameDirectoryDto = RenameDirectoryDto.builder() + .loginUser(loginUser) + .originDirectoryAbsolutePath("/tmp/dolphinscheduler/default/resources/a") + .targetDirectoryAbsolutePath("/tmp/dolphinscheduler/default/resources/b") + .build(); + when(storageOperator.exists(renameDirectoryDto.getOriginDirectoryAbsolutePath())).thenReturn(true); + when(storageOperator.getResourceMetaData(renameDirectoryDto.getOriginDirectoryAbsolutePath())) + .thenReturn(ResourceMetadata.builder() + .resourceAbsolutePath(renameDirectoryDto.getOriginDirectoryAbsolutePath()) + .resourceBaseDirectory(BASE_DIRECTORY) + .resourceRelativePath("a") + .isDirectory(true) + .tenant("default") + .build()); + assertThrowServiceException( + "Internal Server Error: The user's tenant is test have no permission to access the resource: /tmp/dolphinscheduler/default/resources/a", + () -> renameDirectoryDtoValidator.validate(renameDirectoryDto)); + } + + @Test + public void testValidate_targetDirectoryAlreadyExist() { + Tenant tenant = new Tenant(); + tenant.setTenantCode("default"); + when(tenantDao.queryOptionalById(loginUser.getTenantId())).thenReturn(Optional.of(tenant)); + + RenameDirectoryDto renameDirectoryDto = RenameDirectoryDto.builder() + .loginUser(loginUser) + .originDirectoryAbsolutePath("/tmp/dolphinscheduler/default/resources/a") + .targetDirectoryAbsolutePath("/tmp/dolphinscheduler/default/resources/b") + .build(); + when(storageOperator.exists(renameDirectoryDto.getOriginDirectoryAbsolutePath())).thenReturn(true); + when(storageOperator.getResourceMetaData(renameDirectoryDto.getOriginDirectoryAbsolutePath())) + .thenReturn(ResourceMetadata.builder() + .resourceAbsolutePath(renameDirectoryDto.getOriginDirectoryAbsolutePath()) + .resourceBaseDirectory(BASE_DIRECTORY) + .resourceRelativePath("a") + .isDirectory(true) + .tenant("default") + .build()); + + when(storageOperator.exists(renameDirectoryDto.getTargetDirectoryAbsolutePath())).thenReturn(true); + assertThrowServiceException( + "Internal Server Error: The resource is already exist: /tmp/dolphinscheduler/default/resources/b", + () -> renameDirectoryDtoValidator.validate(renameDirectoryDto)); + } + + @Test + public void testValidate() { + Tenant tenant = new Tenant(); + tenant.setTenantCode("default"); + when(tenantDao.queryOptionalById(loginUser.getTenantId())).thenReturn(Optional.of(tenant)); + + RenameDirectoryDto renameDirectoryDto = RenameDirectoryDto.builder() + .loginUser(loginUser) + .originDirectoryAbsolutePath("/tmp/dolphinscheduler/default/resources/a") + .targetDirectoryAbsolutePath("/tmp/dolphinscheduler/default/resources/b") + .build(); + when(storageOperator.exists(renameDirectoryDto.getOriginDirectoryAbsolutePath())).thenReturn(true); + when(storageOperator.getResourceMetaData(renameDirectoryDto.getOriginDirectoryAbsolutePath())) + .thenReturn(ResourceMetadata.builder() + .resourceAbsolutePath(renameDirectoryDto.getOriginDirectoryAbsolutePath()) + .resourceBaseDirectory(BASE_DIRECTORY) + .resourceRelativePath("a") + .isDirectory(true) + .tenant("default") + .build()); + + when(storageOperator.exists(renameDirectoryDto.getTargetDirectoryAbsolutePath())).thenReturn(false); + when(storageOperator.getResourceMetaData(renameDirectoryDto.getTargetDirectoryAbsolutePath())) + .thenReturn(ResourceMetadata.builder() + .resourceAbsolutePath(renameDirectoryDto.getOriginDirectoryAbsolutePath()) + .resourceBaseDirectory(BASE_DIRECTORY) + .resourceRelativePath("b") + .isDirectory(true) + .tenant("default") + .build()); + + assertDoesNotThrow(() -> renameDirectoryDtoValidator.validate(renameDirectoryDto)); + } +} diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/validator/resource/RenameFileDtoValidatorTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/validator/resource/RenameFileDtoValidatorTest.java new file mode 100644 index 000000000000..79fcd8c12483 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/validator/resource/RenameFileDtoValidatorTest.java @@ -0,0 +1,202 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.validator.resource; + +import static org.apache.dolphinscheduler.api.AssertionsHelper.assertThrowServiceException; +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; +import static org.mockito.Mockito.when; + +import org.apache.dolphinscheduler.api.dto.resources.RenameFileDto; +import org.apache.dolphinscheduler.dao.entity.Tenant; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.repository.TenantDao; +import org.apache.dolphinscheduler.plugin.storage.api.ResourceMetadata; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; + +import java.util.Locale; +import java.util.Optional; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.context.i18n.LocaleContextHolder; + +@ExtendWith(MockitoExtension.class) +public class RenameFileDtoValidatorTest { + + @Mock + private StorageOperator storageOperator; + + @Mock + private TenantDao tenantDao; + + @InjectMocks + private RenameFileDtoValidator renameFileDtoValidator; + + private static final String BASE_DIRECTORY = "/tmp/dolphinscheduler"; + + private User loginUser; + + @BeforeEach + public void setup() { + when(storageOperator.getStorageBaseDirectory()).thenReturn(BASE_DIRECTORY); + loginUser = new User(); + loginUser.setTenantId(1); + LocaleContextHolder.setLocale(Locale.ENGLISH); + } + + @Test + void testValidate_notUnderBaseDirectory() { + RenameFileDto renameFileDto = RenameFileDto.builder() + .loginUser(loginUser) + .originFileAbsolutePath("/tmp") + .targetFileAbsolutePath("/tmp1") + .build(); + assertThrowServiceException( + "Internal Server Error: Invalidated resource path: /tmp", + () -> renameFileDtoValidator.validate(renameFileDto)); + + } + + @Test + public void testValidate_fileAbsolutePathContainsIllegalSymbolic() { + RenameFileDto renameFileDto = RenameFileDto.builder() + .loginUser(loginUser) + .originFileAbsolutePath("/tmp/dolphinscheduler/default/resources/../a.txt") + .targetFileAbsolutePath("/tmp/dolphinscheduler/default/resources/b.txt") + .build(); + assertThrowServiceException( + "Internal Server Error: Invalidated resource path: /tmp/dolphinscheduler/default/resources/../a.txt", + () -> renameFileDtoValidator.validate(renameFileDto)); + } + + @Test + public void testValidate_originFileNotExist() { + RenameFileDto renameFileDto = RenameFileDto.builder() + .loginUser(loginUser) + .originFileAbsolutePath("/tmp/dolphinscheduler/default/resources/a.txt") + .targetFileAbsolutePath("/tmp/dolphinscheduler/default/resources/b.txt") + .build(); + assertThrowServiceException( + "Internal Server Error: Thr resource is not exists: /tmp/dolphinscheduler/default/resources/a.txt", + () -> renameFileDtoValidator.validate(renameFileDto)); + } + + @Test + public void testValidate_originFileIsNotFile() { + RenameFileDto renameFileDto = RenameFileDto.builder() + .loginUser(loginUser) + .originFileAbsolutePath("/tmp/dolphinscheduler/default/resources/a") + .targetFileAbsolutePath("/tmp/dolphinscheduler/default/resources/b.txt") + .build(); + when(storageOperator.exists(renameFileDto.getOriginFileAbsolutePath())).thenReturn(true); + assertThrowServiceException( + "Internal Server Error: The path is not a file: /tmp/dolphinscheduler/default/resources/a", + () -> renameFileDtoValidator.validate(renameFileDto)); + } + + @Test + public void testValidate_originFileNoPermission() { + Tenant tenant = new Tenant(); + tenant.setTenantCode("test"); + when(tenantDao.queryOptionalById(loginUser.getTenantId())).thenReturn(Optional.of(tenant)); + + RenameFileDto renameFileDto = RenameFileDto.builder() + .loginUser(loginUser) + .originFileAbsolutePath("/tmp/dolphinscheduler/default/resources/a.txt") + .targetFileAbsolutePath("/tmp/dolphinscheduler/default/resources/b.txt") + .build(); + when(storageOperator.exists(renameFileDto.getOriginFileAbsolutePath())).thenReturn(true); + when(storageOperator.getResourceMetaData(renameFileDto.getOriginFileAbsolutePath())) + .thenReturn(ResourceMetadata.builder() + .resourceAbsolutePath(renameFileDto.getOriginFileAbsolutePath()) + .resourceBaseDirectory(BASE_DIRECTORY) + .resourceRelativePath("a.txt") + .isDirectory(false) + .tenant("default") + .build()); + assertThrowServiceException( + "Internal Server Error: The user's tenant is test have no permission to access the resource: /tmp/dolphinscheduler/default/resources/a.txt", + () -> renameFileDtoValidator.validate(renameFileDto)); + } + + @Test + public void testValidate_targetFileAlreadyExist() { + Tenant tenant = new Tenant(); + tenant.setTenantCode("default"); + when(tenantDao.queryOptionalById(loginUser.getTenantId())).thenReturn(Optional.of(tenant)); + + RenameFileDto renameDirectoryDto = RenameFileDto.builder() + .loginUser(loginUser) + .originFileAbsolutePath("/tmp/dolphinscheduler/default/resources/a.txt") + .targetFileAbsolutePath("/tmp/dolphinscheduler/default/resources/b.txt") + .build(); + when(storageOperator.exists(renameDirectoryDto.getOriginFileAbsolutePath())).thenReturn(true); + when(storageOperator.getResourceMetaData(renameDirectoryDto.getOriginFileAbsolutePath())) + .thenReturn(ResourceMetadata.builder() + .resourceAbsolutePath(renameDirectoryDto.getOriginFileAbsolutePath()) + .resourceBaseDirectory(BASE_DIRECTORY) + .resourceRelativePath("a.txt") + .isDirectory(false) + .tenant("default") + .build()); + + when(storageOperator.exists(renameDirectoryDto.getTargetFileAbsolutePath())).thenReturn(true); + assertThrowServiceException( + "Internal Server Error: The resource is already exist: /tmp/dolphinscheduler/default/resources/b.txt", + () -> renameFileDtoValidator.validate(renameDirectoryDto)); + } + + @Test + public void testValidate() { + Tenant tenant = new Tenant(); + tenant.setTenantCode("default"); + when(tenantDao.queryOptionalById(loginUser.getTenantId())).thenReturn(Optional.of(tenant)); + + RenameFileDto renameDirectoryDto = RenameFileDto.builder() + .loginUser(loginUser) + .originFileAbsolutePath("/tmp/dolphinscheduler/default/resources/a.txt") + .targetFileAbsolutePath("/tmp/dolphinscheduler/default/resources/b.txt") + .build(); + when(storageOperator.exists(renameDirectoryDto.getOriginFileAbsolutePath())).thenReturn(true); + when(storageOperator.getResourceMetaData(renameDirectoryDto.getOriginFileAbsolutePath())) + .thenReturn(ResourceMetadata.builder() + .resourceAbsolutePath(renameDirectoryDto.getOriginFileAbsolutePath()) + .resourceBaseDirectory(BASE_DIRECTORY) + .resourceRelativePath("a.txt") + .isDirectory(false) + .tenant("default") + .build()); + + when(storageOperator.exists(renameDirectoryDto.getTargetFileAbsolutePath())).thenReturn(false); + when(storageOperator.getResourceMetaData(renameDirectoryDto.getTargetFileAbsolutePath())) + .thenReturn(ResourceMetadata.builder() + .resourceAbsolutePath(renameDirectoryDto.getTargetFileAbsolutePath()) + .resourceBaseDirectory(BASE_DIRECTORY) + .resourceRelativePath("b.txt") + .isDirectory(false) + .tenant("default") + .build()); + + assertDoesNotThrow(() -> renameFileDtoValidator.validate(renameDirectoryDto)); + } + +} diff --git a/dolphinscheduler-api/src/test/resources/application.yaml b/dolphinscheduler-api/src/test/resources/application.yaml index 26536d631f68..5eb7e1f8d7e9 100644 --- a/dolphinscheduler-api/src/test/resources/application.yaml +++ b/dolphinscheduler-api/src/test/resources/application.yaml @@ -44,6 +44,17 @@ mybatis-plus: registry: type: zookeeper + zookeeper: + namespace: dolphinscheduler + connect-string: localhost:2181 + retry-policy: + base-sleep-time: 60ms + max-sleep: 300ms + max-retries: 5 + session-timeout: 30s + connection-timeout: 9s + block-until-connected: 600ms + digest: ~ api: audit-enable: true diff --git a/dolphinscheduler-api/src/test/resources/logback.xml b/dolphinscheduler-api/src/test/resources/logback.xml new file mode 100644 index 000000000000..9159c3b02d49 --- /dev/null +++ b/dolphinscheduler-api/src/test/resources/logback.xml @@ -0,0 +1,57 @@ + + + + + + + + + + + [%level] %date{yyyy-MM-dd HH:mm:ss.SSS Z} %logger{10}:[%line] - %msg%n + + UTF-8 + + + + + ${log.base}/dolphinscheduler-api.log + + ${log.base}/dolphinscheduler-api.%d{yyyy-MM-dd_HH}.%i.log + 168 + 64MB + 50GB + true + + + + [%level] %date{yyyy-MM-dd HH:mm:ss.SSS Z} %logger{10}:[%line] - %msg%n + + UTF-8 + + + + + + + + + + + + diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/pom.xml b/dolphinscheduler-authentication/dolphinscheduler-aws-authentication/pom.xml similarity index 50% rename from dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/pom.xml rename to dolphinscheduler-authentication/dolphinscheduler-aws-authentication/pom.xml index e4f036ae7df9..9bad537cd918 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/pom.xml +++ b/dolphinscheduler-authentication/dolphinscheduler-aws-authentication/pom.xml @@ -17,69 +17,48 @@ --> - 4.0.0 org.apache.dolphinscheduler - dolphinscheduler-task-plugin + dolphinscheduler-authentication dev-SNAPSHOT - dolphinscheduler-task-pigeon - jar + dolphinscheduler-aws-authentication + - org.apache.dolphinscheduler - dolphinscheduler-task-api - ${project.version} - - - org.apache.dolphinscheduler - dolphinscheduler-spi - provided - - - org.apache.commons - commons-collections4 - - - org.slf4j - slf4j-api + com.amazonaws + aws-java-sdk-emr - - com.github.dreamhead - moco-core - 1.2.0 - test + com.amazonaws + aws-java-sdk-s3 + - com.github.dreamhead - moco-runner - 1.2.0 - test - - - commons-cli - commons-cli - - + com.amazonaws + aws-java-sdk-sagemaker - org.java-websocket - Java-WebSocket + com.amazonaws + aws-java-sdk-dms - org.apache.httpcomponents - httpclient + software.amazon.awssdk + datasync + - org.apache.httpcomponents - httpcore + org.slf4j + slf4j-api + provided + + diff --git a/dolphinscheduler-authentication/dolphinscheduler-aws-authentication/src/main/java/org/apache/dolphinscheduler/authentication/aws/AWSCredentialsProviderFactor.java b/dolphinscheduler-authentication/dolphinscheduler-aws-authentication/src/main/java/org/apache/dolphinscheduler/authentication/aws/AWSCredentialsProviderFactor.java new file mode 100644 index 000000000000..c79c70d8c23e --- /dev/null +++ b/dolphinscheduler-authentication/dolphinscheduler-aws-authentication/src/main/java/org/apache/dolphinscheduler/authentication/aws/AWSCredentialsProviderFactor.java @@ -0,0 +1,74 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.authentication.aws; + +import static org.apache.dolphinscheduler.authentication.aws.AwsConfigurationKeys.AWS_AUTHENTICATION_TYPE; + +import java.util.Map; + +import lombok.experimental.UtilityClass; +import lombok.extern.slf4j.Slf4j; + +import com.amazonaws.auth.AWSCredentialsProvider; +import com.amazonaws.auth.AWSStaticCredentialsProvider; +import com.amazonaws.auth.BasicAWSCredentials; +import com.amazonaws.auth.InstanceProfileCredentialsProvider; + +@Slf4j +@UtilityClass +public class AWSCredentialsProviderFactor { + + public static AWSCredentialsProvider credentialsProvider(Map awsProperties) { + String awsAuthenticationType = awsProperties.getOrDefault( + AWS_AUTHENTICATION_TYPE, AWSCredentialsProviderType.STATIC_CREDENTIALS_PROVIDER.getName()); + AWSCredentialsProviderType awsCredentialsProviderType = + AWSCredentialsProviderType.of(awsAuthenticationType).orElse(null); + if (awsCredentialsProviderType == null) { + throw new IllegalArgumentException( + "The aws.credentials.provider.type: " + awsAuthenticationType + " is invalidated"); + } + switch (awsCredentialsProviderType) { + case STATIC_CREDENTIALS_PROVIDER: + return createAWSStaticCredentialsProvider(awsProperties); + case INSTANCE_PROFILE_CREDENTIALS_PROVIDER: + return createInstanceProfileCredentialsProvider(); + default: + throw new IllegalArgumentException( + "The aws.credentials.provider.type: " + awsAuthenticationType + " is invalidated"); + } + + } + + private static AWSCredentialsProvider createAWSStaticCredentialsProvider(Map awsProperties) { + String awsAccessKeyId = awsProperties.get(AwsConfigurationKeys.AWS_ACCESS_KEY_ID); + String awsSecretAccessKey = awsProperties.get(AwsConfigurationKeys.AWS_SECRET); + final BasicAWSCredentials basicAWSCredentials = new BasicAWSCredentials(awsAccessKeyId, awsSecretAccessKey); + AWSStaticCredentialsProvider awsStaticCredentialsProvider = + new AWSStaticCredentialsProvider(basicAWSCredentials); + log.info("AWSStaticCredentialsProvider created successfully"); + return awsStaticCredentialsProvider; + } + + private static AWSCredentialsProvider createInstanceProfileCredentialsProvider() { + InstanceProfileCredentialsProvider instanceProfileCredentialsProvider = + InstanceProfileCredentialsProvider.getInstance(); + log.info("InstanceProfileCredentialsProvider created successfully"); + return instanceProfileCredentialsProvider; + } + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/UdfType.java b/dolphinscheduler-authentication/dolphinscheduler-aws-authentication/src/main/java/org/apache/dolphinscheduler/authentication/aws/AWSCredentialsProviderType.java similarity index 53% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/UdfType.java rename to dolphinscheduler-authentication/dolphinscheduler-aws-authentication/src/main/java/org/apache/dolphinscheduler/authentication/aws/AWSCredentialsProviderType.java index 9c4266bf7a30..9b932f155436 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/UdfType.java +++ b/dolphinscheduler-authentication/dolphinscheduler-aws-authentication/src/main/java/org/apache/dolphinscheduler/authentication/aws/AWSCredentialsProviderType.java @@ -15,45 +15,35 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.enums; +package org.apache.dolphinscheduler.authentication.aws; -import com.baomidou.mybatisplus.annotation.EnumValue; +import java.util.Optional; -/** - * UDF type - */ -public enum UdfType { +import lombok.Getter; - /** - * 0 hive; 1 spark - */ - HIVE(0, "hive"), - SPARK(1, "spark"); +@Getter +public enum AWSCredentialsProviderType { - UdfType(int code, String descp) { - this.code = code; - this.descp = descp; - } + STATIC_CREDENTIALS_PROVIDER("AWSStaticCredentialsProvider"), + INSTANCE_PROFILE_CREDENTIALS_PROVIDER("InstanceProfileCredentialsProvider"), + ; - @EnumValue - private final int code; - private final String descp; + private final String name; - public int getCode() { - return code; + AWSCredentialsProviderType(String name) { + this.name = name; } - public String getDescp() { - return descp; - } - - public static UdfType of(int type) { - for (UdfType ut : values()) { - if (ut.getCode() == type) { - return ut; + public static Optional of(String name) { + if (name == null) { + return Optional.empty(); + } + for (AWSCredentialsProviderType type : values()) { + if (type.getName().equalsIgnoreCase(name)) { + return Optional.of(type); } } - throw new IllegalArgumentException("invalid type : " + type); + return Optional.empty(); } } diff --git a/dolphinscheduler-authentication/dolphinscheduler-aws-authentication/src/main/java/org/apache/dolphinscheduler/authentication/aws/AWSDatabaseMigrationServiceClientFactory.java b/dolphinscheduler-authentication/dolphinscheduler-aws-authentication/src/main/java/org/apache/dolphinscheduler/authentication/aws/AWSDatabaseMigrationServiceClientFactory.java new file mode 100644 index 000000000000..2056268f2bbe --- /dev/null +++ b/dolphinscheduler-authentication/dolphinscheduler-aws-authentication/src/main/java/org/apache/dolphinscheduler/authentication/aws/AWSDatabaseMigrationServiceClientFactory.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.authentication.aws; + +import java.util.Map; + +import lombok.experimental.UtilityClass; + +import com.amazonaws.auth.AWSCredentialsProvider; +import com.amazonaws.client.builder.AwsClientBuilder; +import com.amazonaws.regions.Regions; +import com.amazonaws.services.databasemigrationservice.AWSDatabaseMigrationService; +import com.amazonaws.services.databasemigrationservice.AWSDatabaseMigrationServiceClientBuilder; + +@UtilityClass +public class AWSDatabaseMigrationServiceClientFactory { + + public AWSDatabaseMigrationService createAWSDatabaseMigrationServiceClient(Map awsProperties) { + AWSCredentialsProvider awsCredentialsProvider = AWSCredentialsProviderFactor.credentialsProvider(awsProperties); + Regions regions = Regions.fromName(awsProperties.get(AwsConfigurationKeys.AWS_REGION)); + String endpoint = awsProperties.get(AwsConfigurationKeys.AWS_ENDPOINT); + + if (endpoint != null && !endpoint.isEmpty()) { + return AWSDatabaseMigrationServiceClientBuilder + .standard() + .withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(endpoint, regions.getName())) + .withCredentials(awsCredentialsProvider) + .build(); + } else { + return AWSDatabaseMigrationServiceClientBuilder + .standard() + .withCredentials(awsCredentialsProvider) + .withRegion(regions) + .build(); + } + } + +} diff --git a/dolphinscheduler-authentication/dolphinscheduler-aws-authentication/src/main/java/org/apache/dolphinscheduler/authentication/aws/AmazonElasticMapReduceClientFactory.java b/dolphinscheduler-authentication/dolphinscheduler-aws-authentication/src/main/java/org/apache/dolphinscheduler/authentication/aws/AmazonElasticMapReduceClientFactory.java new file mode 100644 index 000000000000..ea00473b7953 --- /dev/null +++ b/dolphinscheduler-authentication/dolphinscheduler-aws-authentication/src/main/java/org/apache/dolphinscheduler/authentication/aws/AmazonElasticMapReduceClientFactory.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.authentication.aws; + +import java.util.Map; + +import lombok.experimental.UtilityClass; + +import com.amazonaws.auth.AWSCredentialsProvider; +import com.amazonaws.client.builder.AwsClientBuilder; +import com.amazonaws.regions.Regions; +import com.amazonaws.services.elasticmapreduce.AmazonElasticMapReduce; +import com.amazonaws.services.elasticmapreduce.AmazonElasticMapReduceClientBuilder; + +@UtilityClass +public class AmazonElasticMapReduceClientFactory { + + public AmazonElasticMapReduce createAmazonElasticMapReduceClient(Map awsProperties) { + AWSCredentialsProvider awsCredentialsProvider = AWSCredentialsProviderFactor.credentialsProvider(awsProperties); + Regions regions = Regions.fromName(awsProperties.get(AwsConfigurationKeys.AWS_REGION)); + String endpoint = awsProperties.get(AwsConfigurationKeys.AWS_ENDPOINT); + + if (endpoint != null && !endpoint.isEmpty()) { + return AmazonElasticMapReduceClientBuilder + .standard() + .withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(endpoint, regions.getName())) + .withCredentials(awsCredentialsProvider) + .build(); + } else { + return AmazonElasticMapReduceClientBuilder + .standard() + .withCredentials(awsCredentialsProvider) + .withRegion(regions) + .build(); + } + } + +} diff --git a/dolphinscheduler-authentication/dolphinscheduler-aws-authentication/src/main/java/org/apache/dolphinscheduler/authentication/aws/AmazonS3ClientFactory.java b/dolphinscheduler-authentication/dolphinscheduler-aws-authentication/src/main/java/org/apache/dolphinscheduler/authentication/aws/AmazonS3ClientFactory.java new file mode 100644 index 000000000000..c45e4de9ea4f --- /dev/null +++ b/dolphinscheduler-authentication/dolphinscheduler-aws-authentication/src/main/java/org/apache/dolphinscheduler/authentication/aws/AmazonS3ClientFactory.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.authentication.aws; + +import java.util.Map; + +import lombok.experimental.UtilityClass; + +import com.amazonaws.auth.AWSCredentialsProvider; +import com.amazonaws.client.builder.AwsClientBuilder; +import com.amazonaws.regions.Regions; +import com.amazonaws.services.s3.AmazonS3; +import com.amazonaws.services.s3.AmazonS3ClientBuilder; + +@UtilityClass +public class AmazonS3ClientFactory { + + public AmazonS3 createAmazonS3Client(Map awsProperties) { + AWSCredentialsProvider awsCredentialsProvider = AWSCredentialsProviderFactor.credentialsProvider(awsProperties); + Regions regions = Regions.fromName(awsProperties.get(AwsConfigurationKeys.AWS_REGION)); + String endpoint = awsProperties.get(AwsConfigurationKeys.AWS_ENDPOINT); + + if (endpoint != null && !endpoint.isEmpty()) { + return AmazonS3ClientBuilder + .standard() + .withPathStyleAccessEnabled(true) + .withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(endpoint, regions.getName())) + .withCredentials(awsCredentialsProvider) + .build(); + } else { + return AmazonS3ClientBuilder + .standard() + .withCredentials(awsCredentialsProvider) + .withRegion(regions) + .build(); + } + } + +} diff --git a/dolphinscheduler-authentication/dolphinscheduler-aws-authentication/src/main/java/org/apache/dolphinscheduler/authentication/aws/AmazonSageMakerClientFactory.java b/dolphinscheduler-authentication/dolphinscheduler-aws-authentication/src/main/java/org/apache/dolphinscheduler/authentication/aws/AmazonSageMakerClientFactory.java new file mode 100644 index 000000000000..6bff921894c4 --- /dev/null +++ b/dolphinscheduler-authentication/dolphinscheduler-aws-authentication/src/main/java/org/apache/dolphinscheduler/authentication/aws/AmazonSageMakerClientFactory.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.authentication.aws; + +import java.util.Map; + +import lombok.experimental.UtilityClass; + +import com.amazonaws.auth.AWSCredentialsProvider; +import com.amazonaws.client.builder.AwsClientBuilder; +import com.amazonaws.regions.Regions; +import com.amazonaws.services.sagemaker.AmazonSageMaker; +import com.amazonaws.services.sagemaker.AmazonSageMakerClientBuilder; + +@UtilityClass +public class AmazonSageMakerClientFactory { + + public AmazonSageMaker createAmazonSageMakerClient(Map awsProperties) { + AWSCredentialsProvider awsCredentialsProvider = AWSCredentialsProviderFactor.credentialsProvider(awsProperties); + Regions regions = Regions.fromName(awsProperties.get(AwsConfigurationKeys.AWS_REGION)); + String endpoint = awsProperties.get(AwsConfigurationKeys.AWS_ENDPOINT); + + if (endpoint != null && !endpoint.isEmpty()) { + return AmazonSageMakerClientBuilder + .standard() + .withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(endpoint, regions.getName())) + .withCredentials(awsCredentialsProvider) + .build(); + } else { + return AmazonSageMakerClientBuilder + .standard() + .withCredentials(awsCredentialsProvider) + .withRegion(regions) + .build(); + } + } + +} diff --git a/dolphinscheduler-authentication/dolphinscheduler-aws-authentication/src/main/java/org/apache/dolphinscheduler/authentication/aws/AwsConfigurationKeys.java b/dolphinscheduler-authentication/dolphinscheduler-aws-authentication/src/main/java/org/apache/dolphinscheduler/authentication/aws/AwsConfigurationKeys.java new file mode 100644 index 000000000000..3d2d8677b39d --- /dev/null +++ b/dolphinscheduler-authentication/dolphinscheduler-aws-authentication/src/main/java/org/apache/dolphinscheduler/authentication/aws/AwsConfigurationKeys.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.authentication.aws; + +public class AwsConfigurationKeys { + + public static final String AWS_AUTHENTICATION_TYPE = "credentials.provider.type"; + public static final String AWS_REGION = "region"; + public static final String AWS_ENDPOINT = "endpoint"; + + public static final String AWS_ACCESS_KEY_ID = "access.key.id"; + public static final String AWS_SECRET = "access.key.secret"; +} diff --git a/dolphinscheduler-authentication/dolphinscheduler-aws-authentication/src/main/java/org/apache/dolphinscheduler/authentication/aws/DataSyncClientFactory.java b/dolphinscheduler-authentication/dolphinscheduler-aws-authentication/src/main/java/org/apache/dolphinscheduler/authentication/aws/DataSyncClientFactory.java new file mode 100644 index 000000000000..b67c7dd8401a --- /dev/null +++ b/dolphinscheduler-authentication/dolphinscheduler-aws-authentication/src/main/java/org/apache/dolphinscheduler/authentication/aws/DataSyncClientFactory.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.authentication.aws; + +import software.amazon.awssdk.auth.credentials.AwsBasicCredentials; +import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider; +import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider; +import software.amazon.awssdk.regions.Region; +import software.amazon.awssdk.services.datasync.DataSyncClient; + +import java.util.Map; + +import lombok.experimental.UtilityClass; + +@UtilityClass +public class DataSyncClientFactory { + + public DataSyncClient createDataSyncClient(Map awsProperties) { + // todo: upgrade the version of aws sdk + String awsAccessKeyId = awsProperties.get(AwsConfigurationKeys.AWS_ACCESS_KEY_ID); + String awsSecretAccessKey = awsProperties.get(AwsConfigurationKeys.AWS_SECRET); + final AwsBasicCredentials basicAWSCredentials = AwsBasicCredentials.create(awsAccessKeyId, awsSecretAccessKey); + final AwsCredentialsProvider awsCredentialsProvider = StaticCredentialsProvider.create(basicAWSCredentials); + + // create a datasync client + return DataSyncClient.builder() + .region(Region.of(awsProperties.get(AwsConfigurationKeys.AWS_REGION))) + .credentialsProvider(awsCredentialsProvider) + .build(); + } + +} diff --git a/dolphinscheduler-authentication/dolphinscheduler-aws-authentication/src/main/resources/aws.yaml b/dolphinscheduler-authentication/dolphinscheduler-aws-authentication/src/main/resources/aws.yaml new file mode 100644 index 000000000000..6d453bb78a07 --- /dev/null +++ b/dolphinscheduler-authentication/dolphinscheduler-aws-authentication/src/main/resources/aws.yaml @@ -0,0 +1,65 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +aws: + s3: + # The AWS credentials provider type. support: AWSStaticCredentialsProvider, InstanceProfileCredentialsProvider + # AWSStaticCredentialsProvider: use the access key and secret key to authenticate + # InstanceProfileCredentialsProvider: use the IAM role to authenticate + credentials.provider.type: AWSStaticCredentialsProvider + access.key.id: accessKey123 + access.key.secret: secretKey123 + region: us-east-1 + bucket.name: dolphinscheduler + endpoint: http://s3:9000 + emr: + # The AWS credentials provider type. support: AWSStaticCredentialsProvider, InstanceProfileCredentialsProvider + # AWSStaticCredentialsProvider: use the access key and secret key to authenticate + # InstanceProfileCredentialsProvider: use the IAM role to authenticate + credentials.provider.type: AWSStaticCredentialsProvider + access.key.id: minioadmin + access.key.secret: minioadmin + region: cn-north-1 + endpoint: http://localhost:9000 + sagemaker: + # The AWS credentials provider type. support: AWSStaticCredentialsProvider, InstanceProfileCredentialsProvider + # AWSStaticCredentialsProvider: use the access key and secret key to authenticate + # InstanceProfileCredentialsProvider: use the IAM role to authenticate + credentials.provider.type: AWSStaticCredentialsProvider + access.key.id: minioadmin + access.key.secret: minioadmin + region: cn-north-1 + endpoint: http://localhost:9000 + dms: + # The AWS credentials provider type. support: AWSStaticCredentialsProvider, InstanceProfileCredentialsProvider + # AWSStaticCredentialsProvider: use the access key and secret key to authenticate + # InstanceProfileCredentialsProvider: use the IAM role to authenticate + credentials.provider.type: AWSStaticCredentialsProvider + access.key.id: minioadmin + access.key.secret: minioadmin + region: cn-north-1 + endpoint: http://localhost:9000 + datasync: + # The AWS credentials provider type. support: AWSStaticCredentialsProvider, InstanceProfileCredentialsProvider + # AWSStaticCredentialsProvider: use the access key and secret key to authenticate + # InstanceProfileCredentialsProvider: use the IAM role to authenticate + credentials.provider.type: AWSStaticCredentialsProvider + access.key.id: minioadmin + access.key.secret: minioadmin + region: cn-north-1 + endpoint: http://localhost:9000 + diff --git a/dolphinscheduler-authentication/pom.xml b/dolphinscheduler-authentication/pom.xml new file mode 100644 index 000000000000..b49c4d37d09b --- /dev/null +++ b/dolphinscheduler-authentication/pom.xml @@ -0,0 +1,46 @@ + + + + 4.0.0 + + org.apache.dolphinscheduler + dolphinscheduler + dev-SNAPSHOT + + + dolphinscheduler-authentication + pom + + + dolphinscheduler-aws-authentication + + + + + + org.apache.dolphinscheduler + dolphinscheduler-bom + ${project.version} + pom + import + + + + + diff --git a/dolphinscheduler-bom/pom.xml b/dolphinscheduler-bom/pom.xml index 18e75de57ba0..0635239f8ce2 100644 --- a/dolphinscheduler-bom/pom.xml +++ b/dolphinscheduler-bom/pom.xml @@ -31,12 +31,12 @@ 4.1.53.Final 2.7.3 2.4.1 - 1.5.1 3.5.2 2.3.2 1.2.20 2.12.0 0.5.11 + 0.7.1 1.41.0 1.11 @@ -168,11 +168,6 @@ test - - org.java-websocket - Java-WebSocket - ${java-websocket.version} - com.baomidou @@ -943,6 +938,13 @@ + + org.testcontainers + testcontainers + ${testcontainer.version} + test + + org.testcontainers mysql @@ -957,6 +959,13 @@ test + + org.testcontainers + minio + ${testcontainer.version} + test + + org.checkerframework checker-qual diff --git a/dolphinscheduler-common/pom.xml b/dolphinscheduler-common/pom.xml index b03fdd74832b..6c83c580ae25 100644 --- a/dolphinscheduler-common/pom.xml +++ b/dolphinscheduler-common/pom.xml @@ -41,6 +41,10 @@ + + org.apache.dolphinscheduler + dolphinscheduler-aws-authentication + commons-io commons-io @@ -98,6 +102,11 @@ esdk-obs-java-bundle + + com.azure + azure-storage-blob + + com.github.oshi oshi-core diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/CommonConfiguration.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/CommonConfiguration.java new file mode 100644 index 000000000000..5411e4cbc154 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/CommonConfiguration.java @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.common; + +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.Configuration; + +@Configuration +@ComponentScan("org.apache.dolphinscheduler.common") +public class CommonConfiguration { +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/config/ImmutablePriorityPropertyDelegate.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/config/ImmutablePriorityPropertyDelegate.java index 742e745fe40f..620f74ef95cf 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/config/ImmutablePriorityPropertyDelegate.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/config/ImmutablePriorityPropertyDelegate.java @@ -31,12 +31,18 @@ * This class will get the property by the priority of the following: env > jvm > properties. */ @Slf4j -public class ImmutablePriorityPropertyDelegate extends ImmutablePropertyDelegate { +public class ImmutablePriorityPropertyDelegate implements IPropertyDelegate { private static final Map>> configValueMap = new ConcurrentHashMap<>(); - public ImmutablePriorityPropertyDelegate(String propertyAbsolutePath) { - super(propertyAbsolutePath); + private ImmutablePropertyDelegate immutablePropertyDelegate; + + private ImmutableYamlDelegate immutableYamlDelegate; + + public ImmutablePriorityPropertyDelegate(ImmutablePropertyDelegate immutablePropertyDelegate, + ImmutableYamlDelegate immutableYamlDelegate) { + this.immutablePropertyDelegate = immutablePropertyDelegate; + this.immutableYamlDelegate = immutableYamlDelegate; } @Override @@ -56,8 +62,14 @@ public String get(String key) { return value; } value = getConfigValueFromProperties(key); + if (value.isPresent()) { + log.debug("Get config value from properties, key: {} actualKey: {}, value: {}", + k, value.get().getActualKey(), value.get().getValue()); + return value; + } + value = getConfigValueFromYaml(key); value.ifPresent( - stringConfigValue -> log.debug("Get config value from properties, key: {} actualKey: {}, value: {}", + stringConfigValue -> log.debug("Get config value from yaml, key: {} actualKey: {}, value: {}", k, stringConfigValue.getActualKey(), stringConfigValue.getValue())); return value; }); @@ -76,7 +88,8 @@ public String get(String key, String defaultValue) { @Override public Set getPropertyKeys() { Set propertyKeys = new HashSet<>(); - propertyKeys.addAll(super.getPropertyKeys()); + propertyKeys.addAll(this.immutablePropertyDelegate.getPropertyKeys()); + propertyKeys.addAll(this.immutableYamlDelegate.getPropertyKeys()); propertyKeys.addAll(System.getProperties().stringPropertyNames()); propertyKeys.addAll(System.getenv().keySet()); return propertyKeys; @@ -104,7 +117,15 @@ private Optional> getConfigValueFromJvm(String key) { } private Optional> getConfigValueFromProperties(String key) { - String value = super.get(key); + String value = this.immutablePropertyDelegate.get(key); + if (value != null) { + return Optional.of(ConfigValue.fromProperties(key, value)); + } + return Optional.empty(); + } + + private Optional> getConfigValueFromYaml(String key) { + String value = this.immutableYamlDelegate.get(key); if (value != null) { return Optional.of(ConfigValue.fromProperties(key, value)); } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/config/ImmutablePropertyDelegate.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/config/ImmutablePropertyDelegate.java index b58735afb067..4a0c192210be 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/config/ImmutablePropertyDelegate.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/config/ImmutablePropertyDelegate.java @@ -49,7 +49,7 @@ public ImmutablePropertyDelegate(String... propertyAbsolutePath) { } catch (IOException e) { log.error("Load property: {} error, please check if the file exist under classpath", propertyAbsolutePath, e); - System.exit(1); + throw new RuntimeException(e); } } printProperties(); diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/config/ImmutableYamlDelegate.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/config/ImmutableYamlDelegate.java new file mode 100644 index 000000000000..6f4bb34eab7c --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/config/ImmutableYamlDelegate.java @@ -0,0 +1,86 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.common.config; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Properties; +import java.util.Set; + +import lombok.extern.slf4j.Slf4j; + +import org.springframework.beans.factory.config.YamlPropertiesFactoryBean; +import org.springframework.core.io.InputStreamResource; + +@Slf4j +public class ImmutableYamlDelegate implements IPropertyDelegate { + + private static final String REMOTE_LOGGING_YAML_NAME = "/remote-logging.yaml"; + + private final Properties properties; + + public ImmutableYamlDelegate() { + this(REMOTE_LOGGING_YAML_NAME); + } + + public ImmutableYamlDelegate(String... yamlAbsolutePath) { + properties = new Properties(); + // read from classpath + for (String fileName : yamlAbsolutePath) { + try (InputStream fis = ImmutableYamlDelegate.class.getResourceAsStream(fileName)) { + if (fis == null) { + log.warn("Cannot find the file: {} under classpath", fileName); + continue; + } + YamlPropertiesFactoryBean factory = new YamlPropertiesFactoryBean(); + factory.setResources(new InputStreamResource(fis)); + factory.afterPropertiesSet(); + Properties subProperties = factory.getObject(); + properties.putAll(subProperties); + } catch (IOException e) { + log.error("Load property: {} error, please check if the file exist under classpath", + yamlAbsolutePath, e); + throw new RuntimeException(e); + } + } + printProperties(); + } + + public ImmutableYamlDelegate(Properties properties) { + this.properties = properties; + } + + @Override + public String get(String key) { + return properties.getProperty(key); + } + + @Override + public String get(String key, String defaultValue) { + return properties.getProperty(key, defaultValue); + } + + @Override + public Set getPropertyKeys() { + return properties.stringPropertyNames(); + } + + private void printProperties() { + properties.forEach((k, v) -> log.debug("Get property {} -> {}", k, v)); + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/constants/Constants.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/constants/Constants.java index 755663621671..c7d771796f49 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/constants/Constants.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/constants/Constants.java @@ -35,6 +35,9 @@ private Constants() { */ public static final String COMMON_PROPERTIES_PATH = "/common.properties"; + public static final String REMOTE_LOGGING_YAML_PATH = "/remote-logging.yaml"; + public static final String AWS_YAML_PATH = "/aws.yaml"; + public static final String FORMAT_SS = "%s%s"; public static final String FORMAT_S_S = "%s/%s"; public static final String FORMAT_S_S_COLON = "%s:%s"; @@ -42,8 +45,6 @@ private Constants() { public static final String RESOURCE_TYPE_FILE = "resources"; - public static final String RESOURCE_TYPE_UDF = "udfs"; - public static final String EMPTY_STRING = ""; /** @@ -56,28 +57,6 @@ private Constants() { */ public static final String HDFS_DEFAULT_FS = "fs.defaultFS"; - /** - * hadoop configuration - */ - public static final String HADOOP_RM_STATE_ACTIVE = "ACTIVE"; - - public static final String HADOOP_RESOURCE_MANAGER_HTTPADDRESS_PORT = "resource.manager.httpaddress.port"; - - /** - * yarn.resourcemanager.ha.rm.ids - */ - public static final String YARN_RESOURCEMANAGER_HA_RM_IDS = "yarn.resourcemanager.ha.rm.ids"; - - /** - * yarn.application.status.address - */ - public static final String YARN_APPLICATION_STATUS_ADDRESS = "yarn.application.status.address"; - - /** - * yarn.job.history.status.address - */ - public static final String YARN_JOB_HISTORY_STATUS_ADDRESS = "yarn.job.history.status.address"; - /** * hdfs configuration * resource.hdfs.root.user @@ -128,8 +107,7 @@ private Constants() { */ public static final String RESOURCE_STORAGE_TYPE = "resource.storage.type"; - public static final String AWS_S3_BUCKET_NAME = "resource.aws.s3.bucket.name"; - public static final String AWS_END_POINT = "resource.aws.s3.endpoint"; + public static final String AWS_S3_BUCKET_NAME = "aws.s3.bucket.name"; public static final String ALIBABA_CLOUD_OSS_BUCKET_NAME = "resource.alibaba.cloud.oss.bucket.name"; public static final String ALIBABA_CLOUD_OSS_END_POINT = "resource.alibaba.cloud.oss.endpoint"; @@ -242,11 +220,6 @@ private Constants() { */ public static final String HTTP_X_REAL_IP = "X-Real-IP"; - /** - * UTF-8 - */ - public static final String UTF_8 = "UTF-8"; - /** * user name regex */ @@ -297,8 +270,6 @@ private Constants() { */ public static final String FLOWNODE_RUN_FLAG_NORMAL = "NORMAL"; - public static final String COMMON_TASK_TYPE = "common"; - public static final String DEFAULT = "default"; public static final String PASSWORD = "password"; public static final String XXXXXX = "******"; @@ -395,8 +366,6 @@ private Constants() { public static final String QUEUE_NAME = "queueName"; public static final int LOG_QUERY_SKIP_LINE_NUMBER = 0; public static final int LOG_QUERY_LIMIT = 4096; - public static final String ALIAS = "alias"; - public static final String CONTENT = "content"; public static final String DEPENDENT_SPLIT = ":||"; public static final long DEPENDENT_ALL_TASK_CODE = -1; public static final long DEPENDENT_WORKFLOW_CODE = 0; @@ -535,14 +504,11 @@ private Constants() { * session timeout */ public static final int SESSION_TIME_OUT = 7200; + public static final int MAX_FILE_SIZE = 1024 * 1024 * 1024; - public static final String UDF = "UDF"; + public static final String CLASS = "class"; - /** - * default worker group - */ - public static final String DEFAULT_WORKER_GROUP = "default"; /** * authorize writable perm */ @@ -683,9 +649,6 @@ private Constants() { public static final Integer QUERY_ALL_ON_WORKFLOW = 2; public static final Integer QUERY_ALL_ON_TASK = 3; - /** - * remote logging - */ public static final String REMOTE_LOGGING_ENABLE = "remote.logging.enable"; public static final String REMOTE_LOGGING_TARGET = "remote.logging.target"; @@ -705,19 +668,8 @@ private Constants() { public static final String REMOTE_LOGGING_OSS_ENDPOINT = "remote.logging.oss.endpoint"; - /** - * remote logging for S3 - */ - public static final String REMOTE_LOGGING_S3_ACCESS_KEY_ID = "remote.logging.s3.access.key.id"; - - public static final String REMOTE_LOGGING_S3_ACCESS_KEY_SECRET = "remote.logging.s3.access.key.secret"; - public static final String REMOTE_LOGGING_S3_BUCKET_NAME = "remote.logging.s3.bucket.name"; - public static final String REMOTE_LOGGING_S3_ENDPOINT = "remote.logging.s3.endpoint"; - - public static final String REMOTE_LOGGING_S3_REGION = "remote.logging.s3.region"; - /** * remote logging for GCS */ @@ -725,6 +677,13 @@ private Constants() { public static final String REMOTE_LOGGING_GCS_BUCKET_NAME = "remote.logging.google.cloud.storage.bucket.name"; + /** + * remote logging for ABS + */ + public static final String REMOTE_LOGGING_ABS_ACCOUNT_NAME = "remote.logging.abs.account.name"; + public static final String REMOTE_LOGGING_ABS_ACCOUNT_KEY = "remote.logging.abs.account.key"; + public static final String REMOTE_LOGGING_ABS_CONTAINER_NAME = "remote.logging.abs.container.name"; + /** * data quality */ @@ -735,4 +694,8 @@ private Constants() { * K8S sensitive param */ public static final String K8S_CONFIG_REGEX = "(?<=((?i)configYaml(\" : \"))).*?(?=(\",\\n))"; + + public static final String RELEASE_STATE = "releaseState"; + public static final String EXECUTE_TYPE = "executeType"; + } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/constants/TenantConstants.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/constants/TenantConstants.java index 8bffd5429e25..aa905cbb720f 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/constants/TenantConstants.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/constants/TenantConstants.java @@ -21,5 +21,5 @@ public class TenantConstants { public static final String DEFAULT_TENANT_CODE = "default"; - public static final String BOOTSTRAPT_SYSTEM_USER = System.getProperty("user.name"); + public static final String BOOTSTRAP_SYSTEM_USER = System.getProperty("user.name"); } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AuditModelType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AuditModelType.java new file mode 100644 index 000000000000..8216caa6b6d7 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AuditModelType.java @@ -0,0 +1,95 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.common.enums; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; + +import lombok.Getter; + +/** + * Audit Model type + */ +@Getter +public enum AuditModelType { + + PROJECT("Project", null), + PROCESS("Process", PROJECT), + PROCESS_INSTANCE("ProcessInstance", PROCESS), + TASK("Task", PROCESS), + TASK_INSTANCE("TaskInstance", TASK), + SCHEDULE("Schedule", PROCESS), + + RESOURCE("Resource", null), + FOLDER("Folder", RESOURCE), + FILE("File", FOLDER), + TASK_GROUP("TaskGroup", RESOURCE), + + DATASOURCE("Datasource", null), + + SECURITY("Security", null), + TENANT("Tenant", SECURITY), + USER("User", SECURITY), + ALARM_GROUP("AlarmGroup", SECURITY), + ALARM_INSTANCE("AlarmInstance", SECURITY), + WORKER_GROUP("WorkerGroup", SECURITY), + YARN_QUEUE("YarnQueue", SECURITY), + ENVIRONMENT("Environment", SECURITY), + CLUSTER("Cluster", SECURITY), + K8S_NAMESPACE("K8sNamespace", SECURITY), + TOKEN("Token", SECURITY), + ; + private final String name; + private final AuditModelType parentType; + private final List child = new ArrayList<>(); + + private static final HashMap AUDIT_MODEL_MAP = new HashMap<>(); + private static final List AUDIT_MODEL_TREE_LIST = new ArrayList<>(); + + static { + for (AuditModelType auditModelType : values()) { + AUDIT_MODEL_MAP.put(auditModelType.name, auditModelType); + } + + for (AuditModelType auditModelType : values()) { + if (auditModelType.parentType != null) { + of(auditModelType.parentType.name).child.add(auditModelType); + } else { + AUDIT_MODEL_TREE_LIST.add(auditModelType); + } + } + } + + public static List getAuditModelTreeList() { + return AUDIT_MODEL_TREE_LIST; + } + + public static AuditModelType of(String name) { + if (AUDIT_MODEL_MAP.containsKey(name)) { + return AUDIT_MODEL_MAP.get(name); + } + + throw new IllegalArgumentException("invalid audit operation type name " + name); + } + + AuditModelType(String name, AuditModelType parentType) { + this.name = name; + this.parentType = parentType; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AuditOperationType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AuditOperationType.java index bcd78dff56a7..c4ca1b351e97 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AuditOperationType.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AuditOperationType.java @@ -17,46 +17,80 @@ package org.apache.dolphinscheduler.common.enums; +import java.util.ArrayList; import java.util.HashMap; +import java.util.List; + +import lombok.Getter; /** * Audit Operation type */ +@Getter public enum AuditOperationType { - CREATE(0, "CREATE"), - READ(1, "READ"), - UPDATE(2, "UPDATE"), - DELETE(3, "DELETE"); + CREATE("Create"), + UPDATE("Update"), + BATCH_DELETE("BatchDelete"), + BATCH_START("BatchStart"), + DELETE("Delete"), + CLOSE("Close"), + + RELEASE("Release"), + ONLINE("Online"), + OFFLINE("Offline"), + + RESUME_PAUSE("ResumePause"), + RESUME_FAILURE("ResumeFailure"), - private final int code; - private final String enMsg; + IMPORT("Import"), + EXPORT("Export"), - private static HashMap AUDIT_OPERATION_MAP = new HashMap<>(); + EXECUTE("Execute"), + START("Start"), + MODIFY("Modify"), + RUN("Run"), + RERUN("Rerun"), + BATCH_RERUN("BatchRerun"), + STOP("Stop"), + KILL("Kill"), + PAUSE("Pause"), + MOVE("Move"), + + SWITCH_STATUS("SwitchStatus"), + SWITCH_VERSION("SwitchVersion"), + DELETE_VERSION("DeleteVersion"), + FORCE_SUCCESS("ForceSuccess"), + RENAME("Rename"), + UPLOAD("Upload"), + AUTHORIZE("Authorize"), + UN_AUTHORIZE("UnAuthorize"), + COPY("Copy"), + ; + + private final String name; + + AuditOperationType(String name) { + this.name = name; + } + + private static final HashMap AUDIT_OPERATION_MAP = new HashMap<>(); static { for (AuditOperationType operationType : AuditOperationType.values()) { - AUDIT_OPERATION_MAP.put(operationType.code, operationType); + AUDIT_OPERATION_MAP.put(operationType.name, operationType); } } - AuditOperationType(int code, String enMsg) { - this.code = code; - this.enMsg = enMsg; + public static List getOperationList() { + return new ArrayList<>(AUDIT_OPERATION_MAP.values()); } - public static AuditOperationType of(int status) { - if (AUDIT_OPERATION_MAP.containsKey(status)) { - return AUDIT_OPERATION_MAP.get(status); + public static AuditOperationType of(String name) { + if (AUDIT_OPERATION_MAP.containsKey(name)) { + return AUDIT_OPERATION_MAP.get(name); } - throw new IllegalArgumentException("invalid audit operation type code " + status); - } - - public int getCode() { - return code; - } - public String getMsg() { - return enMsg; + throw new IllegalArgumentException("invalid audit operation type code " + name); } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AuditResourceType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AuditResourceType.java deleted file mode 100644 index d422cd7de3e0..000000000000 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AuditResourceType.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.common.enums; - -import java.util.HashMap; - -/** - * Audit Module type - */ -public enum AuditResourceType { - - // TODO: add other audit resource enums - USER_MODULE(0, "USER"), - PROJECT_MODULE(1, "PROJECT"); - - private final int code; - private final String enMsg; - - private static HashMap AUDIT_RESOURCE_MAP = new HashMap<>(); - - static { - for (AuditResourceType auditResourceType : AuditResourceType.values()) { - AUDIT_RESOURCE_MAP.put(auditResourceType.code, auditResourceType); - } - } - - AuditResourceType(int code, String enMsg) { - this.code = code; - this.enMsg = enMsg; - } - - public int getCode() { - return this.code; - } - - public String getMsg() { - return this.enMsg; - } - - public static AuditResourceType of(int status) { - if (AUDIT_RESOURCE_MAP.containsKey(status)) { - return AUDIT_RESOURCE_MAP.get(status); - } - throw new IllegalArgumentException("invalid audit resource type code " + status); - } -} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AuthorizationType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AuthorizationType.java index 79ef986cd3b6..f987bdb1a251 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AuthorizationType.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AuthorizationType.java @@ -51,7 +51,6 @@ public enum AuthorizationType { @Deprecated UDF_FILE(2, "udf file"), DATASOURCE(3, "data source"), - UDF(4, "udf function"), PROJECTS(5, "projects"), WORKER_GROUP(6, "worker group"), ALERT_GROUP(7, "alert group"), diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ServerStatus.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ServerStatus.java index 1e4f49721a5e..afa7e97023e1 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ServerStatus.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ServerStatus.java @@ -20,6 +20,6 @@ public enum ServerStatus { NORMAL, - BUSY + BUSY, } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/StateEventType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/StateEventType.java index 6e25d2ed4b34..220b56a9399e 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/StateEventType.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/StateEventType.java @@ -27,7 +27,6 @@ public enum StateEventType { TASK_TIMEOUT(3, "task timeout"), WAKE_UP_TASK_GROUP(4, "wait task group"), TASK_RETRY(5, "task retry"), - PROCESS_BLOCKED(6, "process blocked"), PROCESS_SUBMIT_FAILED(7, "process submit failed"); StateEventType(int code, String descp) { diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ResUploadType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/StorageType.java similarity index 97% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ResUploadType.java rename to dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/StorageType.java index d60a657002f7..ba3cad45cdf7 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ResUploadType.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/StorageType.java @@ -20,6 +20,6 @@ /** * data base types */ -public enum ResUploadType { +public enum StorageType { LOCAL, HDFS, S3, OSS, GCS, ABS, OBS } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/WorkflowExecutionStatus.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/WorkflowExecutionStatus.java index 880db602574e..923192ea7b7f 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/WorkflowExecutionStatus.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/WorkflowExecutionStatus.java @@ -38,8 +38,6 @@ public enum WorkflowExecutionStatus { SUCCESS(7, "success"), DELAY_EXECUTION(12, "delay execution"), SERIAL_WAIT(14, "serial wait"), - READY_BLOCK(15, "ready block"), - BLOCK(16, "block"), WAIT_TO_RUN(17, "wait to run"), ; @@ -59,7 +57,6 @@ public enum WorkflowExecutionStatus { READY_PAUSE.getCode(), READY_STOP.getCode(), SERIAL_WAIT.getCode(), - READY_BLOCK.getCode(), WAIT_TO_RUN.getCode() }; @@ -91,7 +88,7 @@ public boolean canStop() { public boolean isFinished() { // todo: do we need to remove pause/block in finished judge? - return isSuccess() || isFailure() || isStop() || isPause() || isBlock(); + return isSuccess() || isFailure() || isStop() || isPause(); } /** @@ -119,10 +116,6 @@ public boolean isStop() { return this == STOP; } - public boolean isBlock() { - return this == BLOCK; - } - public static int[] getNeedFailoverWorkflowInstanceState() { return NEED_FAILOVER_STATES; } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/remote/AbsRemoteLogHandler.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/remote/AbsRemoteLogHandler.java new file mode 100644 index 000000000000..c0df3f6287c7 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/remote/AbsRemoteLogHandler.java @@ -0,0 +1,137 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.common.log.remote; + +import org.apache.dolphinscheduler.common.constants.Constants; +import org.apache.dolphinscheduler.common.utils.PropertyUtils; + +import org.apache.commons.lang3.StringUtils; + +import java.io.Closeable; +import java.io.FileOutputStream; +import java.io.IOException; + +import lombok.extern.slf4j.Slf4j; + +import com.azure.storage.blob.BlobContainerClient; +import com.azure.storage.blob.BlobServiceClient; +import com.azure.storage.blob.BlobServiceClientBuilder; +import com.azure.storage.blob.specialized.BlobInputStream; +import com.azure.storage.common.StorageSharedKeyCredential; + +@Slf4j +public class AbsRemoteLogHandler implements RemoteLogHandler, Closeable { + + private String accountName; + + private String accountKey; + + private String containerName; + + private BlobContainerClient blobContainerClient; + + private static AbsRemoteLogHandler instance; + + private AbsRemoteLogHandler() { + accountName = readAccountName(); + accountKey = readAccountKey(); + containerName = readContainerName(); + blobContainerClient = buildBlobContainerClient(); + } + + public static synchronized AbsRemoteLogHandler getInstance() { + if (instance == null) { + instance = new AbsRemoteLogHandler(); + } + + return instance; + } + + protected BlobContainerClient buildBlobContainerClient() { + + BlobServiceClient serviceClient = new BlobServiceClientBuilder() + .endpoint(String.format("https://%s.blob.core.windows.net/", accountName)) + .credential(new StorageSharedKeyCredential(accountName, accountKey)) + .buildClient(); + + if (StringUtils.isBlank(containerName)) { + throw new IllegalArgumentException("remote.logging.abs.container.name is blank"); + } + + try { + this.blobContainerClient = serviceClient.getBlobContainerClient(containerName); + } catch (Exception ex) { + throw new IllegalArgumentException( + "containerName: " + containerName + " is not exists, you need to create them by yourself"); + } + + log.info("containerName: {} has been found.", containerName); + + return blobContainerClient; + } + + @Override + public void close() throws IOException { + // no need to close blobContainerClient + } + + @Override + public void sendRemoteLog(String logPath) { + String objectName = RemoteLogUtils.getObjectNameFromLogPath(logPath); + + try { + log.info("send remote log {} to Azure Blob {}", logPath, objectName); + blobContainerClient.getBlobClient(objectName).uploadFromFile(logPath); + } catch (Exception e) { + log.error("error while sending remote log {} to Azure Blob {}", logPath, objectName, e); + } + } + + @Override + public void getRemoteLog(String logPath) { + String objectName = RemoteLogUtils.getObjectNameFromLogPath(logPath); + + try { + log.info("get remote log on Azure Blob {} to {}", objectName, logPath); + + try ( + BlobInputStream bis = blobContainerClient.getBlobClient(objectName).openInputStream(); + FileOutputStream fos = new FileOutputStream(logPath)) { + byte[] readBuf = new byte[1024]; + int readLen = 0; + while ((readLen = bis.read(readBuf)) > 0) { + fos.write(readBuf, 0, readLen); + } + } + } catch (Exception e) { + log.error("error while getting remote log on Azure Blob {} to {}", objectName, logPath, e); + } + } + + protected String readAccountName() { + return PropertyUtils.getString(Constants.REMOTE_LOGGING_ABS_ACCOUNT_NAME); + } + + protected String readAccountKey() { + return PropertyUtils.getString(Constants.REMOTE_LOGGING_ABS_ACCOUNT_KEY); + } + + protected String readContainerName() { + return PropertyUtils.getString(Constants.REMOTE_LOGGING_ABS_CONTAINER_NAME); + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/remote/GcsRemoteLogHandler.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/remote/GcsRemoteLogHandler.java index 20fd30336e94..ad6e534251e6 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/remote/GcsRemoteLogHandler.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/remote/GcsRemoteLogHandler.java @@ -49,19 +49,6 @@ public class GcsRemoteLogHandler implements RemoteLogHandler, Closeable { private static GcsRemoteLogHandler instance; private GcsRemoteLogHandler() { - - } - - public static synchronized GcsRemoteLogHandler getInstance() { - if (instance == null) { - instance = new GcsRemoteLogHandler(); - instance.init(); - } - - return instance; - } - - public void init() { try { credential = readCredentials(); bucketName = readBucketName(); @@ -73,6 +60,14 @@ public void init() { } } + public static synchronized GcsRemoteLogHandler getInstance() { + if (instance == null) { + instance = new GcsRemoteLogHandler(); + } + + return instance; + } + @Override public void close() throws IOException { try { diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/remote/OssRemoteLogHandler.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/remote/OssRemoteLogHandler.java index 792085b19478..59b139451520 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/remote/OssRemoteLogHandler.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/remote/OssRemoteLogHandler.java @@ -44,28 +44,23 @@ public class OssRemoteLogHandler implements RemoteLogHandler, Closeable { private static OssRemoteLogHandler instance; private OssRemoteLogHandler() { + String accessKeyId = readOssAccessKeyId(); + String accessKeySecret = readOssAccessKeySecret(); + String endpoint = readOssEndpoint(); + ossClient = OssClientFactory.buildOssClient(new OssConnection(accessKeyId, accessKeySecret, endpoint)); + bucketName = readOssBucketName(); + checkBucketNameExists(bucketName); } public static synchronized OssRemoteLogHandler getInstance() { if (instance == null) { instance = new OssRemoteLogHandler(); - instance.init(); } return instance; } - public void init() { - String accessKeyId = readOssAccessKeyId(); - String accessKeySecret = readOssAccessKeySecret(); - String endpoint = readOssEndpoint(); - ossClient = OssClientFactory.buildOssClient(new OssConnection(accessKeyId, accessKeySecret, endpoint)); - - bucketName = readOssBucketName(); - checkBucketNameExists(bucketName); - } - @Override public void sendRemoteLog(String logPath) { String objectName = RemoteLogUtils.getObjectNameFromLogPath(logPath); diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/remote/RemoteLogHandlerFactory.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/remote/RemoteLogHandlerFactory.java index 73ab41a134a8..ac75a23f2d30 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/remote/RemoteLogHandlerFactory.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/remote/RemoteLogHandlerFactory.java @@ -39,6 +39,8 @@ public RemoteLogHandler getRemoteLogHandler() { return S3RemoteLogHandler.getInstance(); } else if ("GCS".equals(target)) { return GcsRemoteLogHandler.getInstance(); + } else if ("ABS".equals(target)) { + return AbsRemoteLogHandler.getInstance(); } log.error("No suitable remote logging target for {}", target); diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/remote/RemoteLogUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/remote/RemoteLogUtils.java index 25d80244740e..8aab70f30445 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/remote/RemoteLogUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/remote/RemoteLogUtils.java @@ -40,7 +40,6 @@ public class RemoteLogUtils { @Autowired private RemoteLogService autowiredRemoteLogService; - @PostConstruct private void init() { remoteLogService = autowiredRemoteLogService; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/remote/S3RemoteLogHandler.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/remote/S3RemoteLogHandler.java index d1c8c41445ba..4fef7b032a59 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/remote/S3RemoteLogHandler.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/remote/S3RemoteLogHandler.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.common.log.remote; +import org.apache.dolphinscheduler.authentication.aws.AmazonS3ClientFactory; import org.apache.dolphinscheduler.common.constants.Constants; import org.apache.dolphinscheduler.common.utils.PropertyUtils; @@ -26,76 +27,40 @@ import java.io.File; import java.io.FileOutputStream; import java.io.IOException; +import java.util.Map; import lombok.extern.slf4j.Slf4j; -import com.amazonaws.auth.AWSStaticCredentialsProvider; -import com.amazonaws.auth.BasicAWSCredentials; -import com.amazonaws.client.builder.AwsClientBuilder; -import com.amazonaws.regions.Regions; import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.AmazonS3ClientBuilder; import com.amazonaws.services.s3.model.S3Object; import com.amazonaws.services.s3.model.S3ObjectInputStream; @Slf4j public class S3RemoteLogHandler implements RemoteLogHandler, Closeable { - private String accessKeyId; + private final String bucketName; - private String accessKeySecret; - - private String region; - - private String bucketName; - - private String endPoint; - - private AmazonS3 s3Client; + private final AmazonS3 s3Client; private static S3RemoteLogHandler instance; private S3RemoteLogHandler() { - + bucketName = readBucketName(); + s3Client = buildS3Client(); + checkBucketNameExists(bucketName); } public static synchronized S3RemoteLogHandler getInstance() { if (instance == null) { instance = new S3RemoteLogHandler(); - instance.init(); } return instance; } - public void init() { - accessKeyId = readAccessKeyID(); - accessKeySecret = readAccessKeySecret(); - region = readRegion(); - bucketName = readBucketName(); - endPoint = readEndPoint(); - s3Client = buildS3Client(); - checkBucketNameExists(bucketName); - } - protected AmazonS3 buildS3Client() { - if (StringUtils.isNotEmpty(endPoint)) { - return AmazonS3ClientBuilder - .standard() - .withPathStyleAccessEnabled(true) - .withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration( - endPoint, Regions.fromName(region).getName())) - .withCredentials( - new AWSStaticCredentialsProvider(new BasicAWSCredentials(accessKeyId, accessKeySecret))) - .build(); - } else { - return AmazonS3ClientBuilder - .standard() - .withCredentials( - new AWSStaticCredentialsProvider(new BasicAWSCredentials(accessKeyId, accessKeySecret))) - .withRegion(Regions.fromName(region)) - .build(); - } + Map awsProperties = PropertyUtils.getByPrefix("aws.s3.", ""); + return AmazonS3ClientFactory.createAmazonS3Client(awsProperties); } @Override @@ -136,24 +101,8 @@ public void getRemoteLog(String logPath) { } } - protected String readAccessKeyID() { - return PropertyUtils.getString(Constants.REMOTE_LOGGING_S3_ACCESS_KEY_ID); - } - - protected String readAccessKeySecret() { - return PropertyUtils.getString(Constants.REMOTE_LOGGING_S3_ACCESS_KEY_SECRET); - } - - protected String readRegion() { - return PropertyUtils.getString(Constants.REMOTE_LOGGING_S3_REGION); - } - protected String readBucketName() { - return PropertyUtils.getString(Constants.REMOTE_LOGGING_S3_BUCKET_NAME); - } - - protected String readEndPoint() { - return PropertyUtils.getString(Constants.REMOTE_LOGGING_S3_ENDPOINT); + return PropertyUtils.getString(Constants.AWS_S3_BUCKET_NAME); } public void checkBucketNameExists(String bucketName) { diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/AlertServerHeartBeat.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/AlertServerHeartBeat.java index 7cbd83b8ce0d..8533ca6e4843 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/AlertServerHeartBeat.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/AlertServerHeartBeat.java @@ -17,33 +17,16 @@ package org.apache.dolphinscheduler.common.model; -import org.apache.dolphinscheduler.common.enums.ServerStatus; - -import lombok.AllArgsConstructor; -import lombok.Builder; -import lombok.Data; import lombok.NoArgsConstructor; +import lombok.experimental.SuperBuilder; -@Data -@Builder +@SuperBuilder @NoArgsConstructor -@AllArgsConstructor -public class AlertServerHeartBeat implements HeartBeat { - - private int processId; - private long startupTime; - private long reportTime; - private double cpuUsage; - private double memoryUsage; - private double jvmMemoryUsage; - - private ServerStatus serverStatus; +public class AlertServerHeartBeat extends BaseHeartBeat implements HeartBeat { - private String host; - private int port; + /** + * If the alert server is active or standby + */ + private boolean isActive; - @Override - public ServerStatus getServerStatus() { - return serverStatus; - } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/BaseHeartBeat.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/BaseHeartBeat.java new file mode 100644 index 000000000000..2837e5482b76 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/BaseHeartBeat.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.common.model; + +import org.apache.dolphinscheduler.common.enums.ServerStatus; + +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +@NoArgsConstructor +@AllArgsConstructor +public class BaseHeartBeat implements HeartBeat { + + protected int processId; + protected long startupTime; + protected long reportTime; + protected double jvmCpuUsage; + protected double cpuUsage; + protected double jvmMemoryUsage; + protected double memoryUsage; + protected double diskUsage; + protected ServerStatus serverStatus; + + protected String host; + protected int port; + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/HeartBeat.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/HeartBeat.java index 3a105227aa08..35971b398b6e 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/HeartBeat.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/HeartBeat.java @@ -21,10 +21,6 @@ public interface HeartBeat { - String getHost(); - ServerStatus getServerStatus(); - int getPort(); - } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/MasterHeartBeat.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/MasterHeartBeat.java index ecc140bcfb5b..b8ae4512dd6d 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/MasterHeartBeat.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/MasterHeartBeat.java @@ -17,33 +17,11 @@ package org.apache.dolphinscheduler.common.model; -import org.apache.dolphinscheduler.common.enums.ServerStatus; - -import lombok.AllArgsConstructor; -import lombok.Builder; -import lombok.Data; import lombok.NoArgsConstructor; +import lombok.experimental.SuperBuilder; -@Data -@Builder +@SuperBuilder @NoArgsConstructor -@AllArgsConstructor -public class MasterHeartBeat implements HeartBeat { - - private long startupTime; - private long reportTime; - private double cpuUsage; - private double jvmMemoryUsage; - private double memoryUsage; - private double diskUsage; - private ServerStatus serverStatus; - private int processId; - - private String host; - private int port; +public class MasterHeartBeat extends BaseHeartBeat implements HeartBeat { - @Override - public ServerStatus getServerStatus() { - return serverStatus; - } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/WorkerHeartBeat.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/WorkerHeartBeat.java index 056fc6a2c713..c02748619818 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/WorkerHeartBeat.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/WorkerHeartBeat.java @@ -17,37 +17,18 @@ package org.apache.dolphinscheduler.common.model; -import org.apache.dolphinscheduler.common.enums.ServerStatus; - -import lombok.AllArgsConstructor; -import lombok.Builder; import lombok.Data; +import lombok.EqualsAndHashCode; import lombok.NoArgsConstructor; +import lombok.experimental.SuperBuilder; @Data -@Builder +@EqualsAndHashCode(callSuper = true) +@SuperBuilder @NoArgsConstructor -@AllArgsConstructor -public class WorkerHeartBeat implements HeartBeat { - - private long startupTime; - private long reportTime; - private double cpuUsage; - private double jvmMemoryUsage; - private double memoryUsage; - private double diskUsage; - private ServerStatus serverStatus; - private int processId; - - private String host; - private int port; +public class WorkerHeartBeat extends BaseHeartBeat implements HeartBeat { private int workerHostWeight; // worker host weight private int threadPoolUsage; // worker waiting task count - @Override - public ServerStatus getServerStatus() { - return serverStatus; - } - } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/HttpProperty.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/HttpProperty.java deleted file mode 100644 index 11786fd5a3d6..000000000000 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/HttpProperty.java +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.common.process; - -import org.apache.dolphinscheduler.common.enums.HttpParametersType; - -import java.util.Objects; - -public class HttpProperty { - - /** - * key - */ - private String prop; - - /** - * httpParametersType - */ - private HttpParametersType httpParametersType; - - /** - * value - */ - private String value; - - public HttpProperty() { - } - - public HttpProperty(String prop, HttpParametersType httpParametersType, String value) { - this.prop = prop; - this.httpParametersType = httpParametersType; - this.value = value; - } - - /** - * getter method - * - * @return the prop - * @see HttpProperty#prop - */ - public String getProp() { - return prop; - } - - /** - * setter method - * - * @param prop the prop to set - * @see HttpProperty#prop - */ - public void setProp(String prop) { - this.prop = prop; - } - - /** - * getter method - * - * @return the value - * @see HttpProperty#value - */ - public String getValue() { - return value; - } - - /** - * setter method - * - * @param value the value to set - * @see HttpProperty#value - */ - public void setValue(String value) { - this.value = value; - } - - public HttpParametersType getHttpParametersType() { - return httpParametersType; - } - - public void setHttpParametersType(HttpParametersType httpParametersType) { - this.httpParametersType = httpParametersType; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - HttpProperty property = (HttpProperty) o; - return Objects.equals(prop, property.prop) - && Objects.equals(value, property.value); - } - - @Override - public int hashCode() { - return Objects.hash(prop, value); - } - - @Override - public String toString() { - return "HttpProperty{" - + "prop='" + prop + '\'' - + ", httpParametersType=" + httpParametersType - + ", value='" + value + '\'' - + '}'; - } -} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ClassFilterConstructor.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ClassFilterConstructor.java new file mode 100644 index 000000000000..bf127d14bb9d --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ClassFilterConstructor.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.apache.dolphinscheduler.common.utils; + +import lombok.extern.slf4j.Slf4j; + +import org.yaml.snakeyaml.LoaderOptions; +import org.yaml.snakeyaml.constructor.Constructor; + +/** + * Whitelist constructor implementation for YAML snake. + * Copied from Apache ShardingSphere and Apache Skywalking. + */ +@Slf4j +public final class ClassFilterConstructor extends Constructor { + + private final Class[] acceptClasses; + + public ClassFilterConstructor(final Class[] acceptClasses) { + super(new LoaderOptions()); + this.acceptClasses = acceptClasses; + } + + @Override + protected Class getClassForName(final String name) throws ClassNotFoundException { + for (Class each : acceptClasses) { + if (name.equals(each.getName())) { + log.info("name - {} : class - {}", name, super.getClassForName(name)); + return super.getClassForName(name); + } + } + throw new IllegalArgumentException(String.format("Class is not accepted: %s", name)); + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CodeGenerateUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CodeGenerateUtils.java index f35523b59df7..3e75264808ae 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CodeGenerateUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CodeGenerateUtils.java @@ -1,4 +1,6 @@ -/** Copyright 2010-2012 Twitter, Inc.*/ +/** + * Copyright 2010-2012 Twitter, Inc. + */ package org.apache.dolphinscheduler.common.utils; @@ -6,66 +8,71 @@ import java.net.UnknownHostException; import java.util.Objects; +import lombok.extern.slf4j.Slf4j; + /** - * Rewriting based on Twitter snowflake algorithm + * Rewriting based on Twitter snowflake algorithm */ +@Slf4j public class CodeGenerateUtils { - // start timestamp - private static final long START_TIMESTAMP = 1609430400000L; // 2021-01-01 00:00:00 - // Each machine generates 32 in the same millisecond - private static final long LOW_DIGIT_BIT = 5L; - private static final long MIDDLE_BIT = 2L; - private static final long MAX_LOW_DIGIT = ~(-1L << LOW_DIGIT_BIT); - // The displacement to the left - private static final long MIDDLE_LEFT = LOW_DIGIT_BIT; - private static final long HIGH_DIGIT_LEFT = LOW_DIGIT_BIT + MIDDLE_BIT; - private final long machineHash; - private long lowDigit = 0L; - private long recordMillisecond = -1L; - - private static final long SYSTEM_TIMESTAMP = System.currentTimeMillis(); - private static final long SYSTEM_NANOTIME = System.nanoTime(); + private static final CodeGenerator codeGenerator; - private CodeGenerateUtils() throws CodeGenerateException { + static { try { - this.machineHash = - Math.abs(Objects.hash(InetAddress.getLocalHost().getHostName())) % (2 << (MIDDLE_BIT - 1)); + codeGenerator = new CodeGenerator(InetAddress.getLocalHost().getHostName() + "-" + OSUtils.getProcessID()); } catch (UnknownHostException e) { throw new CodeGenerateException(e.getMessage()); } } - private static CodeGenerateUtils instance = null; - - public static synchronized CodeGenerateUtils getInstance() throws CodeGenerateException { - if (instance == null) { - instance = new CodeGenerateUtils(); - } - return instance; + public static long genCode() throws CodeGenerateException { + return codeGenerator.genCode(); } - public synchronized long genCode() throws CodeGenerateException { - long nowtMillisecond = systemMillisecond(); - if (nowtMillisecond < recordMillisecond) { - throw new CodeGenerateException("New code exception because time is set back."); + public static class CodeGenerator { + + // start timestamp + private static final long START_TIMESTAMP = 1609430400000L; // 2021-01-01 00:00:00 + // Each machine generates 32 in the same millisecond + private static final long LOW_DIGIT_BIT = 5L; + private static final long MACHINE_BIT = 5L; + private static final long MAX_LOW_DIGIT = ~(-1L << LOW_DIGIT_BIT); + // The displacement to the left + private static final long HIGH_DIGIT_LEFT = LOW_DIGIT_BIT + MACHINE_BIT; + public final long machineHash; + private long lowDigit = 0L; + private long recordMillisecond = -1L; + + private static final long SYSTEM_TIMESTAMP = System.currentTimeMillis(); + private static final long SYSTEM_NANOTIME = System.nanoTime(); + + public CodeGenerator(String appName) { + this.machineHash = Math.abs(Objects.hash(appName)) % (1 << MACHINE_BIT); } - if (nowtMillisecond == recordMillisecond) { - lowDigit = (lowDigit + 1) & MAX_LOW_DIGIT; - if (lowDigit == 0L) { - while (nowtMillisecond <= recordMillisecond) { - nowtMillisecond = systemMillisecond(); + + public synchronized long genCode() throws CodeGenerateException { + long nowtMillisecond = systemMillisecond(); + if (nowtMillisecond < recordMillisecond) { + throw new CodeGenerateException("New code exception because time is set back."); + } + if (nowtMillisecond == recordMillisecond) { + lowDigit = (lowDigit + 1) & MAX_LOW_DIGIT; + if (lowDigit == 0L) { + while (nowtMillisecond <= recordMillisecond) { + nowtMillisecond = systemMillisecond(); + } } + } else { + lowDigit = 0L; } - } else { - lowDigit = 0L; + recordMillisecond = nowtMillisecond; + return (nowtMillisecond - START_TIMESTAMP) << HIGH_DIGIT_LEFT | machineHash << LOW_DIGIT_BIT | lowDigit; } - recordMillisecond = nowtMillisecond; - return (nowtMillisecond - START_TIMESTAMP) << HIGH_DIGIT_LEFT | machineHash << MIDDLE_LEFT | lowDigit; - } - private long systemMillisecond() { - return SYSTEM_TIMESTAMP + (System.nanoTime() - SYSTEM_NANOTIME) / 1000000; + private long systemMillisecond() { + return SYSTEM_TIMESTAMP + (System.nanoTime() - SYSTEM_NANOTIME) / 1000000; + } } public static class CodeGenerateException extends RuntimeException { diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java index fee1d9a95c50..31765b19bdb9 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java @@ -17,15 +17,15 @@ package org.apache.dolphinscheduler.common.utils; +import static com.google.common.base.Preconditions.checkNotNull; import static org.apache.dolphinscheduler.common.constants.Constants.DATA_BASEDIR_PATH; import static org.apache.dolphinscheduler.common.constants.Constants.FOLDER_SEPARATOR; import static org.apache.dolphinscheduler.common.constants.Constants.FORMAT_S_S; import static org.apache.dolphinscheduler.common.constants.Constants.RESOURCE_VIEW_SUFFIXES; import static org.apache.dolphinscheduler.common.constants.Constants.RESOURCE_VIEW_SUFFIXES_DEFAULT_VALUE; -import static org.apache.dolphinscheduler.common.constants.Constants.UTF_8; -import static org.apache.dolphinscheduler.common.constants.DateConstants.YYYYMMDDHHMMSS; import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.SystemUtils; import java.io.ByteArrayOutputStream; @@ -34,17 +34,21 @@ import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; +import java.net.URL; import java.nio.charset.StandardCharsets; import java.nio.file.FileAlreadyExistsException; import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.Paths; import java.nio.file.attribute.PosixFilePermission; import java.nio.file.attribute.PosixFilePermissions; +import java.util.Optional; import java.util.Set; import java.util.zip.CRC32; import java.util.zip.CheckedInputStream; import lombok.NonNull; +import lombok.SneakyThrows; import lombok.experimental.UtilityClass; import lombok.extern.slf4j.Slf4j; @@ -67,32 +71,14 @@ public class FileUtils { * @return download file name */ public static String getDownloadFilename(String filename) { - String fileName = - String.format("%s/download/%s/%s", DATA_BASEDIR, DateUtils.getCurrentTime(YYYYMMDDHHMMSS), filename); - - File file = new File(fileName); - if (!file.getParentFile().exists()) { - file.getParentFile().mkdirs(); - } - - return fileName; + return Paths.get(DATA_BASEDIR, "tmp", CodeGenerateUtils.genCode() + "-" + filename).toString(); } /** - * get upload file absolute path and name - * - * @param tenantCode tenant code - * @param filename file name - * @return local file path + * Generate a local tmp absolute path of the uploaded file */ - public static String getUploadFilename(String tenantCode, String filename) { - String fileName = String.format("%s/%s/resources/%s", DATA_BASEDIR, tenantCode, filename); - File file = new File(fileName); - if (!file.getParentFile().exists()) { - file.getParentFile().mkdirs(); - } - - return fileName; + public static String getUploadFileLocalTmpAbsolutePath() { + return Paths.get(DATA_BASEDIR, "tmp", String.valueOf(CodeGenerateUtils.genCode())).toString(); } /** @@ -136,7 +122,7 @@ public static String getKubeConfigPath(String execPath) { /** * absolute path of appInfo file * - * @param execPath directory of process execution + * @param execPath directory of process execution * @return */ public static String getAppInfoPath(String execPath) { @@ -153,7 +139,7 @@ public static String getResourceViewSuffixes() { /** * write content to file ,if parent path not exists, it will do one's utmost to mkdir * - * @param content content + * @param content content * @param filePath target file path * @return true if write success */ @@ -207,7 +193,7 @@ public static String readFile2Str(InputStream inputStream) { while ((length = inputStream.read(buffer)) != -1) { output.write(buffer, 0, length); } - return output.toString(UTF_8); + return output.toString(StandardCharsets.UTF_8.name()); } catch (Exception e) { log.error(e.getMessage(), e); throw new RuntimeException(e); @@ -237,6 +223,7 @@ public static boolean directoryTraversal(String filename) { /** * Calculate file checksum with CRC32 algorithm + * * @param pathName * @return checksum of file/dir */ @@ -316,4 +303,45 @@ public static void setFileTo755(File file) throws IOException { } } + public static String concatFilePath(String... paths) { + if (paths.length == 0) { + throw new IllegalArgumentException("At least one path should be provided"); + } + StringBuilder finalPath = new StringBuilder(paths[0]); + if (StringUtils.isEmpty(finalPath)) { + throw new IllegalArgumentException("The path should not be empty"); + } + String separator = File.separator; + for (int i = 1; i < paths.length; i++) { + String path = paths[i]; + if (StringUtils.isEmpty(path)) { + throw new IllegalArgumentException("The path should not be empty"); + } + if (finalPath.toString().endsWith(separator) && path.startsWith(separator)) { + finalPath.append(path.substring(separator.length())); + continue; + } + if (!finalPath.toString().endsWith(separator) && !path.startsWith(separator)) { + finalPath.append(separator).append(path); + continue; + } + finalPath.append(path); + } + return finalPath.toString(); + } + + public static String getClassPathAbsolutePath(Class clazz) { + checkNotNull(clazz, "class is null"); + return Optional.ofNullable(clazz.getResource("/")) + .map(URL::getPath) + .orElseThrow(() -> new IllegalArgumentException("class path: " + clazz + " is null")); + } + + /** + * copy input stream to file, if the file already exists, will append the content to the beginning of the file, otherwise will create a new file. + */ + @SneakyThrows + public static void copyInputStreamToFile(InputStream inputStream, String destFilename) { + org.apache.commons.io.FileUtils.copyInputStreamToFile(inputStream, new File(destFilename)); + } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java index 5c79fff951b8..e5d22561508e 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java @@ -36,6 +36,7 @@ import org.apache.http.util.EntityUtils; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.security.NoSuchAlgorithmException; import java.util.Arrays; @@ -143,7 +144,7 @@ public static String getResponseContentString(HttpGet httpGet, CloseableHttpClie } HttpEntity entity = response.getEntity(); - return entity != null ? EntityUtils.toString(entity, Constants.UTF_8) : null; + return entity != null ? EntityUtils.toString(entity, StandardCharsets.UTF_8) : null; } catch (IOException e) { log.error("Error executing HTTP GET request", e); return null; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java index 6750b364f927..bfc3af2c58ef 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java @@ -196,7 +196,10 @@ public static List toList(String json, Class clazz) { * @return true if valid */ public static boolean checkJsonValid(String json) { + return checkJsonValid(json, true); + } + public static boolean checkJsonValid(String json, Boolean logFlag) { if (Strings.isNullOrEmpty(json)) { return false; } @@ -205,7 +208,8 @@ public static boolean checkJsonValid(String json) { objectMapper.readTree(json); return true; } catch (IOException e) { - log.error("check json object valid exception!", e); + if (logFlag) + log.error("check json object valid exception!", e); } return false; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java index beca53c3fd59..dbfcea2ed8b7 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java @@ -183,11 +183,10 @@ public static boolean existTenantCodeInLinux(String tenantCode) { * * @param userName user name */ - public static void createUserIfAbsent(String userName) { + public static synchronized void createUserIfAbsent(String userName) { // if not exists this user, then create if (!getUserList().contains(userName)) { - boolean isSuccess = createUser(userName); - log.info("create user {} {}", userName, isSuccess ? "success" : "fail"); + createUser(userName); } } @@ -197,13 +196,12 @@ public static void createUserIfAbsent(String userName) { * @param userName user name * @return true if creation was successful, otherwise false */ - public static boolean createUser(String userName) { + public static void createUser(String userName) { try { String userGroup = getGroup(); if (StringUtils.isEmpty(userGroup)) { - String errorLog = String.format("%s group does not exist for this operating system.", userGroup); - log.error(errorLog); - return false; + throw new UnsupportedOperationException( + "There is no userGroup exist cannot create tenant, please create userGroupFirst"); } if (SystemUtils.IS_OS_MAC) { createMacUser(userName, userGroup); @@ -212,18 +210,17 @@ public static boolean createUser(String userName) { } else { createLinuxUser(userName, userGroup); } - return true; + log.info("Create tenant {} under userGroup: {} success", userName, userGroup); } catch (Exception e) { - log.error(e.getMessage(), e); + throw new RuntimeException("Create tenant: {} failed", e); } - return false; } /** * create linux user * - * @param userName user name + * @param userName user name * @param userGroup user group * @throws IOException in case of an I/O error */ @@ -237,7 +234,7 @@ private static void createLinuxUser(String userName, String userGroup) throws IO /** * create mac user (Supports Mac OSX 10.10+) * - * @param userName user name + * @param userName user name * @param userGroup user group * @throws IOException in case of an I/O error */ @@ -256,7 +253,7 @@ private static void createMacUser(String userName, String userGroup) throws IOEx /** * create windows user * - * @param userName user name + * @param userName user name * @param userGroup user group * @throws IOException in case of an I/O error */ @@ -304,7 +301,7 @@ public static String getGroup() throws IOException { * get sudo command * * @param tenantCode tenantCode - * @param command command + * @param command command * @return result of sudo execute command */ public static String getSudoCmd(String tenantCode, String command) { diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java index 8289b1447903..aee15893350d 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java @@ -17,10 +17,13 @@ package org.apache.dolphinscheduler.common.utils; +import static org.apache.dolphinscheduler.common.constants.Constants.AWS_YAML_PATH; import static org.apache.dolphinscheduler.common.constants.Constants.COMMON_PROPERTIES_PATH; +import static org.apache.dolphinscheduler.common.constants.Constants.REMOTE_LOGGING_YAML_PATH; -import org.apache.dolphinscheduler.common.config.IPropertyDelegate; import org.apache.dolphinscheduler.common.config.ImmutablePriorityPropertyDelegate; +import org.apache.dolphinscheduler.common.config.ImmutablePropertyDelegate; +import org.apache.dolphinscheduler.common.config.ImmutableYamlDelegate; import java.util.HashMap; import java.util.Map; @@ -37,8 +40,10 @@ public class PropertyUtils { // todo: add another implementation for zookeeper/etcd/consul/xx - private static final IPropertyDelegate propertyDelegate = - new ImmutablePriorityPropertyDelegate(COMMON_PROPERTIES_PATH); + private final ImmutablePriorityPropertyDelegate propertyDelegate = + new ImmutablePriorityPropertyDelegate( + new ImmutablePropertyDelegate(COMMON_PROPERTIES_PATH), + new ImmutableYamlDelegate(REMOTE_LOGGING_YAML_PATH, AWS_YAML_PATH)); public static String getString(String key) { return propertyDelegate.get(key.trim()); @@ -102,6 +107,19 @@ public static Map getByPrefix(String prefix) { return matchedProperties; } + /** + * Get all properties with specified prefix, like: fs., will replace the prefix with newPrefix + */ + public static Map getByPrefix(String prefix, String newPrefix) { + Map matchedProperties = new HashMap<>(); + for (String propName : propertyDelegate.getPropertyKeys()) { + if (propName.startsWith(prefix)) { + matchedProperties.put(propName.replace(prefix, newPrefix), propertyDelegate.get(propName)); + } + } + return matchedProperties; + } + public static Set getSet(String key, Function> transformFunction, Set defaultValue) { return propertyDelegate.get(key, transformFunction, defaultValue); } diff --git a/dolphinscheduler-common/src/main/resources/common.properties b/dolphinscheduler-common/src/main/resources/common.properties index 669d3dfef348..e0704bebe58f 100644 --- a/dolphinscheduler-common/src/main/resources/common.properties +++ b/dolphinscheduler-common/src/main/resources/common.properties @@ -26,7 +26,7 @@ data.basedir.path=/tmp/dolphinscheduler # use shared file mount point resource.storage.type=LOCAL # resource store on HDFS/S3 path, resource file will store to this base path, self configuration, please make sure the directory exists on hdfs and have read write permissions. "/dolphinscheduler" is recommended -resource.storage.upload.base.path=/dolphinscheduler +resource.storage.upload.base.path=/tmp/dolphinscheduler # The Azure client ID (Azure Application (client) ID) resource.azure.client.id=minioadmin @@ -39,17 +39,6 @@ resource.azure.tenant.id=minioadmin # The query interval resource.query.interval=10000 -# The AWS access key. if resource.storage.type=S3 or use EMR-Task, This configuration is required -resource.aws.access.key.id=minioadmin -# The AWS secret access key. if resource.storage.type=S3 or use EMR-Task, This configuration is required -resource.aws.secret.access.key=minioadmin -# The AWS Region to use. if resource.storage.type=S3 or use EMR-Task, This configuration is required -resource.aws.region=cn-north-1 -# The name of the bucket. You need to create them by yourself. Otherwise, the system cannot start. All buckets in Amazon S3 share a single namespace; ensure the bucket is given a unique name. -resource.aws.s3.bucket.name=dolphinscheduler -# You need to set this parameter when private cloud s3. If S3 uses public cloud, you only need to set resource.aws.region or set to the endpoint of a public cloud such as S3.cn-north-1.amazonaws.com.cn -resource.aws.s3.endpoint=http://localhost:9000 - # alibaba cloud access key id, required if you set resource.storage.type=OSS resource.alibaba.cloud.access.key.id= # alibaba cloud access key secret, required if you set resource.storage.type=OSS @@ -188,18 +177,14 @@ remote.logging.oss.access.key.secret= remote.logging.oss.bucket.name= # oss endpoint, required if you set remote.logging.target=OSS remote.logging.oss.endpoint= -# s3 access key id, required if you set remote.logging.target=S3 -remote.logging.s3.access.key.id= -# s3 access key secret, required if you set remote.logging.target=S3 -remote.logging.s3.access.key.secret= -# s3 bucket name, required if you set remote.logging.target=S3 -remote.logging.s3.bucket.name= -# s3 endpoint, required if you set remote.logging.target=S3 -remote.logging.s3.endpoint= -# s3 region, required if you set remote.logging.target=S3 -remote.logging.s3.region= + # the location of the google cloud credential, required if you set remote.logging.target=GCS remote.logging.google.cloud.storage.credential=/path/to/credential # gcs bucket name, required if you set remote.logging.target=GCS remote.logging.google.cloud.storage.bucket.name= - +# abs account name, required if you set resource.storage.type=ABS +remote.logging.abs.account.name= +# abs account key, required if you set resource.storage.type=ABS +remote.logging.abs.account.key= +# abs container name, required if you set resource.storage.type=ABS +remote.logging.abs.container.name= diff --git a/dolphinscheduler-common/src/main/resources/remote-logging.yaml b/dolphinscheduler-common/src/main/resources/remote-logging.yaml new file mode 100644 index 000000000000..f413958e646c --- /dev/null +++ b/dolphinscheduler-common/src/main/resources/remote-logging.yaml @@ -0,0 +1,49 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +remote-logging: + # Whether to enable remote logging + enable: false + # if remote-logging.enable = true, set the target of remote logging + target: OSS + # if remote-logging.enable = true, set the log base directory + base.dir: logs + # if remote-logging.enable = true, set the number of threads to send logs to remote storage + thread.pool.size: 10 + # required if you set remote-logging.target=OSS + oss: + # oss access key id, required if you set remote-logging.target=OSS + access.key.id: + # oss access key secret, required if you set remote-logging.target=OSS + access.key.secret: + # oss bucket name, required if you set remote-logging.target=OSS + bucket.name: + # oss endpoint, required if you set remote-logging.target=OSS + endpoint: + google.cloud.storage: + # the location of the google cloud credential, required if you set remote-logging.target=GCS + credential: /path/to/credential + # gcs bucket name, required if you set remote-logging.target=GCS + bucket.name: + abs: + # abs account name, required if you set resource.storage.type=ABS + account.name: + # abs account key, required if you set resource.storage.type=ABS + account.key: + # abs container name, required if you set resource.storage.type=ABS + container.name: + diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/config/ImmutablePriorityPropertyDelegateTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/config/ImmutablePriorityPropertyDelegateTest.java index efba923a5a5b..633325049274 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/config/ImmutablePriorityPropertyDelegateTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/config/ImmutablePriorityPropertyDelegateTest.java @@ -19,6 +19,7 @@ import static com.github.stefanbirkner.systemlambda.SystemLambda.withEnvironmentVariable; import static org.apache.dolphinscheduler.common.constants.Constants.COMMON_PROPERTIES_PATH; +import static org.apache.dolphinscheduler.common.constants.Constants.REMOTE_LOGGING_YAML_PATH; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; @@ -26,7 +27,9 @@ class ImmutablePriorityPropertyDelegateTest { private final ImmutablePriorityPropertyDelegate immutablePriorityPropertyDelegate = - new ImmutablePriorityPropertyDelegate(COMMON_PROPERTIES_PATH); + new ImmutablePriorityPropertyDelegate( + new ImmutablePropertyDelegate(COMMON_PROPERTIES_PATH), + new ImmutableYamlDelegate(REMOTE_LOGGING_YAML_PATH)); @Test void getOverrideFromEnv() throws Exception { diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/log/remote/AbsRemoteLogHandlerTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/log/remote/AbsRemoteLogHandlerTest.java new file mode 100644 index 000000000000..bc18f952a9ed --- /dev/null +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/log/remote/AbsRemoteLogHandlerTest.java @@ -0,0 +1,144 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.common.log.remote; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.when; + +import org.apache.dolphinscheduler.common.constants.Constants; +import org.apache.dolphinscheduler.common.utils.LogUtils; +import org.apache.dolphinscheduler.common.utils.PropertyUtils; + +import lombok.extern.slf4j.Slf4j; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.MockedConstruction; +import org.mockito.MockedStatic; +import org.mockito.Mockito; +import org.mockito.junit.jupiter.MockitoExtension; + +import com.azure.storage.blob.BlobClient; +import com.azure.storage.blob.BlobContainerClient; +import com.azure.storage.blob.BlobServiceClient; +import com.azure.storage.blob.BlobServiceClientBuilder; +import com.azure.storage.common.StorageSharedKeyCredential; + +@Slf4j +@ExtendWith(MockitoExtension.class) +public class AbsRemoteLogHandlerTest { + + @Mock + BlobServiceClient blobServiceClient; + + @Mock + BlobContainerClient blobContainerClient; + + @Mock + BlobClient blobClient; + + @Test + public void testAbsRemoteLogHandlerContainerNameBlack() { + try ( + MockedStatic propertyUtilsMockedStatic = Mockito.mockStatic(PropertyUtils.class); + MockedStatic remoteLogUtilsMockedStatic = Mockito.mockStatic(LogUtils.class)) { + propertyUtilsMockedStatic.when(() -> PropertyUtils.getString(Constants.REMOTE_LOGGING_ABS_ACCOUNT_NAME)) + .thenReturn("account_name"); + propertyUtilsMockedStatic.when(() -> PropertyUtils.getString(Constants.REMOTE_LOGGING_ABS_ACCOUNT_KEY)) + .thenReturn("account_key"); + propertyUtilsMockedStatic.when(() -> PropertyUtils.getString(Constants.REMOTE_LOGGING_ABS_CONTAINER_NAME)) + .thenReturn(""); + remoteLogUtilsMockedStatic.when(LogUtils::getLocalLogBaseDir).thenReturn("logs"); + + IllegalArgumentException thrown = Assertions.assertThrows(IllegalArgumentException.class, () -> { + AbsRemoteLogHandler.getInstance(); + }); + Assertions.assertEquals("remote.logging.abs.container.name is blank", thrown.getMessage()); + } + } + + @Test + public void testAbsRemoteLogHandlerContainerNotExists() { + try ( + MockedStatic propertyUtilsMockedStatic = Mockito.mockStatic(PropertyUtils.class); + MockedStatic remoteLogUtilsMockedStatic = Mockito.mockStatic(LogUtils.class); + MockedConstruction k8sClientWrapperMockedConstruction = + Mockito.mockConstruction(BlobServiceClientBuilder.class, (mock, context) -> { + when(mock.endpoint(any(String.class))).thenReturn(mock); + when(mock.credential(any(StorageSharedKeyCredential.class))).thenReturn(mock); + when(mock.buildClient()) + .thenReturn(blobServiceClient); + })) { + propertyUtilsMockedStatic.when(() -> PropertyUtils.getString(Constants.REMOTE_LOGGING_ABS_ACCOUNT_NAME)) + .thenReturn("account_name"); + propertyUtilsMockedStatic.when(() -> PropertyUtils.getString(Constants.REMOTE_LOGGING_ABS_ACCOUNT_KEY)) + .thenReturn("account_key"); + propertyUtilsMockedStatic.when(() -> PropertyUtils.getString(Constants.REMOTE_LOGGING_ABS_CONTAINER_NAME)) + .thenReturn("container_name"); + remoteLogUtilsMockedStatic.when(LogUtils::getLocalLogBaseDir).thenReturn("logs"); + + when(blobServiceClient.getBlobContainerClient(any(String.class))).thenThrow( + new NullPointerException("container not exists")); + IllegalArgumentException thrown = Assertions.assertThrows(IllegalArgumentException.class, () -> { + AbsRemoteLogHandler.getInstance(); + }); + Assertions.assertEquals("containerName: container_name is not exists, you need to create them by yourself", + thrown.getMessage()); + } + } + + @Test + public void testAbsRemoteLogHandler() { + + try ( + MockedStatic propertyUtilsMockedStatic = Mockito.mockStatic(PropertyUtils.class); + MockedStatic remoteLogUtilsMockedStatic = Mockito.mockStatic(LogUtils.class); + MockedConstruction blobServiceClientBuilderMockedConstruction = + Mockito.mockConstruction(BlobServiceClientBuilder.class, (mock, context) -> { + when(mock.endpoint(any(String.class))).thenReturn(mock); + when(mock.credential(any(StorageSharedKeyCredential.class))).thenReturn(mock); + when(mock.buildClient()) + .thenReturn(blobServiceClient); + }); + MockedStatic remoteLogUtilsMockedStatic1 = Mockito.mockStatic(RemoteLogUtils.class)) { + propertyUtilsMockedStatic.when(() -> PropertyUtils.getString(Constants.REMOTE_LOGGING_ABS_ACCOUNT_NAME)) + .thenReturn("account_name"); + propertyUtilsMockedStatic.when(() -> PropertyUtils.getString(Constants.REMOTE_LOGGING_ABS_ACCOUNT_KEY)) + .thenReturn("account_key"); + propertyUtilsMockedStatic.when(() -> PropertyUtils.getString(Constants.REMOTE_LOGGING_ABS_CONTAINER_NAME)) + .thenReturn("container_name"); + remoteLogUtilsMockedStatic.when(LogUtils::getLocalLogBaseDir).thenReturn("logs"); + String logPath = "logpath"; + String objectName = "objectname"; + remoteLogUtilsMockedStatic1.when(() -> RemoteLogUtils.getObjectNameFromLogPath(logPath)) + .thenReturn(objectName); + + when(blobServiceClient.getBlobContainerClient(any(String.class))).thenReturn(blobContainerClient); + when(blobContainerClient.getBlobClient(objectName)).thenReturn(blobClient); + + AbsRemoteLogHandler absRemoteLogHandler = AbsRemoteLogHandler.getInstance(); + Assertions.assertNotNull(absRemoteLogHandler); + + absRemoteLogHandler.sendRemoteLog(logPath); + Mockito.verify(blobClient, times(1)).uploadFromFile(logPath); + } + } +} diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CodeGenerateUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CodeGenerateUtilsTest.java index 3871646c95c8..8cd8ab8e6d4f 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CodeGenerateUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CodeGenerateUtilsTest.java @@ -17,20 +17,59 @@ package org.apache.dolphinscheduler.common.utils; +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.util.ArrayList; import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CountDownLatch; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -public class CodeGenerateUtilsTest { +class CodeGenerateUtilsTest { @Test - public void testNoGenerateDuplicateCode() throws CodeGenerateUtils.CodeGenerateException { - HashSet existsCode = new HashSet<>(); - for (int i = 0; i < 100; i++) { - Long currentCode = CodeGenerateUtils.getInstance().genCode(); - Assertions.assertFalse(existsCode.contains(currentCode)); + void testNoGenerateDuplicateCode() { + int codeNum = 10000000; + List existsCode = new ArrayList<>(); + for (int i = 0; i < codeNum; i++) { + Long currentCode = CodeGenerateUtils.genCode(); existsCode.add(currentCode); } + Set existsCodeSet = new HashSet<>(existsCode); + // Disallow duplicate code + assertEquals(existsCode.size(), existsCodeSet.size()); + } + + @Test + void testNoGenerateDuplicateCodeWithDifferentAppName() throws UnknownHostException, InterruptedException { + int threadNum = 10; + int codeNum = 1000000; + + final String hostName = InetAddress.getLocalHost().getHostName(); + Map> machineCodes = new ConcurrentHashMap<>(); + CountDownLatch countDownLatch = new CountDownLatch(threadNum); + + for (int i = 0; i < threadNum; i++) { + final int c = i; + new Thread(() -> { + List codes = new ArrayList<>(codeNum); + CodeGenerateUtils.CodeGenerator codeGenerator = new CodeGenerateUtils.CodeGenerator(hostName + "-" + c); + for (int j = 0; j < codeNum; j++) { + codes.add(codeGenerator.genCode()); + } + machineCodes.put(Thread.currentThread().getName(), codes); + countDownLatch.countDown(); + }).start(); + } + countDownLatch.await(); + Set totalCodes = new HashSet<>(); + machineCodes.values().forEach(totalCodes::addAll); + assertEquals(codeNum * threadNum, totalCodes.size()); } } diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/FileUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/FileUtilsTest.java index f06ece2a566a..72ceed630f92 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/FileUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/FileUtilsTest.java @@ -17,8 +17,6 @@ package org.apache.dolphinscheduler.common.utils; -import static org.apache.dolphinscheduler.common.constants.DateConstants.YYYYMMDDHHMMSS; - import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; @@ -31,26 +29,21 @@ import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.MockedStatic; -import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; +import com.google.common.truth.Truth; + @ExtendWith(MockitoExtension.class) public class FileUtilsTest { @Test public void testGetDownloadFilename() { - try (MockedStatic mockedDateUtils = Mockito.mockStatic(DateUtils.class)) { - mockedDateUtils.when(() -> DateUtils.getCurrentTime(YYYYMMDDHHMMSS)).thenReturn("20190101101059"); - Assertions.assertEquals("/tmp/dolphinscheduler/download/20190101101059/test", - FileUtils.getDownloadFilename("test")); - } + Truth.assertThat(FileUtils.getDownloadFilename("test")).startsWith("/tmp/dolphinscheduler/tmp/"); } @Test public void testGetUploadFilename() { - Assertions.assertEquals("/tmp/dolphinscheduler/aaa/resources/bbb", - FileUtils.getUploadFilename("aaa", "bbb")); + Truth.assertThat(FileUtils.getUploadFileLocalTmpAbsolutePath()).startsWith("/tmp/dolphinscheduler/tmp/"); } @Test diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/PropertyUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/PropertyUtilsTest.java index c915197844aa..8abfb38269cb 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/PropertyUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/PropertyUtilsTest.java @@ -17,12 +17,15 @@ package org.apache.dolphinscheduler.common.utils; +import static com.google.common.truth.Truth.assertThat; + import org.apache.dolphinscheduler.common.constants.Constants; import org.apache.commons.lang3.StringUtils; import java.util.Arrays; import java.util.Collections; +import java.util.Map; import java.util.Set; import java.util.stream.Collectors; @@ -48,4 +51,14 @@ public void getSet() { }, Sets.newHashSet("docker0")); Assertions.assertEquals(Sets.newHashSet("docker0"), networkInterface); } + + @Test + void getByPrefix() { + Map awsProperties = PropertyUtils.getByPrefix("resource.aws.", ""); + assertThat(awsProperties).containsEntry("access.key.id", "minioadmin"); + assertThat(awsProperties).containsEntry("secret.access.key", "minioadmin"); + assertThat(awsProperties).containsEntry("region", "cn-north-1"); + assertThat(awsProperties).containsEntry("s3.bucket.name", "dolphinscheduler"); + assertThat(awsProperties).containsEntry("endpoint", "http://localhost:9000"); + } } diff --git a/dolphinscheduler-common/src/test/resources/common.properties b/dolphinscheduler-common/src/test/resources/common.properties index 7f66a32a23d4..ce8ef3bf4f42 100644 --- a/dolphinscheduler-common/src/test/resources/common.properties +++ b/dolphinscheduler-common/src/test/resources/common.properties @@ -45,6 +45,10 @@ resource.azure.tenant.id=minioadmin # The query interval resource.query.interval=10000 +# The AWS credentials provider type. support: AWSStaticCredentialsProvider, InstanceProfileCredentialsProvider +# AWSStaticCredentialsProvider: use the access key and secret key to authenticate +# InstanceProfileCredentialsProvider: use the IAM role to authenticate +aws.credentials.provider.type=AWSStaticCredentialsProvider # The AWS access key. if resource.storage.type=S3 or use EMR-Task, This configuration is required resource.aws.access.key.id=minioadmin # The AWS secret access key. if resource.storage.type=S3 or use EMR-Task, This configuration is required @@ -54,7 +58,7 @@ resource.aws.region=cn-north-1 # The name of the bucket. You need to create them by yourself. Otherwise, the system cannot start. All buckets in Amazon S3 share a single namespace; ensure the bucket is given a unique name. resource.aws.s3.bucket.name=dolphinscheduler # You need to set this parameter when private cloud s3. If S3 uses public cloud, you only need to set resource.aws.region or set to the endpoint of a public cloud such as S3.cn-north-1.amazonaws.com.cn -resource.aws.s3.endpoint=http://localhost:9000 +resource.aws.endpoint=http://localhost:9000 # alibaba cloud access key id, required if you set resource.storage.type=OSS resource.alibaba.cloud.access.key.id= diff --git a/dolphinscheduler-common/src/test/resources/logback.xml b/dolphinscheduler-common/src/test/resources/logback.xml new file mode 100644 index 000000000000..9a182a18ef12 --- /dev/null +++ b/dolphinscheduler-common/src/test/resources/logback.xml @@ -0,0 +1,21 @@ + + + + + + diff --git a/dolphinscheduler-common/src/test/resources/remote-logging.yaml b/dolphinscheduler-common/src/test/resources/remote-logging.yaml new file mode 100644 index 000000000000..cb149a77fe0d --- /dev/null +++ b/dolphinscheduler-common/src/test/resources/remote-logging.yaml @@ -0,0 +1,61 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +remote-logging: + # Whether to enable remote logging + enable: false + # if remote-logging.enable = true, set the target of remote logging + target: OSS + # if remote-logging.enable = true, set the log base directory + base.dir: logs + # if remote-logging.enable = true, set the number of threads to send logs to remote storage + thread.pool.size: 10 + # required if you set remote-logging.target=OSS + oss: + # oss access key id, required if you set remote-logging.target=OSS + access.key.id: + # oss access key secret, required if you set remote-logging.target=OSS + access.key.secret: + # oss bucket name, required if you set remote-logging.target=OSS + bucket.name: + # oss endpoint, required if you set remote-logging.target=OSS + endpoint: + # required if you set remote-logging.target=S3 + s3: + # s3 access key id, required if you set remote-logging.target=S3 + access.key.id: + # s3 access key secret, required if you set remote-logging.target=S3 + access.key.secret: + # s3 bucket name, required if you set remote-logging.target=S3 + bucket.name: + # s3 endpoint, required if you set remote-logging.target=S3 + endpoint: + # s3 region, required if you set remote-logging.target=S3 + region: + google.cloud.storage: + # the location of the google cloud credential, required if you set remote-logging.target=GCS + credential: /path/to/credential + # gcs bucket name, required if you set remote-logging.target=GCS + bucket.name: + abs: + # abs account name, required if you set resource.storage.type=ABS + account.name: + # abs account key, required if you set resource.storage.type=ABS + account.key: + # abs container name, required if you set resource.storage.type=ABS + container.name: + diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/enums/UdfType.java b/dolphinscheduler-dao-plugin/dolphinscheduler-dao-api/src/main/java/org/apache/dolphinscheduler/dao/plugin/api/DatabaseEnvironmentCondition.java similarity index 54% rename from dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/enums/UdfType.java rename to dolphinscheduler-dao-plugin/dolphinscheduler-dao-api/src/main/java/org/apache/dolphinscheduler/dao/plugin/api/DatabaseEnvironmentCondition.java index b65dc07152f6..32fb807a9bac 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/enums/UdfType.java +++ b/dolphinscheduler-dao-plugin/dolphinscheduler-dao-api/src/main/java/org/apache/dolphinscheduler/dao/plugin/api/DatabaseEnvironmentCondition.java @@ -15,42 +15,25 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.plugin.task.api.enums; +package org.apache.dolphinscheduler.dao.plugin.api; -/** - * UDF type - */ -public enum UdfType { +import java.util.Arrays; - /** - * 0 hive; 1 spark - */ - HIVE(0, "hive"), - SPARK(1, "spark"); +import org.springframework.context.annotation.Condition; +import org.springframework.context.annotation.ConditionContext; +import org.springframework.core.type.AnnotatedTypeMetadata; - UdfType(int code, String descp) { - this.code = code; - this.descp = descp; - } +public class DatabaseEnvironmentCondition implements Condition { - private final int code; - private final String descp; + private final String profile; - public int getCode() { - return code; + public DatabaseEnvironmentCondition(String profile) { + this.profile = profile; } - public String getDescp() { - return descp; + @Override + public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) { + String[] activeProfiles = context.getEnvironment().getActiveProfiles(); + return Arrays.asList(activeProfiles).contains(profile); } - - public static UdfType of(int type) { - for (UdfType ut : values()) { - if (ut.getCode() == type) { - return ut; - } - } - throw new IllegalArgumentException("invalid type : " + type); - } - } diff --git a/dolphinscheduler-dao-plugin/dolphinscheduler-dao-h2/src/main/java/org/apache/dolphinscheduler/dao/plugin/h2/H2DaoPluginConfiguration.java b/dolphinscheduler-dao-plugin/dolphinscheduler-dao-h2/src/main/java/org/apache/dolphinscheduler/dao/plugin/h2/H2DaoPluginAutoConfiguration.java similarity index 88% rename from dolphinscheduler-dao-plugin/dolphinscheduler-dao-h2/src/main/java/org/apache/dolphinscheduler/dao/plugin/h2/H2DaoPluginConfiguration.java rename to dolphinscheduler-dao-plugin/dolphinscheduler-dao-h2/src/main/java/org/apache/dolphinscheduler/dao/plugin/h2/H2DaoPluginAutoConfiguration.java index 9aea94f77de1..f496679ed5a7 100644 --- a/dolphinscheduler-dao-plugin/dolphinscheduler-dao-h2/src/main/java/org/apache/dolphinscheduler/dao/plugin/h2/H2DaoPluginConfiguration.java +++ b/dolphinscheduler-dao-plugin/dolphinscheduler-dao-h2/src/main/java/org/apache/dolphinscheduler/dao/plugin/h2/H2DaoPluginAutoConfiguration.java @@ -29,14 +29,14 @@ import javax.sql.DataSource; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.Profile; import com.baomidou.mybatisplus.annotation.DbType; -@Profile("h2") -@Configuration -public class H2DaoPluginConfiguration implements DaoPluginConfiguration { +@Conditional(H2DatabaseEnvironmentCondition.class) +@Configuration(proxyBeanMethods = false) +public class H2DaoPluginAutoConfiguration implements DaoPluginConfiguration { @Autowired private DataSource dataSource; diff --git a/dolphinscheduler-dao-plugin/dolphinscheduler-dao-h2/src/main/java/org/apache/dolphinscheduler/dao/plugin/h2/H2DatabaseEnvironmentCondition.java b/dolphinscheduler-dao-plugin/dolphinscheduler-dao-h2/src/main/java/org/apache/dolphinscheduler/dao/plugin/h2/H2DatabaseEnvironmentCondition.java new file mode 100644 index 000000000000..894f38bd205f --- /dev/null +++ b/dolphinscheduler-dao-plugin/dolphinscheduler-dao-h2/src/main/java/org/apache/dolphinscheduler/dao/plugin/h2/H2DatabaseEnvironmentCondition.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.plugin.h2; + +import org.apache.dolphinscheduler.dao.plugin.api.DatabaseEnvironmentCondition; + +public class H2DatabaseEnvironmentCondition extends DatabaseEnvironmentCondition { + + public H2DatabaseEnvironmentCondition() { + super("h2"); + } + +} diff --git a/dolphinscheduler-dao-plugin/dolphinscheduler-dao-h2/src/main/resources/META-INF/spring.factories b/dolphinscheduler-dao-plugin/dolphinscheduler-dao-h2/src/main/resources/META-INF/spring.factories new file mode 100644 index 000000000000..c899dfb43fa3 --- /dev/null +++ b/dolphinscheduler-dao-plugin/dolphinscheduler-dao-h2/src/main/resources/META-INF/spring.factories @@ -0,0 +1,19 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +org.springframework.boot.autoconfigure.EnableAutoConfiguration=\ + org.apache.dolphinscheduler.dao.plugin.h2.H2DaoPluginAutoConfiguration diff --git a/dolphinscheduler-dao-plugin/dolphinscheduler-dao-mysql/src/main/java/org/apache/dolphinscheduler/dao/plugin/mysql/MysqlDaoPluginConfiguration.java b/dolphinscheduler-dao-plugin/dolphinscheduler-dao-mysql/src/main/java/org/apache/dolphinscheduler/dao/plugin/mysql/MysqlDaoPluginAutoConfiguration.java similarity index 88% rename from dolphinscheduler-dao-plugin/dolphinscheduler-dao-mysql/src/main/java/org/apache/dolphinscheduler/dao/plugin/mysql/MysqlDaoPluginConfiguration.java rename to dolphinscheduler-dao-plugin/dolphinscheduler-dao-mysql/src/main/java/org/apache/dolphinscheduler/dao/plugin/mysql/MysqlDaoPluginAutoConfiguration.java index 8b37fca67b56..5fb3a350aef8 100644 --- a/dolphinscheduler-dao-plugin/dolphinscheduler-dao-mysql/src/main/java/org/apache/dolphinscheduler/dao/plugin/mysql/MysqlDaoPluginConfiguration.java +++ b/dolphinscheduler-dao-plugin/dolphinscheduler-dao-mysql/src/main/java/org/apache/dolphinscheduler/dao/plugin/mysql/MysqlDaoPluginAutoConfiguration.java @@ -28,14 +28,14 @@ import javax.sql.DataSource; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.Profile; import com.baomidou.mybatisplus.annotation.DbType; -@Profile("mysql") -@Configuration -public class MysqlDaoPluginConfiguration implements DaoPluginConfiguration { +@Configuration(proxyBeanMethods = false) +@Conditional(MysqlDatabaseEnvironmentCondition.class) +public class MysqlDaoPluginAutoConfiguration implements DaoPluginConfiguration { @Autowired private DataSource dataSource; diff --git a/dolphinscheduler-dao-plugin/dolphinscheduler-dao-mysql/src/main/java/org/apache/dolphinscheduler/dao/plugin/mysql/MysqlDatabaseEnvironmentCondition.java b/dolphinscheduler-dao-plugin/dolphinscheduler-dao-mysql/src/main/java/org/apache/dolphinscheduler/dao/plugin/mysql/MysqlDatabaseEnvironmentCondition.java new file mode 100644 index 000000000000..2136e1354e77 --- /dev/null +++ b/dolphinscheduler-dao-plugin/dolphinscheduler-dao-mysql/src/main/java/org/apache/dolphinscheduler/dao/plugin/mysql/MysqlDatabaseEnvironmentCondition.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.plugin.mysql; + +import org.apache.dolphinscheduler.dao.plugin.api.DatabaseEnvironmentCondition; + +public class MysqlDatabaseEnvironmentCondition extends DatabaseEnvironmentCondition { + + public MysqlDatabaseEnvironmentCondition() { + super("mysql"); + } + +} diff --git a/dolphinscheduler-dao-plugin/dolphinscheduler-dao-mysql/src/main/resources/META-INF/spring.factories b/dolphinscheduler-dao-plugin/dolphinscheduler-dao-mysql/src/main/resources/META-INF/spring.factories new file mode 100644 index 000000000000..386c80c67686 --- /dev/null +++ b/dolphinscheduler-dao-plugin/dolphinscheduler-dao-mysql/src/main/resources/META-INF/spring.factories @@ -0,0 +1,19 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +org.springframework.boot.autoconfigure.EnableAutoConfiguration=\ + org.apache.dolphinscheduler.dao.plugin.mysql.MysqlDaoPluginAutoConfiguration diff --git a/dolphinscheduler-dao-plugin/dolphinscheduler-dao-postgresql/src/main/java/org/apache/dolphinscheduler/dao/plugin/postgresql/PostgresqlDaoPluginConfiguration.java b/dolphinscheduler-dao-plugin/dolphinscheduler-dao-postgresql/src/main/java/org/apache/dolphinscheduler/dao/plugin/postgresql/PostgresqlDaoPluginAutoConfiguration.java similarity index 88% rename from dolphinscheduler-dao-plugin/dolphinscheduler-dao-postgresql/src/main/java/org/apache/dolphinscheduler/dao/plugin/postgresql/PostgresqlDaoPluginConfiguration.java rename to dolphinscheduler-dao-plugin/dolphinscheduler-dao-postgresql/src/main/java/org/apache/dolphinscheduler/dao/plugin/postgresql/PostgresqlDaoPluginAutoConfiguration.java index e57c84fab9d5..f0467bfd0747 100644 --- a/dolphinscheduler-dao-plugin/dolphinscheduler-dao-postgresql/src/main/java/org/apache/dolphinscheduler/dao/plugin/postgresql/PostgresqlDaoPluginConfiguration.java +++ b/dolphinscheduler-dao-plugin/dolphinscheduler-dao-postgresql/src/main/java/org/apache/dolphinscheduler/dao/plugin/postgresql/PostgresqlDaoPluginAutoConfiguration.java @@ -29,14 +29,14 @@ import javax.sql.DataSource; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.Profile; import com.baomidou.mybatisplus.annotation.DbType; -@Profile("postgresql") -@Configuration -public class PostgresqlDaoPluginConfiguration implements DaoPluginConfiguration { +@Conditional(PostgresqlDatabaseEnvironmentCondition.class) +@Configuration(proxyBeanMethods = false) +public class PostgresqlDaoPluginAutoConfiguration implements DaoPluginConfiguration { @Autowired private DataSource dataSource; diff --git a/dolphinscheduler-dao-plugin/dolphinscheduler-dao-postgresql/src/main/java/org/apache/dolphinscheduler/dao/plugin/postgresql/PostgresqlDatabaseEnvironmentCondition.java b/dolphinscheduler-dao-plugin/dolphinscheduler-dao-postgresql/src/main/java/org/apache/dolphinscheduler/dao/plugin/postgresql/PostgresqlDatabaseEnvironmentCondition.java new file mode 100644 index 000000000000..2c71ea844280 --- /dev/null +++ b/dolphinscheduler-dao-plugin/dolphinscheduler-dao-postgresql/src/main/java/org/apache/dolphinscheduler/dao/plugin/postgresql/PostgresqlDatabaseEnvironmentCondition.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.plugin.postgresql; + +import org.apache.dolphinscheduler.dao.plugin.api.DatabaseEnvironmentCondition; + +public class PostgresqlDatabaseEnvironmentCondition extends DatabaseEnvironmentCondition { + + public PostgresqlDatabaseEnvironmentCondition() { + super("postgresql"); + } + +} diff --git a/dolphinscheduler-dao-plugin/dolphinscheduler-dao-postgresql/src/main/resources/META-INF/spring.factories b/dolphinscheduler-dao-plugin/dolphinscheduler-dao-postgresql/src/main/resources/META-INF/spring.factories new file mode 100644 index 000000000000..fd6a5f07b9a9 --- /dev/null +++ b/dolphinscheduler-dao-plugin/dolphinscheduler-dao-postgresql/src/main/resources/META-INF/spring.factories @@ -0,0 +1,19 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +org.springframework.boot.autoconfigure.EnableAutoConfiguration=\ + org.apache.dolphinscheduler.dao.plugin.postgresql.PostgresqlDaoPluginAutoConfiguration diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java index 3b71312d0f13..d9f61098349f 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java @@ -63,8 +63,7 @@ @Slf4j public class AlertDao { - @Value("${alert.query_alert_threshold:100}") - private Integer QUERY_ALERT_THRESHOLD; + private static final Integer QUERY_ALERT_THRESHOLD = 100; @Value("${alert.alarm-suppression.crash:60}") private Integer crashAlarmSuppression; @@ -104,8 +103,8 @@ public int addAlert(Alert alert) { * update alert sending(execution) status * * @param alertStatus alertStatus - * @param log alert results json - * @param id id + * @param log alert results json + * @param id id * @return update alert result */ public int updateAlert(AlertStatus alertStatus, String log, int id) { @@ -134,9 +133,9 @@ private String generateSign(Alert alert) { /** * add AlertSendStatus * - * @param sendStatus alert send status - * @param log log - * @param alertId alert id + * @param sendStatus alert send status + * @param log log + * @param alertId alert id * @param alertPluginInstanceId alert plugin instance id * @return insert count */ @@ -192,7 +191,7 @@ public void sendServerStoppedAlert(int alertGroupId, String host, String serverT * process time out alert * * @param processInstance processInstance - * @param projectUser projectUser + * @param projectUser projectUser */ public void sendProcessTimeoutAlert(ProcessInstance processInstance, ProjectUser projectUser) { int alertGroupId = processInstance.getWarningGroupId(); @@ -238,8 +237,8 @@ private void saveTaskTimeoutAlert(Alert alert, String content, int alertGroupId) * task timeout warn * * @param processInstance processInstanceId - * @param taskInstance taskInstance - * @param projectUser projectUser + * @param taskInstance taskInstance + * @param projectUser projectUser */ public void sendTaskTimeoutAlert(ProcessInstance processInstance, TaskInstance taskInstance, ProjectUser projectUser) { @@ -271,10 +270,11 @@ public void sendTaskTimeoutAlert(ProcessInstance processInstance, TaskInstance t } /** - * List alerts that are pending for execution + * List pending alerts which id > minAlertId and status = {@link AlertStatus#WAIT_EXECUTION} order by id asc. */ - public List listPendingAlerts() { - return alertMapper.listingAlertByStatus(AlertStatus.WAIT_EXECUTION.getCode(), QUERY_ALERT_THRESHOLD); + public List listPendingAlerts(int minAlertId) { + return alertMapper.listingAlertByStatus(minAlertId, AlertStatus.WAIT_EXECUTION.getCode(), + QUERY_ALERT_THRESHOLD); } public List listAlerts(int processInstanceId) { @@ -283,15 +283,6 @@ public List listAlerts(int processInstanceId) { return alertMapper.selectList(wrapper); } - /** - * for test - * - * @return AlertMapper - */ - public AlertMapper getAlertMapper() { - return alertMapper; - } - /** * list all alert plugin instance by alert group id * diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/DaoConfiguration.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/DaoConfiguration.java index 985f5b56b4a5..e089c8086bbf 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/DaoConfiguration.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/DaoConfiguration.java @@ -40,8 +40,8 @@ import com.baomidou.mybatisplus.extension.plugins.inner.PaginationInnerInterceptor; @Configuration +@ComponentScan("org.apache.dolphinscheduler.dao") @EnableAutoConfiguration -@ComponentScan({"org.apache.dolphinscheduler.dao.plugin"}) @MapperScan(basePackages = "org.apache.dolphinscheduler.dao.mapper", sqlSessionFactoryRef = "sqlSessionFactory") public class DaoConfiguration { diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/PluginDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/PluginDao.java index 71e3be70c452..24cb022881f6 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/PluginDao.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/PluginDao.java @@ -29,10 +29,10 @@ import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; +import org.springframework.stereotype.Repository; @Slf4j -@Component +@Repository public class PluginDao { @Autowired diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/AuditLog.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/AuditLog.java index 10023d1547ad..7c6f76e1da15 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/AuditLog.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/AuditLog.java @@ -19,11 +19,16 @@ import java.util.Date; +import lombok.Data; + +import org.springframework.beans.BeanUtils; + import com.baomidou.mybatisplus.annotation.IdType; import com.baomidou.mybatisplus.annotation.TableField; import com.baomidou.mybatisplus.annotation.TableId; import com.baomidou.mybatisplus.annotation.TableName; +@Data @TableName("t_ds_audit_log") public class AuditLog { @@ -39,19 +44,19 @@ public class AuditLog { private Integer userId; /** - * resource type + * model type */ - private Integer resourceType; + private String modelType; /** * operation type */ - private Integer operation; + private String operationType; /** - * resource id + * model id */ - private Integer resourceId; + private Long modelId; /** * user name @@ -62,53 +67,19 @@ public class AuditLog { /** * operation time */ - private Date time; - - public Integer getUserId() { - return userId; - } - - public void setUserId(Integer userId) { - this.userId = userId; - } - - public Integer getResourceType() { - return resourceType; - } - - public void setResourceType(Integer resourceType) { - this.resourceType = resourceType; - } + private Date createTime; - public Integer getOperation() { - return operation; - } - - public void setOperation(Integer operation) { - this.operation = operation; - } - - public Integer getResourceId() { - return resourceId; - } + private String detail; - public void setResourceId(Integer resourceId) { - this.resourceId = resourceId; - } + private String description; - public String getUserName() { - return userName; - } + private String modelName; - public void setUserName(String userName) { - this.userName = userName; - } - - public Date getTime() { - return time; - } + private long latency; - public void setTime(Date time) { - this.time = time; + public static AuditLog copyNewOne(AuditLog auditLog) { + AuditLog auditLogNew = new AuditLog(); + BeanUtils.copyProperties(auditLog, auditLogNew); + return auditLogNew; } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DependentProcessDefinition.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DependentProcessDefinition.java index 9fd77c944fd5..fcce2a33d331 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DependentProcessDefinition.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DependentProcessDefinition.java @@ -17,7 +17,6 @@ package org.apache.dolphinscheduler.dao.entity; -import org.apache.dolphinscheduler.common.constants.Constants; import org.apache.dolphinscheduler.common.enums.CycleEnum; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.plugin.task.api.model.DependentItem; @@ -67,7 +66,7 @@ public class DependentProcessDefinition { */ public CycleEnum getDependentCycle(long upstreamProcessDefinitionCode) { DependentParameters dependentParameters = this.getDependentParameters(); - List dependentTaskModelList = dependentParameters.getDependTaskList(); + List dependentTaskModelList = dependentParameters.getDependence().getDependTaskList(); for (DependentTaskModel dependentTaskModel : dependentTaskModelList) { List dependentItemList = dependentTaskModel.getDependItemList(); @@ -104,11 +103,7 @@ public CycleEnum cycle2CycleEnum(String cycle) { } public DependentParameters getDependentParameters() { - return JSONUtils.parseObject(getDependence(), DependentParameters.class); - } - - public String getDependence() { - return JSONUtils.getNodeString(this.taskParams, Constants.DEPENDENCE); + return JSONUtils.parseObject(taskParams, DependentParameters.class); } public String getProcessDefinitionName() { diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ErrorCommand.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ErrorCommand.java index aa4d3d47821a..d52984b61fde 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ErrorCommand.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ErrorCommand.java @@ -52,6 +52,10 @@ public class ErrorCommand { */ private long processDefinitionCode; + private int processDefinitionVersion; + + private int processInstanceId; + /** * executor id */ @@ -73,7 +77,7 @@ public class ErrorCommand { private FailureStrategy failureStrategy; /** - * warning type + * warning type */ private WarningType warningType; @@ -135,21 +139,26 @@ public class ErrorCommand { public ErrorCommand() { } + public ErrorCommand(Command command, String message) { this.id = command.getId(); this.commandType = command.getCommandType(); this.executorId = command.getExecutorId(); this.processDefinitionCode = command.getProcessDefinitionCode(); + this.processDefinitionVersion = command.getProcessDefinitionVersion(); + this.processInstanceId = command.getProcessInstanceId(); this.commandParam = command.getCommandParam(); + this.taskDependType = command.getTaskDependType(); + this.failureStrategy = command.getFailureStrategy(); this.warningType = command.getWarningType(); this.warningGroupId = command.getWarningGroupId(); this.scheduleTime = command.getScheduleTime(); - this.taskDependType = command.getTaskDependType(); - this.failureStrategy = command.getFailureStrategy(); this.startTime = command.getStartTime(); this.updateTime = command.getUpdateTime(); - this.environmentCode = command.getEnvironmentCode(); this.processInstancePriority = command.getProcessInstancePriority(); + this.workerGroup = command.getWorkerGroup(); + this.tenantCode = command.getTenantCode(); + this.environmentCode = command.getEnvironmentCode(); this.message = message; this.dryRun = command.getDryRun(); this.testFlag = command.getTestFlag(); diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessInstance.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessInstance.java index d8f01f473f80..9ed8bca0efea 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessInstance.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessInstance.java @@ -201,12 +201,6 @@ public class ProcessInstance { */ private Date restartTime; - /** - * workflow block flag - */ - @TableField(exist = false) - private boolean isBlocked; - /** * test flag */ diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProjectParameter.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProjectParameter.java index fbeeb387f1ba..4343d07c2993 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProjectParameter.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProjectParameter.java @@ -42,6 +42,8 @@ public class ProjectParameter { @TableField("user_id") private Integer userId; + private Integer operator; + private long code; @TableField("project_code") @@ -53,7 +55,16 @@ public class ProjectParameter { @TableField("param_value") private String paramValue; + @TableField("param_data_type") + private String paramDataType; + private Date createTime; private Date updateTime; + + @TableField(exist = false) + private String createUser; + + @TableField(exist = false) + private String modifyUser; } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskInstance.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskInstance.java index 4897449753a4..81b99c15889b 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskInstance.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskInstance.java @@ -17,26 +17,15 @@ package org.apache.dolphinscheduler.dao.entity; -import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.TASK_TYPE_BLOCKING; -import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.TASK_TYPE_CONDITIONS; -import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.TASK_TYPE_DEPENDENT; -import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.TASK_TYPE_DYNAMIC; -import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.TASK_TYPE_SUB_PROCESS; -import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.TASK_TYPE_SWITCH; - -import org.apache.dolphinscheduler.common.constants.Constants; import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.TaskExecuteType; import org.apache.dolphinscheduler.common.utils.DateUtils; -import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.plugin.task.api.enums.TaskExecutionStatus; -import org.apache.dolphinscheduler.plugin.task.api.parameters.DependentParameters; -import org.apache.dolphinscheduler.plugin.task.api.parameters.SwitchParameters; +import org.apache.dolphinscheduler.plugin.task.api.utils.TaskTypeUtils; import java.io.Serializable; import java.util.Date; -import java.util.Map; import java.util.concurrent.TimeUnit; import lombok.Data; @@ -46,7 +35,6 @@ import com.baomidou.mybatisplus.annotation.TableField; import com.baomidou.mybatisplus.annotation.TableId; import com.baomidou.mybatisplus.annotation.TableName; -import com.fasterxml.jackson.core.type.TypeReference; /** * task instance @@ -186,18 +174,6 @@ public class TaskInstance implements Serializable { @TableField(updateStrategy = FieldStrategy.IGNORED) private String cacheKey; - /** - * dependency - */ - @TableField(exist = false) - private DependentParameters dependency; - - /** - * switch dependency - */ - @TableField(exist = false) - private SwitchParameters switchDependency; - /** * duration */ @@ -225,12 +201,6 @@ public class TaskInstance implements Serializable { @TableField(exist = false) private Priority processInstancePriority; - /** - * dependent state - */ - @TableField(exist = false) - private String dependentResult; - /** * workerGroup */ @@ -303,43 +273,6 @@ public void init(String host, Date startTime, String executePath) { this.executePath = executePath; } - public DependentParameters getDependency() { - if (this.dependency == null) { - Map taskParamsMap = - JSONUtils.parseObject(this.getTaskParams(), new TypeReference>() { - }); - this.dependency = - JSONUtils.parseObject((String) taskParamsMap.get(Constants.DEPENDENCE), DependentParameters.class); - } - return this.dependency; - } - - public void setDependency(DependentParameters dependency) { - this.dependency = dependency; - } - - public SwitchParameters getSwitchDependency() { - // todo: We need to directly use Jackson to deserialize the taskParam, rather than parse the map and get from - // field. - if (this.switchDependency == null) { - Map taskParamsMap = - JSONUtils.parseObject(this.getTaskParams(), new TypeReference>() { - }); - this.switchDependency = - JSONUtils.parseObject((String) taskParamsMap.get(Constants.SWITCH_RESULT), SwitchParameters.class); - } - return this.switchDependency; - } - - public void setSwitchDependency(SwitchParameters switchDependency) { - Map taskParamsMap = - JSONUtils.parseObject(this.getTaskParams(), new TypeReference>() { - }); - taskParamsMap.put(Constants.SWITCH_RESULT, JSONUtils.toJsonString(switchDependency)); - this.switchDependency = switchDependency; - this.setTaskParams(JSONUtils.toJsonString(taskParamsMap)); - } - public boolean isTaskComplete() { return this.getState().isSuccess() @@ -348,30 +281,6 @@ public boolean isTaskComplete() { || this.getState().isForceSuccess(); } - public boolean isSubProcess() { - return TASK_TYPE_SUB_PROCESS.equalsIgnoreCase(this.taskType); - } - - public boolean isDependTask() { - return TASK_TYPE_DEPENDENT.equalsIgnoreCase(this.taskType); - } - - public boolean isDynamic() { - return TASK_TYPE_DYNAMIC.equalsIgnoreCase(this.taskType); - } - - public boolean isConditionsTask() { - return TASK_TYPE_CONDITIONS.equalsIgnoreCase(this.taskType); - } - - public boolean isSwitchTask() { - return TASK_TYPE_SWITCH.equalsIgnoreCase(this.taskType); - } - - public boolean isBlockingTask() { - return TASK_TYPE_BLOCKING.equalsIgnoreCase(this.taskType); - } - public boolean isFirstRun() { return endTime == null; } @@ -383,7 +292,7 @@ public boolean isFirstRun() { * @return can try result */ public boolean taskCanRetry() { - if (this.isSubProcess()) { + if (TaskTypeUtils.isSubWorkflowTask(getTaskType())) { return false; } if (this.getState() == TaskExecutionStatus.NEED_FAULT_TOLERANCE) { diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/UdfFunc.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/UdfFunc.java deleted file mode 100644 index ab452cb1fff7..000000000000 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/UdfFunc.java +++ /dev/null @@ -1,157 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.dao.entity; - -import org.apache.dolphinscheduler.common.enums.UdfType; -import org.apache.dolphinscheduler.common.utils.JSONUtils; - -import java.io.IOException; -import java.util.Date; -import java.util.Objects; - -import lombok.Data; - -import com.baomidou.mybatisplus.annotation.IdType; -import com.baomidou.mybatisplus.annotation.TableField; -import com.baomidou.mybatisplus.annotation.TableId; -import com.baomidou.mybatisplus.annotation.TableName; -import com.fasterxml.jackson.databind.DeserializationContext; -import com.fasterxml.jackson.databind.KeyDeserializer; -import com.google.common.base.Strings; - -@Data -@TableName("t_ds_udfs") -public class UdfFunc { - - /** - * id - */ - @TableId(value = "id", type = IdType.AUTO) - private Integer id; - /** - * user id - */ - private int userId; - - public String getResourceType() { - return resourceType; - } - - public void setResourceType(String resourceType) { - this.resourceType = "UDF"; - } - - @TableField(exist = false) - private String resourceType = "UDF"; - /** - * udf function name - */ - private String funcName; - - /** - * udf class name - */ - private String className; - - /** - * udf argument types - */ - private String argTypes; - - /** - * udf data base - */ - private String database; - - /** - * udf description - */ - private String description; - - /** - * resource id - */ - private int resourceId; - - /** - * resource name - */ - private String resourceName; - - /** - * udf function type: hive / spark - */ - private UdfType type; - - /** - * create time - */ - private Date createTime; - - /** - * update time - */ - private Date updateTime; - - /** - * user name - */ - @TableField(exist = false) - private String userName; - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - UdfFunc udfFunc = (UdfFunc) o; - - if (!Objects.equals(id, udfFunc.id)) { - return false; - } - return !(funcName != null ? !funcName.equals(udfFunc.funcName) : udfFunc.funcName != null); - - } - - @Override - public int hashCode() { - int result = id; - result = 31 * result + (funcName != null ? funcName.hashCode() : 0); - return result; - } - - @Override - public String toString() { - return JSONUtils.toJsonString(this); - } - - public static class UdfFuncDeserializer extends KeyDeserializer { - - @Override - public Object deserializeKey(String key, DeserializationContext ctxt) throws IOException { - if (Strings.isNullOrEmpty(key)) { - return null; - } - return JSONUtils.parseObject(key, UdfFunc.class); - } - } -} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AlertMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AlertMapper.java index c30c1c9043e6..aab7b6f5f222 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AlertMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AlertMapper.java @@ -34,9 +34,10 @@ public interface AlertMapper extends BaseMapper { /** - * Query the alert by alertStatus and return limit with default sort. + * Query the alert which id > minAlertId and status = alertStatus order by id asc. */ - List listingAlertByStatus(@Param("alertStatus") int alertStatus, @Param("limit") int limit); + List listingAlertByStatus(@Param("minAlertId") int minAlertId, @Param("alertStatus") int alertStatus, + @Param("limit") int limit); /** * Insert server crash alert diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AuditLogMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AuditLogMapper.java index 7d77b3e42928..3cc95a76d167 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AuditLogMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AuditLogMapper.java @@ -22,6 +22,7 @@ import org.apache.ibatis.annotations.Param; import java.util.Date; +import java.util.List; import com.baomidou.mybatisplus.core.mapper.BaseMapper; import com.baomidou.mybatisplus.core.metadata.IPage; @@ -32,12 +33,11 @@ public interface AuditLogMapper extends BaseMapper { IPage queryAuditLog(IPage page, - @Param("resourceType") int[] resourceArray, - @Param("operationType") int[] operationType, + @Param("modelTypeList") List modelTypeList, + @Param("operationTypeList") List operationTypeList, @Param("userName") String userName, + @Param("modelName") String modelName, @Param("startDate") Date startDate, @Param("endDate") Date endDate); - String queryResourceNameByType(@Param("resourceType") String resourceType, - @Param("resourceId") Integer resourceId); } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/CommandMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/CommandMapper.java index a8490cbef7c2..ff0f6243cec8 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/CommandMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/CommandMapper.java @@ -26,6 +26,8 @@ import java.util.List; import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; /** * command mapper interface @@ -34,8 +36,9 @@ public interface CommandMapper extends BaseMapper { /** * count command state - * @param startTime startTime - * @param endTime endTime + * + * @param startTime startTime + * @param endTime endTime * @param projectCodes projectCodes * @return CommandCount list */ @@ -46,15 +49,18 @@ List countCommandState( /** * query command page + * * @return */ - List queryCommandPage(@Param("limit") int limit, @Param("offset") int offset); + IPage queryCommandPage(Page page); - /** - * query command page by slot - * @return command list - */ - List queryCommandPageBySlot(@Param("limit") int limit, - @Param("masterCount") int masterCount, - @Param("thisMasterSlot") int thisMasterSlot); + List queryCommandByIdSlot(@Param("currentSlotIndex") int currentSlotIndex, + @Param("totalSlot") int totalSlot, + @Param("idStep") int idStep, + @Param("fetchNumber") int fetchNum); + + void deleteByWorkflowInstanceIds(@Param("workflowInstanceIds") List workflowInstanceIds); + + IPage queryCommandPageByIds(Page page, + @Param("workflowDefinitionCodes") List workflowDefinitionCodes); } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DataSourceMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DataSourceMapper.java index b5dcc316274b..0eb87a9dcb86 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DataSourceMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DataSourceMapper.java @@ -80,12 +80,12 @@ IPage selectPaging(IPage page, List listAllDataSourceByType(@Param("type") Integer type); /** - * list authorized UDF function + * list authorized datasource * * @param userId userId * @param dataSourceIds data source id array * @param T - * @return UDF function list + * @return datasource list */ List listAuthorizedDataSource(@Param("userId") int userId, @Param("dataSourceIds") T[] dataSourceIds); @@ -102,8 +102,7 @@ List listAuthorizedDataSource(@Param("userId") int userId, /** * selectPagingByIds * @param dataSourcePage - * @param ids - * @param searchVal + * @param dataSourceIds * @return */ IPage selectPagingByIds(Page dataSourcePage, diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapper.java index 8054a91786c3..9dee5bbc32be 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapper.java @@ -26,6 +26,8 @@ import java.util.List; import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; /** * error command mapper interface @@ -43,4 +45,9 @@ List countCommandState( @Param("startTime") Date startTime, @Param("endTime") Date endTime, @Param("projectCodes") List projectCodes); + + IPage queryErrorCommandPage(Page page); + + IPage queryErrorCommandPageByIds(Page page, + @Param("workflowDefinitionCodes") List workflowDefinitionCodes); } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ListenerEventMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ListenerEventMapper.java index 820ac3b3a616..f3e187f8030e 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ListenerEventMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ListenerEventMapper.java @@ -34,7 +34,8 @@ public interface ListenerEventMapper extends BaseMapper { void insertServerDownEvent(@Param("event") ListenerEvent event, @Param("crashAlarmSuppressionStartTime") Date crashAlarmSuppressionStartTime); - List listingListenerEventByStatus(@Param("postStatus") AlertStatus postStatus, + List listingListenerEventByStatus(@Param("minId") int minId, + @Param("postStatus") int postStatus, @Param("limit") int limit); void updateListenerEvent(@Param("eventId") int eventId, @Param("postStatus") AlertStatus postStatus, diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.java index 67140720fe48..689e93414bfb 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.java @@ -21,12 +21,10 @@ import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.model.WorkflowDefinitionCountDto; -import org.apache.ibatis.annotations.MapKey; import org.apache.ibatis.annotations.Param; import java.util.Collection; import java.util.List; -import java.util.Map; import com.baomidou.mybatisplus.core.mapper.BaseMapper; import com.baomidou.mybatisplus.core.metadata.IPage; @@ -166,26 +164,11 @@ List countDefinitionByProjectCodesV2(@Param("project @Param("userId") Integer userId, @Param("releaseState") Integer releaseState); - /** - * list all resource ids - * - * @return resource ids list - */ - @MapKey("id") - List> listResources(); - - /** - * list all resource ids by user id - * - * @return resource ids list - */ - @MapKey("id") - List> listResourcesByUser(@Param("userId") Integer userId); - /** * list all project ids * * @return project ids list */ List listProjectIds(); + List queryDefinitionCodeListByProjectCodes(@Param("projectCodes") List projectCodes); } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.java index 20c259da9b43..943b4ac2db9f 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.java @@ -188,13 +188,15 @@ List queryByProcessDefineCode(@Param("processDefinitionCode") L /** * query last scheduler process instance * - * @param definitionCode definitionCode + * @param processDefinitionCode definitionCode + * @param taskDefinitionCode definitionCode * @param startTime startTime * @param endTime endTime * @param testFlag testFlag * @return process instance */ - ProcessInstance queryLastSchedulerProcess(@Param("processDefinitionCode") Long definitionCode, + ProcessInstance queryLastSchedulerProcess(@Param("processDefinitionCode") Long processDefinitionCode, + @Param("taskDefinitionCode") Long taskDefinitionCode, @Param("startTime") Date startTime, @Param("endTime") Date endTime, @Param("testFlag") int testFlag); diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProjectParameterMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProjectParameterMapper.java index 0e7810158c1a..3f700a9a8413 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProjectParameterMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProjectParameterMapper.java @@ -38,7 +38,8 @@ public interface ProjectParameterMapper extends BaseMapper { IPage queryProjectParameterListPaging(IPage page, @Param("projectCode") long projectCode, @Param("projectParameterIds") List projectParameterIds, - @Param("searchName") String searchName); + @Param("searchName") String searchName, + @Param("projectParameterDataType") String projectParameterDataType); List queryByProjectCode(@Param("projectCode") long projectCode); } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskDefinitionMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskDefinitionMapper.java index 6c1a133a87b8..4caadac5409e 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskDefinitionMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskDefinitionMapper.java @@ -23,12 +23,10 @@ import org.apache.dolphinscheduler.dao.entity.TaskMainInfo; import org.apache.dolphinscheduler.dao.model.WorkflowDefinitionCountDto; -import org.apache.ibatis.annotations.MapKey; import org.apache.ibatis.annotations.Param; import java.util.Collection; import java.util.List; -import java.util.Map; import com.baomidou.mybatisplus.core.mapper.BaseMapper; import com.baomidou.mybatisplus.core.metadata.IPage; @@ -74,22 +72,6 @@ TaskDefinition queryByName(@Param("projectCode") long projectCode, */ List countDefinitionGroupByUser(@Param("projectCodes") Long[] projectCodes); - /** - * list all resource ids and task_params containing resourceList - * - * @return task ids list - */ - @MapKey("id") - List> listResources(); - - /** - * list all resource ids by user id - * - * @return resource ids list - */ - @MapKey("id") - List> listResourcesByUser(@Param("userId") Integer userId); - /** * delete task definition by code * diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskGroupQueueMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskGroupQueueMapper.java index cada1c7092cf..8b8241a2ad91 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskGroupQueueMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskGroupQueueMapper.java @@ -122,4 +122,18 @@ List queryUsingTaskGroupQueueByGroupId(@Param("taskGroupId") Int @Param("status") int status, @Param("inQueue") int inQueue, @Param("forceStart") int forceStart); + + int countUsingTaskGroupQueueByGroupId(@Param("taskGroupId") Integer taskGroupId, + @Param("status") int status, + @Param("inQueue") int inQueue, + @Param("forceStart") int forceStart); + + List queryInQueueTaskGroupQueue(@Param("inQueue") int inQueue, + @Param("minTaskGroupQueueId") int minTaskGroupQueueId, + @Param("limit") int limit); + + List queryWaitNotifyForceStartTaskGroupQueue(@Param("inQueue") int inQueue, + @Param("forceStart") int forceStart, + @Param("minTaskGroupQueueId") int minTaskGroupQueueId, + @Param("limit") int limit); } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TriggerRelationMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TriggerRelationMapper.java index 10a0acf47fbb..912ef2810166 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TriggerRelationMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TriggerRelationMapper.java @@ -36,7 +36,7 @@ public interface TriggerRelationMapper extends BaseMapper { * @param jobId * @return */ - TriggerRelation queryByTypeAndJobId(@Param("triggerType") Integer triggerType, @Param("jobId") int jobId); + List queryByTypeAndJobId(@Param("triggerType") Integer triggerType, @Param("jobId") int jobId); /** * query triggerRelation by code diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.java deleted file mode 100644 index 6bc8049c7dd3..000000000000 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.java +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.dolphinscheduler.dao.mapper; - -import org.apache.dolphinscheduler.dao.entity.UdfFunc; - -import org.apache.ibatis.annotations.Param; - -import java.util.List; - -import com.baomidou.mybatisplus.core.mapper.BaseMapper; -import com.baomidou.mybatisplus.core.metadata.IPage; - -/** - * udf function mapper interface - */ -public interface UdfFuncMapper extends BaseMapper { - - /** - * select udf by id - * @param id udf id - * @return UdfFunc - */ - UdfFunc selectUdfById(@Param("id") int id); - - /** - * query udf function by ids and function name - * @param ids ids - * @param funcNames funcNames - * @return udf function list - */ - List queryUdfByIdStr(@Param("ids") Integer[] ids, - @Param("funcNames") String funcNames); - - /** - * udf function page - * @param page page - * @param ids userId - * @param searchVal searchVal - * @return udf function IPage - */ - IPage queryUdfFuncPaging(IPage page, - @Param("ids") List ids, - @Param("searchVal") String searchVal); - - /** - * query udf function by type - * @param ids userId - * @param type type - * @return udf function list - */ - List getUdfFuncByType(@Param("ids") List ids, - @Param("type") Integer type); - - /** - * query udf function except userId - * @param userId userId - * @return udf function list - */ - List queryUdfFuncExceptUserId(@Param("userId") int userId); - - /** - * query authed udf function - * @param userId userId - * @return udf function list - */ - List queryAuthedUdfFunc(@Param("userId") int userId); - - /** - * list authorized UDF function - * @param userId userId - * @param udfIds UDF function id array - * @return UDF function list - */ - List listAuthorizedUdfFunc(@Param("userId") int userId, @Param("udfIds") T[] udfIds); - - /** - * list UDF by resource id - * @param resourceIds resource id array - * @return UDF function list - */ - List listUdfByResourceId(@Param("resourceIds") Integer[] resourceIds); - - /** - * list UDF by resource fullName - * @param resourceFullNames resource fullName array - * @return UDF function list - */ - List listUdfByResourceFullName(@Param("resourceFullNames") String[] resourceFullNames); - - /** - * list authorized UDF by resource id - * @param resourceIds resource id array - * @return UDF function list - */ - List listAuthorizedUdfByResourceId(@Param("userId") int userId, @Param("resourceIds") int[] resourceIds); - - /** - * batch update udf func - * @param udfFuncList udf list - * @return update num - */ - int batchUpdateUdfFunc(@Param("udfFuncList") List udfFuncList); - - /** - * listAuthorizedUdfByUserId - * @param userId - * @return - */ - List listAuthorizedUdfByUserId(@Param("userId") int userId); -} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/BaseDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/BaseDao.java index 2937957dbdc4..664b56ee472c 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/BaseDao.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/BaseDao.java @@ -56,6 +56,11 @@ public List queryByIds(Collection ids) { return mybatisMapper.selectBatchIds(ids); } + @Override + public List queryAll() { + return mybatisMapper.selectList(null); + } + @Override public List queryByCondition(ENTITY queryCondition) { if (queryCondition == null) { diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/CommandDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/CommandDao.java new file mode 100644 index 000000000000..daa52b83181d --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/CommandDao.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.repository; + +import org.apache.dolphinscheduler.dao.entity.Command; + +import java.util.List; + +public interface CommandDao extends IDao { + + /** + * Query command by command id and server slot, return the command which match (commandId / step) %s totalSlot = currentSlotIndex + * + * @param currentSlotIndex current slot index + * @param totalSlot total slot number + * @param idStep id step in db + * @param fetchNum fetch number + * @return command list + */ + List queryCommandByIdSlot(int currentSlotIndex, + int totalSlot, + int idStep, + int fetchNum); +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/IDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/IDao.java index c566d9b90402..ab774196003f 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/IDao.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/IDao.java @@ -41,6 +41,11 @@ public interface IDao { */ List queryByIds(Collection ids); + /** + * Query all entities. + */ + List queryAll(); + /** * Query the entity by condition. */ diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/ListenerEventDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/ListenerEventDao.java new file mode 100644 index 000000000000..424c616cd33e --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/ListenerEventDao.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.repository; + +import org.apache.dolphinscheduler.common.enums.AlertStatus; +import org.apache.dolphinscheduler.dao.entity.ListenerEvent; + +import java.util.Date; +import java.util.List; + +public interface ListenerEventDao extends IDao { + + List listingPendingEvents(int minId, int limit); + + void updateListenerEvent(int eventId, AlertStatus alertStatus, String message, Date date); +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/ProcessInstanceDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/ProcessInstanceDao.java index 02703fe0f37d..91c567e2e37a 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/ProcessInstanceDao.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/ProcessInstanceDao.java @@ -41,11 +41,13 @@ public interface ProcessInstanceDao extends IDao { /** * find last scheduler process instance in the date interval * - * @param definitionCode definitionCode + * @param processDefinitionCode definitionCode + * @param taskDefinitionCode definitionCode * @param dateInterval dateInterval * @return process instance */ - ProcessInstance queryLastSchedulerProcessInterval(Long definitionCode, DateInterval dateInterval, int testFlag); + ProcessInstance queryLastSchedulerProcessInterval(Long processDefinitionCode, Long taskDefinitionCode, + DateInterval dateInterval, int testFlag); /** * find last manual process instance interval diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/TaskGroupQueueDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/TaskGroupQueueDao.java index a788b29bb116..468b7b758cee 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/TaskGroupQueueDao.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/TaskGroupQueueDao.java @@ -38,6 +38,17 @@ public interface TaskGroupQueueDao extends IDao { */ List queryAllInQueueTaskGroupQueue(); + /** + * Query all {@link TaskGroupQueue} which + * in_queue is {@link org.apache.dolphinscheduler.common.enums.Flag#YES} + * and id > minTaskGroupQueueId + * ordered by id asc + * limit #{limit} + * + * @return TaskGroupQueue ordered by id asc + */ + List queryInQueueTaskGroupQueue(int minTaskGroupQueueId, int limit); + /** * Query all {@link TaskGroupQueue} which in_queue is {@link org.apache.dolphinscheduler.common.enums.Flag#YES} and taskGroupId is taskGroupId * @@ -61,4 +72,24 @@ public interface TaskGroupQueueDao extends IDao { * @return TaskGroupQueue */ List queryAcquiredTaskGroupQueueByGroupId(Integer taskGroupId); + + /** + * Count all {@link TaskGroupQueue} which status is TaskGroupQueueStatus.ACQUIRE_SUCCESS and forceStart is {@link org.apache.dolphinscheduler.common.enums.Flag#NO}. + * + * @param taskGroupId taskGroupId + * @return TaskGroupQueue + */ + int countUsingTaskGroupQueueByGroupId(Integer taskGroupId); + + /** + * Query all {@link TaskGroupQueue} which + * in_queue is {@link org.apache.dolphinscheduler.common.enums.Flag#YES} + * and forceStart is {@link org.apache.dolphinscheduler.common.enums.Flag#YES} + * and id > minTaskGroupQueueId + * order by id asc + * limit #{limit} + * + * @return TaskGroupQueue ordered by priority desc + */ + List queryWaitNotifyForceStartTaskGroupQueue(int minTaskGroupQueueId, int limit); } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/TenantDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/TenantDao.java new file mode 100644 index 000000000000..9f48f372da91 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/TenantDao.java @@ -0,0 +1,24 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.repository; + +import org.apache.dolphinscheduler.dao.entity.Tenant; + +public interface TenantDao extends IDao { + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/CommandDaoImpl.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/CommandDaoImpl.java new file mode 100644 index 000000000000..0b510d15b51d --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/CommandDaoImpl.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.repository.impl; + +import org.apache.dolphinscheduler.dao.entity.Command; +import org.apache.dolphinscheduler.dao.mapper.CommandMapper; +import org.apache.dolphinscheduler.dao.repository.BaseDao; +import org.apache.dolphinscheduler.dao.repository.CommandDao; + +import java.util.List; + +import org.springframework.stereotype.Repository; + +@Repository +public class CommandDaoImpl extends BaseDao implements CommandDao { + + public CommandDaoImpl(CommandMapper commandMapper) { + super(commandMapper); + } + + @Override + public List queryCommandByIdSlot(int currentSlotIndex, int totalSlot, int idStep, int fetchNum) { + return mybatisMapper.queryCommandByIdSlot(currentSlotIndex, totalSlot, idStep, fetchNum); + } + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/ListenerEventDaoImpl.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/ListenerEventDaoImpl.java new file mode 100644 index 000000000000..06c4dccd5e7b --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/ListenerEventDaoImpl.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.repository.impl; + +import org.apache.dolphinscheduler.common.enums.AlertStatus; +import org.apache.dolphinscheduler.dao.entity.ListenerEvent; +import org.apache.dolphinscheduler.dao.mapper.ListenerEventMapper; +import org.apache.dolphinscheduler.dao.repository.BaseDao; +import org.apache.dolphinscheduler.dao.repository.ListenerEventDao; + +import java.util.Date; +import java.util.List; + +import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; + +import org.springframework.stereotype.Repository; + +@Slf4j +@Repository +public class ListenerEventDaoImpl extends BaseDao implements ListenerEventDao { + + public ListenerEventDaoImpl(@NonNull ListenerEventMapper listenerEventMapper) { + super(listenerEventMapper); + } + + @Override + public List listingPendingEvents(int minId, int limit) { + return mybatisMapper.listingListenerEventByStatus(minId, AlertStatus.WAIT_EXECUTION.getCode(), limit); + } + + @Override + public void updateListenerEvent(int eventId, AlertStatus alertStatus, String message, Date date) { + mybatisMapper.updateListenerEvent(eventId, alertStatus, message, date); + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/ProcessInstanceDaoImpl.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/ProcessInstanceDaoImpl.java index 8ff93b3a9f4c..8e966c1850b5 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/ProcessInstanceDaoImpl.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/ProcessInstanceDaoImpl.java @@ -67,14 +67,18 @@ public void performTransactionalUpsert(ProcessInstance processInstance) { /** * find last scheduler process instance in the date interval * - * @param definitionCode definitionCode + * @param processDefinitionCode definitionCode + * @param taskDefinitionCode definitionCode * @param dateInterval dateInterval * @return process instance */ @Override - public ProcessInstance queryLastSchedulerProcessInterval(Long definitionCode, DateInterval dateInterval, + public ProcessInstance queryLastSchedulerProcessInterval(Long processDefinitionCode, Long taskDefinitionCode, + DateInterval dateInterval, int testFlag) { - return mybatisMapper.queryLastSchedulerProcess(definitionCode, + return mybatisMapper.queryLastSchedulerProcess( + processDefinitionCode, + taskDefinitionCode, dateInterval.getStartTime(), dateInterval.getEndTime(), testFlag); diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskGroupQueueDaoImpl.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskGroupQueueDaoImpl.java index a1808a909183..5fd50deaae07 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskGroupQueueDaoImpl.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskGroupQueueDaoImpl.java @@ -52,6 +52,11 @@ public List queryAllInQueueTaskGroupQueue() { return mybatisMapper.queryAllTaskGroupQueueByInQueue(Flag.YES.getCode()); } + @Override + public List queryInQueueTaskGroupQueue(int minTaskGroupQueueId, int limit) { + return mybatisMapper.queryInQueueTaskGroupQueue(Flag.YES.getCode(), minTaskGroupQueueId, limit); + } + @Override public List queryAllInQueueTaskGroupQueueByGroupId(Integer taskGroupId) { return mybatisMapper.queryAllInQueueTaskGroupQueueByGroupId(taskGroupId, Flag.YES.getCode()); @@ -70,4 +75,21 @@ public List queryAcquiredTaskGroupQueueByGroupId(Integer taskGro Flag.YES.getCode(), Flag.NO.getCode()); } + + @Override + public int countUsingTaskGroupQueueByGroupId(Integer taskGroupId) { + return mybatisMapper.countUsingTaskGroupQueueByGroupId(taskGroupId, + TaskGroupQueueStatus.ACQUIRE_SUCCESS.getCode(), + Flag.YES.ordinal(), + Flag.NO.getCode()); + } + + @Override + public List queryWaitNotifyForceStartTaskGroupQueue(int minTaskGroupQueueId, int limit) { + return mybatisMapper.queryWaitNotifyForceStartTaskGroupQueue( + Flag.YES.getCode(), + Flag.YES.getCode(), + minTaskGroupQueueId, + limit); + } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TenantDaoImpl.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TenantDaoImpl.java new file mode 100644 index 000000000000..45f9a5fcba69 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TenantDaoImpl.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.repository.impl; + +import org.apache.dolphinscheduler.dao.entity.Tenant; +import org.apache.dolphinscheduler.dao.mapper.TenantMapper; +import org.apache.dolphinscheduler.dao.repository.BaseDao; +import org.apache.dolphinscheduler.dao.repository.TenantDao; + +import lombok.NonNull; + +import org.springframework.stereotype.Repository; + +@Repository +public class TenantDaoImpl extends BaseDao implements TenantDao { + + public TenantDaoImpl(@NonNull TenantMapper tenantMapper) { + super(tenantMapper); + } + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/EnvironmentUtils.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/EnvironmentUtils.java new file mode 100644 index 000000000000..89ea647f75bd --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/EnvironmentUtils.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.utils; + +public class EnvironmentUtils { + + private static final long EMPTY_ENVIRONMENT_CODE = -1L; + + /** + * Check if the environment code is empty (we should use null instead of -1, this is used to comply with the original code) + * + * @return true if the environment code is empty, false otherwise + */ + public static boolean isEnvironmentCodeEmpty(Long environmentCode) { + return environmentCode == null || environmentCode <= 0; + } + + /** + * Get the empty environment code + */ + public static Long getDefaultEnvironmentCode() { + return EMPTY_ENVIRONMENT_CODE; + } + + /** + * Get the environment code or the default environment code if the environment code is empty + */ + public static Long getEnvironmentCodeOrDefault(Long environmentCode) { + return getEnvironmentCodeOrDefault(environmentCode, getDefaultEnvironmentCode()); + } + + /** + * Get the environment code or the default environment code if the environment code is empty + */ + public static Long getEnvironmentCodeOrDefault(Long environmentCode, Long defaultEnvironmentCode) { + return isEnvironmentCodeEmpty(environmentCode) ? defaultEnvironmentCode : environmentCode; + } + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/TaskCacheUtils.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/TaskCacheUtils.java index 36cf6c23568e..90b648386b9e 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/TaskCacheUtils.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/TaskCacheUtils.java @@ -22,7 +22,7 @@ import org.apache.dolphinscheduler.common.utils.FileUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import org.apache.dolphinscheduler.plugin.storage.api.StorageOperate; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.enums.DataType; import org.apache.dolphinscheduler.plugin.task.api.enums.Direct; @@ -65,17 +65,17 @@ private TaskCacheUtils() { * 4. input VarPool, from upstream task and workflow global parameters * @param taskInstance task instance * @param taskExecutionContext taskExecutionContext - * @param storageOperate storageOperate + * @param storageOperator storageOperate * @return cache key */ public static String generateCacheKey(TaskInstance taskInstance, TaskExecutionContext taskExecutionContext, - StorageOperate storageOperate) { + StorageOperator storageOperator) { List keyElements = new ArrayList<>(); keyElements.add(String.valueOf(taskInstance.getTaskCode())); keyElements.add(String.valueOf(taskInstance.getTaskDefinitionVersion())); keyElements.add(String.valueOf(taskInstance.getIsCache().getCode())); keyElements.add(String.valueOf(taskInstance.getEnvironmentConfig())); - keyElements.add(getTaskInputVarPoolData(taskInstance, taskExecutionContext, storageOperate)); + keyElements.add(getTaskInputVarPoolData(taskInstance, taskExecutionContext, storageOperator)); String data = StringUtils.join(keyElements, "_"); return DigestUtils.sha256Hex(data); } @@ -123,7 +123,7 @@ public static Pair revertCacheKey(String tagCacheKey) { * taskExecutionContext taskExecutionContext */ public static String getTaskInputVarPoolData(TaskInstance taskInstance, TaskExecutionContext context, - StorageOperate storageOperate) { + StorageOperator storageOperator) { JsonNode taskParams = JSONUtils.parseObject(taskInstance.getTaskParams()); // The set of input values considered from localParams in the taskParams @@ -141,7 +141,8 @@ public static String getTaskInputVarPoolData(TaskInstance taskInstance, TaskExec List fileInput = varPool.stream().filter(property -> property.getType().equals(DataType.FILE)) .collect(Collectors.toList()); fileInput.forEach( - property -> fileCheckSumMap.put(property.getProp(), getValCheckSum(property, context, storageOperate))); + property -> fileCheckSumMap.put(property.getProp(), + getValCheckSum(property, context, storageOperator))); // var pool value from workflow global parameters if (context.getPrepareParamsMap() != null) { @@ -173,17 +174,18 @@ public static String getTaskInputVarPoolData(TaskInstance taskInstance, TaskExec * cache can be used if content of upstream output files are the same * @param fileProperty * @param context - * @param storageOperate + * @param storageOperator */ public static String getValCheckSum(Property fileProperty, TaskExecutionContext context, - StorageOperate storageOperate) { + StorageOperator storageOperator) { String resourceCRCPath = fileProperty.getValue() + CRC_SUFFIX; - String resourceCRCWholePath = storageOperate.getResourceFullName(context.getTenantCode(), resourceCRCPath); + String resourceCRCWholePath = + storageOperator.getStorageFileAbsolutePath(context.getTenantCode(), resourceCRCPath); String targetPath = String.format("%s/%s", context.getExecutePath(), resourceCRCPath); log.info("{} --- Remote:{} to Local:{}", "CRC file", resourceCRCWholePath, targetPath); String crcString = ""; try { - storageOperate.download(resourceCRCWholePath, targetPath, true); + storageOperator.download(resourceCRCWholePath, targetPath, true); crcString = FileUtils.readFile2Str(new FileInputStream(targetPath)); fileProperty.setValue(crcString); } catch (IOException e) { diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/TaskInstanceUtils.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/TaskInstanceUtils.java index f75a2f5e1164..6c1c7f03a3c5 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/TaskInstanceUtils.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/TaskInstanceUtils.java @@ -55,16 +55,13 @@ public static void copyTaskInstance(TaskInstance source, TaskInstance target) { target.setPid(source.getPid()); target.setAppLink(source.getAppLink()); target.setFlag(source.getFlag()); - target.setDependency(source.getDependency()); // todo: we need to cpoy the task params and then copy switchDependency, since the setSwitchDependency rely on // task params, this is really a very bad practice. target.setTaskParams(source.getTaskParams()); - target.setSwitchDependency(source.getSwitchDependency()); target.setDuration(source.getDuration()); target.setMaxRetryTimes(source.getMaxRetryTimes()); target.setRetryInterval(source.getRetryInterval()); target.setTaskInstancePriority(source.getTaskInstancePriority()); - target.setDependentResult(source.getDependentResult()); target.setWorkerGroup(source.getWorkerGroup()); target.setEnvironmentCode(source.getEnvironmentCode()); target.setEnvironmentConfig(source.getEnvironmentConfig()); diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/BlockingParameters.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/WorkerGroupUtils.java similarity index 51% rename from dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/BlockingParameters.java rename to dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/WorkerGroupUtils.java index 2942a08c301b..2436d45a027e 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/BlockingParameters.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/WorkerGroupUtils.java @@ -15,37 +15,31 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.plugin.task.api.parameters; +package org.apache.dolphinscheduler.dao.utils; import org.apache.commons.lang3.StringUtils; -public class BlockingParameters extends AbstractParameters { +public class WorkerGroupUtils { - // condition of blocking: BlockingOnFailed or BlockingOnSuccess - private String blockingOpportunity; + private static final String DEFAULT_WORKER_GROUP = "default"; - // if true, alert when blocking, otherwise do nothing - - private boolean isAlertWhenBlocking; - - @Override - public boolean checkParameters() { - return !StringUtils.isEmpty(blockingOpportunity); + /** + * Check if the worker group is empty, if the worker group is default, it is considered empty + */ + public static boolean isWorkerGroupEmpty(String workerGroup) { + return StringUtils.isEmpty(workerGroup) || getDefaultWorkerGroup().equals(workerGroup); } - public String getBlockingOpportunity() { - return blockingOpportunity; + public static String getWorkerGroupOrDefault(String workerGroup) { + return getWorkerGroupOrDefault(workerGroup, getDefaultWorkerGroup()); } - public void setBlockingCondition(String blockingOpportunity) { - this.blockingOpportunity = blockingOpportunity; + public static String getWorkerGroupOrDefault(String workerGroup, String defaultWorkerGroup) { + return isWorkerGroupEmpty(workerGroup) ? defaultWorkerGroup : workerGroup; } - public boolean isAlertWhenBlocking() { - return isAlertWhenBlocking; + public static String getDefaultWorkerGroup() { + return DEFAULT_WORKER_GROUP; } - public void setAlertWhenBlocking(boolean alertWhenBlocking) { - isAlertWhenBlocking = alertWhenBlocking; - } } diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AlertMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AlertMapper.xml index e56afa830e61..f0c32aae78bd 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AlertMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AlertMapper.xml @@ -55,7 +55,9 @@ select from t_ds_alert - where alert_status = #{alertStatus} + where id > #{minAlertId} + and alert_status = #{alertStatus} + order by id asc limit #{limit} diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AuditLogMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AuditLogMapper.xml index 09ba57075f90..63adb57c3402 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AuditLogMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AuditLogMapper.xml @@ -19,10 +19,10 @@ - id, user_id, resource_type, operation, resource_id, time + id, user_id, model_type, operation_type, model_id, model_name, create_time, detail, description, latency - ${alias}.id, ${alias}.user_id, ${alias}.resource_type, ${alias}.operation, ${alias}.resource_id, ${alias}.time + ${alias}.id, ${alias}.user_id, ${alias}.model_type, ${alias}.operation_type, ${alias}.model_id, ${alias}.model_name, ${alias}.create_time, ${alias}.detail, ${alias}.description, ${alias}.latency - - + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/CommandMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/CommandMapper.xml index c950f664138a..7f587ef899e8 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/CommandMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/CommandMapper.xml @@ -34,17 +34,36 @@ group by cmd.command_type + + - select * from t_ds_command - where id % #{masterCount} = #{thisMasterSlot} + where (id / #{idStep}) % #{totalSlot} = #{currentSlotIndex} order by process_instance_priority, id asc - limit #{limit} + limit #{fetchNumber} + + delete from t_ds_command + where process_instance_id in + + #{i} + + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapper.xml index 8cf9fbd802fe..217dc43de0cc 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapper.xml @@ -33,4 +33,23 @@ group by cmd.command_type + + + + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ListenerEventMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ListenerEventMapper.xml index ae76bfcadd2e..8e6d3bbc3a9f 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ListenerEventMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ListenerEventMapper.xml @@ -60,7 +60,8 @@ select from t_ds_listener_event - where post_status = #{postStatus.code} + where id > #{minId} and post_status = #{postStatus} + order by id asc limit #{limit} diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.xml index 77775b766868..874db82ccc59 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.xml @@ -180,32 +180,18 @@ AND pd.id = #{processDefineId} - - - - + + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.xml index 095e266ed57e..b31bb571d7e0 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.xml @@ -197,12 +197,17 @@ order by start_time desc limit #{size} diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProjectParameterMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProjectParameterMapper.xml index 159c263ea44d..4e1e972a04e4 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProjectParameterMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProjectParameterMapper.xml @@ -19,7 +19,7 @@ - id, param_name, param_value, code, project_code, user_id, create_time, update_time + id, param_name, param_value, param_data_type, code, project_code, user_id, operator, create_time, update_time select - - from t_ds_project_parameter + pp.id, param_name, param_value, param_data_type, code, project_code, user_id, operator, pp.create_time, pp.update_time, + u.user_name as create_user, + u2.user_name as modify_user + from t_ds_project_parameter pp + left join t_ds_user u on pp.user_id = u.id + left join t_ds_user u2 on pp.operator = u2.id where project_code = #{projectCode} - and id in + and pp.id in #{id} @@ -65,7 +69,10 @@ OR param_value LIKE concat('%', #{searchName}, '%') ) - order by update_time desc + + AND param_data_type = #{projectParameterDataType} + + order by pp.update_time desc - - delete from t_ds_task_definition where code = #{code} diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/TaskGroupQueueMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/TaskGroupQueueMapper.xml index 790ad7bfaef0..800e82e8479d 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/TaskGroupQueueMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/TaskGroupQueueMapper.xml @@ -219,6 +219,16 @@ where in_queue = #{inQueue} order by priority desc + + + + + + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.xml deleted file mode 100644 index 24cb8de95626..000000000000 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.xml +++ /dev/null @@ -1,201 +0,0 @@ - - - - - - - - ${alias}.id, ${alias}.user_id, ${alias}.func_name, ${alias}.class_name, ${alias}.type, ${alias}.arg_types, - ${alias}.database, ${alias}.description, ${alias}.resource_id, ${alias}.resource_name, ${alias}.create_time, ${alias}.update_time - - - - - - - - - - - - - - - - update t_ds_udfs - - resource_name=#{udf.resourceName}, - update_time=#{udf.updateTime} - - - id=#{udf.id} - - - - - - - diff --git a/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_h2.sql b/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_h2.sql index f74000e73154..a907ec69a195 100644 --- a/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_h2.sql +++ b/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_h2.sql @@ -667,9 +667,11 @@ CREATE TABLE t_ds_project_parameter id int(11) NOT NULL AUTO_INCREMENT, param_name varchar(255) NOT NULL, param_value text NOT NULL, + param_data_type varchar(50) DEFAULT 'VARCHAR', code bigint(20) NOT NULL, project_code bigint(20) NOT NULL, user_id int(11) DEFAULT NULL, + operator int(11) DEFAULT NULL, create_time datetime NOT NULL, update_time datetime DEFAULT NULL, PRIMARY KEY (id), @@ -2019,10 +2021,14 @@ CREATE TABLE t_ds_audit_log ( id int(11) NOT NULL AUTO_INCREMENT, user_id int(11) NOT NULL, - resource_type int(11) NOT NULL, - operation int(11) NOT NULL, - time timestamp NULL DEFAULT CURRENT_TIMESTAMP, - resource_id int(11) NOT NULL, + model_id bigint(20) NOT NULL, + model_name varchar(255) NOT NULL, + model_type varchar(255) NOT NULL, + operation_type varchar(255) NOT NULL, + description varchar(255) NOT NULL, + latency int(11) NOT NULL, + detail varchar(255) DEFAULT NULL, + create_time timestamp NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (id) ); diff --git a/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_mysql.sql b/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_mysql.sql index 39512d8c70bb..3a4f17a357ae 100644 --- a/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_mysql.sql +++ b/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_mysql.sql @@ -15,7 +15,70 @@ * limitations under the License. */ -SET FOREIGN_KEY_CHECKS=0; +-- ---------------------------- +-- Table structure for QRTZ_JOB_DETAILS +-- ---------------------------- +DROP TABLE IF EXISTS `QRTZ_JOB_DETAILS`; +CREATE TABLE `QRTZ_JOB_DETAILS` ( + `SCHED_NAME` varchar(120) NOT NULL, + `JOB_NAME` varchar(200) NOT NULL, + `JOB_GROUP` varchar(200) NOT NULL, + `DESCRIPTION` varchar(250) DEFAULT NULL, + `JOB_CLASS_NAME` varchar(250) NOT NULL, + `IS_DURABLE` varchar(1) NOT NULL, + `IS_NONCONCURRENT` varchar(1) NOT NULL, + `IS_UPDATE_DATA` varchar(1) NOT NULL, + `REQUESTS_RECOVERY` varchar(1) NOT NULL, + `JOB_DATA` blob, + PRIMARY KEY (`SCHED_NAME`,`JOB_NAME`,`JOB_GROUP`), + KEY `IDX_QRTZ_J_REQ_RECOVERY` (`SCHED_NAME`,`REQUESTS_RECOVERY`), + KEY `IDX_QRTZ_J_GRP` (`SCHED_NAME`,`JOB_GROUP`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE = utf8_bin; + +-- ---------------------------- +-- Records of QRTZ_JOB_DETAILS +-- ---------------------------- + +-- ---------------------------- +-- Table structure for QRTZ_TRIGGERS +-- ---------------------------- +DROP TABLE IF EXISTS `QRTZ_TRIGGERS`; +CREATE TABLE `QRTZ_TRIGGERS` ( + `SCHED_NAME` varchar(120) NOT NULL, + `TRIGGER_NAME` varchar(200) NOT NULL, + `TRIGGER_GROUP` varchar(200) NOT NULL, + `JOB_NAME` varchar(200) NOT NULL, + `JOB_GROUP` varchar(200) NOT NULL, + `DESCRIPTION` varchar(250) DEFAULT NULL, + `NEXT_FIRE_TIME` bigint(13) DEFAULT NULL, + `PREV_FIRE_TIME` bigint(13) DEFAULT NULL, + `PRIORITY` int(11) DEFAULT NULL, + `TRIGGER_STATE` varchar(16) NOT NULL, + `TRIGGER_TYPE` varchar(8) NOT NULL, + `START_TIME` bigint(13) NOT NULL, + `END_TIME` bigint(13) DEFAULT NULL, + `CALENDAR_NAME` varchar(200) DEFAULT NULL, + `MISFIRE_INSTR` smallint(2) DEFAULT NULL, + `JOB_DATA` blob, + PRIMARY KEY (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`), + KEY `IDX_QRTZ_T_J` (`SCHED_NAME`,`JOB_NAME`,`JOB_GROUP`), + KEY `IDX_QRTZ_T_JG` (`SCHED_NAME`,`JOB_GROUP`), + KEY `IDX_QRTZ_T_C` (`SCHED_NAME`,`CALENDAR_NAME`), + KEY `IDX_QRTZ_T_G` (`SCHED_NAME`,`TRIGGER_GROUP`), + KEY `IDX_QRTZ_T_STATE` (`SCHED_NAME`,`TRIGGER_STATE`), + KEY `IDX_QRTZ_T_N_STATE` (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`,`TRIGGER_STATE`), + KEY `IDX_QRTZ_T_N_G_STATE` (`SCHED_NAME`,`TRIGGER_GROUP`,`TRIGGER_STATE`), + KEY `IDX_QRTZ_T_NEXT_FIRE_TIME` (`SCHED_NAME`,`NEXT_FIRE_TIME`), + KEY `IDX_QRTZ_T_NFT_ST` (`SCHED_NAME`,`TRIGGER_STATE`,`NEXT_FIRE_TIME`), + KEY `IDX_QRTZ_T_NFT_MISFIRE` (`SCHED_NAME`,`MISFIRE_INSTR`,`NEXT_FIRE_TIME`), + KEY `IDX_QRTZ_T_NFT_ST_MISFIRE` (`SCHED_NAME`,`MISFIRE_INSTR`,`NEXT_FIRE_TIME`,`TRIGGER_STATE`), + KEY `IDX_QRTZ_T_NFT_ST_MISFIRE_GRP` (`SCHED_NAME`,`MISFIRE_INSTR`,`NEXT_FIRE_TIME`,`TRIGGER_GROUP`,`TRIGGER_STATE`), + CONSTRAINT `QRTZ_TRIGGERS_ibfk_1` FOREIGN KEY (`SCHED_NAME`, `JOB_NAME`, `JOB_GROUP`) REFERENCES `QRTZ_JOB_DETAILS` (`SCHED_NAME`, `JOB_NAME`, `JOB_GROUP`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE = utf8_bin; + +-- ---------------------------- +-- Records of QRTZ_TRIGGERS +-- ---------------------------- -- ---------------------------- -- Table structure for QRTZ_BLOB_TRIGGERS @@ -99,30 +162,6 @@ CREATE TABLE `QRTZ_FIRED_TRIGGERS` ( -- Records of QRTZ_FIRED_TRIGGERS -- ---------------------------- --- ---------------------------- --- Table structure for QRTZ_JOB_DETAILS --- ---------------------------- -DROP TABLE IF EXISTS `QRTZ_JOB_DETAILS`; -CREATE TABLE `QRTZ_JOB_DETAILS` ( - `SCHED_NAME` varchar(120) NOT NULL, - `JOB_NAME` varchar(200) NOT NULL, - `JOB_GROUP` varchar(200) NOT NULL, - `DESCRIPTION` varchar(250) DEFAULT NULL, - `JOB_CLASS_NAME` varchar(250) NOT NULL, - `IS_DURABLE` varchar(1) NOT NULL, - `IS_NONCONCURRENT` varchar(1) NOT NULL, - `IS_UPDATE_DATA` varchar(1) NOT NULL, - `REQUESTS_RECOVERY` varchar(1) NOT NULL, - `JOB_DATA` blob, - PRIMARY KEY (`SCHED_NAME`,`JOB_NAME`,`JOB_GROUP`), - KEY `IDX_QRTZ_J_REQ_RECOVERY` (`SCHED_NAME`,`REQUESTS_RECOVERY`), - KEY `IDX_QRTZ_J_GRP` (`SCHED_NAME`,`JOB_GROUP`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE = utf8_bin; - --- ---------------------------- --- Records of QRTZ_JOB_DETAILS --- ---------------------------- - -- ---------------------------- -- Table structure for QRTZ_LOCKS -- ---------------------------- @@ -213,47 +252,6 @@ CREATE TABLE `QRTZ_SIMPROP_TRIGGERS` ( -- Records of QRTZ_SIMPROP_TRIGGERS -- ---------------------------- --- ---------------------------- --- Table structure for QRTZ_TRIGGERS --- ---------------------------- -DROP TABLE IF EXISTS `QRTZ_TRIGGERS`; -CREATE TABLE `QRTZ_TRIGGERS` ( - `SCHED_NAME` varchar(120) NOT NULL, - `TRIGGER_NAME` varchar(200) NOT NULL, - `TRIGGER_GROUP` varchar(200) NOT NULL, - `JOB_NAME` varchar(200) NOT NULL, - `JOB_GROUP` varchar(200) NOT NULL, - `DESCRIPTION` varchar(250) DEFAULT NULL, - `NEXT_FIRE_TIME` bigint(13) DEFAULT NULL, - `PREV_FIRE_TIME` bigint(13) DEFAULT NULL, - `PRIORITY` int(11) DEFAULT NULL, - `TRIGGER_STATE` varchar(16) NOT NULL, - `TRIGGER_TYPE` varchar(8) NOT NULL, - `START_TIME` bigint(13) NOT NULL, - `END_TIME` bigint(13) DEFAULT NULL, - `CALENDAR_NAME` varchar(200) DEFAULT NULL, - `MISFIRE_INSTR` smallint(2) DEFAULT NULL, - `JOB_DATA` blob, - PRIMARY KEY (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`), - KEY `IDX_QRTZ_T_J` (`SCHED_NAME`,`JOB_NAME`,`JOB_GROUP`), - KEY `IDX_QRTZ_T_JG` (`SCHED_NAME`,`JOB_GROUP`), - KEY `IDX_QRTZ_T_C` (`SCHED_NAME`,`CALENDAR_NAME`), - KEY `IDX_QRTZ_T_G` (`SCHED_NAME`,`TRIGGER_GROUP`), - KEY `IDX_QRTZ_T_STATE` (`SCHED_NAME`,`TRIGGER_STATE`), - KEY `IDX_QRTZ_T_N_STATE` (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`,`TRIGGER_STATE`), - KEY `IDX_QRTZ_T_N_G_STATE` (`SCHED_NAME`,`TRIGGER_GROUP`,`TRIGGER_STATE`), - KEY `IDX_QRTZ_T_NEXT_FIRE_TIME` (`SCHED_NAME`,`NEXT_FIRE_TIME`), - KEY `IDX_QRTZ_T_NFT_ST` (`SCHED_NAME`,`TRIGGER_STATE`,`NEXT_FIRE_TIME`), - KEY `IDX_QRTZ_T_NFT_MISFIRE` (`SCHED_NAME`,`MISFIRE_INSTR`,`NEXT_FIRE_TIME`), - KEY `IDX_QRTZ_T_NFT_ST_MISFIRE` (`SCHED_NAME`,`MISFIRE_INSTR`,`NEXT_FIRE_TIME`,`TRIGGER_STATE`), - KEY `IDX_QRTZ_T_NFT_ST_MISFIRE_GRP` (`SCHED_NAME`,`MISFIRE_INSTR`,`NEXT_FIRE_TIME`,`TRIGGER_GROUP`,`TRIGGER_STATE`), - CONSTRAINT `QRTZ_TRIGGERS_ibfk_1` FOREIGN KEY (`SCHED_NAME`, `JOB_NAME`, `JOB_GROUP`) REFERENCES `QRTZ_JOB_DETAILS` (`SCHED_NAME`, `JOB_NAME`, `JOB_GROUP`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE = utf8_bin; - --- ---------------------------- --- Records of QRTZ_TRIGGERS --- ---------------------------- - -- ---------------------------- -- Table structure for t_ds_access_token -- ---------------------------- @@ -670,9 +668,11 @@ CREATE TABLE `t_ds_project_parameter` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `param_name` varchar(255) NOT NULL COMMENT 'project parameter name', `param_value` text NOT NULL COMMENT 'project parameter value', + `param_data_type` varchar(50) DEFAULT 'VARCHAR' COMMENT 'project parameter data type', `code` bigint(20) NOT NULL COMMENT 'encoding', `project_code` bigint(20) NOT NULL COMMENT 'project code', `user_id` int(11) DEFAULT NULL COMMENT 'creator id', + `operator` int(11) DEFAULT NULL COMMENT 'operator user id', `create_time` datetime NOT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`), @@ -2008,10 +2008,14 @@ DROP TABLE IF EXISTS `t_ds_audit_log`; CREATE TABLE `t_ds_audit_log` ( `id` bigint(11) NOT NULL AUTO_INCREMENT COMMENT'key', `user_id` int(11) NOT NULL COMMENT 'user id', - `resource_type` int(11) NOT NULL COMMENT 'resource type', - `operation` int(11) NOT NULL COMMENT 'operation', - `time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', - `resource_id` int(11) NULL DEFAULT NULL COMMENT 'resource id', + `model_id` bigint(20) DEFAULT NULL COMMENT 'model id', + `model_name` varchar(100) DEFAULT NULL COMMENT 'model name', + `model_type` varchar(100) NOT NULL COMMENT 'model type', + `operation_type` varchar(100) NOT NULL COMMENT 'operation type', + `description` varchar(100) DEFAULT NULL COMMENT 'api description', + `latency` int(11) DEFAULT NULL COMMENT 'api cost milliseconds', + `detail` varchar(100) DEFAULT NULL COMMENT 'object change detail', + `create_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'operation time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT= 1 DEFAULT CHARSET=utf8 COLLATE = utf8_bin; diff --git a/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_postgresql.sql b/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_postgresql.sql index 946a544064b0..d314c6ef0942 100644 --- a/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_postgresql.sql +++ b/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_postgresql.sql @@ -595,9 +595,11 @@ CREATE TABLE t_ds_project_parameter ( id int NOT NULL , param_name varchar(255) NOT NULL , param_value text NOT NULL , + param_data_type varchar(50) DEFAULT 'VARCHAR', code bigint NOT NULL, project_code bigint NOT NULL, user_id int DEFAULT NULL , + operator int DEFAULT NULL , create_time timestamp DEFAULT CURRENT_TIMESTAMP , update_time timestamp DEFAULT CURRENT_TIMESTAMP , PRIMARY KEY (id) @@ -1992,11 +1994,15 @@ CREATE TABLE t_ds_task_group ( DROP TABLE IF EXISTS t_ds_audit_log; CREATE TABLE t_ds_audit_log ( id serial NOT NULL, - user_id int NOT NULL, - resource_type int NOT NULL, - operation int NOT NULL, - time timestamp DEFAULT NULL , - resource_id int NOT NULL, + user_id int NOT NULL, + model_id bigint NOT NULL, + model_name VARCHAR(255) NOT NULL, + model_type VARCHAR(255) NOT NULL, + operation_type VARCHAR(255) NOT NULL, + description VARCHAR(255) NOT NULL, + latency int NOT NULL, + detail VARCHAR(255) DEFAULT NULL, + create_time timestamp DEFAULT NULL , PRIMARY KEY (id) ); diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.0.0_schema/postgresql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.0.0_schema/postgresql/dolphinscheduler_ddl.sql index 393bda87fcc3..fbc9b26b8647 100644 --- a/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.0.0_schema/postgresql/dolphinscheduler_ddl.sql +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.0.0_schema/postgresql/dolphinscheduler_ddl.sql @@ -40,7 +40,8 @@ ALTER TABLE t_ds_alert ADD COLUMN IF NOT EXISTS "process_instance_id" int DEFAUL ALTER TABLE t_ds_alert ADD COLUMN IF NOT EXISTS "alert_type" int DEFAULT NULL; --- Add unique key -CREATE UNIQUE INDEX IF NOT EXISTS t_ds_relation_project_user_un on t_ds_relation_project_user (user_id, project_id); + +CREATE INDEX IF NOT EXISTS t_ds_relation_project_user_un on t_ds_relation_project_user (user_id, project_id); CREATE UNIQUE INDEX IF NOT EXISTS unique_name on t_ds_project (name); CREATE UNIQUE INDEX IF NOT EXISTS unique_code on t_ds_project (code); CREATE UNIQUE INDEX IF NOT EXISTS unique_queue_name on t_ds_queue (queue_name); diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.2.0_schema/mysql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.2.0_schema/mysql/dolphinscheduler_dml.sql index e5d97fab946f..21189c77a3a3 100644 --- a/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.2.0_schema/mysql/dolphinscheduler_dml.sql +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.2.0_schema/mysql/dolphinscheduler_dml.sql @@ -30,6 +30,9 @@ END IF; END; d// +-- If the admin account is not associated with a tenant, the admin's tenant will be set to the default tenant. +UPDATE `t_ds_user` SET `tenant_id` = '-1' WHERE (`user_name` = 'admin') AND (`tenant_id` = '0'); + delimiter ; CALL dolphin_t_ds_tenant_insert_default(); DROP PROCEDURE dolphin_t_ds_tenant_insert_default; diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.2.0_schema/postgresql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.2.0_schema/postgresql/dolphinscheduler_dml.sql index 482b317a60c4..f47d16f7c7db 100644 --- a/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.2.0_schema/postgresql/dolphinscheduler_dml.sql +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.2.0_schema/postgresql/dolphinscheduler_dml.sql @@ -21,6 +21,9 @@ INSERT INTO t_ds_tenant(id, tenant_code, description, queue_id, create_time, upd UPDATE t_ds_schedules as t1 SET tenant_code = COALESCE(t3.tenant_code, 'default') FROM t_ds_process_definition as t2 LEFT JOIN t_ds_tenant t3 ON t2.tenant_id = t3.id WHERE t1.process_definition_code = t2.code; UPDATE t_ds_process_instance SET tenant_code = 'default' WHERE tenant_code IS NULL; +-- If the admin account is not associated with a tenant, the admin's tenant will be set to the default tenant. +UPDATE t_ds_user SET tenant_id = '-1' WHERE (user_name = 'admin') AND (tenant_id = '0'); + -- data quality support choose database INSERT INTO t_ds_dq_rule_input_entry (id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.2.2_schema/mysql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.2.2_schema/mysql/dolphinscheduler_ddl.sql index 7a3ab5df6856..000173b39745 100644 --- a/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.2.2_schema/mysql/dolphinscheduler_ddl.sql +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.2.2_schema/mysql/dolphinscheduler_ddl.sql @@ -25,3 +25,34 @@ CREATE TABLE `t_ds_relation_project_worker_group` ( UNIQUE KEY unique_project_worker_group(project_code,worker_group) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COLLATE = utf8_bin; +ALTER TABLE t_ds_project_parameter ADD `operator` int(11) DEFAULT NULL COMMENT 'operator user id'; + +-- modify_data_t_ds_audit_log_input_entry behavior change +--DROP PROCEDURE if EXISTS modify_data_t_ds_audit_log_input_entry; +DROP PROCEDURE if EXISTS modify_data_t_ds_audit_log_input_entry; +delimiter d// +CREATE PROCEDURE modify_data_t_ds_audit_log_input_entry() +BEGIN + IF EXISTS (SELECT 1 FROM information_schema.COLUMNS + WHERE TABLE_NAME='t_ds_audit_log' + AND TABLE_SCHEMA=(SELECT DATABASE()) + AND COLUMN_NAME ='resource_type') + THEN +ALTER TABLE `t_ds_audit_log` +drop resource_type, drop operation, drop resource_id, + add `model_id` bigint(20) DEFAULT NULL COMMENT 'model id', + add `model_name` varchar(100) DEFAULT NULL COMMENT 'model name', + add `model_type` varchar(100) NOT NULL COMMENT 'model type', + add `operation_type` varchar(100) NOT NULL COMMENT 'operation type', + add `description` varchar(100) DEFAULT NULL COMMENT 'api description', + add `latency` int(11) DEFAULT NULL COMMENT 'api cost milliseconds', + add `detail` varchar(100) DEFAULT NULL COMMENT 'object change detail', + CHANGE COLUMN `time` `create_time` datetime NULL DEFAULT CURRENT_TIMESTAMP COMMENT "operation time"; +END IF; +END; +d// +delimiter ; +CALL modify_data_t_ds_audit_log_input_entry; +DROP PROCEDURE modify_data_t_ds_audit_log_input_entry; + +ALTER TABLE t_ds_project_parameter ADD `param_data_type` varchar (50) DEFAULT 'VARCHAR' COMMENT 'project parameter data type'; \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.2.2_schema/postgresql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.2.2_schema/postgresql/dolphinscheduler_ddl.sql index 587bab415963..4b4cb6665176 100644 --- a/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.2.2_schema/postgresql/dolphinscheduler_ddl.sql +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.2.2_schema/postgresql/dolphinscheduler_ddl.sql @@ -27,4 +27,35 @@ CREATE TABLE t_ds_relation_project_worker_group ( DROP SEQUENCE IF EXISTS t_ds_relation_project_worker_group_sequence; CREATE SEQUENCE t_ds_relation_project_worker_group_sequence; -ALTER TABLE t_ds_relation_project_worker_group ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_relation_project_worker_group_sequence'); \ No newline at end of file +ALTER TABLE t_ds_relation_project_worker_group ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_relation_project_worker_group_sequence'); + +ALTER TABLE t_ds_project_parameter ADD COLUMN IF NOT EXISTS operator int; + +-- modify_data_t_ds_audit_log_input_entry +delimiter d// +CREATE OR REPLACE FUNCTION modify_data_t_ds_audit_log_input_entry() RETURNS void AS $$ +BEGIN + IF EXISTS (SELECT 1 + FROM information_schema.columns + WHERE table_name = 't_ds_audit_log' + AND column_name = 'resource_type') + THEN + ALTER TABLE t_ds_audit_log + drop resource_type, drop operation, drop resource_id, + add model_id bigint NOT NULL, + add model_name VARCHAR(255) NOT NULL, + add model_type VARCHAR(255) NOT NULL, + add operation_type VARCHAR(255) NOT NULL, + add description VARCHAR(255) NOT NULL, + add latency int NOT NULL, + add detail VARCHAR(255) DEFAULT NULL; + ALTER TABLE t_ds_audit_log RENAME COLUMN "time" TO "create_time"; +END IF; +END; +$$ LANGUAGE plpgsql; +d// + +select modify_data_t_ds_audit_log_input_entry(); +DROP FUNCTION IF EXISTS modify_data_t_ds_audit_log_input_entry(); + +ALTER TABLE t_ds_project_parameter ADD COLUMN IF NOT EXISTS param_data_type varchar(50) DEFAULT 'VARCHAR'; \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/BaseDaoTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/BaseDaoTest.java index 231c947f6588..6f14eb436e75 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/BaseDaoTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/BaseDaoTest.java @@ -22,7 +22,6 @@ import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.annotation.Rollback; -import org.springframework.transaction.annotation.EnableTransactionManagement; import org.springframework.transaction.annotation.Transactional; @ExtendWith(MockitoExtension.class) @@ -30,6 +29,5 @@ @SpringBootApplication(scanBasePackageClasses = DaoConfiguration.class) @Transactional @Rollback -@EnableTransactionManagement public abstract class BaseDaoTest { } diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/entity/ErrorCommandTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/entity/ErrorCommandTest.java new file mode 100644 index 000000000000..057d6578e6a4 --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/entity/ErrorCommandTest.java @@ -0,0 +1,82 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.entity; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.apache.dolphinscheduler.common.enums.CommandType; +import org.apache.dolphinscheduler.common.enums.FailureStrategy; +import org.apache.dolphinscheduler.common.enums.Flag; +import org.apache.dolphinscheduler.common.enums.Priority; +import org.apache.dolphinscheduler.common.enums.TaskDependType; +import org.apache.dolphinscheduler.common.enums.WarningType; + +import java.util.Date; + +import org.junit.jupiter.api.Test; + +class ErrorCommandTest { + + @Test + void testConstructor() { + Command command = new Command(); + command.setId(1); + command.setCommandType(CommandType.PAUSE); + command.setExecutorId(1); + command.setProcessDefinitionCode(123); + command.setProcessDefinitionVersion(1); + command.setProcessInstanceId(1); + command.setCommandParam("param"); + command.setTaskDependType(TaskDependType.TASK_POST); + command.setFailureStrategy(FailureStrategy.CONTINUE); + command.setWarningType(WarningType.ALL); + command.setWarningGroupId(1); + command.setScheduleTime(new Date()); + command.setStartTime(new Date()); + command.setUpdateTime(new Date()); + command.setProcessInstancePriority(Priority.HIGHEST); + command.setWorkerGroup("default"); + command.setTenantCode("root"); + command.setEnvironmentCode(1L); + command.setDryRun(1); + command.setTestFlag(Flag.NO.getCode()); + + ErrorCommand errorCommand = new ErrorCommand(command, "test"); + assertEquals(command.getCommandType(), errorCommand.getCommandType()); + assertEquals(command.getExecutorId(), errorCommand.getExecutorId()); + assertEquals(command.getProcessDefinitionCode(), errorCommand.getProcessDefinitionCode()); + assertEquals(command.getProcessDefinitionVersion(), errorCommand.getProcessDefinitionVersion()); + assertEquals(command.getProcessInstanceId(), errorCommand.getProcessInstanceId()); + assertEquals(command.getCommandParam(), errorCommand.getCommandParam()); + assertEquals(command.getTaskDependType(), errorCommand.getTaskDependType()); + assertEquals(command.getFailureStrategy(), errorCommand.getFailureStrategy()); + assertEquals(command.getWarningType(), errorCommand.getWarningType()); + assertEquals(command.getWarningGroupId(), errorCommand.getWarningGroupId()); + assertEquals(command.getScheduleTime(), errorCommand.getScheduleTime()); + assertEquals(command.getStartTime(), errorCommand.getStartTime()); + assertEquals(command.getUpdateTime(), errorCommand.getUpdateTime()); + assertEquals(command.getProcessInstancePriority(), errorCommand.getProcessInstancePriority()); + assertEquals(command.getWorkerGroup(), errorCommand.getWorkerGroup()); + assertEquals(command.getTenantCode(), errorCommand.getTenantCode()); + assertEquals(command.getEnvironmentCode(), errorCommand.getEnvironmentCode()); + assertEquals(command.getDryRun(), errorCommand.getDryRun()); + assertEquals(command.getTestFlag(), errorCommand.getTestFlag()); + assertEquals("test", errorCommand.getMessage()); + } + +} diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/entity/TaskInstanceTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/entity/TaskInstanceTest.java index cb68cf74948c..1bcc784cbcf0 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/entity/TaskInstanceTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/entity/TaskInstanceTest.java @@ -17,12 +17,7 @@ package org.apache.dolphinscheduler.dao.entity; -import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.TASK_TYPE_CONDITIONS; -import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.TASK_TYPE_DEPENDENT; -import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.TASK_TYPE_SUB_PROCESS; - import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.apache.dolphinscheduler.plugin.task.api.enums.DependentRelation; import org.apache.dolphinscheduler.plugin.task.api.model.DependentItem; import org.apache.dolphinscheduler.plugin.task.api.model.DependentTaskModel; import org.apache.dolphinscheduler.plugin.task.api.parameters.DependentParameters; @@ -30,35 +25,10 @@ import java.util.ArrayList; import java.util.List; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; public class TaskInstanceTest { - /** - * task instance sub process - */ - @Test - public void testTaskInstanceIsSubProcess() { - TaskInstance taskInstance = new TaskInstance(); - - // sub process - taskInstance.setTaskType(TASK_TYPE_SUB_PROCESS); - Assertions.assertTrue(taskInstance.isSubProcess()); - - // not sub process - taskInstance.setTaskType("HTTP"); - Assertions.assertFalse(taskInstance.isSubProcess()); - - // sub process - taskInstance.setTaskType(TASK_TYPE_CONDITIONS); - Assertions.assertTrue(taskInstance.isConditionsTask()); - - // sub process - taskInstance.setTaskType(TASK_TYPE_DEPENDENT); - Assertions.assertTrue(taskInstance.isDependTask()); - } - /** * test for TaskInstance.getDependence */ @@ -66,7 +36,6 @@ public void testTaskInstanceIsSubProcess() { public void testTaskInstanceGetDependence() { TaskInstance taskInstance = new TaskInstance(); taskInstance.setTaskParams(JSONUtils.toJsonString(getDependentParameters())); - taskInstance.getDependency(); } /** @@ -82,8 +51,6 @@ private DependentParameters getDependentParameters() { dependentItem.setDefinitionCode(222L); dependentItem.setCycle("today"); dependentItems.add(dependentItem); - dependentParameters.setDependTaskList(dependTaskList); - dependentParameters.setRelation(DependentRelation.AND); return dependentParameters; } } diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/entity/UdfFuncTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/entity/UdfFuncTest.java deleted file mode 100644 index a4017f356fc3..000000000000 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/entity/UdfFuncTest.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.dao.entity; - -import org.apache.dolphinscheduler.dao.entity.UdfFunc.UdfFuncDeserializer; - -import java.io.IOException; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -public class UdfFuncTest { - - /** - * test UdfFuncDeserializer.deserializeKey - * - * @throws IOException - */ - @Test - public void testUdfFuncDeserializer() throws IOException { - - // UdfFuncDeserializer.deserializeKey key is null - UdfFuncDeserializer udfFuncDeserializer = new UdfFuncDeserializer(); - Assertions.assertNull(udfFuncDeserializer.deserializeKey(null, null)); - - // - UdfFunc udfFunc = new UdfFunc(); - udfFunc.setResourceName("dolphin_resource_update"); - udfFunc.setResourceId(2); - udfFunc.setClassName("org.apache.dolphinscheduler.test.mrUpdate"); - - Assertions.assertNotNull(udfFuncDeserializer.deserializeKey(udfFunc.toString(), null)); - } - -} diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/AuditLogMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/AuditLogMapperTest.java index cc0532032dc2..92418193b84b 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/AuditLogMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/AuditLogMapperTest.java @@ -17,12 +17,15 @@ package org.apache.dolphinscheduler.dao.mapper; -import org.apache.dolphinscheduler.common.enums.AuditResourceType; +import org.apache.dolphinscheduler.common.enums.AuditModelType; +import org.apache.dolphinscheduler.common.enums.AuditOperationType; import org.apache.dolphinscheduler.dao.BaseDaoTest; import org.apache.dolphinscheduler.dao.entity.AuditLog; import org.apache.dolphinscheduler.dao.entity.Project; +import java.util.ArrayList; import java.util.Date; +import java.util.List; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; @@ -30,6 +33,7 @@ import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import com.google.common.collect.Lists; public class AuditLogMapperTest extends BaseDaoTest { @@ -39,13 +43,17 @@ public class AuditLogMapperTest extends BaseDaoTest { @Autowired private ProjectMapper projectMapper; - private void insertOne(AuditResourceType resourceType) { + private void insertOne(AuditModelType objectType) { AuditLog auditLog = new AuditLog(); auditLog.setUserId(1); - auditLog.setTime(new Date()); - auditLog.setResourceType(resourceType.getCode()); - auditLog.setOperation(0); - auditLog.setResourceId(0); + auditLog.setModelName("name"); + auditLog.setDetail("detail"); + auditLog.setLatency(1L); + auditLog.setCreateTime(new Date()); + auditLog.setModelType(objectType.getName()); + auditLog.setOperationType(AuditOperationType.CREATE.getName()); + auditLog.setModelId(1L); + auditLog.setDescription("description"); logMapper.insert(auditLog); } @@ -65,25 +73,14 @@ private Project insertProject() { */ @Test public void testQueryAuditLog() { - insertOne(AuditResourceType.USER_MODULE); - insertOne(AuditResourceType.PROJECT_MODULE); + insertOne(AuditModelType.USER); + insertOne(AuditModelType.PROJECT); Page page = new Page<>(1, 3); - int[] resourceType = new int[0]; - int[] operationType = new int[0]; + List objectTypeList = new ArrayList<>(); + List operationTypeList = Lists.newArrayList(AuditOperationType.CREATE.getName()); - IPage logIPage = logMapper.queryAuditLog(page, resourceType, operationType, "", null, null); + IPage logIPage = + logMapper.queryAuditLog(page, objectTypeList, operationTypeList, "", "", null, null); Assertions.assertNotEquals(0, logIPage.getTotal()); } - - @Test - public void testQueryResourceNameByType() { - String resourceNameByUser = logMapper.queryResourceNameByType(AuditResourceType.USER_MODULE.getMsg(), 1); - Assertions.assertEquals("admin", resourceNameByUser); - Project project = insertProject(); - String resourceNameByProject = - logMapper.queryResourceNameByType(AuditResourceType.PROJECT_MODULE.getMsg(), project.getId()); - Assertions.assertEquals(project.getName(), resourceNameByProject); - int delete = projectMapper.deleteById(project.getId()); - Assertions.assertEquals(delete, 1); - } } diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/CommandMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/CommandMapperTest.java index 3d45477d858b..3f4b0768aa9f 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/CommandMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/CommandMapperTest.java @@ -17,7 +17,8 @@ package org.apache.dolphinscheduler.dao.mapper; -import org.apache.dolphinscheduler.common.constants.Constants; +import static com.google.common.truth.Truth.assertThat; + import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.Flag; @@ -30,6 +31,7 @@ import org.apache.dolphinscheduler.dao.entity.Command; import org.apache.dolphinscheduler.dao.entity.CommandCount; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.utils.WorkerGroupUtils; import java.util.Date; import java.util.HashMap; @@ -40,6 +42,9 @@ import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; +import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import com.google.common.collect.Lists; /** @@ -62,6 +67,18 @@ public void testInsert() { Assertions.assertTrue(command.getId() > 0); } + @Test + public void testQueryCommandPageByIds() { + Command expectedCommand = createCommand(); + Page page = new Page<>(1, 10); + IPage commandIPage = commandMapper.queryCommandPageByIds(page, + Lists.newArrayList(expectedCommand.getProcessDefinitionCode())); + List commandList = commandIPage.getRecords(); + assertThat(commandList).isNotEmpty(); + assertThat(commandIPage.getTotal()).isEqualTo(1); + assertThat(commandList.get(0).getId()).isEqualTo(expectedCommand.getId()); + } + /** * test select by id */ @@ -133,7 +150,7 @@ public void testGetOneToRun() { createCommand(CommandType.START_PROCESS, processDefinition.getCode()); - List actualCommand = commandMapper.queryCommandPage(1, 0); + List actualCommand = commandMapper.selectList(new QueryWrapper<>()); Assertions.assertNotNull(actualCommand); } @@ -173,11 +190,19 @@ public void testQueryCommandPageBySlot() { toTestQueryCommandPageBySlot(masterCount, thisMasterSlot); } + @Test + void deleteByWorkflowInstanceIds() { + Command command = createCommand(); + assertThat(commandMapper.selectList(null)).isNotEmpty(); + commandMapper.deleteByWorkflowInstanceIds(Lists.newArrayList(command.getProcessInstanceId())); + assertThat(commandMapper.selectList(null)).isEmpty(); + } + private boolean toTestQueryCommandPageBySlot(int masterCount, int thisMasterSlot) { Command command = createCommand(); Integer id = command.getId(); boolean hit = id % masterCount == thisMasterSlot; - List commandList = commandMapper.queryCommandPageBySlot(1, masterCount, thisMasterSlot); + List commandList = commandMapper.queryCommandByIdSlot(thisMasterSlot, masterCount, 1, 1); if (hit) { Assertions.assertEquals(id, commandList.get(0).getId()); } else { @@ -191,8 +216,9 @@ private boolean toTestQueryCommandPageBySlot(int masterCount, int thisMasterSlot /** * create command map - * @param count map count - * @param commandType comman type + * + * @param count map count + * @param commandType comman type * @param processDefinitionCode process definition code * @return command map */ @@ -213,7 +239,8 @@ private CommandCount createCommandMap( } /** - * create process definition + * create process definition + * * @return process definition */ private ProcessDefinition createProcessDefinition() { @@ -233,6 +260,7 @@ private ProcessDefinition createProcessDefinition() { /** * create command map + * * @param count map count * @return command map */ @@ -248,6 +276,7 @@ private Map createCommandMap(Integer count) { /** * create command + * * @return */ private Command createCommand() { @@ -256,6 +285,7 @@ private Command createCommand() { /** * create command + * * @return Command */ private Command createCommand(CommandType commandType, long processDefinitionCode) { @@ -273,12 +303,11 @@ private Command createCommand(CommandType commandType, long processDefinitionCod command.setProcessInstancePriority(Priority.MEDIUM); command.setStartTime(DateUtils.stringToDate("2019-12-29 10:10:00")); command.setUpdateTime(DateUtils.stringToDate("2019-12-29 10:10:00")); - command.setWorkerGroup(Constants.DEFAULT_WORKER_GROUP); + command.setWorkerGroup(WorkerGroupUtils.getDefaultWorkerGroup()); command.setProcessInstanceId(0); command.setProcessDefinitionVersion(0); commandMapper.insert(command); return command; } - } diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapperTest.java index 098c001f2d82..07409cf0e5dd 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapperTest.java @@ -17,6 +17,8 @@ package org.apache.dolphinscheduler.dao.mapper; +import static com.google.common.truth.Truth.assertThat; + import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.dao.BaseDaoTest; import org.apache.dolphinscheduler.dao.entity.CommandCount; @@ -31,6 +33,9 @@ import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; + public class ErrorCommandMapperTest extends BaseDaoTest { @Autowired @@ -54,6 +59,18 @@ private ErrorCommand insertOne() { return errorCommand; } + @Test + public void testQueryCommandPageByIds() { + ErrorCommand expectedCommand = insertOne(); + Page page = new Page<>(1, 10); + IPage commandIPage = errorCommandMapper.queryErrorCommandPageByIds(page, + Lists.newArrayList(expectedCommand.getProcessDefinitionCode())); + List commandList = commandIPage.getRecords(); + assertThat(commandList).isNotEmpty(); + assertThat(commandIPage.getTotal()).isEqualTo(1); + assertThat(commandList.get(0).getId()).isEqualTo(expectedCommand.getId()); + } + /** * test query */ diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ListenerEventMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ListenerEventMapperTest.java index f3e877aaa3ee..4e239265c424 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ListenerEventMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ListenerEventMapperTest.java @@ -81,7 +81,7 @@ public void testListingListenerEventByStatus() { ListenerEvent event2 = generateServerDownListenerEvent("192.168.x.2"); listenerEventMapper.batchInsert(Lists.newArrayList(event1, event2)); List listenerEvents = - listenerEventMapper.listingListenerEventByStatus(AlertStatus.WAIT_EXECUTION, 50); + listenerEventMapper.listingListenerEventByStatus(-1, AlertStatus.WAIT_EXECUTION.getCode(), 50); Assertions.assertEquals(listenerEvents.size(), 2); } @@ -111,8 +111,10 @@ public void testDeleteListenerEvent() { ListenerEvent actualAlert = listenerEventMapper.selectById(event.getId()); Assertions.assertNull(actualAlert); } + /** * create server down event + * * @param host worker host * @return listener event */ diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapperTest.java index eca6904654ca..ad55bdd84d90 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapperTest.java @@ -17,7 +17,6 @@ package org.apache.dolphinscheduler.dao.mapper; -import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.dao.BaseDaoTest; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; @@ -29,7 +28,6 @@ import java.util.Date; import java.util.List; -import java.util.Map; import java.util.concurrent.atomic.AtomicLong; import org.junit.jupiter.api.Assertions; @@ -302,22 +300,6 @@ public void testCountDefinitionGroupByUser() { Assertions.assertNotEquals(0, processDefinitions.size()); } - @Test - public void listResourcesTest() { - ProcessDefinition processDefinition = insertOne("def 1"); - processDefinition.setReleaseState(ReleaseState.ONLINE); - List> maps = processDefinitionMapper.listResources(); - Assertions.assertNotNull(maps); - } - - @Test - public void listResourcesByUserTest() { - ProcessDefinition processDefinition = insertOne("def 1"); - processDefinition.setReleaseState(ReleaseState.ONLINE); - List> maps = processDefinitionMapper.listResourcesByUser(processDefinition.getUserId()); - Assertions.assertNotNull(maps); - } - @Test public void listProjectIds() { insertOne("def 1"); diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapperTest.java index 39b8d04e4d12..0aae0dce2be3 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapperTest.java @@ -263,7 +263,8 @@ public void testQueryLastSchedulerProcess() { processInstanceMapper.updateById(processInstance); ProcessInstance processInstance1 = - processInstanceMapper.queryLastSchedulerProcess(processInstance.getProcessDefinitionCode(), null, null, + processInstanceMapper.queryLastSchedulerProcess(processInstance.getProcessDefinitionCode(), 0L, null, + null, processInstance.getTestFlag()); Assertions.assertNotEquals(null, processInstance1); processInstanceMapper.deleteById(processInstance.getId()); diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProjectParameterMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProjectParameterMapperTest.java index 48e51d9ca328..1c13d95a0ee8 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProjectParameterMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProjectParameterMapperTest.java @@ -89,7 +89,7 @@ public void testQueryProjectParameterListPaging() { insertOne(2, "name2", 2); Page page = new Page(1, 3); - IPage res = projectParameterMapper.queryProjectParameterListPaging(page, 1, null, null); + IPage res = projectParameterMapper.queryProjectParameterListPaging(page, 1, null, null, null); Assertions.assertEquals(1, res.getRecords().size()); } } diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskDefinitionMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskDefinitionMapperTest.java index dfa07c4d1022..5583159dc209 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskDefinitionMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskDefinitionMapperTest.java @@ -129,27 +129,6 @@ public void testCountDefinitionGroupByUser() { } - @Test - public void testListResources() { - TaskDefinition taskDefinition = insertOne(); - List> maps = taskDefinitionMapper.listResources(); - Assertions.assertNotEquals(0, maps.size()); - - } - - @Test - public void testListResourcesByUser() { - User user = new User(); - user.setUserName("un"); - userMapper.insert(user); - User un = userMapper.queryByUserNameAccurately("un"); - TaskDefinition taskDefinition = insertOne(un.getId()); - - List> maps = taskDefinitionMapper.listResourcesByUser(taskDefinition.getUserId()); - Assertions.assertNotEquals(0, maps.size()); - - } - @Test public void testDeleteByCode() { TaskDefinition taskDefinition = insertOne(); diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TriggerRelationMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TriggerRelationMapperTest.java index d3f4fcc666bf..7e31b64a23f2 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TriggerRelationMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TriggerRelationMapperTest.java @@ -17,13 +17,13 @@ package org.apache.dolphinscheduler.dao.mapper; +import static com.google.common.truth.Truth.assertThat; + import org.apache.dolphinscheduler.common.enums.ApiTriggerType; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.dao.BaseDaoTest; import org.apache.dolphinscheduler.dao.entity.TriggerRelation; -import java.util.List; - import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; @@ -67,9 +67,9 @@ public void testSelectById() { @Test public void testQueryByTypeAndJobId() { TriggerRelation expectRelation = createTriggerRelation(); - TriggerRelation actualRelation = triggerRelationMapper.queryByTypeAndJobId( - expectRelation.getTriggerType(), expectRelation.getJobId()); - Assertions.assertEquals(expectRelation, actualRelation); + assertThat( + triggerRelationMapper.queryByTypeAndJobId(expectRelation.getTriggerType(), expectRelation.getJobId())) + .containsExactly(expectRelation); } /** @@ -80,9 +80,8 @@ public void testQueryByTypeAndJobId() { @Test public void testQueryByTriggerRelationCode() { TriggerRelation expectRelation = createTriggerRelation(); - List actualRelations = triggerRelationMapper.queryByTriggerRelationCode( - expectRelation.getTriggerCode()); - Assertions.assertEquals(actualRelations.size(), 1); + assertThat(triggerRelationMapper.queryByTriggerRelationCode(expectRelation.getTriggerCode())) + .containsExactly(expectRelation); } /** @@ -93,17 +92,15 @@ public void testQueryByTriggerRelationCode() { @Test public void testQueryByTriggerRelationCodeAndType() { TriggerRelation expectRelation = createTriggerRelation(); - List actualRelations = triggerRelationMapper.queryByTriggerRelationCodeAndType( - expectRelation.getTriggerCode(), expectRelation.getTriggerType()); - Assertions.assertEquals(actualRelations.size(), 1); + assertThat(triggerRelationMapper.queryByTriggerRelationCodeAndType(expectRelation.getTriggerCode(), + expectRelation.getTriggerType())).containsExactly(expectRelation); } @Test public void testUpsert() { TriggerRelation expectRelation = createTriggerRelation(); triggerRelationMapper.upsert(expectRelation); - TriggerRelation actualRelation = triggerRelationMapper.selectById(expectRelation.getId()); - Assertions.assertEquals(expectRelation, actualRelation); + assertThat(triggerRelationMapper.selectById(expectRelation.getId())).isEqualTo(expectRelation); } /** @@ -113,8 +110,7 @@ public void testUpsert() { public void testDelete() { TriggerRelation expectRelation = createTriggerRelation(); triggerRelationMapper.deleteById(expectRelation.getId()); - TriggerRelation actualRelation = triggerRelationMapper.selectById(expectRelation.getId()); - Assertions.assertNull(actualRelation); + assertThat(triggerRelationMapper.selectById(expectRelation.getId())).isNull(); } /** diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UDFUserMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UDFUserMapperTest.java deleted file mode 100644 index c726e6138581..000000000000 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UDFUserMapperTest.java +++ /dev/null @@ -1,184 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.dolphinscheduler.dao.mapper; - -import org.apache.dolphinscheduler.common.enums.UdfType; -import org.apache.dolphinscheduler.common.enums.UserType; -import org.apache.dolphinscheduler.dao.BaseDaoTest; -import org.apache.dolphinscheduler.dao.entity.UDFUser; -import org.apache.dolphinscheduler.dao.entity.UdfFunc; -import org.apache.dolphinscheduler.dao.entity.User; - -import java.util.Date; -import java.util.List; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import org.springframework.beans.factory.annotation.Autowired; - -public class UDFUserMapperTest extends BaseDaoTest { - - @Autowired - private UDFUserMapper udfUserMapper; - - @Autowired - private UserMapper userMapper; - - @Autowired - private UdfFuncMapper udfFuncMapper; - - /** - * insert - * @return UDFUser - */ - private UDFUser insertOne() { - UDFUser udfUser = new UDFUser(); - udfUser.setUdfId(1); - udfUser.setUserId(1); - udfUser.setCreateTime(new Date()); - udfUser.setUpdateTime(new Date()); - udfUserMapper.insert(udfUser); - return udfUser; - } - - /** - * insert UDFUser - * @param user user - * @param udfFunc udfFunc - * @return UDFUser - */ - private UDFUser insertOne(User user, UdfFunc udfFunc) { - UDFUser udfUser = new UDFUser(); - udfUser.setUdfId(udfFunc.getId()); - udfUser.setUserId(user.getId()); - udfUser.setCreateTime(new Date()); - udfUser.setUpdateTime(new Date()); - udfUserMapper.insert(udfUser); - return udfUser; - } - - /** - * insert one user - * @return User - */ - private User insertOneUser() { - User user = new User(); - user.setUserName("user1"); - user.setUserPassword("1"); - user.setEmail("xx@123.com"); - user.setUserType(UserType.GENERAL_USER); - user.setCreateTime(new Date()); - user.setTenantId(1); - user.setQueue("dolphin"); - user.setUpdateTime(new Date()); - userMapper.insert(user); - return user; - } - - /** - * insert one udf - * @return UdfFunc - */ - private UdfFunc insertOneUdfFunc() { - UdfFunc udfFunc = new UdfFunc(); - udfFunc.setFuncName("dolphin_udf_func"); - udfFunc.setClassName("org.apache.dolphinscheduler.test.mr"); - udfFunc.setType(UdfType.HIVE); - udfFunc.setResourceId(1); - udfFunc.setResourceName("dolphin_resource"); - udfFunc.setCreateTime(new Date()); - udfFunc.setUpdateTime(new Date()); - udfFuncMapper.insert(udfFunc); - return udfFunc; - } - - /** - * test update - */ - @Test - public void testUpdate() { - // insertOneUser - User user = insertOneUser(); - // insertOneUdfFunc - UdfFunc udfFunc = insertOneUdfFunc(); - // insertOne - UDFUser udfUser = insertOne(user, udfFunc); - udfUser.setUserId(2); - udfUser.setUdfId(2); - int update = udfUserMapper.updateById(udfUser); - Assertions.assertEquals(update, 1); - - } - - /** - * test delete - */ - @Test - public void testDelete() { - // insertOneUser - User user = insertOneUser(); - // insertOneUdfFunc - UdfFunc udfFunc = insertOneUdfFunc(); - // insertOne - UDFUser udfUser = insertOne(user, udfFunc); - int delete = udfUserMapper.deleteById(udfUser.getId()); - Assertions.assertEquals(delete, 1); - } - - /** - * test query - */ - @Test - public void testQuery() { - // insertOne - UDFUser udfUser = insertOne(); - // query - List udfUserList = udfUserMapper.selectList(null); - Assertions.assertNotEquals(0, udfUserList.size()); - } - - /** - * test delete by userId - */ - @Test - public void testDeleteByUserId() { - // insertOneUser - User user = insertOneUser(); - // insertOneUdfFunc - UdfFunc udfFunc = insertOneUdfFunc(); - // insertOne - UDFUser udfUser = insertOne(user, udfFunc); - int delete = udfUserMapper.deleteByUserId(user.getId()); - Assertions.assertEquals(1, delete); - - } - - /** - * test delete by udffuncId - */ - @Test - public void testDeleteByUdfFuncId() { - // insertOneUser - User user = insertOneUser(); - // insertOneUdfFunc - UdfFunc udfFunc = insertOneUdfFunc(); - // insertOne - UDFUser udfUser = insertOne(user, udfFunc); - int delete = udfUserMapper.deleteByUdfFuncId(udfFunc.getId()); - Assertions.assertEquals(1, delete); - } -} diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapperTest.java deleted file mode 100644 index 774449782612..000000000000 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapperTest.java +++ /dev/null @@ -1,293 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.dolphinscheduler.dao.mapper; - -import static java.util.stream.Collectors.toList; - -import org.apache.dolphinscheduler.common.enums.UdfType; -import org.apache.dolphinscheduler.common.enums.UserType; -import org.apache.dolphinscheduler.dao.BaseDaoTest; -import org.apache.dolphinscheduler.dao.entity.UDFUser; -import org.apache.dolphinscheduler.dao.entity.UdfFunc; -import org.apache.dolphinscheduler.dao.entity.User; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.Date; -import java.util.List; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import org.springframework.beans.factory.annotation.Autowired; - -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; - -public class UdfFuncMapperTest extends BaseDaoTest { - - @Autowired - private UserMapper userMapper; - - @Autowired - private UdfFuncMapper udfFuncMapper; - - @Autowired - private UDFUserMapper udfUserMapper; - - /** - * insert one udf - * - * @return UdfFunc - */ - private UdfFunc insertOne(String funcName) { - UdfFunc udfFunc = new UdfFunc(); - udfFunc.setUserId(1); - udfFunc.setFuncName(funcName); - udfFunc.setClassName("org.apache.dolphinscheduler.test.mr"); - udfFunc.setType(UdfType.HIVE); - udfFunc.setResourceId(1); - udfFunc.setResourceName("dolphin_resource"); - udfFunc.setCreateTime(new Date()); - udfFunc.setUpdateTime(new Date()); - udfFuncMapper.insert(udfFunc); - return udfFunc; - } - - /** - * insert one udf - * - * @return - */ - private UdfFunc insertOne(User user) { - UdfFunc udfFunc = new UdfFunc(); - udfFunc.setUserId(user.getId()); - udfFunc.setFuncName("dolphin_udf_func" + user.getUserName()); - udfFunc.setClassName("org.apache.dolphinscheduler.test.mr"); - udfFunc.setType(UdfType.HIVE); - udfFunc.setResourceId(1); - udfFunc.setResourceName("dolphin_resource"); - udfFunc.setCreateTime(new Date()); - udfFunc.setUpdateTime(new Date()); - udfFuncMapper.insert(udfFunc); - return udfFunc; - } - - /** - * insert one user - * - * @return User - */ - private User insertOneUser() { - return insertOneUser("user1"); - } - - /** - * insert one user - * - * @return User - */ - private User insertOneUser(String userName) { - return createGeneralUser(userName); - } - - /** - * create general user - * - * @return User - */ - private User createGeneralUser(String userName) { - User user = new User(); - user.setUserName(userName); - user.setUserPassword("1"); - user.setEmail("xx@123.com"); - user.setUserType(UserType.GENERAL_USER); - user.setCreateTime(new Date()); - user.setTenantId(1); - user.setUpdateTime(new Date()); - userMapper.insert(user); - return user; - } - - /** - * insert UDFUser - * - * @param user user - * @param udfFunc udf func - * @return UDFUser - */ - private UDFUser insertOneUDFUser(User user, UdfFunc udfFunc) { - UDFUser udfUser = new UDFUser(); - udfUser.setUdfId(udfFunc.getId()); - udfUser.setUserId(user.getId()); - udfUser.setCreateTime(new Date()); - udfUser.setUpdateTime(new Date()); - udfUserMapper.insert(udfUser); - return udfUser; - } - - /** - * test update - */ - @Test - public void testUpdate() { - // insertOne - UdfFunc udfFunc = insertOne("func1"); - udfFunc.setResourceName("dolphin_resource_update"); - udfFunc.setResourceId(2); - udfFunc.setClassName("org.apache.dolphinscheduler.test.mrUpdate"); - udfFunc.setUpdateTime(new Date()); - // update - int update = udfFuncMapper.updateById(udfFunc); - Assertions.assertEquals(update, 1); - - } - - /** - * test delete - */ - @Test - public void testDelete() { - // insertOne - UdfFunc udfFunc = insertOne("func2"); - // delete - int delete = udfFuncMapper.deleteById(udfFunc.getId()); - Assertions.assertEquals(delete, 1); - } - - /** - * test query udf by ids - */ - @Test - public void testQueryUdfByIdStr() { - // insertOne - UdfFunc udfFunc = insertOne("func3"); - // insertOne - UdfFunc udfFunc1 = insertOne("func4"); - Integer[] idArray = new Integer[]{udfFunc.getId(), udfFunc1.getId()}; - // queryUdfByIdStr - List udfFuncList = udfFuncMapper.queryUdfByIdStr(idArray, ""); - Assertions.assertNotEquals(0, udfFuncList.size()); - } - - /** - * test page - */ - @Test - public void testQueryUdfFuncPaging() { - // insertOneUser - User user = insertOneUser(); - // insertOne - UdfFunc udfFunc = insertOne(user); - // queryUdfFuncPaging - Page page = new Page(1, 3); - - IPage udfFuncIPage = - udfFuncMapper.queryUdfFuncPaging(page, Collections.singletonList(udfFunc.getId()), ""); - Assertions.assertNotEquals(0, udfFuncIPage.getTotal()); - - } - - /** - * test get udffunc by type - */ - @Test - public void testGetUdfFuncByType() { - // insertOneUser - User user = insertOneUser(); - // insertOne - UdfFunc udfFunc = insertOne(user); - // getUdfFuncByType - List udfFuncList = - udfFuncMapper.getUdfFuncByType(Collections.singletonList(udfFunc.getId()), udfFunc.getType().ordinal()); - Assertions.assertNotEquals(0, udfFuncList.size()); - - } - - /** - * test query udffunc expect userId - */ - @Test - public void testQueryUdfFuncExceptUserId() { - // insertOneUser - User user1 = insertOneUser(); - User user2 = insertOneUser("user2"); - // insertOne - UdfFunc udfFunc1 = insertOne(user1); - UdfFunc udfFunc2 = insertOne(user2); - List udfFuncList = udfFuncMapper.queryUdfFuncExceptUserId(user1.getId()); - Assertions.assertNotEquals(0, udfFuncList.size()); - - } - - /** - * test query authed udffunc - */ - @Test - public void testQueryAuthedUdfFunc() { - // insertOneUser - User user = insertOneUser(); - - // insertOne - UdfFunc udfFunc = insertOne(user); - - // insertOneUDFUser - UDFUser udfUser = insertOneUDFUser(user, udfFunc); - // queryAuthedUdfFunc - List udfFuncList = udfFuncMapper.queryAuthedUdfFunc(user.getId()); - Assertions.assertNotEquals(0, udfFuncList.size()); - } - - @Test - public void testListAuthorizedUdfFunc() { - // create general user - User generalUser1 = createGeneralUser("user1"); - User generalUser2 = createGeneralUser("user2"); - - // create udf function - UdfFunc udfFunc = insertOne(generalUser1); - UdfFunc unauthorizdUdfFunc = insertOne(generalUser2); - - // udf function ids - Integer[] udfFuncIds = new Integer[]{udfFunc.getId(), unauthorizdUdfFunc.getId()}; - - List authorizedUdfFunc = udfFuncMapper.listAuthorizedUdfFunc(generalUser1.getId(), udfFuncIds); - - Assertions.assertEquals(generalUser1.getId().intValue(), udfFunc.getUserId()); - Assertions.assertNotEquals(generalUser1.getId().intValue(), unauthorizdUdfFunc.getUserId()); - Assertions.assertFalse(authorizedUdfFunc.stream().map(t -> t.getId()).collect(toList()) - .containsAll(Arrays.asList(udfFuncIds))); - - // authorize object unauthorizdUdfFunc to generalUser1 - insertOneUDFUser(generalUser1, unauthorizdUdfFunc); - authorizedUdfFunc = udfFuncMapper.listAuthorizedUdfFunc(generalUser1.getId(), udfFuncIds); - Assertions.assertTrue(authorizedUdfFunc.stream().map(t -> t.getId()).collect(toList()) - .containsAll(Arrays.asList(udfFuncIds))); - } - - @Test - public void batchUpdateUdfFuncTest() { - // create general user - User generalUser1 = createGeneralUser("user1"); - UdfFunc udfFunc = insertOne(generalUser1); - udfFunc.setResourceName("/updateTest"); - List udfFuncList = new ArrayList<>(); - udfFuncList.add(udfFunc); - Assertions.assertTrue(udfFuncMapper.batchUpdateUdfFunc(udfFuncList) > 0); - - } -} diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/AlertDaoTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/repository/impl/AlertDaoTest.java similarity index 68% rename from dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/AlertDaoTest.java rename to dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/repository/impl/AlertDaoTest.java index f2cb503d9bfe..a2c2c2ab9856 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/AlertDaoTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/repository/impl/AlertDaoTest.java @@ -15,40 +15,26 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.dao; +package org.apache.dolphinscheduler.dao.repository.impl; import org.apache.dolphinscheduler.common.enums.AlertStatus; -import org.apache.dolphinscheduler.common.enums.ProfileType; +import org.apache.dolphinscheduler.dao.AlertDao; +import org.apache.dolphinscheduler.dao.BaseDaoTest; import org.apache.dolphinscheduler.dao.entity.Alert; import java.util.List; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.junit.jupiter.MockitoExtension; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.autoconfigure.SpringBootApplication; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.annotation.Rollback; -import org.springframework.test.context.ActiveProfiles; -import org.springframework.transaction.annotation.EnableTransactionManagement; -import org.springframework.transaction.annotation.Transactional; -@ActiveProfiles(ProfileType.H2) -@ExtendWith(MockitoExtension.class) -@SpringBootApplication(scanBasePackageClasses = DaoConfiguration.class) -@SpringBootTest(classes = DaoConfiguration.class) -@Transactional -@Rollback -@EnableTransactionManagement -public class AlertDaoTest { +class AlertDaoTest extends BaseDaoTest { @Autowired private AlertDao alertDao; @Test - public void testAlertDao() { + void testAlertDao() { Alert alert = new Alert(); alert.setTitle("Mysql Exception"); alert.setContent("[\"alarm time:2018-02-05\", \"service name:MYSQL_ALTER\", \"alarm name:MYSQL_ALTER_DUMP\", " @@ -57,25 +43,25 @@ public void testAlertDao() { alert.setAlertStatus(AlertStatus.WAIT_EXECUTION); alertDao.addAlert(alert); - List alerts = alertDao.listPendingAlerts(); + List alerts = alertDao.listPendingAlerts(-1); Assertions.assertNotNull(alerts); Assertions.assertNotEquals(0, alerts.size()); } @Test - public void testAddAlertSendStatus() { + void testAddAlertSendStatus() { int insertCount = alertDao.addAlertSendStatus(AlertStatus.EXECUTION_SUCCESS, "success", 1, 1); Assertions.assertEquals(1, insertCount); } @Test - public void testSendServerStoppedAlert() { + void testSendServerStoppedAlert() { int alertGroupId = 1; String host = "127.0.0.998165432"; String serverType = "Master"; alertDao.sendServerStoppedAlert(alertGroupId, host, serverType); alertDao.sendServerStoppedAlert(alertGroupId, host, serverType); - long count = alertDao.listPendingAlerts() + long count = alertDao.listPendingAlerts(-1) .stream() .filter(alert -> alert.getContent().contains(host)) .count(); diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/repository/impl/CommandDaoImplTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/repository/impl/CommandDaoImplTest.java new file mode 100644 index 000000000000..9185a965512a --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/repository/impl/CommandDaoImplTest.java @@ -0,0 +1,92 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.repository.impl; + +import static com.google.common.truth.Truth.assertThat; + +import org.apache.dolphinscheduler.common.enums.CommandType; +import org.apache.dolphinscheduler.common.enums.FailureStrategy; +import org.apache.dolphinscheduler.common.enums.Priority; +import org.apache.dolphinscheduler.common.enums.TaskDependType; +import org.apache.dolphinscheduler.common.enums.WarningType; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.dao.BaseDaoTest; +import org.apache.dolphinscheduler.dao.entity.Command; +import org.apache.dolphinscheduler.dao.repository.CommandDao; +import org.apache.dolphinscheduler.dao.utils.WorkerGroupUtils; + +import org.apache.commons.lang3.RandomUtils; + +import java.util.List; + +import org.junit.jupiter.api.RepeatedTest; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.annotation.DirtiesContext; + +@DirtiesContext(classMode = DirtiesContext.ClassMode.BEFORE_EACH_TEST_METHOD) +class CommandDaoImplTest extends BaseDaoTest { + + @Autowired + private CommandDao commandDao; + + @RepeatedTest(value = 10) + void fetchCommandByIdSlot() { + int totalSlot = RandomUtils.nextInt(1, 10); + int currentSlotIndex = RandomUtils.nextInt(0, totalSlot); + int fetchSize = RandomUtils.nextInt(10, 100); + int idStep = RandomUtils.nextInt(1, 5); + int commandSize = RandomUtils.nextInt(currentSlotIndex, 1000); + // Generate commandSize commands + int id = 1; + for (int j = 0; j < commandSize; j++) { + Command command = generateCommand(CommandType.START_PROCESS, 0); + command.setId(id); + commandDao.insert(command); + id += idStep; + } + + List commands = commandDao.queryCommandByIdSlot(currentSlotIndex, totalSlot, idStep, fetchSize); + assertThat(commands.size()) + .isEqualTo(commandDao.queryAll() + .stream() + .filter(command -> (command.getId() / idStep) % totalSlot == currentSlotIndex) + .limit(fetchSize) + .count()); + + } + + private Command generateCommand(CommandType commandType, int processDefinitionCode) { + Command command = new Command(); + command.setCommandType(commandType); + command.setProcessDefinitionCode(processDefinitionCode); + command.setExecutorId(4); + command.setCommandParam("test command param"); + command.setTaskDependType(TaskDependType.TASK_ONLY); + command.setFailureStrategy(FailureStrategy.CONTINUE); + command.setWarningType(WarningType.ALL); + command.setWarningGroupId(1); + command.setScheduleTime(DateUtils.stringToDate("2019-12-29 12:10:00")); + command.setProcessInstancePriority(Priority.MEDIUM); + command.setStartTime(DateUtils.stringToDate("2019-12-29 10:10:00")); + command.setUpdateTime(DateUtils.stringToDate("2019-12-29 10:10:00")); + command.setWorkerGroup(WorkerGroupUtils.getDefaultWorkerGroup()); + command.setProcessInstanceId(0); + command.setProcessDefinitionVersion(0); + return command; + } +} diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/repository/impl/ListenerEventDaoImplTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/repository/impl/ListenerEventDaoImplTest.java new file mode 100644 index 000000000000..2574c52f7828 --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/repository/impl/ListenerEventDaoImplTest.java @@ -0,0 +1,79 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.repository.impl; + +import static com.google.common.truth.Truth.assertThat; + +import org.apache.dolphinscheduler.common.enums.AlertStatus; +import org.apache.dolphinscheduler.common.enums.ListenerEventType; +import org.apache.dolphinscheduler.dao.BaseDaoTest; +import org.apache.dolphinscheduler.dao.entity.ListenerEvent; +import org.apache.dolphinscheduler.dao.repository.ListenerEventDao; + +import java.util.Date; + +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; + +class ListenerEventDaoImplTest extends BaseDaoTest { + + @Autowired + private ListenerEventDao listenerEventDao; + + @Test + void listingPendingEvents() { + int minId = -1; + int limit = 10; + assertThat(listenerEventDao.listingPendingEvents(minId, limit)).isEmpty(); + + ListenerEvent listenerEvent = ListenerEvent.builder() + .eventType(ListenerEventType.SERVER_DOWN) + .sign("test") + .createTime(new Date()) + .updateTime(new Date()) + .postStatus(AlertStatus.WAIT_EXECUTION) + .build(); + listenerEventDao.insert(listenerEvent); + + listenerEvent = ListenerEvent.builder() + .eventType(ListenerEventType.SERVER_DOWN) + .sign("test") + .createTime(new Date()) + .updateTime(new Date()) + .postStatus(AlertStatus.EXECUTION_SUCCESS) + .build(); + listenerEventDao.insert(listenerEvent); + + assertThat(listenerEventDao.listingPendingEvents(minId, limit)).hasSize(1); + } + + @Test + void updateListenerEvent() { + ListenerEvent listenerEvent = ListenerEvent.builder() + .eventType(ListenerEventType.SERVER_DOWN) + .sign("test") + .createTime(new Date()) + .updateTime(new Date()) + .postStatus(AlertStatus.WAIT_EXECUTION) + .build(); + listenerEventDao.insert(listenerEvent); + listenerEventDao.updateListenerEvent(listenerEvent.getId(), AlertStatus.EXECUTION_SUCCESS, "test", new Date()); + assertThat(listenerEventDao.queryById(listenerEvent.getId()).getPostStatus()) + .isEqualTo(AlertStatus.EXECUTION_SUCCESS); + } +} diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/repository/impl/ProcessInstanceDaoImplTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/repository/impl/ProcessInstanceDaoImplTest.java index 421309130fc9..9f69b7d90fd8 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/repository/impl/ProcessInstanceDaoImplTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/repository/impl/ProcessInstanceDaoImplTest.java @@ -65,11 +65,9 @@ void queryByWorkflowCodeVersionStatus_EXIST_NOT_FINISH_INSTANCE() { WorkflowExecutionStatus.READY_STOP)); processInstanceDao.insert(createWorkflowInstance(workflowDefinitionCode, workflowDefinitionVersion, WorkflowExecutionStatus.SERIAL_WAIT)); - processInstanceDao.insert(createWorkflowInstance(workflowDefinitionCode, workflowDefinitionVersion, - WorkflowExecutionStatus.READY_BLOCK)); processInstanceDao.insert(createWorkflowInstance(workflowDefinitionCode, workflowDefinitionVersion, WorkflowExecutionStatus.WAIT_TO_RUN)); - assertEquals(8, processInstanceDao + assertEquals(7, processInstanceDao .queryByWorkflowCodeVersionStatus(workflowDefinitionCode, workflowDefinitionVersion, status).size()); } diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/repository/impl/TaskGroupQueueDaoImplTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/repository/impl/TaskGroupQueueDaoImplTest.java index 17c15371845b..13dcf91f55d1 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/repository/impl/TaskGroupQueueDaoImplTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/repository/impl/TaskGroupQueueDaoImplTest.java @@ -27,7 +27,11 @@ import org.apache.dolphinscheduler.dao.entity.TaskGroupQueue; import org.apache.dolphinscheduler.dao.repository.TaskGroupQueueDao; +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.RandomUtils; + import java.util.Date; +import java.util.List; import org.assertj.core.util.Lists; import org.junit.jupiter.api.Test; @@ -55,6 +59,35 @@ void queryAllInQueueTaskGroupQueue() { assertEquals(1, taskGroupQueueDao.queryAllInQueueTaskGroupQueue().size()); } + @Test + void queryInQueueTaskGroupQueue_withMinId() { + // Insert 1w ~ 10w records + int insertCount = RandomUtils.nextInt(10000, 100000); + List insertTaskGroupQueue = Lists.newArrayList(); + for (int i = 0; i < insertCount; i++) { + TaskGroupQueue taskGroupQueue = createTaskGroupQueue(Flag.NO, TaskGroupQueueStatus.ACQUIRE_SUCCESS); + insertTaskGroupQueue.add(taskGroupQueue); + } + taskGroupQueueDao.insertBatch(insertTaskGroupQueue); + + int minTaskGroupQueueId = -1; + int limit = 1000; + int queryCount = 0; + while (true) { + List taskGroupQueues = + taskGroupQueueDao.queryInQueueTaskGroupQueue(minTaskGroupQueueId, limit); + if (CollectionUtils.isEmpty(taskGroupQueues)) { + break; + } + queryCount += taskGroupQueues.size(); + if (taskGroupQueues.size() < limit) { + break; + } + minTaskGroupQueueId = taskGroupQueues.get(taskGroupQueues.size() - 1).getId(); + } + assertEquals(insertCount, queryCount); + } + @Test void queryAllInQueueTaskGroupQueueByGroupId() { TaskGroupQueue taskGroupQueue = createTaskGroupQueue(Flag.NO, TaskGroupQueueStatus.ACQUIRE_SUCCESS); @@ -91,6 +124,49 @@ void queryUsingTaskGroupQueueByGroupId() { assertEquals(1, taskGroupQueueDao.queryAcquiredTaskGroupQueueByGroupId(1).size()); } + @Test + void countUsingTaskGroupQueueByGroupId() { + assertEquals(0, taskGroupQueueDao.countUsingTaskGroupQueueByGroupId(1)); + + TaskGroupQueue taskGroupQueue = createTaskGroupQueue(Flag.NO, TaskGroupQueueStatus.ACQUIRE_SUCCESS); + taskGroupQueueDao.insert(taskGroupQueue); + assertEquals(1, taskGroupQueueDao.countUsingTaskGroupQueueByGroupId(1)); + + taskGroupQueue = createTaskGroupQueue(Flag.YES, TaskGroupQueueStatus.WAIT_QUEUE); + taskGroupQueueDao.insert(taskGroupQueue); + assertEquals(1, taskGroupQueueDao.countUsingTaskGroupQueueByGroupId(1)); + } + + @Test + void queryWaitNotifyForceStartTaskGroupQueue() { + // Insert 1w records + int insertCount = RandomUtils.nextInt(10000, 20000); + List insertTaskGroupQueue = Lists.newArrayList(); + for (int i = 0; i < insertCount; i++) { + TaskGroupQueue taskGroupQueue = createTaskGroupQueue(Flag.YES, TaskGroupQueueStatus.ACQUIRE_SUCCESS); + + insertTaskGroupQueue.add(taskGroupQueue); + } + taskGroupQueueDao.insertBatch(insertTaskGroupQueue); + + int beginTaskGroupQueueId = -1; + int limit = 1000; + int queryCount = 0; + while (true) { + List taskGroupQueues = + taskGroupQueueDao.queryWaitNotifyForceStartTaskGroupQueue(beginTaskGroupQueueId, limit); + if (CollectionUtils.isEmpty(taskGroupQueues)) { + break; + } + queryCount += taskGroupQueues.size(); + if (taskGroupQueues.size() < limit) { + break; + } + beginTaskGroupQueueId = taskGroupQueues.get(taskGroupQueues.size() - 1).getId(); + } + assertEquals(insertCount, queryCount); + } + private TaskGroupQueue createTaskGroupQueue(Flag forceStart, TaskGroupQueueStatus taskGroupQueueStatus) { return TaskGroupQueue.builder() .taskId(1) diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/utils/EnvironmentUtilsTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/utils/EnvironmentUtilsTest.java new file mode 100644 index 000000000000..dd5356169d4e --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/utils/EnvironmentUtilsTest.java @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.utils; + +import static com.google.common.truth.Truth.assertThat; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.CsvSource; +import org.junit.jupiter.params.provider.ValueSource; + +class EnvironmentUtilsTest { + + @ParameterizedTest + @ValueSource(longs = {0, -1}) + void testIsEnvironmentCodeEmpty_emptyEnvironmentCode(Long environmentCode) { + assertThat(EnvironmentUtils.isEnvironmentCodeEmpty(environmentCode)).isTrue(); + } + + @ParameterizedTest + @ValueSource(longs = {123}) + void testIsEnvironmentCodeEmpty_nonEmptyEnvironmentCode(Long environmentCode) { + assertThat(EnvironmentUtils.isEnvironmentCodeEmpty(environmentCode)).isFalse(); + } + + @Test + void testGetDefaultEnvironmentCode() { + assertThat(EnvironmentUtils.getDefaultEnvironmentCode()).isEqualTo(-1L); + } + + @ParameterizedTest + @ValueSource(longs = {0, -1}) + void testGetEnvironmentCodeOrDefault_emptyEnvironmentCode(Long environmentCode) { + assertThat(EnvironmentUtils.getEnvironmentCodeOrDefault(environmentCode)).isEqualTo(-1L); + } + + @ParameterizedTest + @ValueSource(longs = {123}) + void testGetEnvironmentCodeOrDefault_nonEmptyEnvironmentCode(Long environmentCode) { + assertThat(EnvironmentUtils.getEnvironmentCodeOrDefault(environmentCode)).isEqualTo(environmentCode); + } + + @ParameterizedTest + @CsvSource(value = {",123", "-1,123"}) + void testGetEnvironmentCodeOrDefault_withDefaultValue_emptyEnvironmentCode(Long environmentCode, + Long defaultValue) { + assertThat(EnvironmentUtils.getEnvironmentCodeOrDefault(environmentCode, defaultValue)).isEqualTo(defaultValue); + } + + @ParameterizedTest + @CsvSource(value = {"1,123"}) + void testGetEnvironmentCodeOrDefault_withDefaultValue_nonEmptyEnvironmentCode(Long environmentCode, + Long defaultValue) { + assertThat(EnvironmentUtils.getEnvironmentCodeOrDefault(environmentCode, defaultValue)) + .isEqualTo(environmentCode); + } +} diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/utils/TaskCacheUtilsTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/utils/TaskCacheUtilsTest.java index 49d5a87bb630..ee88d1cd1a43 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/utils/TaskCacheUtilsTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/utils/TaskCacheUtilsTest.java @@ -20,7 +20,7 @@ import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.common.utils.FileUtils; import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import org.apache.dolphinscheduler.plugin.storage.api.StorageOperate; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.enums.DataType; import org.apache.dolphinscheduler.plugin.task.api.enums.Direct; @@ -46,7 +46,7 @@ class TaskCacheUtilsTest { private TaskExecutionContext taskExecutionContext; - private StorageOperate storageOperate; + private StorageOperator storageOperator; @BeforeEach void setUp() { @@ -101,7 +101,7 @@ void setUp() { prepareParamsMap.put("a", property); taskExecutionContext.setPrepareParamsMap(prepareParamsMap); - storageOperate = Mockito.mock(StorageOperate.class); + storageOperator = Mockito.mock(StorageOperator.class); } @Test @@ -128,26 +128,26 @@ void testGetScriptVarInSet() { @Test void TestGetTaskInputVarPoolData() { - TaskCacheUtils.getTaskInputVarPoolData(taskInstance, taskExecutionContext, storageOperate); + TaskCacheUtils.getTaskInputVarPoolData(taskInstance, taskExecutionContext, storageOperator); // only a=aa and c=cc will influence the result, // b=bb is a fixed value, will be considered in task version // k=kk is not in task params, will be ignored String except = "[{\"prop\":\"a\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"aa\"},{\"prop\":\"c\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"cc\"}]"; Assertions.assertEquals(except, - TaskCacheUtils.getTaskInputVarPoolData(taskInstance, taskExecutionContext, storageOperate)); + TaskCacheUtils.getTaskInputVarPoolData(taskInstance, taskExecutionContext, storageOperator)); } @Test void TestGenerateCacheKey() { - String cacheKeyBase = TaskCacheUtils.generateCacheKey(taskInstance, taskExecutionContext, storageOperate); + String cacheKeyBase = TaskCacheUtils.generateCacheKey(taskInstance, taskExecutionContext, storageOperator); Property propertyI = new Property(); propertyI.setProp("i"); propertyI.setDirect(Direct.IN); propertyI.setType(DataType.VARCHAR); propertyI.setValue("ii"); taskExecutionContext.getPrepareParamsMap().put("i", propertyI); - String cacheKeyNew = TaskCacheUtils.generateCacheKey(taskInstance, taskExecutionContext, storageOperate); + String cacheKeyNew = TaskCacheUtils.generateCacheKey(taskInstance, taskExecutionContext, storageOperator); // i will not influence the result, because task instance not use it Assertions.assertEquals(cacheKeyBase, cacheKeyNew); @@ -157,17 +157,17 @@ void TestGenerateCacheKey() { propertyD.setType(DataType.VARCHAR); propertyD.setValue("dd"); taskExecutionContext.getPrepareParamsMap().put("i", propertyD); - String cacheKeyD = TaskCacheUtils.generateCacheKey(taskInstance, taskExecutionContext, storageOperate); + String cacheKeyD = TaskCacheUtils.generateCacheKey(taskInstance, taskExecutionContext, storageOperator); // d will influence the result, because task instance use it Assertions.assertNotEquals(cacheKeyBase, cacheKeyD); taskInstance.setTaskDefinitionVersion(100); - String cacheKeyE = TaskCacheUtils.generateCacheKey(taskInstance, taskExecutionContext, storageOperate); + String cacheKeyE = TaskCacheUtils.generateCacheKey(taskInstance, taskExecutionContext, storageOperator); // task definition version is changed, so cache key changed Assertions.assertNotEquals(cacheKeyD, cacheKeyE); taskInstance.setEnvironmentConfig("export PYTHON_LAUNCHER=/bin/python3"); - String cacheKeyF = TaskCacheUtils.generateCacheKey(taskInstance, taskExecutionContext, storageOperate); + String cacheKeyF = TaskCacheUtils.generateCacheKey(taskInstance, taskExecutionContext, storageOperator); // EnvironmentConfig is changed, so cache key changed Assertions.assertNotEquals(cacheKeyE, cacheKeyF); } @@ -193,7 +193,7 @@ void testReplaceWithCheckSum() { taskExecutionContext.setExecutePath("test"); taskExecutionContext.setTenantCode("aaa"); - String crc = TaskCacheUtils.getValCheckSum(property, taskExecutionContext, storageOperate); + String crc = TaskCacheUtils.getValCheckSum(property, taskExecutionContext, storageOperator); Assertions.assertEquals(crc, content); } diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/utils/WorkerGroupUtilsTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/utils/WorkerGroupUtilsTest.java new file mode 100644 index 000000000000..60bf74695dce --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/utils/WorkerGroupUtilsTest.java @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.utils; + +import static com.google.common.truth.Truth.assertThat; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.CsvSource; +import org.junit.jupiter.params.provider.ValueSource; + +class WorkerGroupUtilsTest { + + @ParameterizedTest + @ValueSource(strings = {"", "default"}) + void testIsWorkerGroupEmpty_emptyWorkerGroup(String workerGroup) { + assertThat(WorkerGroupUtils.isWorkerGroupEmpty(workerGroup)).isTrue(); + } + + @ParameterizedTest + @ValueSource(strings = {"123", "default1"}) + void testIsWorkerGroupEmpty_nonEmptyWorkerGroup(String workerGroup) { + assertThat(WorkerGroupUtils.isWorkerGroupEmpty(workerGroup)).isFalse(); + } + + @ParameterizedTest + @ValueSource(strings = {"", "default"}) + void testGetWorkerGroupOrDefault_emptyWorkerGroup(String workerGroup) { + assertThat(WorkerGroupUtils.getWorkerGroupOrDefault(workerGroup)) + .isEqualTo(WorkerGroupUtils.getDefaultWorkerGroup()); + } + + @ParameterizedTest + @ValueSource(strings = {"test"}) + void testGetWorkerGroupOrDefault_nonEmptyWorkerGroup(String workerGroup) { + assertThat(WorkerGroupUtils.getWorkerGroupOrDefault(workerGroup)).isEqualTo(workerGroup); + } + + @ParameterizedTest + @CsvSource(value = {",test", "default,test"}) + void testGetWorkerGroupOrDefault_withDefaultValue_emptyWorkerGroup(String workerGroup, String defaultValue) { + assertThat(WorkerGroupUtils.getWorkerGroupOrDefault(workerGroup, defaultValue)).isEqualTo(defaultValue); + } + + @ParameterizedTest + @CsvSource(value = {"test1,test"}) + void testGetWorkerGroupOrDefault_withDefaultValue_nonEmptyWorkerGroup(String workerGroup, String defaultValue) { + assertThat(WorkerGroupUtils.getWorkerGroupOrDefault(workerGroup)).isEqualTo(workerGroup); + } + + @Test + void getDefaultWorkerGroup() { + assertThat(WorkerGroupUtils.getDefaultWorkerGroup()).isEqualTo("default"); + } +} diff --git a/dolphinscheduler-dao/src/test/resources/logback.xml b/dolphinscheduler-dao/src/test/resources/logback.xml new file mode 100644 index 000000000000..9a182a18ef12 --- /dev/null +++ b/dolphinscheduler-dao/src/test/resources/logback.xml @@ -0,0 +1,21 @@ + + + + + + diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/reader/JdbcReader.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/reader/JdbcReader.java index 274d4f793a14..97ae41405146 100644 --- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/reader/JdbcReader.java +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/reader/JdbcReader.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.data.quality.flow.batch.reader; +import static java.nio.charset.StandardCharsets.UTF_8; import static org.apache.dolphinscheduler.data.quality.Constants.DATABASE; import static org.apache.dolphinscheduler.data.quality.Constants.DB_TABLE; import static org.apache.dolphinscheduler.data.quality.Constants.DOTS; @@ -32,17 +33,19 @@ import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment; import org.apache.dolphinscheduler.data.quality.flow.batch.BatchReader; import org.apache.dolphinscheduler.data.quality.utils.ConfigUtils; -import org.apache.dolphinscheduler.data.quality.utils.ParserUtils; import org.apache.spark.sql.DataFrameReader; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import org.apache.spark.sql.SparkSession; +import java.net.URLDecoder; import java.util.Arrays; import java.util.HashMap; import java.util.Map; +import lombok.SneakyThrows; + /** * AbstractJdbcSource */ @@ -74,6 +77,7 @@ public Dataset read(SparkRuntimeEnvironment env) { return jdbcReader(env.sparkSession()).load(); } + @SneakyThrows private DataFrameReader jdbcReader(SparkSession sparkSession) { DataFrameReader reader = sparkSession.read() @@ -81,7 +85,7 @@ private DataFrameReader jdbcReader(SparkSession sparkSession) { .option(URL, config.getString(URL)) .option(DB_TABLE, config.getString(DATABASE) + "." + config.getString(TABLE)) .option(USER, config.getString(USER)) - .option(PASSWORD, ParserUtils.decode(config.getString(PASSWORD))) + .option(PASSWORD, URLDecoder.decode(config.getString(PASSWORD), UTF_8.name())) .option(DRIVER, config.getString(DRIVER)); Config jdbcConfig = ConfigUtils.extractSubConfig(config, JDBC + DOTS, false); diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/JdbcWriter.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/JdbcWriter.java index 07b2bd60d562..b737567f2147 100644 --- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/JdbcWriter.java +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/JdbcWriter.java @@ -33,13 +33,16 @@ import org.apache.dolphinscheduler.data.quality.config.ValidateResult; import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment; import org.apache.dolphinscheduler.data.quality.flow.batch.BatchWriter; -import org.apache.dolphinscheduler.data.quality.utils.ParserUtils; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; +import java.net.URLDecoder; +import java.nio.charset.StandardCharsets; import java.util.Arrays; +import lombok.SneakyThrows; + import com.google.common.base.Strings; /** @@ -70,6 +73,7 @@ public void prepare(SparkRuntimeEnvironment prepareEnv) { } } + @SneakyThrows @Override public void write(Dataset data, SparkRuntimeEnvironment env) { if (!Strings.isNullOrEmpty(config.getString(SQL))) { @@ -82,7 +86,7 @@ public void write(Dataset data, SparkRuntimeEnvironment env) { .option(URL, config.getString(URL)) .option(DB_TABLE, config.getString(DATABASE) + "." + config.getString(TABLE)) .option(USER, config.getString(USER)) - .option(PASSWORD, ParserUtils.decode(config.getString(PASSWORD))) + .option(PASSWORD, URLDecoder.decode(config.getString(PASSWORD), StandardCharsets.UTF_8.name())) .mode(config.getString(SAVE_MODE)) .save(); } diff --git a/dolphinscheduler-data-quality/src/test/resources/logback.xml b/dolphinscheduler-data-quality/src/test/resources/logback.xml new file mode 100644 index 000000000000..9a182a18ef12 --- /dev/null +++ b/dolphinscheduler-data-quality/src/test/resources/logback.xml @@ -0,0 +1,21 @@ + + + + + + diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/AbstractDataSourceProcessor.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/AbstractDataSourceProcessor.java index db357e9d5ce2..4acf531ddc66 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/AbstractDataSourceProcessor.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/AbstractDataSourceProcessor.java @@ -118,7 +118,7 @@ protected Map transformOtherParamToMap(String other) { @Override public String getDatasourceUniqueId(ConnectionParam connectionParam, DbType dbType) { BaseConnectionParam baseConnectionParam = (BaseConnectionParam) connectionParam; - return MessageFormat.format("{0}@{1}@{2}@{3}", dbType.getDescp(), baseConnectionParam.getUser(), + return MessageFormat.format("{0}@{1}@{2}@{3}", dbType.getName(), baseConnectionParam.getUser(), PasswordUtils.encodePassword(baseConnectionParam.getPassword()), baseConnectionParam.getJdbcUrl()); } @@ -134,6 +134,7 @@ public boolean checkDataSourceConnectivity(ConnectionParam connectionParam) { @Override public List splitAndRemoveComment(String sql) { - return SQLParserUtils.splitAndRemoveComment(sql, com.alibaba.druid.DbType.other); + String cleanSQL = SQLParserUtils.removeComment(sql, com.alibaba.druid.DbType.other); + return SQLParserUtils.split(cleanSQL, com.alibaba.druid.DbType.other); } } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/plugin/DataSourceClientProvider.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/plugin/DataSourceClientProvider.java index 839a4c5d61db..7223fe62a387 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/plugin/DataSourceClientProvider.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/plugin/DataSourceClientProvider.java @@ -69,9 +69,9 @@ public static DataSourceClient getPooledDataSourceClient(DbType dbType, String datasourceUniqueId = DataSourceUtils.getDatasourceUniqueId(baseConnectionParam, dbType); return POOLED_DATASOURCE_CLIENT_CACHE.get(datasourceUniqueId, () -> { Map dataSourceChannelMap = dataSourcePluginManager.getDataSourceChannelMap(); - DataSourceChannel dataSourceChannel = dataSourceChannelMap.get(dbType.getDescp()); + DataSourceChannel dataSourceChannel = dataSourceChannelMap.get(dbType.getName()); if (null == dataSourceChannel) { - throw new RuntimeException(String.format("datasource plugin '%s' is not found", dbType.getDescp())); + throw new RuntimeException(String.format("datasource plugin '%s' is not found", dbType.getName())); } return dataSourceChannel.createPooledDataSourceClient(baseConnectionParam, dbType); }); @@ -85,9 +85,9 @@ public static Connection getPooledConnection(DbType dbType, public static AdHocDataSourceClient getAdHocDataSourceClient(DbType dbType, ConnectionParam connectionParam) { BaseConnectionParam baseConnectionParam = (BaseConnectionParam) connectionParam; Map dataSourceChannelMap = dataSourcePluginManager.getDataSourceChannelMap(); - DataSourceChannel dataSourceChannel = dataSourceChannelMap.get(dbType.getDescp()); + DataSourceChannel dataSourceChannel = dataSourceChannelMap.get(dbType.getName()); if (null == dataSourceChannel) { - throw new RuntimeException(String.format("datasource plugin '%s' is not found", dbType.getDescp())); + throw new RuntimeException(String.format("datasource plugin '%s' is not found", dbType.getName())); } return dataSourceChannel.createAdHocDataSourceClient(baseConnectionParam, dbType); } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/plugin/DataSourceProcessorProvider.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/plugin/DataSourceProcessorProvider.java index 751ac1ba0816..973421615ff0 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/plugin/DataSourceProcessorProvider.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/plugin/DataSourceProcessorProvider.java @@ -37,6 +37,10 @@ public class DataSourceProcessorProvider { private DataSourceProcessorProvider() { } + public static void initialize() { + log.info("Initialize DataSourceProcessorProvider"); + } + public static DataSourceProcessor getDataSourceProcessor(@NonNull DbType dbType) { return dataSourcePluginManager.getDataSourceProcessorMap().get(dbType.name()); } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/CommonUtils.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/CommonUtils.java index 1c24785c2f90..eb8f2ddef05e 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/CommonUtils.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/CommonUtils.java @@ -29,7 +29,7 @@ import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.RESOURCE_UPLOAD_PATH; import org.apache.dolphinscheduler.common.constants.Constants; -import org.apache.dolphinscheduler.common.enums.ResUploadType; +import org.apache.dolphinscheduler.common.enums.StorageType; import org.apache.dolphinscheduler.common.utils.PropertyUtils; import org.apache.commons.lang3.StringUtils; @@ -72,9 +72,9 @@ public static boolean isDevelopMode() { */ public static boolean getKerberosStartupState() { String resUploadStartupType = PropertyUtils.getUpperCaseString(RESOURCE_STORAGE_TYPE); - ResUploadType resUploadType = ResUploadType.valueOf(resUploadStartupType); + StorageType storageType = StorageType.valueOf(resUploadStartupType); Boolean kerberosStartupState = PropertyUtils.getBoolean(HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE, false); - return resUploadType == ResUploadType.HDFS && kerberosStartupState; + return storageType == StorageType.HDFS && kerberosStartupState; } /** diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-athena/src/main/java/org/apache/dolphinscheduler/plugin/datasource/athena/AthenaDataSourceChannelFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-athena/src/main/java/org/apache/dolphinscheduler/plugin/datasource/athena/AthenaDataSourceChannelFactory.java index 1b2ed367d0a5..b4759db39ab2 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-athena/src/main/java/org/apache/dolphinscheduler/plugin/datasource/athena/AthenaDataSourceChannelFactory.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-athena/src/main/java/org/apache/dolphinscheduler/plugin/datasource/athena/AthenaDataSourceChannelFactory.java @@ -19,6 +19,7 @@ import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; import org.apache.dolphinscheduler.spi.datasource.DataSourceChannelFactory; +import org.apache.dolphinscheduler.spi.enums.DbType; import com.google.auto.service.AutoService; @@ -32,6 +33,6 @@ public DataSourceChannel create() { @Override public String getName() { - return "athena"; + return DbType.ATHENA.getName(); } } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-azure-sql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/azuresql/AzureSQLDataSourceChannelFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-azure-sql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/azuresql/AzureSQLDataSourceChannelFactory.java index 5966848f3303..2b8cdca973a4 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-azure-sql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/azuresql/AzureSQLDataSourceChannelFactory.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-azure-sql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/azuresql/AzureSQLDataSourceChannelFactory.java @@ -19,6 +19,7 @@ import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; import org.apache.dolphinscheduler.spi.datasource.DataSourceChannelFactory; +import org.apache.dolphinscheduler.spi.enums.DbType; import com.google.auto.service.AutoService; @@ -27,7 +28,7 @@ public class AzureSQLDataSourceChannelFactory implements DataSourceChannelFactor @Override public String getName() { - return "azuresql"; + return DbType.AZURESQL.getName(); } @Override diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-clickhouse/src/main/java/org/apache/dolphinscheduler/plugin/datasource/clickhouse/ClickHouseDataSourceChannelFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-clickhouse/src/main/java/org/apache/dolphinscheduler/plugin/datasource/clickhouse/ClickHouseDataSourceChannelFactory.java index d756226522fb..77d0feb1d18e 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-clickhouse/src/main/java/org/apache/dolphinscheduler/plugin/datasource/clickhouse/ClickHouseDataSourceChannelFactory.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-clickhouse/src/main/java/org/apache/dolphinscheduler/plugin/datasource/clickhouse/ClickHouseDataSourceChannelFactory.java @@ -19,6 +19,7 @@ import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; import org.apache.dolphinscheduler.spi.datasource.DataSourceChannelFactory; +import org.apache.dolphinscheduler.spi.enums.DbType; import com.google.auto.service.AutoService; @@ -27,7 +28,7 @@ public class ClickHouseDataSourceChannelFactory implements DataSourceChannelFact @Override public String getName() { - return "clickhouse"; + return DbType.CLICKHOUSE.getName(); } @Override diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-clickhouse/src/main/java/org/apache/dolphinscheduler/plugin/datasource/clickhouse/param/ClickHouseDataSourceProcessor.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-clickhouse/src/main/java/org/apache/dolphinscheduler/plugin/datasource/clickhouse/param/ClickHouseDataSourceProcessor.java index a613806460ea..81a4415f5d15 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-clickhouse/src/main/java/org/apache/dolphinscheduler/plugin/datasource/clickhouse/param/ClickHouseDataSourceProcessor.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-clickhouse/src/main/java/org/apache/dolphinscheduler/plugin/datasource/clickhouse/param/ClickHouseDataSourceProcessor.java @@ -129,7 +129,8 @@ public DataSourceProcessor create() { @Override public List splitAndRemoveComment(String sql) { - return SQLParserUtils.splitAndRemoveComment(sql, com.alibaba.druid.DbType.clickhouse); + String cleanSQL = SQLParserUtils.removeComment(sql, com.alibaba.druid.DbType.clickhouse); + return SQLParserUtils.split(cleanSQL, com.alibaba.druid.DbType.clickhouse); } private String transformOther(Map otherMap) { diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-dameng/src/main/java/org/apache/dolphinscheduler/plugin/datasource/dameng/DamengDataSourceChannelFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-dameng/src/main/java/org/apache/dolphinscheduler/plugin/datasource/dameng/DamengDataSourceChannelFactory.java index 945f6610c0e7..84ae08013460 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-dameng/src/main/java/org/apache/dolphinscheduler/plugin/datasource/dameng/DamengDataSourceChannelFactory.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-dameng/src/main/java/org/apache/dolphinscheduler/plugin/datasource/dameng/DamengDataSourceChannelFactory.java @@ -28,7 +28,7 @@ public class DamengDataSourceChannelFactory implements DataSourceChannelFactory @Override public String getName() { - return DbType.DAMENG.getDescp(); + return DbType.DAMENG.getName(); } @Override diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-dameng/src/main/java/org/apache/dolphinscheduler/plugin/datasource/dameng/param/DamengDataSourceProcessor.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-dameng/src/main/java/org/apache/dolphinscheduler/plugin/datasource/dameng/param/DamengDataSourceProcessor.java index 1af61facd37c..bc31bdcd492b 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-dameng/src/main/java/org/apache/dolphinscheduler/plugin/datasource/dameng/param/DamengDataSourceProcessor.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-dameng/src/main/java/org/apache/dolphinscheduler/plugin/datasource/dameng/param/DamengDataSourceProcessor.java @@ -135,7 +135,8 @@ public DataSourceProcessor create() { @Override public List splitAndRemoveComment(String sql) { - return SQLParserUtils.splitAndRemoveComment(sql, com.alibaba.druid.DbType.dm); + String cleanSQL = SQLParserUtils.removeComment(sql, com.alibaba.druid.DbType.dm); + return SQLParserUtils.split(cleanSQL, com.alibaba.druid.DbType.dm); } private String transformOther(Map paramMap) { diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-databend/src/main/java/org/apache/dolphinscheduler/plugin/datasource/databend/DatabendDataSourceChannelFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-databend/src/main/java/org/apache/dolphinscheduler/plugin/datasource/databend/DatabendDataSourceChannelFactory.java index 0ea40c3b132e..3c86601dd717 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-databend/src/main/java/org/apache/dolphinscheduler/plugin/datasource/databend/DatabendDataSourceChannelFactory.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-databend/src/main/java/org/apache/dolphinscheduler/plugin/datasource/databend/DatabendDataSourceChannelFactory.java @@ -19,6 +19,7 @@ import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; import org.apache.dolphinscheduler.spi.datasource.DataSourceChannelFactory; +import org.apache.dolphinscheduler.spi.enums.DbType; import com.google.auto.service.AutoService; @@ -27,7 +28,7 @@ public class DatabendDataSourceChannelFactory implements DataSourceChannelFactor @Override public String getName() { - return "databend"; + return DbType.DATABEND.getName(); } @Override diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-databend/src/test/java/org/apache/dolphinscheduler/plugin/datasource/databend/param/DatabendDataSourceProcessorTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-databend/src/test/java/org/apache/dolphinscheduler/plugin/datasource/databend/param/DatabendDataSourceProcessorTest.java index f225a2fd3d2b..cb41c6562b20 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-databend/src/test/java/org/apache/dolphinscheduler/plugin/datasource/databend/param/DatabendDataSourceProcessorTest.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-databend/src/test/java/org/apache/dolphinscheduler/plugin/datasource/databend/param/DatabendDataSourceProcessorTest.java @@ -151,7 +151,7 @@ public void testGetJdbcUrl() { @Test public void testDbType() { Assertions.assertEquals(19, DbType.DATABEND.getCode()); - Assertions.assertEquals("databend", DbType.DATABEND.getDescp()); + Assertions.assertEquals("databend", DbType.DATABEND.getName()); Assertions.assertEquals(DbType.DATABEND, DbType.of(19)); } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-db2/src/main/java/org/apache/dolphinscheduler/plugin/datasource/db2/DB2DataSourceChannelFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-db2/src/main/java/org/apache/dolphinscheduler/plugin/datasource/db2/DB2DataSourceChannelFactory.java index cda8a2e59259..3bbae238ea3b 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-db2/src/main/java/org/apache/dolphinscheduler/plugin/datasource/db2/DB2DataSourceChannelFactory.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-db2/src/main/java/org/apache/dolphinscheduler/plugin/datasource/db2/DB2DataSourceChannelFactory.java @@ -19,6 +19,7 @@ import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; import org.apache.dolphinscheduler.spi.datasource.DataSourceChannelFactory; +import org.apache.dolphinscheduler.spi.enums.DbType; import com.google.auto.service.AutoService; @@ -27,7 +28,7 @@ public class DB2DataSourceChannelFactory implements DataSourceChannelFactory { @Override public String getName() { - return "db2"; + return DbType.DB2.getName(); } @Override diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-db2/src/main/java/org/apache/dolphinscheduler/plugin/datasource/db2/param/Db2DataSourceProcessor.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-db2/src/main/java/org/apache/dolphinscheduler/plugin/datasource/db2/param/Db2DataSourceProcessor.java index 2d67d9184373..1d7c448355e2 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-db2/src/main/java/org/apache/dolphinscheduler/plugin/datasource/db2/param/Db2DataSourceProcessor.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-db2/src/main/java/org/apache/dolphinscheduler/plugin/datasource/db2/param/Db2DataSourceProcessor.java @@ -129,7 +129,8 @@ public String getValidationQuery() { @Override public List splitAndRemoveComment(String sql) { - return SQLParserUtils.splitAndRemoveComment(sql, com.alibaba.druid.DbType.db2); + String cleanSQL = SQLParserUtils.removeComment(sql, com.alibaba.druid.DbType.db2); + return SQLParserUtils.split(cleanSQL, com.alibaba.druid.DbType.db2); } private String transformOther(Map otherMap) { diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-doris/src/main/java/org/apache/dolphinscheduler/plugin/doris/DorisDataSourceChannelFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-doris/src/main/java/org/apache/dolphinscheduler/plugin/doris/DorisDataSourceChannelFactory.java index d663c362f24c..7180a6c6c254 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-doris/src/main/java/org/apache/dolphinscheduler/plugin/doris/DorisDataSourceChannelFactory.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-doris/src/main/java/org/apache/dolphinscheduler/plugin/doris/DorisDataSourceChannelFactory.java @@ -32,6 +32,6 @@ public DataSourceChannel create() { @Override public String getName() { - return DbType.DORIS.getDescp(); + return DbType.DORIS.getName(); } } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hana/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hana/HanaDataSourceChannelFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hana/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hana/HanaDataSourceChannelFactory.java index 75aacebaff2e..91d275aab6b5 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hana/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hana/HanaDataSourceChannelFactory.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hana/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hana/HanaDataSourceChannelFactory.java @@ -19,6 +19,7 @@ import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; import org.apache.dolphinscheduler.spi.datasource.DataSourceChannelFactory; +import org.apache.dolphinscheduler.spi.enums.DbType; import com.google.auto.service.AutoService; @@ -27,7 +28,7 @@ public class HanaDataSourceChannelFactory implements DataSourceChannelFactory { @Override public String getName() { - return "hana"; + return DbType.HANA.getName(); } @Override diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hive/HiveDataSourceChannelFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hive/HiveDataSourceChannelFactory.java index 96ee007c8dbc..2caa4092dc11 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hive/HiveDataSourceChannelFactory.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hive/HiveDataSourceChannelFactory.java @@ -19,6 +19,7 @@ import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; import org.apache.dolphinscheduler.spi.datasource.DataSourceChannelFactory; +import org.apache.dolphinscheduler.spi.enums.DbType; import com.google.auto.service.AutoService; @@ -27,7 +28,7 @@ public class HiveDataSourceChannelFactory implements DataSourceChannelFactory { @Override public String getName() { - return "hive"; + return DbType.HIVE.getName(); } @Override diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hive/param/HiveDataSourceProcessor.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hive/param/HiveDataSourceProcessor.java index 36330c17c860..09d9f4b9630d 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hive/param/HiveDataSourceProcessor.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hive/param/HiveDataSourceProcessor.java @@ -152,7 +152,8 @@ public DataSourceProcessor create() { @Override public List splitAndRemoveComment(String sql) { - return SQLParserUtils.splitAndRemoveComment(sql, com.alibaba.druid.DbType.hive); + String cleanSQL = SQLParserUtils.removeComment(sql, com.alibaba.druid.DbType.hive); + return SQLParserUtils.split(cleanSQL, com.alibaba.druid.DbType.hive); } private String transformOther(Map otherMap) { diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hive/security/UserGroupInformationFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hive/security/UserGroupInformationFactory.java index 168ff3bdca5f..a39cdaf61436 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hive/security/UserGroupInformationFactory.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hive/security/UserGroupInformationFactory.java @@ -20,7 +20,7 @@ import static org.apache.dolphinscheduler.common.constants.Constants.JAVA_SECURITY_KRB5_CONF; import org.apache.dolphinscheduler.common.constants.Constants; -import org.apache.dolphinscheduler.common.enums.ResUploadType; +import org.apache.dolphinscheduler.common.enums.StorageType; import org.apache.dolphinscheduler.common.thread.ThreadUtils; import org.apache.dolphinscheduler.common.utils.PropertyUtils; @@ -120,10 +120,10 @@ private static UserGroupInformation createKerberosUser() { public static boolean openKerberos() { String resUploadStartupType = PropertyUtils.getUpperCaseString(Constants.RESOURCE_STORAGE_TYPE); - ResUploadType resUploadType = ResUploadType.valueOf(resUploadStartupType); + StorageType storageType = StorageType.valueOf(resUploadStartupType); Boolean kerberosStartupState = PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE, false); - return resUploadType == ResUploadType.HDFS && kerberosStartupState; + return storageType == StorageType.HDFS && kerberosStartupState; } } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/test/java/org/apache/dolphinscheduler/plugin/datasource/hive/param/HiveDataSourceProcessorTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/test/java/org/apache/dolphinscheduler/plugin/datasource/hive/param/HiveDataSourceProcessorTest.java index 1da0cbadda94..ef5e71729cc3 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/test/java/org/apache/dolphinscheduler/plugin/datasource/hive/param/HiveDataSourceProcessorTest.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/test/java/org/apache/dolphinscheduler/plugin/datasource/hive/param/HiveDataSourceProcessorTest.java @@ -23,6 +23,7 @@ import org.apache.dolphinscheduler.spi.enums.DbType; import java.util.HashMap; +import java.util.List; import java.util.Map; import org.junit.jupiter.api.Assertions; @@ -94,4 +95,23 @@ public void testGetValidationQuery() { Assertions.assertEquals(DataSourceConstants.HIVE_VALIDATION_QUERY, hiveDatasourceProcessor.getValidationQuery()); } + + @Test + void splitAndRemoveComment() { + String sql = "create table if not exists test_ods.tb_test(\n" + + " `id` bigint COMMENT 'id', -- auto increment\n" + + " `user_name` string COMMENT 'username',\n" + + " `birthday` string COMMENT 'birthday',\n" + + " `gender` int COMMENT '1 male 2 female'\n" + + ") COMMENT 'user information table' PARTITIONED BY (`date_id` string);\n" + + "\n" + + "-- insert\n" + + "insert\n" + + " overwrite table test_ods.tb_test partition(date_id = '2024-03-28') -- partition\n" + + "values\n" + + " (1, 'Magic', '1990-10-01', '1');"; + List list = hiveDatasourceProcessor.splitAndRemoveComment(sql); + Assertions.assertEquals(list.size(), 2); + + } } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-k8s/src/main/java/org/apache/dolphinscheduler/plugin/datasource/k8s/K8sDataSourceChannelFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-k8s/src/main/java/org/apache/dolphinscheduler/plugin/datasource/k8s/K8sDataSourceChannelFactory.java index 03ec046de84e..6a4428b47b24 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-k8s/src/main/java/org/apache/dolphinscheduler/plugin/datasource/k8s/K8sDataSourceChannelFactory.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-k8s/src/main/java/org/apache/dolphinscheduler/plugin/datasource/k8s/K8sDataSourceChannelFactory.java @@ -19,6 +19,7 @@ import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; import org.apache.dolphinscheduler.spi.datasource.DataSourceChannelFactory; +import org.apache.dolphinscheduler.spi.enums.DbType; import com.google.auto.service.AutoService; @@ -32,7 +33,7 @@ public DataSourceChannel create() { @Override public String getName() { - return "k8s"; + return DbType.K8S.getName(); } } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-k8s/src/main/java/org/apache/dolphinscheduler/plugin/datasource/k8s/param/K8sDataSourceProcessor.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-k8s/src/main/java/org/apache/dolphinscheduler/plugin/datasource/k8s/param/K8sDataSourceProcessor.java index 9e7342d433d9..fd3b49469fd6 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-k8s/src/main/java/org/apache/dolphinscheduler/plugin/datasource/k8s/param/K8sDataSourceProcessor.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-k8s/src/main/java/org/apache/dolphinscheduler/plugin/datasource/k8s/param/K8sDataSourceProcessor.java @@ -58,7 +58,7 @@ public void checkDatasourceParam(BaseDataSourceParamDTO datasourceParam) { @Override public String getDatasourceUniqueId(ConnectionParam connectionParam, DbType dbType) { K8sConnectionParam baseConnectionParam = (K8sConnectionParam) connectionParam; - return MessageFormat.format("{0}@{1}@{2}", dbType.getDescp(), + return MessageFormat.format("{0}@{1}@{2}", dbType.getName(), PasswordUtils.encodePassword(baseConnectionParam.getKubeConfig()), baseConnectionParam.getNamespace()); } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-kyuubi/src/main/java/org/apache/dolphinscheduler/plugin/datasource/kyuubi/KyuubiDataSourceChannelFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-kyuubi/src/main/java/org/apache/dolphinscheduler/plugin/datasource/kyuubi/KyuubiDataSourceChannelFactory.java index 4c67a2098f8f..c60e74ccf846 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-kyuubi/src/main/java/org/apache/dolphinscheduler/plugin/datasource/kyuubi/KyuubiDataSourceChannelFactory.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-kyuubi/src/main/java/org/apache/dolphinscheduler/plugin/datasource/kyuubi/KyuubiDataSourceChannelFactory.java @@ -19,6 +19,7 @@ import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; import org.apache.dolphinscheduler.spi.datasource.DataSourceChannelFactory; +import org.apache.dolphinscheduler.spi.enums.DbType; import com.google.auto.service.AutoService; @@ -27,7 +28,7 @@ public class KyuubiDataSourceChannelFactory implements DataSourceChannelFactory @Override public String getName() { - return "kyuubi"; + return DbType.KYUUBI.getName(); } @Override diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-kyuubi/src/test/java/org/apache/dolphinscheduler/plugin/datasource/kyuubi/param/KyuubiDataSourceProcessorTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-kyuubi/src/test/java/org/apache/dolphinscheduler/plugin/datasource/kyuubi/param/KyuubiDataSourceProcessorTest.java index 865565c5dc0c..a18ceb4216d2 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-kyuubi/src/test/java/org/apache/dolphinscheduler/plugin/datasource/kyuubi/param/KyuubiDataSourceProcessorTest.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-kyuubi/src/test/java/org/apache/dolphinscheduler/plugin/datasource/kyuubi/param/KyuubiDataSourceProcessorTest.java @@ -143,7 +143,7 @@ public void testGetJdbcUrl() { @Test public void testDbType() { Assertions.assertEquals(18, DbType.KYUUBI.getCode()); - Assertions.assertEquals("kyuubi", DbType.KYUUBI.getDescp()); + Assertions.assertEquals("kyuubi", DbType.KYUUBI.getName()); Assertions.assertEquals(DbType.KYUUBI, DbType.of(18)); } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-mysql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/mysql/MySQLDataSourceChannelFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-mysql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/mysql/MySQLDataSourceChannelFactory.java index e57fc7e61d26..adc3ec79467f 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-mysql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/mysql/MySQLDataSourceChannelFactory.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-mysql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/mysql/MySQLDataSourceChannelFactory.java @@ -19,6 +19,7 @@ import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; import org.apache.dolphinscheduler.spi.datasource.DataSourceChannelFactory; +import org.apache.dolphinscheduler.spi.enums.DbType; import com.google.auto.service.AutoService; @@ -27,7 +28,7 @@ public class MySQLDataSourceChannelFactory implements DataSourceChannelFactory { @Override public String getName() { - return "mysql"; + return DbType.MYSQL.getName(); } @Override diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-mysql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/mysql/param/MySQLDataSourceProcessor.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-mysql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/mysql/param/MySQLDataSourceProcessor.java index b954defdd1c9..0c93b9821211 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-mysql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/mysql/param/MySQLDataSourceProcessor.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-mysql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/mysql/param/MySQLDataSourceProcessor.java @@ -177,7 +177,8 @@ public DataSourceProcessor create() { @Override public List splitAndRemoveComment(String sql) { - return SQLParserUtils.splitAndRemoveComment(sql, com.alibaba.druid.DbType.mysql); + String cleanSQL = SQLParserUtils.removeComment(sql, com.alibaba.druid.DbType.mysql); + return SQLParserUtils.split(cleanSQL, com.alibaba.druid.DbType.mysql); } private static boolean checkKeyIsLegitimate(String key) { diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oceanbase/src/main/java/org/apache/dolphinscheduler/plugin/datasource/oceanbase/OceanBaseDataSourceChannelFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oceanbase/src/main/java/org/apache/dolphinscheduler/plugin/datasource/oceanbase/OceanBaseDataSourceChannelFactory.java index a69d6b3ae5ec..13650679b0bb 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oceanbase/src/main/java/org/apache/dolphinscheduler/plugin/datasource/oceanbase/OceanBaseDataSourceChannelFactory.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oceanbase/src/main/java/org/apache/dolphinscheduler/plugin/datasource/oceanbase/OceanBaseDataSourceChannelFactory.java @@ -19,6 +19,7 @@ import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; import org.apache.dolphinscheduler.spi.datasource.DataSourceChannelFactory; +import org.apache.dolphinscheduler.spi.enums.DbType; import com.google.auto.service.AutoService; @@ -27,7 +28,7 @@ public class OceanBaseDataSourceChannelFactory implements DataSourceChannelFacto @Override public String getName() { - return "oceanbase"; + return DbType.OCEANBASE.getName(); } @Override diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oceanbase/src/main/java/org/apache/dolphinscheduler/plugin/datasource/oceanbase/param/OceanBaseDataSourceProcessor.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oceanbase/src/main/java/org/apache/dolphinscheduler/plugin/datasource/oceanbase/param/OceanBaseDataSourceProcessor.java index b07b543c4291..4d89f28eab1c 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oceanbase/src/main/java/org/apache/dolphinscheduler/plugin/datasource/oceanbase/param/OceanBaseDataSourceProcessor.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oceanbase/src/main/java/org/apache/dolphinscheduler/plugin/datasource/oceanbase/param/OceanBaseDataSourceProcessor.java @@ -192,6 +192,7 @@ public DataSourceProcessor create() { @Override public List splitAndRemoveComment(String sql) { - return SQLParserUtils.splitAndRemoveComment(sql, com.alibaba.druid.DbType.oceanbase); + String cleanSQL = SQLParserUtils.removeComment(sql, com.alibaba.druid.DbType.oceanbase); + return SQLParserUtils.split(cleanSQL, com.alibaba.druid.DbType.oceanbase); } } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oracle/src/main/java/org/apache/dolphinscheduler/plugin/datasource/oracle/OracleDataSourceChannelFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oracle/src/main/java/org/apache/dolphinscheduler/plugin/datasource/oracle/OracleDataSourceChannelFactory.java index dedbce494693..f63aff9a2b49 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oracle/src/main/java/org/apache/dolphinscheduler/plugin/datasource/oracle/OracleDataSourceChannelFactory.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oracle/src/main/java/org/apache/dolphinscheduler/plugin/datasource/oracle/OracleDataSourceChannelFactory.java @@ -19,6 +19,7 @@ import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; import org.apache.dolphinscheduler.spi.datasource.DataSourceChannelFactory; +import org.apache.dolphinscheduler.spi.enums.DbType; import com.google.auto.service.AutoService; @@ -27,7 +28,7 @@ public class OracleDataSourceChannelFactory implements DataSourceChannelFactory @Override public String getName() { - return "oracle"; + return DbType.ORACLE.getName(); } @Override diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oracle/src/main/java/org/apache/dolphinscheduler/plugin/datasource/oracle/param/OracleDataSourceProcessor.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oracle/src/main/java/org/apache/dolphinscheduler/plugin/datasource/oracle/param/OracleDataSourceProcessor.java index 89b872d7f595..5f24b8cc1939 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oracle/src/main/java/org/apache/dolphinscheduler/plugin/datasource/oracle/param/OracleDataSourceProcessor.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oracle/src/main/java/org/apache/dolphinscheduler/plugin/datasource/oracle/param/OracleDataSourceProcessor.java @@ -41,8 +41,7 @@ import com.alibaba.druid.sql.ast.SQLStatement; import com.alibaba.druid.sql.dialect.oracle.parser.OracleStatementParser; -import com.alibaba.druid.sql.parser.SQLParserFeature; -import com.alibaba.druid.sql.parser.SQLStatementParser; +import com.alibaba.druid.sql.parser.SQLParserUtils; import com.google.auto.service.AutoService; @AutoService(DataSourceProcessor.class) @@ -149,9 +148,11 @@ public DataSourceProcessor create() { @Override public List splitAndRemoveComment(String sql) { - SQLStatementParser parser = new OracleStatementParser(sql, SQLParserFeature.KeepComments); - List statementList = parser.parseStatementList(); - return statementList.stream().map(SQLStatement::toString).collect(Collectors.toList()); + if (sql.toUpperCase().contains("BEGIN") && sql.toUpperCase().contains("END")) { + return new OracleStatementParser(sql).parseStatementList().stream().map(SQLStatement::toString) + .collect(Collectors.toList()); + } + return SQLParserUtils.splitAndRemoveComment(sql, com.alibaba.druid.DbType.oracle); } private String transformOther(Map otherMap) { diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oracle/src/test/java/org/apache/dolphinscheduler/plugin/datasource/oracle/param/OracleDataSourceProcessorTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oracle/src/test/java/org/apache/dolphinscheduler/plugin/datasource/oracle/param/OracleDataSourceProcessorTest.java index 57e316ed6365..2f9133463ded 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oracle/src/test/java/org/apache/dolphinscheduler/plugin/datasource/oracle/param/OracleDataSourceProcessorTest.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oracle/src/test/java/org/apache/dolphinscheduler/plugin/datasource/oracle/param/OracleDataSourceProcessorTest.java @@ -155,13 +155,16 @@ void splitAndRemoveComment_PLSQLWithComment() { @Test void splitAndRemoveComment_MultipleSql() { - String plSql = "select * from test;select * from test2;"; + String plSql = + "select a,a-a as b from (select 1 as a,2 as b from dual) union all select 1 as a,2 as b from dual;select * from dual; -- this comment"; List sqls = oracleDatasourceProcessor.splitAndRemoveComment(plSql); // We will not split the plsql Assertions.assertEquals(2, sqls.size()); - Assertions.assertEquals("SELECT *\n" + - "FROM test;", sqls.get(0)); - Assertions.assertEquals("SELECT *\n" + - "FROM test2;", sqls.get(1)); + System.out.println(sqls.get(0)); + System.out.println(sqls.get(1)); + Assertions.assertEquals( + "select a,a-a as b from (select 1 as a,2 as b from dual) union all select 1 as a,2 as b from dual", + sqls.get(0)); + Assertions.assertEquals("select * from dual", sqls.get(1)); } } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-postgresql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/postgresql/PostgreSQLDataSourceChannelFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-postgresql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/postgresql/PostgreSQLDataSourceChannelFactory.java index 8aa6e566b781..e82a2e686052 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-postgresql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/postgresql/PostgreSQLDataSourceChannelFactory.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-postgresql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/postgresql/PostgreSQLDataSourceChannelFactory.java @@ -19,6 +19,7 @@ import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; import org.apache.dolphinscheduler.spi.datasource.DataSourceChannelFactory; +import org.apache.dolphinscheduler.spi.enums.DbType; import com.google.auto.service.AutoService; @@ -27,7 +28,7 @@ public class PostgreSQLDataSourceChannelFactory implements DataSourceChannelFact @Override public String getName() { - return "postgresql"; + return DbType.POSTGRESQL.getName(); } @Override diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-postgresql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/postgresql/param/PostgreSQLDataSourceProcessor.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-postgresql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/postgresql/param/PostgreSQLDataSourceProcessor.java index 2835d357ab04..28a0aa294578 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-postgresql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/postgresql/param/PostgreSQLDataSourceProcessor.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-postgresql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/postgresql/param/PostgreSQLDataSourceProcessor.java @@ -131,7 +131,8 @@ public DataSourceProcessor create() { @Override public List splitAndRemoveComment(String sql) { - return SQLParserUtils.splitAndRemoveComment(sql, com.alibaba.druid.DbType.postgresql); + String cleanSQL = SQLParserUtils.removeComment(sql, com.alibaba.druid.DbType.postgresql); + return SQLParserUtils.split(cleanSQL, com.alibaba.druid.DbType.postgresql); } private String transformOther(Map otherMap) { diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-presto/src/main/java/org/apache/dolphinscheduler/plugin/datasource/presto/PrestoDataSourceChannelFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-presto/src/main/java/org/apache/dolphinscheduler/plugin/datasource/presto/PrestoDataSourceChannelFactory.java index ed1292ffc99e..76bf9d0808ed 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-presto/src/main/java/org/apache/dolphinscheduler/plugin/datasource/presto/PrestoDataSourceChannelFactory.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-presto/src/main/java/org/apache/dolphinscheduler/plugin/datasource/presto/PrestoDataSourceChannelFactory.java @@ -19,6 +19,7 @@ import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; import org.apache.dolphinscheduler.spi.datasource.DataSourceChannelFactory; +import org.apache.dolphinscheduler.spi.enums.DbType; import com.google.auto.service.AutoService; @@ -27,7 +28,7 @@ public class PrestoDataSourceChannelFactory implements DataSourceChannelFactory @Override public String getName() { - return "presto"; + return DbType.PRESTO.getName(); } @Override diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-redshift/src/main/java/org/apache/dolphinscheduler/plugin/datasource/redshift/RedshiftDataSourceChannelFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-redshift/src/main/java/org/apache/dolphinscheduler/plugin/datasource/redshift/RedshiftDataSourceChannelFactory.java index 25a587ae06c4..8c588f0b44e2 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-redshift/src/main/java/org/apache/dolphinscheduler/plugin/datasource/redshift/RedshiftDataSourceChannelFactory.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-redshift/src/main/java/org/apache/dolphinscheduler/plugin/datasource/redshift/RedshiftDataSourceChannelFactory.java @@ -19,6 +19,7 @@ import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; import org.apache.dolphinscheduler.spi.datasource.DataSourceChannelFactory; +import org.apache.dolphinscheduler.spi.enums.DbType; import com.google.auto.service.AutoService; @@ -32,6 +33,6 @@ public DataSourceChannel create() { @Override public String getName() { - return "redshift"; + return DbType.REDSHIFT.getName(); } } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sagemaker/src/main/java/org/apache/dolphinscheduler/plugin/datasource/sagemaker/SagemakerDataSourceChannelFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sagemaker/src/main/java/org/apache/dolphinscheduler/plugin/datasource/sagemaker/SagemakerDataSourceChannelFactory.java index 04ab93f36f9d..245784361445 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sagemaker/src/main/java/org/apache/dolphinscheduler/plugin/datasource/sagemaker/SagemakerDataSourceChannelFactory.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sagemaker/src/main/java/org/apache/dolphinscheduler/plugin/datasource/sagemaker/SagemakerDataSourceChannelFactory.java @@ -19,6 +19,7 @@ import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; import org.apache.dolphinscheduler.spi.datasource.DataSourceChannelFactory; +import org.apache.dolphinscheduler.spi.enums.DbType; import com.google.auto.service.AutoService; @@ -32,7 +33,7 @@ public DataSourceChannel create() { @Override public String getName() { - return "sagemaker"; + return DbType.SAGEMAKER.getName(); } } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sagemaker/src/main/java/org/apache/dolphinscheduler/plugin/datasource/sagemaker/param/SagemakerDataSourceProcessor.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sagemaker/src/main/java/org/apache/dolphinscheduler/plugin/datasource/sagemaker/param/SagemakerDataSourceProcessor.java index 4239f45e5c66..7452ef8a14f5 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sagemaker/src/main/java/org/apache/dolphinscheduler/plugin/datasource/sagemaker/param/SagemakerDataSourceProcessor.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sagemaker/src/main/java/org/apache/dolphinscheduler/plugin/datasource/sagemaker/param/SagemakerDataSourceProcessor.java @@ -57,7 +57,7 @@ public void checkDatasourceParam(BaseDataSourceParamDTO datasourceParamDTO) { @Override public String getDatasourceUniqueId(ConnectionParam connectionParam, DbType dbType) { SagemakerConnectionParam baseConnectionParam = (SagemakerConnectionParam) connectionParam; - return MessageFormat.format("{0}@{1}@{2}@{3}", dbType.getDescp(), + return MessageFormat.format("{0}@{1}@{2}@{3}", dbType.getName(), PasswordUtils.encodePassword(baseConnectionParam.getUserName()), PasswordUtils.encodePassword(baseConnectionParam.getPassword()), PasswordUtils.encodePassword(baseConnectionParam.getAwsRegion())); diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/main/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/SnowflakeDataSourceChannelFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/main/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/SnowflakeDataSourceChannelFactory.java index 0d0c97ecd68a..6bbfd7a6fba2 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/main/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/SnowflakeDataSourceChannelFactory.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/main/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/SnowflakeDataSourceChannelFactory.java @@ -19,6 +19,7 @@ import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; import org.apache.dolphinscheduler.spi.datasource.DataSourceChannelFactory; +import org.apache.dolphinscheduler.spi.enums.DbType; import com.google.auto.service.AutoService; @@ -27,7 +28,7 @@ public class SnowflakeDataSourceChannelFactory implements DataSourceChannelFacto @Override public String getName() { - return "snowflake"; + return DbType.SNOWFLAKE.getName(); } @Override diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/test/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/param/SnowflakeDataSourceProcessorTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/test/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/param/SnowflakeDataSourceProcessorTest.java index 54c5acf0f225..c60e70576fb8 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/test/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/param/SnowflakeDataSourceProcessorTest.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/test/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/param/SnowflakeDataSourceProcessorTest.java @@ -169,7 +169,7 @@ public void testCreateDatasourceParamDTO() { @Test public void testDbType() { Assertions.assertEquals(20, DbType.SNOWFLAKE.getCode()); - Assertions.assertEquals("snowflake", DbType.SNOWFLAKE.getDescp()); + Assertions.assertEquals("snowflake", DbType.SNOWFLAKE.getName()); Assertions.assertEquals(DbType.of(20), DbType.SNOWFLAKE); Assertions.assertEquals(DbType.ofName("SNOWFLAKE"), DbType.SNOWFLAKE); } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-spark/src/main/java/org/apache/dolphinscheduler/plugin/datasource/spark/SparkDataSourceChannelFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-spark/src/main/java/org/apache/dolphinscheduler/plugin/datasource/spark/SparkDataSourceChannelFactory.java index dbda3da5bdef..25f29ff21fe3 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-spark/src/main/java/org/apache/dolphinscheduler/plugin/datasource/spark/SparkDataSourceChannelFactory.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-spark/src/main/java/org/apache/dolphinscheduler/plugin/datasource/spark/SparkDataSourceChannelFactory.java @@ -19,6 +19,7 @@ import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; import org.apache.dolphinscheduler.spi.datasource.DataSourceChannelFactory; +import org.apache.dolphinscheduler.spi.enums.DbType; import com.google.auto.service.AutoService; @@ -27,7 +28,7 @@ public class SparkDataSourceChannelFactory implements DataSourceChannelFactory { @Override public String getName() { - return "spark"; + return DbType.SPARK.getName(); } @Override diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sqlserver/src/main/java/org/apache/dolphinscheduler/plugin/datasource/sqlserver/SQLServerDataSourceChannelFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sqlserver/src/main/java/org/apache/dolphinscheduler/plugin/datasource/sqlserver/SQLServerDataSourceChannelFactory.java index e76f520d1e19..f29cf6415e65 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sqlserver/src/main/java/org/apache/dolphinscheduler/plugin/datasource/sqlserver/SQLServerDataSourceChannelFactory.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sqlserver/src/main/java/org/apache/dolphinscheduler/plugin/datasource/sqlserver/SQLServerDataSourceChannelFactory.java @@ -19,6 +19,7 @@ import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; import org.apache.dolphinscheduler.spi.datasource.DataSourceChannelFactory; +import org.apache.dolphinscheduler.spi.enums.DbType; import com.google.auto.service.AutoService; @@ -27,7 +28,7 @@ public class SQLServerDataSourceChannelFactory implements DataSourceChannelFacto @Override public String getName() { - return "sqlserver"; + return DbType.SQLSERVER.getName(); } @Override diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sqlserver/src/main/java/org/apache/dolphinscheduler/plugin/datasource/sqlserver/param/SQLServerDataSourceProcessor.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sqlserver/src/main/java/org/apache/dolphinscheduler/plugin/datasource/sqlserver/param/SQLServerDataSourceProcessor.java index 264f92c2b964..687d853fffae 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sqlserver/src/main/java/org/apache/dolphinscheduler/plugin/datasource/sqlserver/param/SQLServerDataSourceProcessor.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sqlserver/src/main/java/org/apache/dolphinscheduler/plugin/datasource/sqlserver/param/SQLServerDataSourceProcessor.java @@ -35,6 +35,7 @@ import java.sql.SQLException; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; import com.alibaba.druid.sql.parser.SQLParserUtils; import com.google.auto.service.AutoService; @@ -128,7 +129,10 @@ public DataSourceProcessor create() { @Override public List splitAndRemoveComment(String sql) { - return SQLParserUtils.splitAndRemoveComment(sql, com.alibaba.druid.DbType.sqlserver); + return SQLParserUtils.splitAndRemoveComment(sql, com.alibaba.druid.DbType.sqlserver) + .stream() + .map(subSql -> subSql.concat(";")) + .collect(Collectors.toList()); } private String transformOther(Map otherMap) { diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sqlserver/src/test/java/org/apache/dolphinscheduler/plugin/datasource/sqlserver/param/SQLServerDataSourceProcessorTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sqlserver/src/test/java/org/apache/dolphinscheduler/plugin/datasource/sqlserver/param/SQLServerDataSourceProcessorTest.java index de30e27e0bf9..7d34755d6539 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sqlserver/src/test/java/org/apache/dolphinscheduler/plugin/datasource/sqlserver/param/SQLServerDataSourceProcessorTest.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sqlserver/src/test/java/org/apache/dolphinscheduler/plugin/datasource/sqlserver/param/SQLServerDataSourceProcessorTest.java @@ -17,12 +17,15 @@ package org.apache.dolphinscheduler.plugin.datasource.sqlserver.param; +import static com.google.common.truth.Truth.assertThat; + import org.apache.dolphinscheduler.common.constants.DataSourceConstants; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils; import org.apache.dolphinscheduler.spi.enums.DbType; import java.util.HashMap; +import java.util.List; import java.util.Map; import org.junit.jupiter.api.Assertions; @@ -95,4 +98,32 @@ public void testGetValidationQuery() { Assertions.assertEquals(DataSourceConstants.SQLSERVER_VALIDATION_QUERY, sqlServerDatasourceProcessor.getValidationQuery()); } + + @Test + void splitAndRemoveComment_singleSelect() { + String sql = "select * from table;"; + List subSqls = sqlServerDatasourceProcessor.splitAndRemoveComment(sql); + assertThat(subSqls).hasSize(1); + assertThat(subSqls.get(0)).isEqualTo("select * from table;"); + } + + @Test + void splitAndRemoveComment_singleMerge() { + String sql = "MERGE\n" + + " [ TOP ( expression ) [ PERCENT ] ]\n" + + " [ INTO ] [ WITH ( ) ] [ [ AS ] table_alias ]\n" + + " USING [ [ AS ] table_alias ]\n" + + " ON \n" + + " [ WHEN MATCHED [ AND ]\n" + + " THEN ] [ ...n ]\n" + + " [ WHEN NOT MATCHED [ BY TARGET ] [ AND ]\n" + + " THEN ]\n" + + " [ WHEN NOT MATCHED BY SOURCE [ AND ]\n" + + " THEN ] [ ...n ]\n" + + " [ ]\n" + + " [ OPTION ( [ ,...n ] ) ];"; + List subSqls = sqlServerDatasourceProcessor.splitAndRemoveComment(sql); + assertThat(subSqls).hasSize(1); + assertThat(subSqls.get(0)).isEqualTo(sql); + } } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-ssh/src/main/java/org/apache/dolphinscheduler/plugin/datasource/ssh/SSHDataSourceChannelFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-ssh/src/main/java/org/apache/dolphinscheduler/plugin/datasource/ssh/SSHDataSourceChannelFactory.java index 3195432703b1..9742c97e9bcc 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-ssh/src/main/java/org/apache/dolphinscheduler/plugin/datasource/ssh/SSHDataSourceChannelFactory.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-ssh/src/main/java/org/apache/dolphinscheduler/plugin/datasource/ssh/SSHDataSourceChannelFactory.java @@ -19,6 +19,7 @@ import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; import org.apache.dolphinscheduler.spi.datasource.DataSourceChannelFactory; +import org.apache.dolphinscheduler.spi.enums.DbType; import com.google.auto.service.AutoService; @@ -27,7 +28,7 @@ public class SSHDataSourceChannelFactory implements DataSourceChannelFactory { @Override public String getName() { - return "ssh"; + return DbType.SSH.getName(); } @Override diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-ssh/src/main/java/org/apache/dolphinscheduler/plugin/datasource/ssh/SSHUtils.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-ssh/src/main/java/org/apache/dolphinscheduler/plugin/datasource/ssh/SSHUtils.java index 42e1175e2efd..7abb18cee891 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-ssh/src/main/java/org/apache/dolphinscheduler/plugin/datasource/ssh/SSHUtils.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-ssh/src/main/java/org/apache/dolphinscheduler/plugin/datasource/ssh/SSHUtils.java @@ -59,7 +59,7 @@ public static ClientSession getSession(SshClient client, SSHConnectionParam conn throw new Exception("Failed to add public key identity", e); } } - session.setSessionHeartbeat(SessionHeartbeatController.HeartbeatType.RESERVED, Duration.ofSeconds(3)); + session.setSessionHeartbeat(SessionHeartbeatController.HeartbeatType.IGNORE, Duration.ofSeconds(3)); return session; } } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-ssh/src/main/java/org/apache/dolphinscheduler/plugin/datasource/ssh/param/SSHDataSourceProcessor.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-ssh/src/main/java/org/apache/dolphinscheduler/plugin/datasource/ssh/param/SSHDataSourceProcessor.java index 6bf0bed1b90a..1916edba3531 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-ssh/src/main/java/org/apache/dolphinscheduler/plugin/datasource/ssh/param/SSHDataSourceProcessor.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-ssh/src/main/java/org/apache/dolphinscheduler/plugin/datasource/ssh/param/SSHDataSourceProcessor.java @@ -55,7 +55,7 @@ public void checkDatasourceParam(BaseDataSourceParamDTO datasourceParamDTO) { @Override public String getDatasourceUniqueId(ConnectionParam connectionParam, DbType dbType) { SSHConnectionParam baseConnectionParam = (SSHConnectionParam) connectionParam; - return MessageFormat.format("{0}@{1}@{2}@{3}", dbType.getDescp(), baseConnectionParam.getHost(), + return MessageFormat.format("{0}@{1}@{2}@{3}", dbType.getName(), baseConnectionParam.getHost(), baseConnectionParam.getUser(), PasswordUtils.encodePassword(baseConnectionParam.getPassword())); } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-starrocks/src/main/java/org/apache/dolphinscheduler/plugin/datasource/starrocks/StarRocksDataSourceChannelFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-starrocks/src/main/java/org/apache/dolphinscheduler/plugin/datasource/starrocks/StarRocksDataSourceChannelFactory.java index 50e248395253..82f78cff2102 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-starrocks/src/main/java/org/apache/dolphinscheduler/plugin/datasource/starrocks/StarRocksDataSourceChannelFactory.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-starrocks/src/main/java/org/apache/dolphinscheduler/plugin/datasource/starrocks/StarRocksDataSourceChannelFactory.java @@ -33,6 +33,6 @@ public DataSourceChannel create() { @Override public String getName() { - return DbType.STARROCKS.getDescp(); + return DbType.STARROCKS.getName(); } } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-trino/src/main/java/org/apache/dolphinscheduler/plugin/datasource/trino/TrinoDataSourceChannelFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-trino/src/main/java/org/apache/dolphinscheduler/plugin/datasource/trino/TrinoDataSourceChannelFactory.java index 8c9605d791ef..36a3817fb0a4 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-trino/src/main/java/org/apache/dolphinscheduler/plugin/datasource/trino/TrinoDataSourceChannelFactory.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-trino/src/main/java/org/apache/dolphinscheduler/plugin/datasource/trino/TrinoDataSourceChannelFactory.java @@ -19,6 +19,7 @@ import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; import org.apache.dolphinscheduler.spi.datasource.DataSourceChannelFactory; +import org.apache.dolphinscheduler.spi.enums.DbType; import com.google.auto.service.AutoService; @@ -27,7 +28,7 @@ public class TrinoDataSourceChannelFactory implements DataSourceChannelFactory { @Override public String getName() { - return "trino"; + return DbType.TRINO.getName(); } @Override diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-trino/src/main/java/org/apache/dolphinscheduler/plugin/datasource/trino/param/TrinoDataSourceProcessor.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-trino/src/main/java/org/apache/dolphinscheduler/plugin/datasource/trino/param/TrinoDataSourceProcessor.java index 77b10b51621b..bd53acaaf8fd 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-trino/src/main/java/org/apache/dolphinscheduler/plugin/datasource/trino/param/TrinoDataSourceProcessor.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-trino/src/main/java/org/apache/dolphinscheduler/plugin/datasource/trino/param/TrinoDataSourceProcessor.java @@ -131,7 +131,8 @@ public DataSourceProcessor create() { @Override public List splitAndRemoveComment(String sql) { - return SQLParserUtils.splitAndRemoveComment(sql, com.alibaba.druid.DbType.trino); + String cleanSQL = SQLParserUtils.removeComment(sql, com.alibaba.druid.DbType.trino); + return SQLParserUtils.split(cleanSQL, com.alibaba.druid.DbType.trino); } private String transformOther(Map otherMap) { diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-vertica/src/main/java/org/apache/dolphinscheduler/plugin/datasource/vertica/VerticaDataSourceChannelFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-vertica/src/main/java/org/apache/dolphinscheduler/plugin/datasource/vertica/VerticaDataSourceChannelFactory.java index b507a207b477..44e151f2f272 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-vertica/src/main/java/org/apache/dolphinscheduler/plugin/datasource/vertica/VerticaDataSourceChannelFactory.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-vertica/src/main/java/org/apache/dolphinscheduler/plugin/datasource/vertica/VerticaDataSourceChannelFactory.java @@ -19,6 +19,7 @@ import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; import org.apache.dolphinscheduler.spi.datasource.DataSourceChannelFactory; +import org.apache.dolphinscheduler.spi.enums.DbType; import com.google.auto.service.AutoService; @@ -27,7 +28,7 @@ public class VerticaDataSourceChannelFactory implements DataSourceChannelFactory @Override public String getName() { - return "vertica"; + return DbType.VERTICA.getName(); } @Override diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-zeppelin/src/main/java/org/apache/dolphinscheduler/plugin/datasource/zeppelin/ZeppelinDataSourceChannelFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-zeppelin/src/main/java/org/apache/dolphinscheduler/plugin/datasource/zeppelin/ZeppelinDataSourceChannelFactory.java index 692819cf788b..559ee558363e 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-zeppelin/src/main/java/org/apache/dolphinscheduler/plugin/datasource/zeppelin/ZeppelinDataSourceChannelFactory.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-zeppelin/src/main/java/org/apache/dolphinscheduler/plugin/datasource/zeppelin/ZeppelinDataSourceChannelFactory.java @@ -19,6 +19,7 @@ import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; import org.apache.dolphinscheduler.spi.datasource.DataSourceChannelFactory; +import org.apache.dolphinscheduler.spi.enums.DbType; import com.google.auto.service.AutoService; @@ -32,7 +33,7 @@ public DataSourceChannel create() { @Override public String getName() { - return "zeppelin"; + return DbType.ZEPPELIN.getName(); } } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-zeppelin/src/main/java/org/apache/dolphinscheduler/plugin/datasource/zeppelin/param/ZeppelinDataSourceProcessor.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-zeppelin/src/main/java/org/apache/dolphinscheduler/plugin/datasource/zeppelin/param/ZeppelinDataSourceProcessor.java index 92077275adcf..88a913974e9f 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-zeppelin/src/main/java/org/apache/dolphinscheduler/plugin/datasource/zeppelin/param/ZeppelinDataSourceProcessor.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-zeppelin/src/main/java/org/apache/dolphinscheduler/plugin/datasource/zeppelin/param/ZeppelinDataSourceProcessor.java @@ -56,7 +56,7 @@ public void checkDatasourceParam(BaseDataSourceParamDTO datasourceParamDTO) { @Override public String getDatasourceUniqueId(ConnectionParam connectionParam, DbType dbType) { ZeppelinConnectionParam baseConnectionParam = (ZeppelinConnectionParam) connectionParam; - return MessageFormat.format("{0}@{1}@{2}@{3}", dbType.getDescp(), baseConnectionParam.getRestEndpoint(), + return MessageFormat.format("{0}@{1}@{2}@{3}", dbType.getName(), baseConnectionParam.getRestEndpoint(), baseConnectionParam.getUsername(), PasswordUtils.encodePassword(baseConnectionParam.getPassword())); } diff --git a/dolphinscheduler-dist/release-docs/LICENSE b/dolphinscheduler-dist/release-docs/LICENSE index 8856138aa4b5..1fbd3e6e521b 100644 --- a/dolphinscheduler-dist/release-docs/LICENSE +++ b/dolphinscheduler-dist/release-docs/LICENSE @@ -525,10 +525,9 @@ The text of each license is also included at licenses/LICENSE-[project].txt. reactor-netty-core 1.0.22: https://mvnrepository.com/artifact/io.projectreactor.netty/reactor-netty-core/1.0.22, Apache 2.0 reactor-netty-http 1.0.22: https://mvnrepository.com/artifact/io.projectreactor.netty/reactor-netty-http/1.0.22, Apache 2.0 DmJdbcDriver18 8.1.2.79: https://mvnrepository.com/artifact/com.dameng/DmJdbcDriver18/8.1.2.79, Apache 2.0 - nimbus-jose-jwt 9.22: https://mvnrepository.com/artifact/com.nimbusds/nimbus-jose-jwt/9.22, Apache 2.0 woodstox-core 6.4.0: https://mvnrepository.com/artifact/com.fasterxml.woodstox/woodstox-core/6.4.0, Apache 2.0 auto-value 1.10.1: https://mvnrepository.com/artifact/com.google.auto.value/auto-value/1.10.1, Apache 2.0 - auto-value-annotations 1.10.1: https://mvnrepository.com/artifact/com.google.auto.value/auto-value-annotations/1.10.1, Apache 2.0 + auto-value-annotations 1.10.4: https://mvnrepository.com/artifact/com.google.auto.value/auto-value-annotations/1.10.4, Apache 2.0 conscrypt-openjdk-uber 2.5.2: https://mvnrepository.com/artifact/org.conscrypt/conscrypt-openjdk-uber/2.5.2, Apache 2.0 gapic-google-cloud-storage-v2 2.18.0-alpha: https://mvnrepository.com/artifact/com.google.api.grpc/gapic-google-cloud-storage-v2/2.18.0-alpha, Apache 2.0 google-api-client 2.2.0: https://mvnrepository.com/artifact/com.google.api-client/google-api-client/2.2.0, Apache 2.0 @@ -667,7 +666,6 @@ The text of each license is also included at licenses/LICENSE-[project].txt. animal-sniffer-annotations 1.19 https://mvnrepository.com/artifact/org.codehaus.mojo/animal-sniffer-annotations/1.19, MIT checker-qual 3.12.0 https://mvnrepository.com/artifact/org.checkerframework/checker-qual/3.12.0, MIT + GPLv2 checker-qual 3.19.0 https://mvnrepository.com/artifact/org.checkerframework/checker-qual/3.19.0, MIT + GPLv2 - Java-WebSocket 1.5.1: https://github.com/TooTallNate/Java-WebSocket, MIT oshi-core 6.1.1: https://mvnrepository.com/artifact/com.github.oshi/oshi-core/6.1.1, MIT unirest-java 3.7.04-standalone: https://mvnrepository.com/artifact/com.konghq/unirest-java/3.7.04, MIT classgraph 4.8.83: https://mvnrepository.com/artifact/io.github.classgraph/classgraph, MIT @@ -748,7 +746,7 @@ MIT licenses axios 0.27.2: https://github.com/axios/axios MIT date-fns 2.29.3: https://github.com/date-fns/date-fns MIT lodash 4.17.21: https://github.com/lodash/lodash MIT - monaco-editor 0.34.0: https://github.com/microsoft/monaco-editor MIT + monaco-editor 0.50.0: https://github.com/microsoft/monaco-editor MIT naive-ui 2.30.7: https://github.com/TuSimple/naive-ui MIT nprogress 0.2.0: https://github.com/rstacruz/nprogress MIT pinia 2.0.22: https://github.com/vuejs/pinia MIT diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-Java-WebSocket.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-Java-WebSocket.txt deleted file mode 100644 index dbf7415b4152..000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-Java-WebSocket.txt +++ /dev/null @@ -1,22 +0,0 @@ -Copyright (c) 2010-2020 Nathan Rajlich - - Permission is hereby granted, free of charge, to any person - obtaining a copy of this software and associated documentation - files (the "Software"), to deal in the Software without - restriction, including without limitation the rights to use, - copy, modify, merge, publish, distribute, sublicense, and/or sell - copies of the Software, and to permit persons to whom the - Software is furnished to do so, subject to the following - conditions: - - The above copyright notice and this permission notice shall be - included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, - EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES - OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND - NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT - HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, - WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR - OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/dolphinscheduler-e2e/README.md b/dolphinscheduler-e2e/README.md index f2fbcead77e8..eb332de90e1d 100644 --- a/dolphinscheduler-e2e/README.md +++ b/dolphinscheduler-e2e/README.md @@ -97,3 +97,27 @@ class TenantE2ETest { - For UI tests, it's common that the pages might need some time to load, or the operations might need some time to complete, we can use `await().untilAsserted(() -> {})` to wait for the assertions. +## Local development + +### Mac M1 +Add VM options to the test configuration in IntelliJ IDEA: +``` +# In this mode you need to install docker desktop for mac and run it with locally +-Dm1_chip=true +``` + +### Running locally(without Docker) +``` +# In this mode you need to start frontend and backend services locally +-Dlocal=true +``` + +### Running locally(with Docker) +``` +# In this mode you only need to install docker locally +``` + +- To run the tests locally, you need to have the DolphinScheduler running locally. You should add `dolphinscheduler-e2e/pom.xml` to the maven project + Since it does not participate in project compilation, it is not in the main project. +- Running run test class `org.apache.dolphinscheduler.e2e.cases.UserE2ETest` in the IDE. After execution, the test video will be saved as mp4 in a local temporary directory. Such as + `/var/folders/hf/123/T/record-3123/PASSED-[engine:junit-jupiter]/[class:org.apache.dolphinscheduler.e2e.cases.UserE2ETest]-20240606-152333.mp4` diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/ClickhouseDataSourceE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/ClickhouseDataSourceE2ETest.java index e150e73e98a4..09d8fa4ea824 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/ClickhouseDataSourceE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/ClickhouseDataSourceE2ETest.java @@ -1,111 +1,107 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - * - */ - -package org.apache.dolphinscheduler.e2e.cases; - -import static org.assertj.core.api.Assertions.assertThat; - -import org.apache.dolphinscheduler.e2e.core.DolphinScheduler; -import org.apache.dolphinscheduler.e2e.pages.LoginPage; -import org.apache.dolphinscheduler.e2e.pages.datasource.DataSourcePage; - -import java.time.Duration; - -import org.testcontainers.shaded.org.awaitility.Awaitility; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Order; -import org.junit.jupiter.api.Test; -import org.openqa.selenium.By; -import org.openqa.selenium.WebElement; -import org.openqa.selenium.remote.RemoteWebDriver; -import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; - - -@DolphinScheduler(composeFiles = "docker/datasource-clickhouse/docker-compose.yaml") -public class ClickhouseDataSourceE2ETest { - private static RemoteWebDriver browser; - - private static final String tenant = System.getProperty("user.name"); - - private static final String user = "admin"; - - private static final String password = "dolphinscheduler123"; - - private static final String dataSourceType = "CLICKHOUSE"; - - private static final String dataSourceName = "clickhouse_test"; - - private static final String dataSourceDescription = "clickhouse_test"; - - private static final String ip = "clickhouse"; - - private static final String port = "8123"; - - private static final String userName = "ch_test"; - - private static final String pgPassword = "ch_test"; - - private static final String database = "ch_test"; - - private static final String jdbcParams = ""; - - - @BeforeAll - public static void setup() { - new LoginPage(browser) - .login(user, password) - .goToNav(DataSourcePage.class); - } - - @Test - @Order(10) - void testCreateClickhouseDataSource() { - final DataSourcePage page = new DataSourcePage(browser); - - page.createDataSource(dataSourceType, dataSourceName, dataSourceDescription, ip, port, userName, pgPassword, database, jdbcParams); - - new WebDriverWait(page.driver(), Duration.ofSeconds(20)).until(ExpectedConditions.invisibilityOfElementLocated( - new By.ByClassName("dialog-create-data-source"))); - - Awaitility.await().untilAsserted(() -> assertThat(page.dataSourceItemsList()) - .as("DataSource list should contain newly-created database") - .extracting(WebElement::getText) - .anyMatch(it -> it.contains(dataSourceName))); - } - - @Test - @Order(20) - void testDeleteClickhouseDataSource() { - final DataSourcePage page = new DataSourcePage(browser); - - page.delete(dataSourceName); - - Awaitility.await().untilAsserted(() -> { - browser.navigate().refresh(); - - assertThat( - page.dataSourceItemsList() - ).noneMatch( - it -> it.getText().contains(dataSourceName) - ); - }); - } -} +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + */ + +package org.apache.dolphinscheduler.e2e.cases; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.apache.dolphinscheduler.e2e.core.DolphinScheduler; +import org.apache.dolphinscheduler.e2e.core.WebDriverWaitFactory; +import org.apache.dolphinscheduler.e2e.pages.LoginPage; +import org.apache.dolphinscheduler.e2e.pages.datasource.DataSourcePage; + +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Order; +import org.junit.jupiter.api.Test; +import org.openqa.selenium.By; +import org.openqa.selenium.WebElement; +import org.openqa.selenium.remote.RemoteWebDriver; +import org.openqa.selenium.support.ui.ExpectedConditions; +import org.testcontainers.shaded.org.awaitility.Awaitility; + +@DolphinScheduler(composeFiles = "docker/datasource-clickhouse/docker-compose.yaml") +public class ClickhouseDataSourceE2ETest { + + private static RemoteWebDriver browser; + + private static final String tenant = System.getProperty("user.name"); + + private static final String user = "admin"; + + private static final String password = "dolphinscheduler123"; + + private static final String dataSourceType = "CLICKHOUSE"; + + private static final String dataSourceName = "clickhouse_test"; + + private static final String dataSourceDescription = "clickhouse_test"; + + private static final String ip = "clickhouse"; + + private static final String port = "8123"; + + private static final String userName = "ch_test"; + + private static final String pgPassword = "ch_test"; + + private static final String database = "ch_test"; + + private static final String jdbcParams = ""; + + @BeforeAll + public static void setup() { + new LoginPage(browser) + .login(user, password) + .goToNav(DataSourcePage.class); + } + + @Test + @Order(10) + void testCreateClickhouseDataSource() { + final DataSourcePage page = new DataSourcePage(browser); + + page.createDataSource(dataSourceType, dataSourceName, dataSourceDescription, ip, port, userName, pgPassword, + database, jdbcParams); + + WebDriverWaitFactory.createWebDriverWait(page.driver()).until(ExpectedConditions.invisibilityOfElementLocated( + new By.ByClassName("dialog-create-data-source"))); + + Awaitility.await().untilAsserted(() -> assertThat(page.dataSourceItemsList()) + .as("DataSource list should contain newly-created database") + .extracting(WebElement::getText) + .anyMatch(it -> it.contains(dataSourceName))); + } + + @Test + @Order(20) + void testDeleteClickhouseDataSource() { + final DataSourcePage page = new DataSourcePage(browser); + + page.delete(dataSourceName); + + Awaitility.await().untilAsserted(() -> { + browser.navigate().refresh(); + + assertThat( + page.dataSourceItemsList()).noneMatch( + it -> it.getText().contains(dataSourceName)); + }); + } +} diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/ClusterE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/ClusterE2ETest.java index a7e526540bec..b75b496b5e17 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/ClusterE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/ClusterE2ETest.java @@ -19,7 +19,6 @@ package org.apache.dolphinscheduler.e2e.cases; - import static org.assertj.core.api.Assertions.assertThat; import org.apache.dolphinscheduler.e2e.core.DolphinScheduler; @@ -27,13 +26,13 @@ import org.apache.dolphinscheduler.e2e.pages.security.ClusterPage; import org.apache.dolphinscheduler.e2e.pages.security.SecurityPage; -import org.testcontainers.shaded.org.awaitility.Awaitility; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import org.openqa.selenium.remote.RemoteWebDriver; +import org.testcontainers.shaded.org.awaitility.Awaitility; @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") class ClusterE2ETest { @@ -53,8 +52,7 @@ public static void setup() { new LoginPage(browser) .login("admin", "dolphinscheduler123") .goToNav(SecurityPage.class) - .goToTab(ClusterPage.class) - ; + .goToTab(ClusterPage.class); } @Test @@ -78,10 +76,8 @@ void testCreateDuplicateCluster() { final ClusterPage page = new ClusterPage(browser); page.create(clusterName, clusterConfig, clusterDesc); - Awaitility.await().untilAsserted(() -> - assertThat(browser.findElement(By.tagName("body")).getText()) - .contains("already exists") - ); + Awaitility.await().untilAsserted(() -> assertThat(browser.findElement(By.tagName("body")).getText()) + .contains("already exists")); page.createClusterForm().buttonCancel().click(); } @@ -112,12 +108,10 @@ void testDeleteCluster() { browser.navigate().refresh(); assertThat( - page.clusterList() - ) - .as("Cluster list should not contain deleted cluster") - .noneMatch( - it -> it.getText().contains(clusterName) || it.getText().contains(editClusterName) - ); + page.clusterList()) + .as("Cluster list should not contain deleted cluster") + .noneMatch( + it -> it.getText().contains(clusterName) || it.getText().contains(editClusterName)); }); } } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/EnvironmentE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/EnvironmentE2ETest.java index 1e3d84864534..626f2cc44b4a 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/EnvironmentE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/EnvironmentE2ETest.java @@ -19,7 +19,6 @@ package org.apache.dolphinscheduler.e2e.cases; - import static org.assertj.core.api.Assertions.assertThat; import org.apache.dolphinscheduler.e2e.core.DolphinScheduler; @@ -27,13 +26,13 @@ import org.apache.dolphinscheduler.e2e.pages.security.EnvironmentPage; import org.apache.dolphinscheduler.e2e.pages.security.SecurityPage; -import org.testcontainers.shaded.org.awaitility.Awaitility; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import org.openqa.selenium.remote.RemoteWebDriver; +import org.testcontainers.shaded.org.awaitility.Awaitility; @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") class EnvironmentE2ETest { @@ -55,8 +54,7 @@ public static void setup() { new LoginPage(browser) .login("admin", "dolphinscheduler123") .goToNav(SecurityPage.class) - .goToTab(EnvironmentPage.class) - ; + .goToTab(EnvironmentPage.class); } @Test @@ -80,10 +78,8 @@ void testCreateDuplicateEnvironment() { final EnvironmentPage page = new EnvironmentPage(browser); page.create(environmentName, environmentConfig, environmentDesc, environmentWorkerGroup); - Awaitility.await().untilAsserted(() -> - assertThat(browser.findElement(By.tagName("body")).getText()) - .contains("already exists") - ); + Awaitility.await().untilAsserted(() -> assertThat(browser.findElement(By.tagName("body")).getText()) + .contains("already exists")); page.createEnvironmentForm().buttonCancel().click(); } @@ -92,7 +88,8 @@ void testCreateDuplicateEnvironment() { @Order(30) void testEditEnvironment() { final EnvironmentPage page = new EnvironmentPage(browser); - page.update(environmentName, editEnvironmentName, editEnvironmentConfig, editEnvironmentDesc, editEnvironmentWorkerGroup); + page.update(environmentName, editEnvironmentName, editEnvironmentConfig, editEnvironmentDesc, + editEnvironmentWorkerGroup); Awaitility.await().untilAsserted(() -> { browser.navigate().refresh(); @@ -114,12 +111,11 @@ void testDeleteEnvironment() { browser.navigate().refresh(); assertThat( - page.environmentList() - ) - .as("Environment list should not contain deleted environment") - .noneMatch( - it -> it.getText().contains(environmentName) || it.getText().contains(editEnvironmentName) - ); + page.environmentList()) + .as("Environment list should not contain deleted environment") + .noneMatch( + it -> it.getText().contains(environmentName) + || it.getText().contains(editEnvironmentName)); }); } } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/FileManageE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/FileManageE2ETest.java index 1bfa997f85d8..83d45f4531a1 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/FileManageE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/FileManageE2ETest.java @@ -19,11 +19,11 @@ */ package org.apache.dolphinscheduler.e2e.cases; - import static org.assertj.core.api.Assertions.assertThat; import org.apache.dolphinscheduler.e2e.core.Constants; import org.apache.dolphinscheduler.e2e.core.DolphinScheduler; +import org.apache.dolphinscheduler.e2e.core.WebDriverWaitFactory; import org.apache.dolphinscheduler.e2e.pages.LoginPage; import org.apache.dolphinscheduler.e2e.pages.resource.FileManagePage; import org.apache.dolphinscheduler.e2e.pages.resource.ResourcePage; @@ -36,10 +36,10 @@ import java.io.RandomAccessFile; import java.nio.file.Files; import java.nio.file.Path; -import java.time.Duration; import java.util.Comparator; -import org.testcontainers.shaded.org.awaitility.Awaitility; +import lombok.SneakyThrows; + import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; @@ -48,12 +48,11 @@ import org.openqa.selenium.WebElement; import org.openqa.selenium.remote.RemoteWebDriver; import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; - -import lombok.SneakyThrows; +import org.testcontainers.shaded.org.awaitility.Awaitility; @DolphinScheduler(composeFiles = "docker/file-manage/docker-compose.yaml") public class FileManageE2ETest { + private static RemoteWebDriver browser; private static final String tenant = System.getProperty("user.name"); @@ -91,19 +90,19 @@ public static void setup() { .create(tenant); Awaitility.await().untilAsserted(() -> assertThat(tenantPage.tenantList()) - .as("Tenant list should contain newly-created tenant") - .extracting(WebElement::getText) - .anyMatch(it -> it.contains(tenant))); + .as("Tenant list should contain newly-created tenant") + .extracting(WebElement::getText) + .anyMatch(it -> it.contains(tenant))); UserPage userPage = tenantPage.goToNav(SecurityPage.class) - .goToTab(UserPage.class); + .goToTab(UserPage.class); - new WebDriverWait(userPage.driver(), Duration.ofSeconds(20)).until(ExpectedConditions.visibilityOfElementLocated( + WebDriverWaitFactory.createWebDriverWait(userPage.driver()).until(ExpectedConditions.visibilityOfElementLocated( new By.ByClassName("name"))); userPage.update(user, user, email, phone, tenant) - .goToNav(ResourcePage.class) - .goToTab(FileManagePage.class); + .goToNav(ResourcePage.class) + .goToTab(FileManagePage.class); } @AfterAll @@ -112,9 +111,9 @@ public static void cleanup() { Files.deleteIfExists(testUnder1GBFilePath); Files.deleteIfExists(testOver1GBFilePath); Files.walk(Constants.HOST_CHROME_DOWNLOAD_PATH) - .sorted(Comparator.reverseOrder()) - .map(Path::toFile) - .forEach(File::delete); + .sorted(Comparator.reverseOrder()) + .map(Path::toFile) + .forEach(File::delete); } @Test @@ -138,57 +137,57 @@ void testCancelCreateDirectory() { page.cancelCreateDirectory(testDirectoryName); Awaitility.await().untilAsserted(() -> assertThat(page.fileList()) - .as("File list should contain newly-created file") - .extracting(WebElement::getText) - .anyMatch(it -> it.contains(testDirectoryName))); + .as("File list should contain newly-created file") + .extracting(WebElement::getText) + .anyMatch(it -> it.contains(testDirectoryName))); } -// @Test -// @Order(20) -// void testCreateDuplicateDirectory() { -// final FileManagePage page = new FileManagePage(browser); -// -// page.createDirectory(testDirectoryName, "test_desc"); -// -// await().untilAsserted(() -> assertThat(browser.findElement(By.tagName("body")).getText()) -// .contains("resource already exists") -// ); -// -// page.createDirectoryBox().buttonCancel().click(); -// } - -// @Test -// @Order(21) -// void testCreateSubDirectory() { -// final FileManagePage page = new FileManagePage(browser); -// -// page.createSubDirectory(testDirectoryName, testSubDirectoryName, "test_desc"); -// -// await().untilAsserted(() -> assertThat(page.fileList()) -// .as("File list should contain newly-created file") -// .extracting(WebElement::getText) -// .anyMatch(it -> it.contains(testSubDirectoryName))); -// } - -/* -* when the storage is s3,the directory cannot be renamed -* */ -// @Test -// @Order(22) -// void testRenameDirectory() { -// final FileManagePage page = new FileManagePage(browser); -// -// page.rename(testDirectoryName, testRenameDirectoryName); -// -// await().untilAsserted(() -> { -// browser.navigate().refresh(); -// -// assertThat(page.fileList()) -// .as("File list should contain newly-created file") -// .extracting(WebElement::getText) -// .anyMatch(it -> it.contains(testRenameDirectoryName)); -// }); -// } + // @Test + // @Order(20) + // void testCreateDuplicateDirectory() { + // final FileManagePage page = new FileManagePage(browser); + // + // page.createDirectory(testDirectoryName, "test_desc"); + // + // await().untilAsserted(() -> assertThat(browser.findElement(By.tagName("body")).getText()) + // .contains("resource already exists") + // ); + // + // page.createDirectoryBox().buttonCancel().click(); + // } + + // @Test + // @Order(21) + // void testCreateSubDirectory() { + // final FileManagePage page = new FileManagePage(browser); + // + // page.createSubDirectory(testDirectoryName, testSubDirectoryName, "test_desc"); + // + // await().untilAsserted(() -> assertThat(page.fileList()) + // .as("File list should contain newly-created file") + // .extracting(WebElement::getText) + // .anyMatch(it -> it.contains(testSubDirectoryName))); + // } + + /* + * when the storage is s3,the directory cannot be renamed + */ + // @Test + // @Order(22) + // void testRenameDirectory() { + // final FileManagePage page = new FileManagePage(browser); + // + // page.rename(testDirectoryName, testRenameDirectoryName); + // + // await().untilAsserted(() -> { + // browser.navigate().refresh(); + // + // assertThat(page.fileList()) + // .as("File list should contain newly-created file") + // .extracting(WebElement::getText) + // .anyMatch(it -> it.contains(testRenameDirectoryName)); + // }); + // } @Test @Order(30) @@ -196,17 +195,15 @@ void testDeleteDirectory() { final FileManagePage page = new FileManagePage(browser); page.goToNav(ResourcePage.class) - .goToTab(FileManagePage.class) - .delete(testDirectoryName); + .goToTab(FileManagePage.class) + .delete(testDirectoryName); Awaitility.await().untilAsserted(() -> { browser.navigate().refresh(); assertThat( - page.fileList() - ).noneMatch( - it -> it.getText().contains(testDirectoryName) - ); + page.fileList()).noneMatch( + it -> it.getText().contains(testDirectoryName)); }); } @@ -219,9 +216,9 @@ void testCreateFile() { page.createFile(testFileName, scripts); Awaitility.await().untilAsserted(() -> assertThat(page.fileList()) - .as("File list should contain newly-created file") - .extracting(WebElement::getText) - .anyMatch(it -> it.contains(testFileName))); + .as("File list should contain newly-created file") + .extracting(WebElement::getText) + .anyMatch(it -> it.contains(testFileName))); } @Test @@ -235,9 +232,9 @@ void testRenameFile() { browser.navigate().refresh(); assertThat(page.fileList()) - .as("File list should contain newly-created file") - .extracting(WebElement::getText) - .anyMatch(it -> it.contains(testRenameFileName)); + .as("File list should contain newly-created file") + .extracting(WebElement::getText) + .anyMatch(it -> it.contains(testRenameFileName)); }); } @@ -250,9 +247,9 @@ void testEditFile() { page.editFile(testRenameFileName, scripts); Awaitility.await().untilAsserted(() -> assertThat(page.fileList()) - .as("File list should contain newly-created file") - .extracting(WebElement::getText) - .anyMatch(it -> it.contains(testRenameFileName))); + .as("File list should contain newly-created file") + .extracting(WebElement::getText) + .anyMatch(it -> it.contains(testRenameFileName))); } @Test @@ -266,10 +263,8 @@ void testDeleteFile() { browser.navigate().refresh(); assertThat( - page.fileList() - ).noneMatch( - it -> it.getText().contains(testRenameFileName) - ); + page.fileList()).noneMatch( + it -> it.getText().contains(testRenameFileName)); }); } @@ -285,13 +280,14 @@ void testUploadUnder1GBFile() throws IOException { page.uploadFile(testUnder1GBFilePath.toFile().getAbsolutePath()); - new WebDriverWait(browser, Duration.ofSeconds(20)).until(ExpectedConditions.invisibilityOfElementLocated(By.id("fileUpdateDialog"))); + WebDriverWaitFactory.createWebDriverWait(browser) + .until(ExpectedConditions.invisibilityOfElementLocated(By.id("fileUpdateDialog"))); Awaitility.await().untilAsserted(() -> { assertThat(page.fileList()) - .as("File list should contain newly-created file") - .extracting(WebElement::getText) - .anyMatch(it -> it.contains(testUnder1GBFileName)); + .as("File list should contain newly-created file") + .extracting(WebElement::getText) + .anyMatch(it -> it.contains(testUnder1GBFileName)); }); } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/FunctionManageE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/FunctionManageE2ETest.java deleted file mode 100644 index 144c430eeeab..000000000000 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/FunctionManageE2ETest.java +++ /dev/null @@ -1,188 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - * - */ - -package org.apache.dolphinscheduler.e2e.cases; - -import lombok.SneakyThrows; -import static org.assertj.core.api.Assertions.assertThat; - -import org.apache.dolphinscheduler.e2e.core.Constants; -import org.apache.dolphinscheduler.e2e.core.DolphinScheduler; -import org.apache.dolphinscheduler.e2e.pages.LoginPage; -import org.apache.dolphinscheduler.e2e.pages.resource.FunctionManagePage; -import org.apache.dolphinscheduler.e2e.pages.resource.ResourcePage; -import org.apache.dolphinscheduler.e2e.pages.resource.UdfManagePage; -import org.apache.dolphinscheduler.e2e.pages.security.SecurityPage; -import org.apache.dolphinscheduler.e2e.pages.security.TenantPage; -import org.apache.dolphinscheduler.e2e.pages.security.UserPage; - -import java.io.File; -import java.io.FileOutputStream; -import java.io.InputStream; -import java.net.URL; -import java.net.URLConnection; -import java.nio.file.Files; -import java.nio.file.Path; -import java.time.Duration; -import java.util.Comparator; - -import org.testcontainers.shaded.org.awaitility.Awaitility; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Order; -import org.junit.jupiter.api.Test; -import org.openqa.selenium.By; -import org.openqa.selenium.WebElement; -import org.openqa.selenium.remote.RemoteWebDriver; -import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; - -@DolphinScheduler(composeFiles = "docker/file-manage/docker-compose.yaml") -public class FunctionManageE2ETest { - private static RemoteWebDriver browser; - - private static final String tenant = System.getProperty("user.name"); - - private static final String user = "admin"; - - private static final String password = "dolphinscheduler123"; - - private static final String email = "admin@gmail.com"; - - private static final String phone = "15800000000"; - - private static final String testUdfFunctionName = "test_function"; - - private static final String testRenameUdfFunctionName = "test_rename_function"; - - private static final String testUploadUdfFileName = "hive-jdbc-3.1.2.jar"; - - private static final String testClassName = "org.dolphinscheduler.UdfTest"; - - private static final String testDescription = "test_description"; - - private static final Path testUploadUdfFilePath = Constants.HOST_TMP_PATH.resolve(testUploadUdfFileName); - - @BeforeAll - @SneakyThrows - public static void setup() { - TenantPage tenantPage = new LoginPage(browser) - .login(user, password) - .goToNav(SecurityPage.class) - .goToTab(TenantPage.class) - .create(tenant); - - Awaitility.await().untilAsserted(() -> assertThat(tenantPage.tenantList()) - .as("Tenant list should contain newly-created tenant") - .extracting(WebElement::getText) - .anyMatch(it -> it.contains(tenant))); - - downloadFile("https://repo1.maven.org/maven2/org/apache/hive/hive-jdbc/3.1.2/hive-jdbc-3.1.2.jar", testUploadUdfFilePath.toFile().getAbsolutePath()); - - UserPage userPage = tenantPage.goToNav(SecurityPage.class) - .goToTab(UserPage.class); - - new WebDriverWait(userPage.driver(), Duration.ofSeconds(20)).until(ExpectedConditions.visibilityOfElementLocated( - new By.ByClassName("name"))); - - UdfManagePage udfManagePage = userPage.update(user, user, email, phone, tenant) - .goToNav(ResourcePage.class) - .goToTab(UdfManagePage.class) - .uploadFile(testUploadUdfFilePath.toFile().getAbsolutePath()); - - udfManagePage.goToNav(ResourcePage.class) - .goToTab(FunctionManagePage.class); - } - - @AfterAll - @SneakyThrows - public static void cleanup() { - Files.walk(Constants.HOST_CHROME_DOWNLOAD_PATH) - .sorted(Comparator.reverseOrder()) - .map(Path::toFile) - .forEach(File::delete); - - Files.deleteIfExists(testUploadUdfFilePath); - } - - static void downloadFile(String downloadUrl, String filePath) throws Exception { - int byteRead; - - URL url = new URL(downloadUrl); - - URLConnection conn = url.openConnection(); - InputStream inStream = conn.getInputStream(); - FileOutputStream fs = new FileOutputStream(filePath); - - byte[] buffer = new byte[1024]; - while ((byteRead = inStream.read(buffer)) != -1) { - fs.write(buffer, 0, byteRead); - } - - inStream.close(); - fs.close(); - } - - @Test - @Order(10) - void testCreateUdfFunction() { - FunctionManagePage page = new FunctionManagePage(browser); - - page.createUdfFunction(testUdfFunctionName, testClassName, testUploadUdfFileName, testDescription); - - Awaitility.await().untilAsserted(() -> assertThat(page.functionList()) - .as("Function list should contain newly-created file") - .extracting(WebElement::getText) - .anyMatch(it -> it.contains(testUdfFunctionName))); - } - - @Test - @Order(20) - void testRenameUdfFunction() { - FunctionManagePage page = new FunctionManagePage(browser); - - browser.navigate().refresh(); - - page.renameUdfFunction(testUdfFunctionName, testRenameUdfFunctionName); - - Awaitility.await().pollDelay(Duration.ofSeconds(2)).untilAsserted(() -> assertThat(page.functionList()) - .as("Function list should contain newly-created file") - .extracting(WebElement::getText) - .anyMatch(it -> it.contains(testRenameUdfFunctionName))); - } - - @Test - @Order(30) - void testDeleteUdfFunction() { - FunctionManagePage page = new FunctionManagePage(browser); - - page.deleteUdfFunction(testRenameUdfFunctionName); - - Awaitility.await().untilAsserted(() -> { - browser.navigate().refresh(); - - assertThat( - page.functionList() - ).noneMatch( - it -> it.getText().contains(testRenameUdfFunctionName) - ); - }); - } -} diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/HiveDataSourceE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/HiveDataSourceE2ETest.java index 9b6e661f52d1..fa9fb809139f 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/HiveDataSourceE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/HiveDataSourceE2ETest.java @@ -23,11 +23,10 @@ import static org.assertj.core.api.Assertions.assertThat; import org.apache.dolphinscheduler.e2e.core.DolphinScheduler; +import org.apache.dolphinscheduler.e2e.core.WebDriverWaitFactory; import org.apache.dolphinscheduler.e2e.pages.LoginPage; import org.apache.dolphinscheduler.e2e.pages.datasource.DataSourcePage; -import java.time.Duration; - import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; @@ -35,11 +34,11 @@ import org.openqa.selenium.WebElement; import org.openqa.selenium.remote.RemoteWebDriver; import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; import org.testcontainers.shaded.org.awaitility.Awaitility; @DolphinScheduler(composeFiles = "docker/datasource-hive/docker-compose.yaml") public class HiveDataSourceE2ETest { + private static RemoteWebDriver browser; private static final String tenant = System.getProperty("user.name"); @@ -69,9 +68,8 @@ public class HiveDataSourceE2ETest { @BeforeAll public static void setup() { new LoginPage(browser) - .login(user, password) - .goToNav(DataSourcePage.class); - + .login(user, password) + .goToNav(DataSourcePage.class); } @@ -80,15 +78,16 @@ public static void setup() { void testCreateHiveDataSource() { final DataSourcePage page = new DataSourcePage(browser); - page.createDataSource(dataSourceType, dataSourceName, dataSourceDescription, ip, port, userName, hivePassword, database, jdbcParams); + page.createDataSource(dataSourceType, dataSourceName, dataSourceDescription, ip, port, userName, hivePassword, + database, jdbcParams); - new WebDriverWait(page.driver(), Duration.ofSeconds(20)).until(ExpectedConditions.invisibilityOfElementLocated( + WebDriverWaitFactory.createWebDriverWait(page.driver()).until(ExpectedConditions.invisibilityOfElementLocated( new By.ByClassName("dialog-create-data-source"))); Awaitility.await().untilAsserted(() -> assertThat(page.dataSourceItemsList()) - .as("DataSource list should contain newly-created database") - .extracting(WebElement::getText) - .anyMatch(it -> it.contains(dataSourceName))); + .as("DataSource list should contain newly-created database") + .extracting(WebElement::getText) + .anyMatch(it -> it.contains(dataSourceName))); } @Test @@ -102,10 +101,8 @@ void testDeleteHiveDataSource() { browser.navigate().refresh(); assertThat( - page.dataSourceItemsList() - ).noneMatch( - it -> it.getText().contains(dataSourceName) - ); + page.dataSourceItemsList()).noneMatch( + it -> it.getText().contains(dataSourceName)); }); } } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/MysqlDataSourceE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/MysqlDataSourceE2ETest.java index 5078f55e3527..faf033f0d0d8 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/MysqlDataSourceE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/MysqlDataSourceE2ETest.java @@ -1,112 +1,108 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - * - */ - -package org.apache.dolphinscheduler.e2e.cases; - -import static org.assertj.core.api.Assertions.assertThat; - -import org.apache.dolphinscheduler.e2e.core.DolphinScheduler; -import org.apache.dolphinscheduler.e2e.pages.LoginPage; -import org.apache.dolphinscheduler.e2e.pages.datasource.DataSourcePage; - -import java.time.Duration; - -import org.testcontainers.shaded.org.awaitility.Awaitility; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Order; -import org.junit.jupiter.api.Test; -import org.openqa.selenium.By; -import org.openqa.selenium.WebElement; -import org.openqa.selenium.remote.RemoteWebDriver; -import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; - - -@DolphinScheduler(composeFiles = "docker/datasource-mysql/docker-compose.yaml") -public class MysqlDataSourceE2ETest { - private static RemoteWebDriver browser; - - private static final String tenant = System.getProperty("user.name"); - - private static final String user = "admin"; - - private static final String password = "dolphinscheduler123"; - - private static final String dataSourceType = "MYSQL"; - - private static final String dataSourceName = "mysql_test"; - - private static final String dataSourceDescription = "mysql_test"; - - private static final String ip = "mysql"; - - private static final String port = "3306"; - - private static final String userName = "root"; - - private static final String mysqlPassword = "123456"; - - private static final String database = "mysql"; - - private static final String jdbcParams = "{\"useSSL\": false}"; - - - @BeforeAll - public static void setup() { - new LoginPage(browser) - .login(user, password) - .goToNav(DataSourcePage.class); - } - - @Test - @Order(10) - void testCreateMysqlDataSource() { - final DataSourcePage page = new DataSourcePage(browser); - - page.createDataSource(dataSourceType, dataSourceName, dataSourceDescription, ip, port, userName, mysqlPassword, database, jdbcParams); - - new WebDriverWait(page.driver(), Duration.ofSeconds(20)).until(ExpectedConditions.invisibilityOfElementLocated( - new By.ByClassName("dialog-create-data-source"))); - - Awaitility.await().untilAsserted(() -> assertThat(page.dataSourceItemsList()) - .as("DataSource list should contain newly-created database") - .extracting(WebElement::getText) - .anyMatch(it -> it.contains(dataSourceName))); - } - - @Test - @Order(20) - void testDeleteMysqlDataSource() { - final DataSourcePage page = new DataSourcePage(browser); - - page.delete(dataSourceName); - - Awaitility.await().untilAsserted(() -> { - browser.navigate().refresh(); - - assertThat( - page.dataSourceItemsList() - ).noneMatch( - it -> it.getText().contains(dataSourceName) - ); - }); - } - -} +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + */ + +package org.apache.dolphinscheduler.e2e.cases; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.apache.dolphinscheduler.e2e.core.DolphinScheduler; +import org.apache.dolphinscheduler.e2e.core.WebDriverWaitFactory; +import org.apache.dolphinscheduler.e2e.pages.LoginPage; +import org.apache.dolphinscheduler.e2e.pages.datasource.DataSourcePage; + +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Order; +import org.junit.jupiter.api.Test; +import org.openqa.selenium.By; +import org.openqa.selenium.WebElement; +import org.openqa.selenium.remote.RemoteWebDriver; +import org.openqa.selenium.support.ui.ExpectedConditions; +import org.testcontainers.shaded.org.awaitility.Awaitility; + +@DolphinScheduler(composeFiles = "docker/datasource-mysql/docker-compose.yaml") +public class MysqlDataSourceE2ETest { + + private static RemoteWebDriver browser; + + private static final String tenant = System.getProperty("user.name"); + + private static final String user = "admin"; + + private static final String password = "dolphinscheduler123"; + + private static final String dataSourceType = "MYSQL"; + + private static final String dataSourceName = "mysql_test"; + + private static final String dataSourceDescription = "mysql_test"; + + private static final String ip = "mysql"; + + private static final String port = "3306"; + + private static final String userName = "root"; + + private static final String mysqlPassword = "123456"; + + private static final String database = "mysql"; + + private static final String jdbcParams = "{\"useSSL\": false}"; + + @BeforeAll + public static void setup() { + new LoginPage(browser) + .login(user, password) + .goToNav(DataSourcePage.class); + } + + @Test + @Order(10) + void testCreateMysqlDataSource() { + final DataSourcePage page = new DataSourcePage(browser); + + page.createDataSource(dataSourceType, dataSourceName, dataSourceDescription, ip, port, userName, mysqlPassword, + database, jdbcParams); + + WebDriverWaitFactory.createWebDriverWait(page.driver()).until(ExpectedConditions.invisibilityOfElementLocated( + new By.ByClassName("dialog-create-data-source"))); + + Awaitility.await().untilAsserted(() -> assertThat(page.dataSourceItemsList()) + .as("DataSource list should contain newly-created database") + .extracting(WebElement::getText) + .anyMatch(it -> it.contains(dataSourceName))); + } + + @Test + @Order(20) + void testDeleteMysqlDataSource() { + final DataSourcePage page = new DataSourcePage(browser); + + page.delete(dataSourceName); + + Awaitility.await().untilAsserted(() -> { + browser.navigate().refresh(); + + assertThat( + page.dataSourceItemsList()).noneMatch( + it -> it.getText().contains(dataSourceName)); + }); + } + +} diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/PostgresDataSourceE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/PostgresDataSourceE2ETest.java index 647c6677416d..a8a1db9e3d11 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/PostgresDataSourceE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/PostgresDataSourceE2ETest.java @@ -1,111 +1,107 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - * - */ - -package org.apache.dolphinscheduler.e2e.cases; - -import static org.assertj.core.api.Assertions.assertThat; - -import org.apache.dolphinscheduler.e2e.core.DolphinScheduler; -import org.apache.dolphinscheduler.e2e.pages.LoginPage; -import org.apache.dolphinscheduler.e2e.pages.datasource.DataSourcePage; - -import java.time.Duration; - -import org.testcontainers.shaded.org.awaitility.Awaitility; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Order; -import org.junit.jupiter.api.Test; -import org.openqa.selenium.By; -import org.openqa.selenium.WebElement; -import org.openqa.selenium.remote.RemoteWebDriver; -import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; - - -@DolphinScheduler(composeFiles = "docker/datasource-postgresql/docker-compose.yaml") -public class PostgresDataSourceE2ETest { - private static RemoteWebDriver browser; - - private static final String tenant = System.getProperty("user.name"); - - private static final String user = "admin"; - - private static final String password = "dolphinscheduler123"; - - private static final String dataSourceType = "POSTGRESQL"; - - private static final String dataSourceName = "postgres_test"; - - private static final String dataSourceDescription = "postgres_test"; - - private static final String ip = "postgres"; - - private static final String port = "5432"; - - private static final String userName = "postgres"; - - private static final String pgPassword = "postgres"; - - private static final String database = "postgres"; - - private static final String jdbcParams = ""; - - - @BeforeAll - public static void setup() { - new LoginPage(browser) - .login(user, password) - .goToNav(DataSourcePage.class); - } - - @Test - @Order(10) - void testCreatePostgresDataSource() { - final DataSourcePage page = new DataSourcePage(browser); - - page.createDataSource(dataSourceType, dataSourceName, dataSourceDescription, ip, port, userName, pgPassword, database, jdbcParams); - - new WebDriverWait(page.driver(), Duration.ofSeconds(20)).until(ExpectedConditions.invisibilityOfElementLocated( - new By.ByClassName("dialog-create-data-source"))); - - Awaitility.await().untilAsserted(() -> assertThat(page.dataSourceItemsList()) - .as("DataSource list should contain newly-created database") - .extracting(WebElement::getText) - .anyMatch(it -> it.contains(dataSourceName))); - } - - @Test - @Order(20) - void testDeletePostgresDataSource() { - final DataSourcePage page = new DataSourcePage(browser); - - page.delete(dataSourceName); - - Awaitility.await().untilAsserted(() -> { - browser.navigate().refresh(); - - assertThat( - page.dataSourceItemsList() - ).noneMatch( - it -> it.getText().contains(dataSourceName) - ); - }); - } -} +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + */ + +package org.apache.dolphinscheduler.e2e.cases; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.apache.dolphinscheduler.e2e.core.DolphinScheduler; +import org.apache.dolphinscheduler.e2e.core.WebDriverWaitFactory; +import org.apache.dolphinscheduler.e2e.pages.LoginPage; +import org.apache.dolphinscheduler.e2e.pages.datasource.DataSourcePage; + +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Order; +import org.junit.jupiter.api.Test; +import org.openqa.selenium.By; +import org.openqa.selenium.WebElement; +import org.openqa.selenium.remote.RemoteWebDriver; +import org.openqa.selenium.support.ui.ExpectedConditions; +import org.testcontainers.shaded.org.awaitility.Awaitility; + +@DolphinScheduler(composeFiles = "docker/datasource-postgresql/docker-compose.yaml") +public class PostgresDataSourceE2ETest { + + private static RemoteWebDriver browser; + + private static final String tenant = System.getProperty("user.name"); + + private static final String user = "admin"; + + private static final String password = "dolphinscheduler123"; + + private static final String dataSourceType = "POSTGRESQL"; + + private static final String dataSourceName = "postgres_test"; + + private static final String dataSourceDescription = "postgres_test"; + + private static final String ip = "postgres"; + + private static final String port = "5432"; + + private static final String userName = "postgres"; + + private static final String pgPassword = "postgres"; + + private static final String database = "postgres"; + + private static final String jdbcParams = ""; + + @BeforeAll + public static void setup() { + new LoginPage(browser) + .login(user, password) + .goToNav(DataSourcePage.class); + } + + @Test + @Order(10) + void testCreatePostgresDataSource() { + final DataSourcePage page = new DataSourcePage(browser); + + page.createDataSource(dataSourceType, dataSourceName, dataSourceDescription, ip, port, userName, pgPassword, + database, jdbcParams); + + WebDriverWaitFactory.createWebDriverWait(page.driver()).until(ExpectedConditions.invisibilityOfElementLocated( + new By.ByClassName("dialog-create-data-source"))); + + Awaitility.await().untilAsserted(() -> assertThat(page.dataSourceItemsList()) + .as("DataSource list should contain newly-created database") + .extracting(WebElement::getText) + .anyMatch(it -> it.contains(dataSourceName))); + } + + @Test + @Order(20) + void testDeletePostgresDataSource() { + final DataSourcePage page = new DataSourcePage(browser); + + page.delete(dataSourceName); + + Awaitility.await().untilAsserted(() -> { + browser.navigate().refresh(); + + assertThat( + page.dataSourceItemsList()).noneMatch( + it -> it.getText().contains(dataSourceName)); + }); + } +} diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/ProjectE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/ProjectE2ETest.java index f9f54299e8d4..e23a1000b1c2 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/ProjectE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/ProjectE2ETest.java @@ -25,14 +25,15 @@ import org.apache.dolphinscheduler.e2e.pages.LoginPage; import org.apache.dolphinscheduler.e2e.pages.project.ProjectPage; -import org.testcontainers.shaded.org.awaitility.Awaitility; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; import org.openqa.selenium.remote.RemoteWebDriver; +import org.testcontainers.shaded.org.awaitility.Awaitility; @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") class ProjectE2ETest { + private static final String project = "test-project-1"; private static RemoteWebDriver browser; @@ -59,10 +60,8 @@ void testDeleteProject() { Awaitility.await().untilAsserted(() -> { browser.navigate().refresh(); assertThat( - page.projectList() - ).noneMatch( - it -> it.getText().contains(project) - ); + page.projectList()).noneMatch( + it -> it.getText().contains(project)); }); } } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/QueueE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/QueueE2ETest.java index 0ad3ee3a357f..c9c73d87149c 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/QueueE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/QueueE2ETest.java @@ -19,7 +19,6 @@ package org.apache.dolphinscheduler.e2e.cases; - import static org.assertj.core.api.Assertions.assertThat; import org.apache.dolphinscheduler.e2e.core.DolphinScheduler; @@ -27,13 +26,13 @@ import org.apache.dolphinscheduler.e2e.pages.security.QueuePage; import org.apache.dolphinscheduler.e2e.pages.security.SecurityPage; -import org.testcontainers.shaded.org.awaitility.Awaitility; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import org.openqa.selenium.remote.RemoteWebDriver; +import org.testcontainers.shaded.org.awaitility.Awaitility; @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") class QueueE2ETest { @@ -50,8 +49,7 @@ public static void setup() { new LoginPage(browser) .login("admin", "dolphinscheduler123") .goToNav(SecurityPage.class) - .goToTab(QueuePage.class) - ; + .goToTab(QueuePage.class); } @Test @@ -75,10 +73,8 @@ void testCreateDuplicateQueue() { final QueuePage page = new QueuePage(browser); page.create(queueName, queueValue); - Awaitility.await().untilAsserted(() -> - assertThat(browser.findElement(By.tagName("body")).getText()) - .contains("already exists") - ); + Awaitility.await().untilAsserted(() -> assertThat(browser.findElement(By.tagName("body")).getText()) + .contains("already exists")); page.createQueueForm().buttonCancel().click(); } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/SqlServerDataSourceE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/SqlServerDataSourceE2ETest.java index 7777b9a44158..c01452eaecd6 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/SqlServerDataSourceE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/SqlServerDataSourceE2ETest.java @@ -1,111 +1,107 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - * - */ - -package org.apache.dolphinscheduler.e2e.cases; - -import static org.assertj.core.api.Assertions.assertThat; - -import org.apache.dolphinscheduler.e2e.core.DolphinScheduler; -import org.apache.dolphinscheduler.e2e.pages.LoginPage; -import org.apache.dolphinscheduler.e2e.pages.datasource.DataSourcePage; - -import java.time.Duration; - -import org.testcontainers.shaded.org.awaitility.Awaitility; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Order; -import org.junit.jupiter.api.Test; -import org.openqa.selenium.By; -import org.openqa.selenium.WebElement; -import org.openqa.selenium.remote.RemoteWebDriver; -import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; - - -@DolphinScheduler(composeFiles = "docker/datasource-sqlserver/docker-compose.yaml") -public class SqlServerDataSourceE2ETest { - private static RemoteWebDriver browser; - - private static final String tenant = System.getProperty("user.name"); - - private static final String user = "admin"; - - private static final String password = "dolphinscheduler123"; - - private static final String dataSourceType = "SQLSERVER"; - - private static final String dataSourceName = "sqlserver_test"; - - private static final String dataSourceDescription = "sqlserver_test"; - - private static final String ip = "sqlserver"; - - private static final String port = "1433"; - - private static final String userName = "sa"; - - private static final String pgPassword = "OcP2020123"; - - private static final String database = "master"; - - private static final String jdbcParams = ""; - - - @BeforeAll - public static void setup() { - new LoginPage(browser) - .login(user, password) - .goToNav(DataSourcePage.class); - } - - @Test - @Order(10) - void testCreateSqlServerDataSource() { - final DataSourcePage page = new DataSourcePage(browser); - - page.createDataSource(dataSourceType, dataSourceName, dataSourceDescription, ip, port, userName, pgPassword, database, jdbcParams); - - new WebDriverWait(page.driver(), Duration.ofSeconds(20)).until(ExpectedConditions.invisibilityOfElementLocated( - new By.ByClassName("dialog-create-data-source"))); - - Awaitility.await().untilAsserted(() -> assertThat(page.dataSourceItemsList()) - .as("DataSource list should contain newly-created database") - .extracting(WebElement::getText) - .anyMatch(it -> it.contains(dataSourceName))); - } - - @Test - @Order(20) - void testDeleteSqlServerDataSource() { - final DataSourcePage page = new DataSourcePage(browser); - - page.delete(dataSourceName); - - Awaitility.await().untilAsserted(() -> { - browser.navigate().refresh(); - - assertThat( - page.dataSourceItemsList() - ).noneMatch( - it -> it.getText().contains(dataSourceName) - ); - }); - } -} +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + */ + +package org.apache.dolphinscheduler.e2e.cases; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.apache.dolphinscheduler.e2e.core.DolphinScheduler; +import org.apache.dolphinscheduler.e2e.core.WebDriverWaitFactory; +import org.apache.dolphinscheduler.e2e.pages.LoginPage; +import org.apache.dolphinscheduler.e2e.pages.datasource.DataSourcePage; + +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Order; +import org.junit.jupiter.api.Test; +import org.openqa.selenium.By; +import org.openqa.selenium.WebElement; +import org.openqa.selenium.remote.RemoteWebDriver; +import org.openqa.selenium.support.ui.ExpectedConditions; +import org.testcontainers.shaded.org.awaitility.Awaitility; + +@DolphinScheduler(composeFiles = "docker/datasource-sqlserver/docker-compose.yaml") +public class SqlServerDataSourceE2ETest { + + private static RemoteWebDriver browser; + + private static final String tenant = System.getProperty("user.name"); + + private static final String user = "admin"; + + private static final String password = "dolphinscheduler123"; + + private static final String dataSourceType = "SQLSERVER"; + + private static final String dataSourceName = "sqlserver_test"; + + private static final String dataSourceDescription = "sqlserver_test"; + + private static final String ip = "sqlserver"; + + private static final String port = "1433"; + + private static final String userName = "sa"; + + private static final String pgPassword = "OcP2020123"; + + private static final String database = "master"; + + private static final String jdbcParams = ""; + + @BeforeAll + public static void setup() { + new LoginPage(browser) + .login(user, password) + .goToNav(DataSourcePage.class); + } + + @Test + @Order(10) + void testCreateSqlServerDataSource() { + final DataSourcePage page = new DataSourcePage(browser); + + page.createDataSource(dataSourceType, dataSourceName, dataSourceDescription, ip, port, userName, pgPassword, + database, jdbcParams); + + WebDriverWaitFactory.createWebDriverWait(page.driver()).until(ExpectedConditions.invisibilityOfElementLocated( + new By.ByClassName("dialog-create-data-source"))); + + Awaitility.await().untilAsserted(() -> assertThat(page.dataSourceItemsList()) + .as("DataSource list should contain newly-created database") + .extracting(WebElement::getText) + .anyMatch(it -> it.contains(dataSourceName))); + } + + @Test + @Order(20) + void testDeleteSqlServerDataSource() { + final DataSourcePage page = new DataSourcePage(browser); + + page.delete(dataSourceName); + + Awaitility.await().untilAsserted(() -> { + browser.navigate().refresh(); + + assertThat( + page.dataSourceItemsList()).noneMatch( + it -> it.getText().contains(dataSourceName)); + }); + } +} diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/TenantE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/TenantE2ETest.java index d80eae7b89d9..696c8f172f6f 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/TenantE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/TenantE2ETest.java @@ -26,16 +26,17 @@ import org.apache.dolphinscheduler.e2e.pages.security.SecurityPage; import org.apache.dolphinscheduler.e2e.pages.security.TenantPage; -import org.testcontainers.shaded.org.awaitility.Awaitility; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import org.openqa.selenium.remote.RemoteWebDriver; +import org.testcontainers.shaded.org.awaitility.Awaitility; @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") class TenantE2ETest { + private static final String tenant = System.getProperty("user.name"); private static final String editDescription = "This is a test"; @@ -44,10 +45,9 @@ class TenantE2ETest { @BeforeAll public static void setup() { new LoginPage(browser) - .login("admin", "dolphinscheduler123") - .goToNav(SecurityPage.class) - .goToTab(TenantPage.class) - ; + .login("admin", "dolphinscheduler123") + .goToNav(SecurityPage.class) + .goToTab(TenantPage.class); } @Test @@ -57,9 +57,9 @@ void testCreateTenant() { page.create(tenant); Awaitility.await().untilAsserted(() -> assertThat(page.tenantList()) - .as("Tenant list should contain newly-created tenant") - .extracting(WebElement::getText) - .anyMatch(it -> it.contains(tenant))); + .as("Tenant list should contain newly-created tenant") + .extracting(WebElement::getText) + .anyMatch(it -> it.contains(tenant))); } @Test @@ -69,10 +69,8 @@ void testCreateDuplicateTenant() { page.create(tenant); - Awaitility.await().untilAsserted(() -> - assertThat(browser.findElement(By.tagName("body")).getText()) - .contains("already exists") - ); + Awaitility.await().untilAsserted(() -> assertThat(browser.findElement(By.tagName("body")).getText()) + .contains("already exists")); page.tenantForm().buttonCancel().click(); } @@ -87,9 +85,9 @@ void testUpdateTenant() { Awaitility.await().untilAsserted(() -> { browser.navigate().refresh(); assertThat(page.tenantList()) - .as("Tenant list should contain newly-modified tenant") - .extracting(WebElement::getText) - .anyMatch(it -> it.contains(tenant)); + .as("Tenant list should contain newly-modified tenant") + .extracting(WebElement::getText) + .anyMatch(it -> it.contains(tenant)); }); } @@ -103,10 +101,8 @@ void testDeleteTenant() { browser.navigate().refresh(); assertThat( - page.tenantList() - ).noneMatch( - it -> it.getText().contains(tenant) - ); + page.tenantList()).noneMatch( + it -> it.getText().contains(tenant)); }); } } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/TokenE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/TokenE2ETest.java index 8d287637b0de..7a23f7625c2e 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/TokenE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/TokenE2ETest.java @@ -26,12 +26,12 @@ import org.apache.dolphinscheduler.e2e.pages.security.SecurityPage; import org.apache.dolphinscheduler.e2e.pages.security.TokenPage; -import org.testcontainers.shaded.org.awaitility.Awaitility; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; import org.openqa.selenium.WebElement; import org.openqa.selenium.remote.RemoteWebDriver; +import org.testcontainers.shaded.org.awaitility.Awaitility; @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") public class TokenE2ETest { @@ -43,10 +43,9 @@ public class TokenE2ETest { @BeforeAll public static void setup() { new LoginPage(browser) - .login("admin", "dolphinscheduler123") - .goToNav(SecurityPage.class) - .goToTab(TokenPage.class) - ; + .login("admin", "dolphinscheduler123") + .goToNav(SecurityPage.class) + .goToTab(TokenPage.class); } @Test @@ -59,9 +58,9 @@ void testCreateToken() { browser.navigate().refresh(); assertThat(page.tokenList()) - .as("Token list should contain newly-created token") - .extracting(WebElement::getText) - .anyMatch(it -> it.contains(userName)); + .as("Token list should contain newly-created token") + .extracting(WebElement::getText) + .anyMatch(it -> it.contains(userName)); }); } @@ -76,9 +75,9 @@ void testEditToken() { browser.navigate().refresh(); assertThat(page.tokenList()) - .as("Token list should contain newly-modified token") - .extracting(WebElement::getText) - .isNotEqualTo(oldToken); + .as("Token list should contain newly-modified token") + .extracting(WebElement::getText) + .isNotEqualTo(oldToken); }); } @@ -92,7 +91,7 @@ void testDeleteToken() { browser.navigate().refresh(); assertThat(page.tokenList()) - .noneMatch(it -> it.getText().contains(userName)); + .noneMatch(it -> it.getText().contains(userName)); }); } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/UdfManageE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/UdfManageE2ETest.java deleted file mode 100644 index 07610f9b1226..000000000000 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/UdfManageE2ETest.java +++ /dev/null @@ -1,229 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - * - */ - -package org.apache.dolphinscheduler.e2e.cases; - -import static org.assertj.core.api.Assertions.assertThat; - -import org.apache.dolphinscheduler.e2e.core.Constants; -import org.apache.dolphinscheduler.e2e.core.DolphinScheduler; -import org.apache.dolphinscheduler.e2e.pages.LoginPage; -import org.apache.dolphinscheduler.e2e.pages.resource.ResourcePage; -import org.apache.dolphinscheduler.e2e.pages.resource.UdfManagePage; -import org.apache.dolphinscheduler.e2e.pages.security.SecurityPage; -import org.apache.dolphinscheduler.e2e.pages.security.TenantPage; -import org.apache.dolphinscheduler.e2e.pages.security.UserPage; - -import java.io.File; -import java.io.FileOutputStream; -import java.io.InputStream; -import java.net.URL; -import java.net.URLConnection; -import java.nio.file.Files; -import java.nio.file.Path; -import java.time.Duration; -import java.util.Comparator; - -import org.testcontainers.shaded.org.awaitility.Awaitility; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Order; -import org.junit.jupiter.api.Test; -import org.openqa.selenium.By; -import org.openqa.selenium.WebElement; -import org.openqa.selenium.remote.RemoteWebDriver; -import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; - -import lombok.SneakyThrows; - -@DolphinScheduler(composeFiles = "docker/file-manage/docker-compose.yaml") -public class UdfManageE2ETest { - private static RemoteWebDriver browser; - - private static final String tenant = System.getProperty("user.name"); - - private static final String user = "admin"; - - private static final String password = "dolphinscheduler123"; - - private static final String email = "admin@gmail.com"; - - private static final String phone = "15800000000"; - - private static final String testDirectoryName = "test_directory"; - - private static final String testRenameDirectoryName = "test_rename_directory"; - - private static final String testUploadUdfFileName = "hive-jdbc-3.1.2.jar"; - - private static final Path testUploadUdfFilePath = Constants.HOST_TMP_PATH.resolve(testUploadUdfFileName); - - private static final String testUploadUdfRenameFileName = "hive-jdbc.jar"; - - @BeforeAll - public static void setup() { - TenantPage tenantPage = new LoginPage(browser) - .login(user, password) - .goToNav(SecurityPage.class) - .goToTab(TenantPage.class) - .create(tenant); - - Awaitility.await().untilAsserted(() -> assertThat(tenantPage.tenantList()) - .as("Tenant list should contain newly-created tenant") - .extracting(WebElement::getText) - .anyMatch(it -> it.contains(tenant))); - - UserPage userPage = tenantPage.goToNav(SecurityPage.class) - .goToTab(UserPage.class); - - new WebDriverWait(userPage.driver(), Duration.ofSeconds(20)).until(ExpectedConditions.visibilityOfElementLocated( - new By.ByClassName("name"))); - - userPage.update(user, user, email, phone, tenant) - .goToNav(ResourcePage.class) - .goToTab(UdfManagePage.class); - } - - @AfterAll - @SneakyThrows - public static void cleanup() { - Files.walk(Constants.HOST_CHROME_DOWNLOAD_PATH) - .sorted(Comparator.reverseOrder()) - .map(Path::toFile) - .forEach(File::delete); - - Files.deleteIfExists(testUploadUdfFilePath); - } - - @Test - @Order(10) - void testCreateDirectory() { - final UdfManagePage page = new UdfManagePage(browser); - - new WebDriverWait(page.driver(), Duration.ofSeconds(20)) - .until(ExpectedConditions.urlContains("/resource-manage")); - page.createDirectory(testDirectoryName); - Awaitility.await().untilAsserted(() -> assertThat(page.udfList()) - .as("File list should contain newly-created file") - .extracting(WebElement::getText) - .anyMatch(it -> it.contains(testDirectoryName))); - } - -//when s3 the directory cannot be renamed -// @Test -// @Order(20) -// void testRenameDirectory() { -// final UdfManagePage page = new UdfManagePage(browser); -// -// page.rename(testDirectoryName, testRenameDirectoryName); -// -// await().untilAsserted(() -> { -// browser.navigate().refresh(); -// -// assertThat(page.udfList()) -// .as("File list should contain newly-created file") -// .extracting(WebElement::getText) -// .anyMatch(it -> it.contains(testRenameDirectoryName)); -// }); -// } - - @Test - @Order(30) - void testDeleteDirectory() { - final UdfManagePage page = new UdfManagePage(browser); - page.delete(testDirectoryName); - - Awaitility.await().untilAsserted(() -> { - browser.navigate().refresh(); - - assertThat( - page.udfList() - ).noneMatch( - it -> it.getText().contains(testDirectoryName) - ); - }); - } - - @Test - @Order(40) - @SneakyThrows - void testUploadUdf() { - final UdfManagePage page = new UdfManagePage(browser); - - downloadFile("https://repo1.maven.org/maven2/org/apache/hive/hive-jdbc/3.1.2/hive-jdbc-3.1.2.jar", testUploadUdfFilePath.toFile().getAbsolutePath()); - page.uploadFile(testUploadUdfFilePath.toFile().getAbsolutePath()); - Awaitility.await().untilAsserted(() -> { - assertThat(page.udfList()) - .as("File list should contain newly-created file") - .extracting(WebElement::getText) - .anyMatch(it -> it.contains(testUploadUdfFileName)); - }); - } - - void downloadFile(String downloadUrl, String filePath) throws Exception { - int byteRead; - - URL url = new URL(downloadUrl); - - URLConnection conn = url.openConnection(); - InputStream inStream = conn.getInputStream(); - FileOutputStream fs = new FileOutputStream(filePath); - - byte[] buffer = new byte[1024]; - while ((byteRead = inStream.read(buffer)) != -1) { - fs.write(buffer, 0, byteRead); - } - - inStream.close(); - fs.close(); - } - - @Test - @Order(60) - void testRenameUdf() { - final UdfManagePage page = new UdfManagePage(browser); - page.rename(testUploadUdfFileName, testUploadUdfRenameFileName); - - Awaitility.await().untilAsserted(() -> { - assertThat(page.udfList()) - .as("File list should contain newly-created file") - .extracting(WebElement::getText) - .anyMatch(it -> it.contains(testUploadUdfRenameFileName)); - }); - } - - @Test - @Order(70) - void testDeleteUdf() { - final UdfManagePage page = new UdfManagePage(browser); - page.delete(testUploadUdfRenameFileName); - - Awaitility.await().untilAsserted(() -> { - browser.navigate().refresh(); - - assertThat( - page.udfList() - ).noneMatch( - it -> it.getText().contains(testUploadUdfRenameFileName) - ); - }); - } -} diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/UserE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/UserE2ETest.java index 3ed263ec0e7c..59afb550ae73 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/UserE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/UserE2ETest.java @@ -19,19 +19,16 @@ package org.apache.dolphinscheduler.e2e.cases; - import static org.assertj.core.api.Assertions.assertThat; import org.apache.dolphinscheduler.e2e.core.DolphinScheduler; +import org.apache.dolphinscheduler.e2e.core.WebDriverWaitFactory; import org.apache.dolphinscheduler.e2e.pages.LoginPage; import org.apache.dolphinscheduler.e2e.pages.common.NavBarPage; import org.apache.dolphinscheduler.e2e.pages.security.SecurityPage; import org.apache.dolphinscheduler.e2e.pages.security.TenantPage; import org.apache.dolphinscheduler.e2e.pages.security.UserPage; -import java.time.Duration; - -import org.testcontainers.shaded.org.awaitility.Awaitility; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; @@ -40,10 +37,11 @@ import org.openqa.selenium.WebElement; import org.openqa.selenium.remote.RemoteWebDriver; import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; +import org.testcontainers.shaded.org.awaitility.Awaitility; @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") class UserE2ETest { + private static final String tenant = System.getProperty("user.name"); private static final String user = "test_user"; private static final String password = "testUser123"; @@ -77,9 +75,9 @@ public static void setup() { @AfterAll public static void cleanup() { new NavBarPage(browser) - .goToNav(SecurityPage.class) - .goToTab(TenantPage.class) - .delete(tenant); + .goToNav(SecurityPage.class) + .goToTab(TenantPage.class) + .delete(tenant); } @Test @@ -93,9 +91,9 @@ void testCreateUser() { browser.navigate().refresh(); assertThat(page.userList()) - .as("User list should contain newly-created user") - .extracting(WebElement::getText) - .anyMatch(it -> it.contains(user)); + .as("User list should contain newly-created user") + .extracting(WebElement::getText) + .anyMatch(it -> it.contains(user)); }); } @@ -106,10 +104,8 @@ void testCreateDuplicateUser() { page.create(user, password, email, phone, tenant); - Awaitility.await().untilAsserted(() -> - assertThat(browser.findElement(By.tagName("body")).getText()) - .contains("already exists") - ); + Awaitility.await().untilAsserted(() -> assertThat(browser.findElement(By.tagName("body")).getText()) + .contains("already exists")); page.createUserForm().buttonCancel().click(); } @@ -119,7 +115,7 @@ void testCreateDuplicateUser() { void testEditUser() { UserPage page = new UserPage(browser); - new WebDriverWait(browser, Duration.ofSeconds(20)).until(ExpectedConditions.visibilityOfElementLocated( + WebDriverWaitFactory.createWebDriverWait(browser).until(ExpectedConditions.visibilityOfElementLocated( new By.ByClassName("name"))); browser.navigate().refresh(); @@ -129,12 +125,12 @@ void testEditUser() { Awaitility.await().untilAsserted(() -> { browser.navigate().refresh(); assertThat(page.userList()) - .as("User list should contain newly-modified User") - .extracting(WebElement::getText) - .anyMatch(it -> it.contains(editUser)); + .as("User list should contain newly-modified User") + .extracting(WebElement::getText) + .anyMatch(it -> it.contains(editUser)); }); } - + @Test @Order(40) void testDeleteUser() { @@ -146,10 +142,8 @@ void testDeleteUser() { browser.navigate().refresh(); assertThat( - page.userList() - ).noneMatch( - it -> it.getText().contains(user) || it.getText().contains(editUser) - ); + page.userList()).noneMatch( + it -> it.getText().contains(user) || it.getText().contains(editUser)); }); } } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkerGroupE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkerGroupE2ETest.java index b7f2a9474cad..5b6aae81050d 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkerGroupE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkerGroupE2ETest.java @@ -19,17 +19,14 @@ package org.apache.dolphinscheduler.e2e.cases; - import static org.assertj.core.api.Assertions.assertThat; import org.apache.dolphinscheduler.e2e.core.DolphinScheduler; +import org.apache.dolphinscheduler.e2e.core.WebDriverWaitFactory; import org.apache.dolphinscheduler.e2e.pages.LoginPage; import org.apache.dolphinscheduler.e2e.pages.security.SecurityPage; import org.apache.dolphinscheduler.e2e.pages.security.WorkerGroupPage; -import java.time.Duration; - -import org.testcontainers.shaded.org.awaitility.Awaitility; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; @@ -37,10 +34,11 @@ import org.openqa.selenium.WebElement; import org.openqa.selenium.remote.RemoteWebDriver; import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; +import org.testcontainers.shaded.org.awaitility.Awaitility; @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") class WorkerGroupE2ETest { + private static final String workerGroupName = "test_worker_group"; private static final String editWorkerGroupName = "edit_worker_group"; @@ -49,9 +47,9 @@ class WorkerGroupE2ETest { @BeforeAll public static void setup() { new LoginPage(browser) - .login("admin", "dolphinscheduler123") - .goToNav(SecurityPage.class) - .goToTab(WorkerGroupPage.class); + .login("admin", "dolphinscheduler123") + .goToNav(SecurityPage.class) + .goToTab(WorkerGroupPage.class); } @Test @@ -59,8 +57,8 @@ public static void setup() { void testCreateWorkerGroup() { final WorkerGroupPage page = new WorkerGroupPage(browser); - new WebDriverWait(page.driver(), Duration.ofSeconds(20)) - .until(ExpectedConditions.urlContains("/security/worker-group-manage")); + WebDriverWaitFactory.createWebDriverWait(page.driver()) + .until(ExpectedConditions.urlContains("/security/worker-group-manage")); page.create(workerGroupName); @@ -68,9 +66,9 @@ void testCreateWorkerGroup() { browser.navigate().refresh(); assertThat(page.workerGroupList()) - .as("workerGroup list should contain newly-created workerGroup") - .extracting(WebElement::getText) - .anyMatch(it -> it.contains(workerGroupName)); + .as("workerGroup list should contain newly-created workerGroup") + .extracting(WebElement::getText) + .anyMatch(it -> it.contains(workerGroupName)); }); } @@ -81,10 +79,8 @@ void testCreateDuplicateWorkerGroup() { page.create(workerGroupName); - Awaitility.await().untilAsserted(() -> - assertThat(browser.findElement(By.tagName("body")).getText()) - .contains("already exists") - ); + Awaitility.await().untilAsserted(() -> assertThat(browser.findElement(By.tagName("body")).getText()) + .contains("already exists")); page.createWorkerForm().buttonCancel().click(); } @@ -98,13 +94,12 @@ void testEditWorkerGroup() { Awaitility.await().untilAsserted(() -> { browser.navigate().refresh(); assertThat(page.workerGroupList()) - .as("workerGroup list should contain newly-modified workerGroup") - .extracting(WebElement::getText) - .anyMatch(it -> it.contains(editWorkerGroupName)); + .as("workerGroup list should contain newly-modified workerGroup") + .extracting(WebElement::getText) + .anyMatch(it -> it.contains(editWorkerGroupName)); }); } - @Test @Order(40) void testDeleteWorkerGroup() { @@ -116,10 +111,8 @@ void testDeleteWorkerGroup() { browser.navigate().refresh(); assertThat( - page.workerGroupList() - ).noneMatch( - it -> it.getText().contains(workerGroupName) || it.getText().contains(editWorkerGroupName) - ); + page.workerGroupList()).noneMatch( + it -> it.getText().contains(workerGroupName) || it.getText().contains(editWorkerGroupName)); }); } } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkflowE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkflowE2ETest.java index 0b97ab02af77..11543d709ce6 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkflowE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkflowE2ETest.java @@ -19,7 +19,10 @@ package org.apache.dolphinscheduler.e2e.cases; +import static org.assertj.core.api.Assertions.assertThat; + import org.apache.dolphinscheduler.e2e.core.DolphinScheduler; +import org.apache.dolphinscheduler.e2e.core.WebDriverWaitFactory; import org.apache.dolphinscheduler.e2e.pages.LoginPage; import org.apache.dolphinscheduler.e2e.pages.common.NavBarPage; import org.apache.dolphinscheduler.e2e.pages.project.ProjectDetailPage; @@ -34,7 +37,6 @@ import org.apache.dolphinscheduler.e2e.pages.security.TenantPage; import org.apache.dolphinscheduler.e2e.pages.security.UserPage; -import org.testcontainers.shaded.org.awaitility.Awaitility; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; @@ -42,14 +44,11 @@ import org.openqa.selenium.By; import org.openqa.selenium.remote.RemoteWebDriver; import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; - -import static org.assertj.core.api.Assertions.assertThat; - -import java.time.Duration; +import org.testcontainers.shaded.org.awaitility.Awaitility; @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") class WorkflowE2ETest { + private static final String project = "test-workflow-1"; private static final String workflow = "test-workflow-1"; @@ -76,63 +75,60 @@ public static void setup() { .goToNav(SecurityPage.class) .goToTab(UserPage.class); - new WebDriverWait(userPage.driver(), Duration.ofSeconds(20)).until(ExpectedConditions.visibilityOfElementLocated( + WebDriverWaitFactory.createWebDriverWait(userPage.driver()).until(ExpectedConditions.visibilityOfElementLocated( new By.ByClassName("name"))); userPage.update(user, user, email, phone, tenant) .goToNav(ProjectPage.class) - .create(project) - ; + .create(project); } @AfterAll public static void cleanup() { new NavBarPage(browser) - .goToNav(ProjectPage.class) - .goTo(project) - .goToTab(WorkflowDefinitionTab.class) - .delete(workflow); + .goToNav(ProjectPage.class) + .goTo(project) + .goToTab(WorkflowDefinitionTab.class) + .delete(workflow); new NavBarPage(browser) - .goToNav(ProjectPage.class) - .delete(project); + .goToNav(ProjectPage.class) + .delete(project); browser.navigate().refresh(); new NavBarPage(browser) - .goToNav(SecurityPage.class) - .goToTab(TenantPage.class) - .delete(tenant); + .goToNav(SecurityPage.class) + .goToTab(TenantPage.class) + .delete(tenant); } @Test @Order(1) void testCreateWorkflow() { WorkflowDefinitionTab workflowDefinitionPage = - new ProjectPage(browser) - .goTo(project) - .goToTab(WorkflowDefinitionTab.class); + new ProjectPage(browser) + .goTo(project) + .goToTab(WorkflowDefinitionTab.class); workflowDefinitionPage - .createWorkflow() + .createWorkflow() - . addTask(TaskType.SHELL) - .script("echo ${today}\necho ${global_param}\n") - .name("test-1") - .addParam("today", "${system.datetime}") - .submit() + .addTask(TaskType.SHELL) + .script("echo ${today}\necho ${global_param}\n") + .name("test-1") + .addParam("today", "${system.datetime}") + .submit() - .submit() - .name(workflow) - .addGlobalParam("global_param", "hello world") - .submit() - ; + .submit() + .name(workflow) + .addGlobalParam("global_param", "hello world") + .submit(); Awaitility.await().untilAsserted(() -> assertThat(workflowDefinitionPage.workflowList()) .as("Workflow list should contain newly-created workflow") .anyMatch( - it -> it.getText().contains(workflow) - )); + it -> it.getText().contains(workflow))); workflowDefinitionPage.publish(workflow); } @@ -141,28 +137,26 @@ void testCreateWorkflow() { void testCreateSubWorkflow() { final String workflow = "test-sub-workflow-1"; WorkflowDefinitionTab workflowDefinitionPage = - new ProjectPage(browser) - .goToNav(ProjectPage.class) - .goTo(project) - .goToTab(WorkflowDefinitionTab.class); + new ProjectPage(browser) + .goToNav(ProjectPage.class) + .goTo(project) + .goToTab(WorkflowDefinitionTab.class); workflowDefinitionPage - .createSubProcessWorkflow() + .createSubProcessWorkflow() - . addTask(TaskType.SUB_PROCESS) - .childNode("test-workflow-1") - .name("test-sub-1") - .submit() + .addTask(TaskType.SUB_PROCESS) + .childNode("test-workflow-1") + .name("test-sub-1") + .submit() - .submit() - .name(workflow) - .addGlobalParam("global_param", "hello world") - .submit() - ; + .submit() + .name(workflow) + .addGlobalParam("global_param", "hello world") + .submit(); Awaitility.await().untilAsserted(() -> assertThat( - workflowDefinitionPage.workflowList() - ).anyMatch(it -> it.getText().contains(workflow))); + workflowDefinitionPage.workflowList()).anyMatch(it -> it.getText().contains(workflow))); workflowDefinitionPage.publish(workflow); } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkflowHttpTaskE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkflowHttpTaskE2ETest.java index 0993e61b813a..e29c0f1f87b7 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkflowHttpTaskE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkflowHttpTaskE2ETest.java @@ -19,7 +19,10 @@ package org.apache.dolphinscheduler.e2e.cases; +import static org.assertj.core.api.Assertions.assertThat; + import org.apache.dolphinscheduler.e2e.core.DolphinScheduler; +import org.apache.dolphinscheduler.e2e.core.WebDriverWaitFactory; import org.apache.dolphinscheduler.e2e.pages.LoginPage; import org.apache.dolphinscheduler.e2e.pages.common.NavBarPage; import org.apache.dolphinscheduler.e2e.pages.project.ProjectDetailPage; @@ -31,6 +34,7 @@ import org.apache.dolphinscheduler.e2e.pages.security.SecurityPage; import org.apache.dolphinscheduler.e2e.pages.security.TenantPage; import org.apache.dolphinscheduler.e2e.pages.security.UserPage; + import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; @@ -38,16 +42,10 @@ import org.openqa.selenium.By; import org.openqa.selenium.remote.RemoteWebDriver; import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; import org.testcontainers.shaded.org.awaitility.Awaitility; - -import java.time.Duration; - -import static org.assertj.core.api.Assertions.assertThat; @DolphinScheduler(composeFiles = "docker/workflow-http/docker-compose.yaml") public class WorkflowHttpTaskE2ETest { - private static final String project = "test-workflow-1"; private static final String workflow = "test-workflow-1"; @@ -76,13 +74,12 @@ public static void setup() { .goToNav(SecurityPage.class) .goToTab(UserPage.class); - new WebDriverWait(userPage.driver(), Duration.ofSeconds(20)).until(ExpectedConditions.visibilityOfElementLocated( + WebDriverWaitFactory.createWebDriverWait(userPage.driver()).until(ExpectedConditions.visibilityOfElementLocated( new By.ByClassName("name"))); userPage.update(user, user, email, phone, tenant) .goToNav(ProjectPage.class) - .create(project) - ; + .create(project); } @AfterAll @@ -115,7 +112,7 @@ void testCreateWorkflow() { workflowDefinitionPage .createWorkflow() - . addTask(WorkflowForm.TaskType.HTTP) + .addTask(WorkflowForm.TaskType.HTTP) .url(mockServerUrl) .name("test-1") .addParam("today", "${system.datetime}") @@ -124,18 +121,15 @@ void testCreateWorkflow() { .submit() .name(workflow) .addGlobalParam("global_param", "hello world") - .submit() - ; + .submit(); Awaitility.await().untilAsserted(() -> assertThat(workflowDefinitionPage.workflowList()) .as("Workflow list should contain newly-created workflow") .anyMatch( - it -> it.getText().contains(workflow) - )); + it -> it.getText().contains(workflow))); workflowDefinitionPage.publish(workflow); } - @Test @Order(30) void testRunWorkflow() { diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkflowJavaTaskE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkflowJavaTaskE2ETest.java index d61a9ddd2a40..332bb4ff6fb6 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkflowJavaTaskE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkflowJavaTaskE2ETest.java @@ -19,7 +19,10 @@ package org.apache.dolphinscheduler.e2e.cases; +import static org.assertj.core.api.Assertions.assertThat; + import org.apache.dolphinscheduler.e2e.core.DolphinScheduler; +import org.apache.dolphinscheduler.e2e.core.WebDriverWaitFactory; import org.apache.dolphinscheduler.e2e.pages.LoginPage; import org.apache.dolphinscheduler.e2e.pages.common.NavBarPage; import org.apache.dolphinscheduler.e2e.pages.project.ProjectDetailPage; @@ -32,23 +35,19 @@ import org.apache.dolphinscheduler.e2e.pages.security.SecurityPage; import org.apache.dolphinscheduler.e2e.pages.security.TenantPage; import org.apache.dolphinscheduler.e2e.pages.security.UserPage; + import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; import org.openqa.selenium.By; -import org.openqa.selenium.WebElement; import org.openqa.selenium.remote.RemoteWebDriver; import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; import org.testcontainers.shaded.org.awaitility.Awaitility; -import java.time.Duration; - -import static org.assertj.core.api.Assertions.assertThat; - @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") public class WorkflowJavaTaskE2ETest { + private static final String project = "test-workflow-1"; private static final String workflow = "test-workflow-1"; @@ -92,13 +91,12 @@ public static void setup() { .goToNav(SecurityPage.class) .goToTab(UserPage.class); - new WebDriverWait(userPage.driver(), Duration.ofSeconds(20)).until(ExpectedConditions.visibilityOfElementLocated( - new By.ByClassName("name"))); + WebDriverWaitFactory.createWebDriverWait(userPage.driver()) + .until(ExpectedConditions.visibilityOfElementLocated(new By.ByClassName("name"))); userPage.update(user, user, email, phone, tenant) .goToNav(ProjectPage.class) - .create(project) - ; + .create(project); } @AfterAll @@ -121,8 +119,6 @@ public static void cleanup() { .delete(tenant); } - - @Test @Order(1) void testCreateWorkflow() { @@ -133,7 +129,7 @@ void testCreateWorkflow() { workflowDefinitionPage .createWorkflow() - . addTask(WorkflowForm.TaskType.JAVA) + .addTask(WorkflowForm.TaskType.JAVA) .script(javaContent) .name("test-1") .addParam("today", "${system.datetime}") @@ -142,18 +138,15 @@ void testCreateWorkflow() { .submit() .name(workflow) .addGlobalParam("global_param", "hello world") - .submit() - ; + .submit(); Awaitility.await().untilAsserted(() -> assertThat(workflowDefinitionPage.workflowList()) .as("Workflow list should contain newly-created workflow") .anyMatch( - it -> it.getText().contains(workflow) - )); + it -> it.getText().contains(workflow))); workflowDefinitionPage.publish(workflow); } - @Test @Order(30) void testRunWorkflow() { diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkflowSwitchE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkflowSwitchE2ETest.java index edf4bc59e2b4..685d5fdc523b 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkflowSwitchE2ETest.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/WorkflowSwitchE2ETest.java @@ -19,6 +19,8 @@ package org.apache.dolphinscheduler.e2e.cases; +import static org.assertj.core.api.Assertions.assertThat; + import org.apache.dolphinscheduler.e2e.core.DolphinScheduler; import org.apache.dolphinscheduler.e2e.pages.LoginPage; import org.apache.dolphinscheduler.e2e.pages.common.NavBarPage; @@ -35,19 +37,18 @@ import org.apache.dolphinscheduler.e2e.pages.security.SecurityPage; import org.apache.dolphinscheduler.e2e.pages.security.TenantPage; -import org.testcontainers.shaded.org.awaitility.Awaitility; +import java.util.List; + import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; import org.openqa.selenium.remote.RemoteWebDriver; - -import java.util.List; - -import static org.assertj.core.api.Assertions.assertThat; +import org.testcontainers.shaded.org.awaitility.Awaitility; @DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") class WorkflowSwitchE2ETest { + private static final String project = "test-workflow-1"; private static final String workflow = "test-workflow-1"; private static final String ifBranchName = "key==1"; @@ -60,31 +61,28 @@ class WorkflowSwitchE2ETest { @BeforeAll public static void setup() { new LoginPage(browser) - .login("admin", "dolphinscheduler123") - .goToNav(SecurityPage.class) - .goToTab(TenantPage.class) - .create(tenant) - .goToNav(ProjectPage.class) - .create(project) - ; + .login("admin", "dolphinscheduler123") + .goToNav(SecurityPage.class) + .goToTab(TenantPage.class) + .create(tenant) + .goToNav(ProjectPage.class) + .create(project); } @AfterAll public static void cleanup() { new NavBarPage(browser) - .goToNav(ProjectPage.class) - .goTo(project) - .goToTab(WorkflowDefinitionTab.class) - .cancelPublishAll() - .deleteAll() - ; + .goToNav(ProjectPage.class) + .goTo(project) + .goToTab(WorkflowDefinitionTab.class) + .cancelPublishAll() + .deleteAll(); new NavBarPage(browser) - .goToNav(ProjectPage.class) - .delete(project) - .goToNav(SecurityPage.class) - .goToTab(TenantPage.class) - .delete(tenant) - ; + .goToNav(ProjectPage.class) + .delete(project) + .goToNav(SecurityPage.class) + .goToTab(TenantPage.class) + .delete(tenant); } @Test @@ -92,33 +90,33 @@ public static void cleanup() { void testCreateSwitchWorkflow() { final WorkflowDefinitionTab workflowDefinitionPage = - new ProjectPage(browser) - .goTo(project) - .goToTab(WorkflowDefinitionTab.class); + new ProjectPage(browser) + .goTo(project) + .goToTab(WorkflowDefinitionTab.class); WorkflowForm workflowForm = workflowDefinitionPage.createWorkflow(); - workflowForm. addTask(TaskType.SHELL) - .script("echo ${today}\necho ${global_param}\n") - .name("pre-task") - .submit(); + workflowForm.addTask(TaskType.SHELL) + .script("echo ${today}\necho ${global_param}\n") + .name("pre-task") + .submit(); SwitchTaskForm switchTaskForm = workflowForm.addTask(TaskType.SWITCH); switchTaskForm.preTask("pre-task") - .name("switch") - .submit(); + .name("switch") + .submit(); workflowForm.addTask(TaskType.SHELL) - .script("echo ${key}") - .preTask("switch") - .name(ifBranchName) - .submit(); + .script("echo ${key}") + .preTask("switch") + .name(ifBranchName) + .submit(); workflowForm.addTask(TaskType.SHELL) - .script("echo ${key}") - .preTask("switch") - .name(elseBranchName) - .submit(); + .script("echo ${key}") + .preTask("switch") + .name(elseBranchName) + .submit(); // format dag workflowForm.formatDAG().confirm(); @@ -130,13 +128,12 @@ workflowForm. addTask(TaskType.SHELL) switchTaskForm.submit(); workflowForm.submit() - .name(workflow) - .addGlobalParam("key", "1") - .submit(); + .name(workflow) + .addGlobalParam("key", "1") + .submit(); Awaitility.await().untilAsserted(() -> assertThat( - workflowDefinitionPage.workflowList() - ).anyMatch(it -> it.getText().contains(workflow))); + workflowDefinitionPage.workflowList()).anyMatch(it -> it.getText().contains(workflow))); workflowDefinitionPage.publish(workflow); } @@ -178,8 +175,10 @@ void testRunWorkflow() { Awaitility.await().untilAsserted(() -> { assertThat(taskInstances.size()).isEqualTo(3); - assertThat(taskInstances.stream().filter(row -> row.name().contains(ifBranchName)).count()).isEqualTo(1); - assertThat(taskInstances.stream().noneMatch(row -> row.name().contains(elseBranchName))).isTrue(); + assertThat(taskInstances.stream().filter(row -> row.taskInstanceName().contains(ifBranchName)).count()) + .isEqualTo(1); + assertThat(taskInstances.stream().noneMatch(row -> row.taskInstanceName().contains(elseBranchName))) + .isTrue(); }); } } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/tasks/PythonTaskE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/tasks/PythonTaskE2ETest.java new file mode 100644 index 000000000000..239da7dfeb0e --- /dev/null +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/tasks/PythonTaskE2ETest.java @@ -0,0 +1,338 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.e2e.cases.tasks; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.apache.dolphinscheduler.e2e.cases.workflow.BaseWorkflowE2ETest; +import org.apache.dolphinscheduler.e2e.core.DolphinScheduler; +import org.apache.dolphinscheduler.e2e.core.WebDriverHolder; +import org.apache.dolphinscheduler.e2e.models.environment.PythonEnvironment; +import org.apache.dolphinscheduler.e2e.pages.LoginPage; +import org.apache.dolphinscheduler.e2e.pages.project.ProjectPage; +import org.apache.dolphinscheduler.e2e.pages.project.workflow.TaskInstanceTab; +import org.apache.dolphinscheduler.e2e.pages.project.workflow.WorkflowDefinitionTab; +import org.apache.dolphinscheduler.e2e.pages.project.workflow.WorkflowForm; +import org.apache.dolphinscheduler.e2e.pages.project.workflow.WorkflowInstanceTab; +import org.apache.dolphinscheduler.e2e.pages.project.workflow.task.PythonTaskForm; +import org.apache.dolphinscheduler.e2e.pages.resource.FileManagePage; +import org.apache.dolphinscheduler.e2e.pages.resource.ResourcePage; +import org.apache.dolphinscheduler.e2e.pages.security.EnvironmentPage; +import org.apache.dolphinscheduler.e2e.pages.security.SecurityPage; +import org.apache.dolphinscheduler.e2e.pages.security.TenantPage; +import org.apache.dolphinscheduler.e2e.pages.security.UserPage; + +import java.util.Date; + +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Order; +import org.junit.jupiter.api.Test; + +@DolphinScheduler(composeFiles = "docker/python-task/docker-compose.yaml") +public class PythonTaskE2ETest extends BaseWorkflowE2ETest { + + private static final PythonEnvironment pythonEnvironment = new PythonEnvironment(); + + @BeforeAll + public static void setup() { + browser = WebDriverHolder.getWebDriver(); + + TenantPage tenantPage = new LoginPage(browser) + .login(adminUser) + .goToNav(SecurityPage.class) + .goToTab(TenantPage.class); + + if (tenantPage.tenants().stream().noneMatch(tenant -> tenant.tenantCode().equals(adminUser.getTenant()))) { + tenantPage + .create(adminUser.getTenant()) + .goToNav(SecurityPage.class) + .goToTab(UserPage.class) + .update(adminUser); + } + tenantPage + .goToNav(SecurityPage.class) + .goToTab(EnvironmentPage.class) + .createEnvironmentUntilSuccess(pythonEnvironment.getEnvironmentName(), + pythonEnvironment.getEnvironmentConfig(), + pythonEnvironment.getEnvironmentDesc(), + pythonEnvironment.getEnvironmentWorkerGroup()); + + tenantPage + .goToNav(ProjectPage.class) + .createProjectUntilSuccess(projectName); + } + + @Test + @Order(10) + void testRunPythonTasks_SuccessCase() { + WorkflowDefinitionTab workflowDefinitionPage = + new ProjectPage(browser) + .goToNav(ProjectPage.class) + .goTo(projectName) + .goToTab(WorkflowDefinitionTab.class); + + // todo: use yaml to define the workflow + String workflowName = "PythonSuccessCase"; + String taskName = "PythonSuccessTask"; + String pythonScripts = "print(\"success\")"; + workflowDefinitionPage + .createWorkflow() + .addTask(WorkflowForm.TaskType.PYTHON) + .script(pythonScripts) + .name(taskName) + .submit() + + .submit() + .name(workflowName) + .submit(); + + untilWorkflowDefinitionExist(workflowName); + + workflowDefinitionPage.publish(workflowName); + + runWorkflow(workflowName); + untilWorkflowInstanceExist(workflowName); + WorkflowInstanceTab.Row workflowInstance = untilWorkflowInstanceSuccess(workflowName); + assertThat(workflowInstance.executionTime()).isEqualTo(1); + + TaskInstanceTab.Row taskInstance = untilTaskInstanceSuccess(workflowName, taskName); + assertThat(taskInstance.retryTimes()).isEqualTo(0); + } + + @Test + @Order(20) + void testRunPythonTasks_WorkflowParamsCase() { + WorkflowDefinitionTab workflowDefinitionPage = + new ProjectPage(browser) + .goToNav(ProjectPage.class) + .goTo(projectName) + .goToTab(WorkflowDefinitionTab.class); + + // todo: use yaml to define the workflow + String workflowName = "PythonWorkflowParamsCase"; + String taskName = "PythonWorkflowParamsTask"; + String pythonScripts = "import sys\n" + + "\n" + + "if '${name}' == 'tom':\n" + + " print('success')\n" + + "else:\n" + + " sys.exit(2)"; + workflowDefinitionPage + .createWorkflow() + .addTask(WorkflowForm.TaskType.PYTHON) + .script(pythonScripts) + .name(taskName) + .submit() + + .submit() + .name(workflowName) + .addGlobalParam("name", "tom") + .submit(); + + untilWorkflowDefinitionExist(workflowName); + + workflowDefinitionPage.publish(workflowName); + + runWorkflow(workflowName); + untilWorkflowInstanceExist(workflowName); + WorkflowInstanceTab.Row workflowInstance = untilWorkflowInstanceSuccess(workflowName); + assertThat(workflowInstance.executionTime()).isEqualTo(1); + + TaskInstanceTab.Row taskInstance = untilTaskInstanceSuccess(workflowName, taskName); + assertThat(taskInstance.retryTimes()).isEqualTo(0); + } + + @Test + @Order(30) + void testRunPythonTasks_LocalParamsCase() { + WorkflowDefinitionTab workflowDefinitionPage = + new ProjectPage(browser) + .goToNav(ProjectPage.class) + .goTo(projectName) + .goToTab(WorkflowDefinitionTab.class); + + String workflowName = "PythonLocalParamsCase"; + String taskName = "PythonLocalParamsSuccess"; + String pythonScripts = "import sys\n" + + "\n" + + "if '${name}' == 'tom':\n" + + " print('success')\n" + + "else:\n" + + " sys.exit(2)"; + workflowDefinitionPage + .createWorkflow() + .addTask(WorkflowForm.TaskType.PYTHON) + .script(pythonScripts) + .name(taskName) + .addParam("name", "tom") + .submit() + + .submit() + .name(workflowName) + .submit(); + + untilWorkflowDefinitionExist(workflowName); + + workflowDefinitionPage.publish(workflowName); + + runWorkflow(workflowName); + untilWorkflowInstanceExist(workflowName); + WorkflowInstanceTab.Row workflowInstance = untilWorkflowInstanceSuccess(workflowName); + assertThat(workflowInstance.executionTime()).isEqualTo(1); + + TaskInstanceTab.Row taskInstance = untilTaskInstanceSuccess(workflowName, taskName); + assertThat(taskInstance.retryTimes()).isEqualTo(0); + } + + @Test + @Order(40) + void testRunPythonTasks_GlobalParamsOverrideLocalParamsCase() { + WorkflowDefinitionTab workflowDefinitionPage = + new ProjectPage(browser) + .goToNav(ProjectPage.class) + .goTo(projectName) + .goToTab(WorkflowDefinitionTab.class); + + String workflowName = "PythonLocalParamsOverrideWorkflowParamsCase"; + String taskName = "PythonLocalParamsOverrideWorkflowParamsSuccess"; + String pythonScripts = "import sys\n" + + "\n" + + "if '${name}' == 'jerry':\n" + + " print('success')\n" + + "else:\n" + + " sys.exit(2)"; + workflowDefinitionPage + .createWorkflow() + .addTask(WorkflowForm.TaskType.PYTHON) + .script(pythonScripts) + .name(taskName) + .addParam("name", "tom") + .submit() + + .submit() + .name(workflowName) + .addGlobalParam("name", "jerry") + .submit(); + + untilWorkflowDefinitionExist(workflowName); + + workflowDefinitionPage.publish(workflowName); + + runWorkflow(workflowName); + untilWorkflowInstanceExist(workflowName); + WorkflowInstanceTab.Row workflowInstance = untilWorkflowInstanceSuccess(workflowName); + assertThat(workflowInstance.executionTime()).isEqualTo(1); + + TaskInstanceTab.Row taskInstance = untilTaskInstanceSuccess(workflowName, taskName); + assertThat(taskInstance.retryTimes()).isEqualTo(0); + } + + @Test + @Order(50) + void testRunPythonTasks_UsingResourceFile() { + long current_timestamp = new Date().getTime(); + String testFileName = String.format("echo_%s", current_timestamp); + new ResourcePage(browser) + .goToNav(ResourcePage.class) + .goToTab(FileManagePage.class) + .createFileUntilSuccess(testFileName, "echo 123"); + + final WorkflowDefinitionTab workflowDefinitionPage = + new ProjectPage(browser) + .goToNav(ProjectPage.class) + .goTo(projectName) + .goToTab(WorkflowDefinitionTab.class); + + String workflowName = "PythonUsingResourceFileWorkflowCase"; + String taskName = "PythonUsingResourceFileSuccessTask"; + String pythonScripts = "import sys\n" + + "\n" + + "file_content = \"\"\n" + + "\n" + + "with open('${file_name}', 'r', encoding='UTF8') as f:\n" + + " file_content = f.read()\n" + + "\n" + + "if len(file_content) != 0:\n" + + " print(f'file_content: {file_content}')\n" + + "else:\n" + + " sys.exit(2)\n" + + " "; + workflowDefinitionPage + .createWorkflow() + .addTask(WorkflowForm.TaskType.PYTHON) + .script(pythonScripts) + .name(taskName) + .selectResource(testFileName) + .addParam("file_name", String.format("%s.sh", testFileName)) + .submit() + + .submit() + .name(workflowName) + .submit(); + + untilWorkflowDefinitionExist(workflowName); + + workflowDefinitionPage.publish(workflowName); + + runWorkflow(workflowName); + untilWorkflowInstanceExist(workflowName); + WorkflowInstanceTab.Row workflowInstance = untilWorkflowInstanceSuccess(workflowName); + assertThat(workflowInstance.executionTime()).isEqualTo(1); + + TaskInstanceTab.Row taskInstance = untilTaskInstanceSuccess(workflowName, taskName); + assertThat(taskInstance.retryTimes()).isEqualTo(0); + } + + @Test + @Order(60) + void testRunPythonTasks_FailedCase() { + WorkflowDefinitionTab workflowDefinitionPage = + new ProjectPage(browser) + .goToNav(ProjectPage.class) + .goTo(projectName) + .goToTab(WorkflowDefinitionTab.class); + + String workflowName = "PythonFailedWorkflowCase"; + String taskName = "PythonFailedTask"; + String pythonScripts = "import sys\n" + + "sys.exit(1)"; + workflowDefinitionPage + .createWorkflow() + .addTask(WorkflowForm.TaskType.PYTHON) + .script(pythonScripts) + .name(taskName) + .submit() + + .submit() + .name(workflowName) + .submit(); + + untilWorkflowDefinitionExist(workflowName); + + workflowDefinitionPage.publish(workflowName); + + runWorkflow(workflowName); + untilWorkflowInstanceExist(workflowName); + WorkflowInstanceTab.Row workflowInstance = untilWorkflowInstanceFailed(workflowName); + assertThat(workflowInstance.executionTime()).isEqualTo(1); + + TaskInstanceTab.Row taskInstance = untilTaskInstanceFailed(workflowName, taskName); + assertThat(taskInstance.retryTimes()).isEqualTo(0); + } + +} diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/tasks/ShellTaskE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/tasks/ShellTaskE2ETest.java new file mode 100644 index 000000000000..12f8a4b14ef4 --- /dev/null +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/tasks/ShellTaskE2ETest.java @@ -0,0 +1,286 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.e2e.cases.tasks; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.apache.dolphinscheduler.e2e.cases.workflow.BaseWorkflowE2ETest; +import org.apache.dolphinscheduler.e2e.core.DolphinScheduler; +import org.apache.dolphinscheduler.e2e.core.WebDriverHolder; +import org.apache.dolphinscheduler.e2e.pages.LoginPage; +import org.apache.dolphinscheduler.e2e.pages.project.ProjectPage; +import org.apache.dolphinscheduler.e2e.pages.project.workflow.TaskInstanceTab; +import org.apache.dolphinscheduler.e2e.pages.project.workflow.WorkflowDefinitionTab; +import org.apache.dolphinscheduler.e2e.pages.project.workflow.WorkflowForm; +import org.apache.dolphinscheduler.e2e.pages.project.workflow.WorkflowInstanceTab; +import org.apache.dolphinscheduler.e2e.pages.project.workflow.task.ShellTaskForm; +import org.apache.dolphinscheduler.e2e.pages.resource.FileManagePage; +import org.apache.dolphinscheduler.e2e.pages.resource.ResourcePage; +import org.apache.dolphinscheduler.e2e.pages.security.SecurityPage; +import org.apache.dolphinscheduler.e2e.pages.security.TenantPage; +import org.apache.dolphinscheduler.e2e.pages.security.UserPage; + +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.MethodOrderer; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; + +@TestMethodOrder(MethodOrderer.MethodName.class) +@DolphinScheduler(composeFiles = "docker/basic/docker-compose.yaml") +public class ShellTaskE2ETest extends BaseWorkflowE2ETest { + + @BeforeAll + public static void setup() { + browser = WebDriverHolder.getWebDriver(); + + TenantPage tenantPage = new LoginPage(browser) + .login(adminUser) + .goToNav(SecurityPage.class) + .goToTab(TenantPage.class); + + if (tenantPage.tenants().stream().noneMatch(tenant -> tenant.tenantCode().equals(adminUser.getTenant()))) { + tenantPage + .create(adminUser.getTenant()) + .goToNav(SecurityPage.class) + .goToTab(UserPage.class) + .update(adminUser); + } + + tenantPage + .goToNav(ProjectPage.class) + .createProjectUntilSuccess(projectName); + } + + @Test + void testRunShellTasks_SuccessCase() { + WorkflowDefinitionTab workflowDefinitionPage = + new ProjectPage(browser) + .goToNav(ProjectPage.class) + .goTo(projectName) + .goToTab(WorkflowDefinitionTab.class); + + // todo: use yaml to define the workflow + String workflowName = "SuccessCase"; + String taskName = "ShellSuccess"; + workflowDefinitionPage + .createWorkflow() + .addTask(WorkflowForm.TaskType.SHELL) + .script("echo hello world\n") + .name(taskName) + .submit() + + .submit() + .name(workflowName) + .submit(); + + untilWorkflowDefinitionExist(workflowName); + + workflowDefinitionPage.publish(workflowName); + + runWorkflow(workflowName); + untilWorkflowInstanceExist(workflowName); + WorkflowInstanceTab.Row workflowInstance = untilWorkflowInstanceSuccess(workflowName); + assertThat(workflowInstance.executionTime()).isEqualTo(1); + + TaskInstanceTab.Row taskInstance = untilTaskInstanceSuccess(workflowName, taskName); + assertThat(taskInstance.retryTimes()).isEqualTo(0); + } + + @Test + void testRunShellTasks_WorkflowParamsCase() { + WorkflowDefinitionTab workflowDefinitionPage = + new ProjectPage(browser) + .goToNav(ProjectPage.class) + .goTo(projectName) + .goToTab(WorkflowDefinitionTab.class); + + // todo: use yaml to define the workflow + String workflowName = "WorkflowParamsCase"; + String taskName = "ShellSuccess"; + workflowDefinitionPage + .createWorkflow() + .addTask(WorkflowForm.TaskType.SHELL) + .script("[ \"${name}\" = \"tom\" ] && echo \"success\" || { echo \"failed\"; exit 1; }") + .name(taskName) + .submit() + + .submit() + .name(workflowName) + .addGlobalParam("name", "tom") + .submit(); + + untilWorkflowDefinitionExist(workflowName); + + workflowDefinitionPage.publish(workflowName); + + runWorkflow(workflowName); + untilWorkflowInstanceExist(workflowName); + WorkflowInstanceTab.Row workflowInstance = untilWorkflowInstanceSuccess(workflowName); + assertThat(workflowInstance.executionTime()).isEqualTo(1); + + TaskInstanceTab.Row taskInstance = untilTaskInstanceSuccess(workflowName, taskName); + assertThat(taskInstance.retryTimes()).isEqualTo(0); + } + + @Test + void testRunShellTasks_LocalParamsCase() { + WorkflowDefinitionTab workflowDefinitionPage = + new ProjectPage(browser) + .goToNav(ProjectPage.class) + .goTo(projectName) + .goToTab(WorkflowDefinitionTab.class); + + String workflowName = "LocalParamsCase"; + String taskName = "ShellSuccess"; + workflowDefinitionPage + .createWorkflow() + .addTask(WorkflowForm.TaskType.SHELL) + .script("[ \"${name}\" = \"tom\" ] && echo \"success\" || { echo \"failed\"; exit 1; }") + .name(taskName) + .addParam("name", "tom") + .submit() + + .submit() + .name(workflowName) + .submit(); + + untilWorkflowDefinitionExist(workflowName); + + workflowDefinitionPage.publish(workflowName); + + runWorkflow(workflowName); + untilWorkflowInstanceExist(workflowName); + WorkflowInstanceTab.Row workflowInstance = untilWorkflowInstanceSuccess(workflowName); + assertThat(workflowInstance.executionTime()).isEqualTo(1); + + TaskInstanceTab.Row taskInstance = untilTaskInstanceSuccess(workflowName, taskName); + assertThat(taskInstance.retryTimes()).isEqualTo(0); + } + + @Test + void testRunShellTasks_GlobalParamsOverrideLocalParamsCase() { + WorkflowDefinitionTab workflowDefinitionPage = + new ProjectPage(browser) + .goToNav(ProjectPage.class) + .goTo(projectName) + .goToTab(WorkflowDefinitionTab.class); + + String workflowName = "LocalParamsOverrideWorkflowParamsCase"; + String taskName = "ShellSuccess"; + workflowDefinitionPage + .createWorkflow() + .addTask(WorkflowForm.TaskType.SHELL) + .script("[ \"${name}\" = \"jerry\" ] && echo \"success\" || { echo \"failed\"; exit 1; }") + .name(taskName) + .addParam("name", "tom") + .submit() + + .submit() + .name(workflowName) + .addGlobalParam("name", "jerry") + .submit(); + + untilWorkflowDefinitionExist(workflowName); + + workflowDefinitionPage.publish(workflowName); + + runWorkflow(workflowName); + untilWorkflowInstanceExist(workflowName); + WorkflowInstanceTab.Row workflowInstance = untilWorkflowInstanceSuccess(workflowName); + assertThat(workflowInstance.executionTime()).isEqualTo(1); + + TaskInstanceTab.Row taskInstance = untilTaskInstanceSuccess(workflowName, taskName); + assertThat(taskInstance.retryTimes()).isEqualTo(0); + } + + @Test + void testRunShellTasks_UsingResourceFile() { + String testFileName = "echo"; + new ResourcePage(browser) + .goToNav(ResourcePage.class) + .goToTab(FileManagePage.class) + .createFileUntilSuccess(testFileName, "echo 123"); + + final WorkflowDefinitionTab workflowDefinitionPage = + new ProjectPage(browser) + .goToNav(ProjectPage.class) + .goTo(projectName) + .goToTab(WorkflowDefinitionTab.class); + + String workflowName = "UsingResourceFile"; + String taskName = "ShellSuccess"; + workflowDefinitionPage + .createWorkflow() + .addTask(WorkflowForm.TaskType.SHELL) + .script("cat " + testFileName + ".sh") + .name(taskName) + .selectResource(testFileName) + .submit() + + .submit() + .name(workflowName) + .submit(); + + untilWorkflowDefinitionExist(workflowName); + + workflowDefinitionPage.publish(workflowName); + + runWorkflow(workflowName); + untilWorkflowInstanceExist(workflowName); + WorkflowInstanceTab.Row workflowInstance = untilWorkflowInstanceSuccess(workflowName); + assertThat(workflowInstance.executionTime()).isEqualTo(1); + + TaskInstanceTab.Row taskInstance = untilTaskInstanceSuccess(workflowName, taskName); + assertThat(taskInstance.retryTimes()).isEqualTo(0); + } + + @Test + void testRunShellTasks_FailedCase() { + WorkflowDefinitionTab workflowDefinitionPage = + new ProjectPage(browser) + .goToNav(ProjectPage.class) + .goTo(projectName) + .goToTab(WorkflowDefinitionTab.class); + + String workflowName = "FailedCase"; + String taskName = "ShellFailed"; + workflowDefinitionPage + .createWorkflow() + .addTask(WorkflowForm.TaskType.SHELL) + .script("echo 'I am failed'\n exit1\n") + .name(taskName) + .submit() + + .submit() + .name(workflowName) + .submit(); + + untilWorkflowDefinitionExist(workflowName); + + workflowDefinitionPage.publish(workflowName); + + runWorkflow(workflowName); + untilWorkflowInstanceExist(workflowName); + WorkflowInstanceTab.Row workflowInstance = untilWorkflowInstanceFailed(workflowName); + assertThat(workflowInstance.executionTime()).isEqualTo(1); + + TaskInstanceTab.Row taskInstance = untilTaskInstanceFailed(workflowName, taskName); + assertThat(taskInstance.retryTimes()).isEqualTo(0); + } + +} diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/workflow/BaseWorkflowE2ETest.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/workflow/BaseWorkflowE2ETest.java new file mode 100644 index 000000000000..550f83d7191c --- /dev/null +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/cases/workflow/BaseWorkflowE2ETest.java @@ -0,0 +1,191 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.e2e.cases.workflow; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.testcontainers.shaded.org.awaitility.Awaitility.await; + +import org.apache.dolphinscheduler.e2e.models.users.AdminUser; +import org.apache.dolphinscheduler.e2e.pages.project.ProjectDetailPage; +import org.apache.dolphinscheduler.e2e.pages.project.ProjectPage; +import org.apache.dolphinscheduler.e2e.pages.project.workflow.TaskInstanceTab; +import org.apache.dolphinscheduler.e2e.pages.project.workflow.WorkflowDefinitionTab; +import org.apache.dolphinscheduler.e2e.pages.project.workflow.WorkflowInstanceTab; + +import java.util.List; +import java.util.Objects; +import java.util.UUID; +import java.util.stream.Collectors; + +import lombok.extern.slf4j.Slf4j; + +import org.openqa.selenium.remote.RemoteWebDriver; + +@Slf4j +public abstract class BaseWorkflowE2ETest { + + protected static final String projectName = UUID.randomUUID().toString(); + + protected static final AdminUser adminUser = new AdminUser(); + + protected static RemoteWebDriver browser; + + protected void untilWorkflowDefinitionExist(String workflowName) { + WorkflowDefinitionTab workflowDefinitionPage = new ProjectPage(browser) + .goToNav(ProjectPage.class) + .goTo(projectName) + .goToTab(WorkflowDefinitionTab.class); + + await().untilAsserted(() -> assertThat(workflowDefinitionPage.workflowList()) + .as("Workflow list should contain newly-created workflow: %s", workflowName) + .anyMatch( + it -> it.getText().contains(workflowName))); + } + + protected void runWorkflow(String workflowName) { + final ProjectDetailPage projectPage = new ProjectPage(browser) + .goToNav(ProjectPage.class) + .goTo(projectName); + + projectPage + .goToTab(WorkflowDefinitionTab.class) + .run(workflowName) + .submit(); + + } + + protected WorkflowInstanceTab.Row untilWorkflowInstanceExist(String workflowName) { + final ProjectDetailPage projectPage = new ProjectPage(browser) + .goToNav(ProjectPage.class) + .goTo(projectName); + + return await() + .until(() -> { + browser.navigate().refresh(); + return projectPage + .goToTab(WorkflowInstanceTab.class) + .instances() + .stream() + .filter(it -> it.workflowInstanceName().startsWith(workflowName)) + .findFirst() + .orElse(null); + }, Objects::nonNull); + } + + protected WorkflowInstanceTab.Row untilWorkflowInstanceSuccess(String workflowName) { + final ProjectDetailPage projectPage = new ProjectPage(browser) + .goToNav(ProjectPage.class) + .goTo(projectName); + return await() + .until(() -> { + browser.navigate().refresh(); + return projectPage + .goToTab(WorkflowInstanceTab.class) + .instances() + .stream() + .filter(it -> it.workflowInstanceName().startsWith(workflowName)) + .filter(WorkflowInstanceTab.Row::isSuccess) + .findFirst() + .orElse(null); + }, Objects::nonNull); + } + + protected WorkflowInstanceTab.Row untilWorkflowInstanceFailed(String workflowName) { + final ProjectDetailPage projectPage = new ProjectPage(browser) + .goToNav(ProjectPage.class) + .goTo(projectName); + return await() + .until(() -> { + browser.navigate().refresh(); + List workflowInstances = projectPage + .goToTab(WorkflowInstanceTab.class) + .instances() + .stream() + .filter(it -> it.workflowInstanceName().startsWith(workflowName)) + .filter(WorkflowInstanceTab.Row::isFailed) + .collect(Collectors.toList()); + if (workflowInstances.isEmpty()) { + return null; + } + if (workflowInstances.size() > 1) { + throw new RuntimeException("More than one failed workflow instance found: " + + workflowInstances.stream() + .map(WorkflowInstanceTab.Row::workflowInstanceName) + .collect(Collectors.joining(", "))); + } + return workflowInstances.get(0); + }, Objects::nonNull); + } + + protected TaskInstanceTab.Row untilTaskInstanceSuccess(String workflowName, String taskName) { + final ProjectDetailPage projectPage = new ProjectPage(browser) + .goToNav(ProjectPage.class) + .goTo(projectName); + return await() + .until(() -> { + browser.navigate().refresh(); + List taskInstances = projectPage + .goToTab(TaskInstanceTab.class) + .instances() + .stream() + .filter(it -> it.taskInstanceName().startsWith(taskName)) + .filter(it -> it.workflowInstanceName().startsWith(workflowName)) + .filter(TaskInstanceTab.Row::isSuccess) + .collect(Collectors.toList()); + + if (taskInstances.isEmpty()) { + return null; + } + if (taskInstances.size() > 1) { + throw new RuntimeException("More than one failed task instance found: " + + taskInstances.stream() + .map(TaskInstanceTab.Row::taskInstanceName).collect(Collectors.joining(", "))); + } + return taskInstances.get(0); + }, Objects::nonNull); + } + + protected TaskInstanceTab.Row untilTaskInstanceFailed(String workflowName, String taskName) { + final ProjectDetailPage projectPage = new ProjectPage(browser) + .goToNav(ProjectPage.class) + .goTo(projectName); + return await() + .until(() -> { + browser.navigate().refresh(); + List taskInstances = projectPage + .goToTab(TaskInstanceTab.class) + .instances() + .stream() + .filter(it -> it.taskInstanceName().startsWith(taskName)) + .filter(it -> it.workflowInstanceName().startsWith(workflowName)) + .filter(TaskInstanceTab.Row::isFailed) + .collect(Collectors.toList()); + + if (taskInstances.isEmpty()) { + return null; + } + if (taskInstances.size() > 1) { + throw new RuntimeException("More than one failed task instance found: " + + taskInstances.stream() + .map(TaskInstanceTab.Row::taskInstanceName).collect(Collectors.joining(", "))); + } + return taskInstances.get(0); + }, Objects::nonNull); + } + +} diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/models/environment/IEnvironment.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/models/environment/IEnvironment.java new file mode 100644 index 000000000000..7469dfa271e0 --- /dev/null +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/models/environment/IEnvironment.java @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.e2e.models.environment; + +public interface IEnvironment { + + String getEnvironmentName(); + + String getEnvironmentConfig(); + + String getEnvironmentDesc(); + + String getEnvironmentWorkerGroup(); + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/UDFUser.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/models/environment/PythonEnvironment.java similarity index 55% rename from dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/UDFUser.java rename to dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/models/environment/PythonEnvironment.java index 44901a797c2f..63bf7e3cbba3 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/UDFUser.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/models/environment/PythonEnvironment.java @@ -14,48 +14,39 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.dao.entity; -import java.util.Date; +package org.apache.dolphinscheduler.e2e.models.environment; import lombok.Data; -import com.baomidou.mybatisplus.annotation.IdType; -import com.baomidou.mybatisplus.annotation.TableId; -import com.baomidou.mybatisplus.annotation.TableName; - @Data -@TableName("t_ds_relation_udfs_user") -public class UDFUser { - - /** - * id - */ - @TableId(value = "id", type = IdType.AUTO) - private Integer id; - - /** - * id - */ - private int userId; - - /** - * udf id - */ - private int udfId; - - /** - * permission - */ - private int perm; - - /** - * create time - */ - private Date createTime; - - /** - * update time - */ - private Date updateTime; +public class PythonEnvironment implements IEnvironment { + + private String environmentName; + + private String environmentConfig; + + private String environmentDesc; + + private String environmentWorkerGroup; + + @Override + public String getEnvironmentName() { + return "python-e2e"; + } + + @Override + public String getEnvironmentConfig() { + return "export PYTHON_LAUNCHER=/usr/bin/python3"; + } + + @Override + public String getEnvironmentDesc() { + return "pythonEnvDesc"; + } + + @Override + public String getEnvironmentWorkerGroup() { + return "default"; + } } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/models/tenant/BootstrapTenant.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/models/tenant/BootstrapTenant.java new file mode 100644 index 000000000000..15af3197e200 --- /dev/null +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/models/tenant/BootstrapTenant.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.e2e.models.tenant; + +public class BootstrapTenant implements ITenant { + + @Override + public String getTenantCode() { + return System.getProperty("user.name"); + } + + @Override + public String getDescription() { + return "bootstrap tenant"; + } +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/src/main/java/org/apache/dolphinscheduler/plugin/task/pigeon/PigeonParamsConstants.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/models/tenant/DefaultTenant.java similarity index 75% rename from dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/src/main/java/org/apache/dolphinscheduler/plugin/task/pigeon/PigeonParamsConstants.java rename to dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/models/tenant/DefaultTenant.java index e50755a72812..afd046e4b4e6 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/src/main/java/org/apache/dolphinscheduler/plugin/task/pigeon/PigeonParamsConstants.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/models/tenant/DefaultTenant.java @@ -15,13 +15,17 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.plugin.task.pigeon; +package org.apache.dolphinscheduler.e2e.models.tenant; -public class PigeonParamsConstants { +public class DefaultTenant implements ITenant { - public static String NAME_TARGET_JOB_NAME = "targetJobName"; - public static String TARGET_JOB_NAME = NAME_TARGET_JOB_NAME; + @Override + public String getTenantCode() { + return "default"; + } - private PigeonParamsConstants() { + @Override + public String getDescription() { + return ""; } } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/models/tenant/ITenant.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/models/tenant/ITenant.java new file mode 100644 index 000000000000..641acbf9d776 --- /dev/null +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/models/tenant/ITenant.java @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.e2e.models.tenant; + +public interface ITenant { + + String getTenantCode(); + + String getDescription(); + +} diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/models/users/AdminUser.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/models/users/AdminUser.java new file mode 100644 index 000000000000..50189759e15e --- /dev/null +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/models/users/AdminUser.java @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.e2e.models.users; + +import org.apache.dolphinscheduler.e2e.models.tenant.BootstrapTenant; +import org.apache.dolphinscheduler.e2e.models.tenant.ITenant; + +import lombok.Data; + +@Data +public class AdminUser implements IUser { + + private String userName; + + private String password; + + private String email; + + private String phone; + + private ITenant tenant; + + @Override + public String getUserName() { + return "admin"; + } + + @Override + public String getPassword() { + return "dolphinscheduler123"; + } + + @Override + public String getEmail() { + return "admin@gmail.com"; + } + + @Override + public String getPhone() { + return "15800000000"; + } + + @Override + public String getTenant() { + return new BootstrapTenant().getTenantCode(); + } + +} diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/models/users/IUser.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/models/users/IUser.java new file mode 100644 index 000000000000..1782478a62e4 --- /dev/null +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/models/users/IUser.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.e2e.models.users; + +public interface IUser { + + String getUserName(); + + String getPassword(); + + String getEmail(); + + String getPhone(); + + String getTenant(); + +} diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/LoginPage.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/LoginPage.java index cde8c9d7780e..65f0706e1945 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/LoginPage.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/LoginPage.java @@ -19,32 +19,31 @@ package org.apache.dolphinscheduler.e2e.pages; +import org.apache.dolphinscheduler.e2e.core.WebDriverWaitFactory; +import org.apache.dolphinscheduler.e2e.models.users.IUser; import org.apache.dolphinscheduler.e2e.pages.common.NavBarPage; -import org.apache.dolphinscheduler.e2e.pages.security.TenantPage; + +import lombok.Getter; +import lombok.SneakyThrows; import org.openqa.selenium.WebElement; import org.openqa.selenium.remote.RemoteWebDriver; import org.openqa.selenium.support.FindBy; import org.openqa.selenium.support.FindBys; import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; - -import lombok.Getter; -import lombok.SneakyThrows; - -import java.time.Duration; @Getter public final class LoginPage extends NavBarPage { + @FindBys({ - @FindBy(className = "input-user-name"), - @FindBy(tagName = "input"), + @FindBy(className = "input-user-name"), + @FindBy(tagName = "input"), }) private WebElement inputUsername; - @FindBys( { - @FindBy(className = "input-password"), - @FindBy(tagName = "input"), + @FindBys({ + @FindBy(className = "input-password"), + @FindBy(tagName = "input"), }) private WebElement inputPassword; @@ -59,17 +58,21 @@ public LoginPage(RemoteWebDriver driver) { } @SneakyThrows - public NavBarPage login(String username, String password) { - new WebDriverWait(driver, Duration.ofSeconds(30)).until(ExpectedConditions.elementToBeClickable(buttonSwitchLanguage)); + public NavBarPage login(IUser user) { + return login(user.getUserName(), user.getPassword()); + } + @SneakyThrows + public NavBarPage login(String username, String password) { + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.elementToBeClickable(buttonSwitchLanguage)); buttonSwitchLanguage().click(); inputUsername().sendKeys(username); inputPassword().sendKeys(password); buttonLogin().click(); - new WebDriverWait(driver, Duration.ofSeconds(30)) - .until(ExpectedConditions.urlContains("/home")); + WebDriverWaitFactory.createWebDriverWait(driver).until(ExpectedConditions.urlContains("/home")); return new NavBarPage(driver); } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/common/CodeEditor.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/common/CodeEditor.java index e55751c367a4..19954bc8a19f 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/common/CodeEditor.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/common/CodeEditor.java @@ -19,29 +19,38 @@ */ package org.apache.dolphinscheduler.e2e.pages.common; -import org.openqa.selenium.By; +import org.apache.dolphinscheduler.e2e.core.Constants; +import org.apache.dolphinscheduler.e2e.core.WebDriverWaitFactory; + +import java.util.List; + +import lombok.Getter; +import lombok.SneakyThrows; +import lombok.extern.slf4j.Slf4j; + +import org.junit.platform.commons.util.StringUtils; +import org.openqa.selenium.JavascriptExecutor; +import org.openqa.selenium.Keys; import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebElement; import org.openqa.selenium.interactions.Actions; -import org.openqa.selenium.remote.RemoteWebDriver; import org.openqa.selenium.support.FindBy; import org.openqa.selenium.support.FindBys; import org.openqa.selenium.support.PageFactory; - -import lombok.Getter; - -import java.time.Duration; - import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; @Getter +@Slf4j public final class CodeEditor { + @FindBys({ - @FindBy(className = "monaco-editor"), - @FindBy(className = "view-line"), + @FindBy(className = "monaco-editor"), + @FindBy(className = "view-line"), }) - private WebElement editor; + private List editor; + + @FindBy(className = "pre-tasks-model") + private WebElement scrollBar; private WebDriver driver; @@ -50,14 +59,72 @@ public CodeEditor(WebDriver driver) { this.driver = driver; } + @SneakyThrows public CodeEditor content(String content) { - new WebDriverWait(this.driver, Duration.ofSeconds(20)).until(ExpectedConditions.elementToBeClickable(editor)); - - editor.click(); + WebDriverWaitFactory.createWebDriverWait(driver).until(ExpectedConditions.elementToBeClickable(editor.get(0))); Actions actions = new Actions(this.driver); - actions.moveToElement(editor).sendKeys(content).perform(); + + List contentList = List.of(content.split(Constants.LINE_SEPARATOR)); + + try { + ((JavascriptExecutor) driver).executeScript("arguments[0].scrollIntoView();", scrollBar); + } catch (org.openqa.selenium.NoSuchElementException ignored) { + log.warn("scroll bar not found, skipping..."); + } + + for (int i = 0; i < contentList.size(); i++) { + String editorLineText; + String inputContent = contentList.get(i); + if (i == 0) { + actions.moveToElement(editor.get(i)) + .click() + .sendKeys(inputContent) + .sendKeys(Constants.LINE_SEPARATOR) + .perform(); + continue; + } else { + editorLineText = editor.get(i).getText(); + } + + if (StringUtils.isNotBlank(inputContent)) { + if (editorLineText.isEmpty()) { + actions.moveToElement(editor.get(i)) + .click() + .sendKeys(inputContent) + .sendKeys(Constants.LINE_SEPARATOR) + .perform(); + Thread.sleep(Constants.DEFAULT_SLEEP_MILLISECONDS); + } else { + for (int p = 0; p < editorLineText.strip().length(); p++) { + clearLine(actions, editor.get(i)); + } + if (!editorLineText.isEmpty()) { + clearLine(actions, editor.get(i)); + } + actions.moveToElement(editor.get(i)) + .click() + .sendKeys(inputContent) + .sendKeys(Constants.LINE_SEPARATOR) + .perform(); + Thread.sleep(Constants.DEFAULT_SLEEP_MILLISECONDS); + } + } else { + actions.moveToElement(editor.get(i)) + .click() + .sendKeys(Constants.LINE_SEPARATOR) + .perform(); + Thread.sleep(Constants.DEFAULT_SLEEP_MILLISECONDS); + } + } return this; } + + private void clearLine(Actions actions, WebElement element) { + actions.moveToElement(element) + .click() + .sendKeys(Keys.BACK_SPACE) + .perform(); + } } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/common/HttpInput.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/common/HttpInput.java index ce3f07c81965..2a200a4cdae8 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/common/HttpInput.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/common/HttpInput.java @@ -18,22 +18,22 @@ * */ - package org.apache.dolphinscheduler.e2e.pages.common; +import org.apache.dolphinscheduler.e2e.core.WebDriverWaitFactory; + import lombok.Getter; + import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebElement; import org.openqa.selenium.support.FindBy; import org.openqa.selenium.support.FindBys; import org.openqa.selenium.support.PageFactory; import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; - -import java.time.Duration; @Getter public class HttpInput { + @FindBys({ @FindBy(className = "input-url-name"), @FindBy(tagName = "input") @@ -42,15 +42,13 @@ public class HttpInput { private WebDriver driver; - - public HttpInput(WebDriver driver) { PageFactory.initElements(driver, this); this.driver = driver; } public HttpInput content(String content) { - new WebDriverWait(this.driver, Duration.ofSeconds(20)).until(ExpectedConditions.elementToBeClickable(urlInput)); + WebDriverWaitFactory.createWebDriverWait(driver).until(ExpectedConditions.elementToBeClickable(urlInput)); urlInput().sendKeys(content); return this; } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/common/NavBarPage.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/common/NavBarPage.java index 0a6373977a79..86ebb54d174b 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/common/NavBarPage.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/common/NavBarPage.java @@ -20,40 +20,39 @@ package org.apache.dolphinscheduler.e2e.pages.common; +import org.apache.dolphinscheduler.e2e.core.WebDriverWaitFactory; import org.apache.dolphinscheduler.e2e.pages.datasource.DataSourcePage; import org.apache.dolphinscheduler.e2e.pages.project.ProjectPage; import org.apache.dolphinscheduler.e2e.pages.resource.ResourcePage; import org.apache.dolphinscheduler.e2e.pages.security.SecurityPage; +import lombok.Getter; + import org.openqa.selenium.JavascriptExecutor; import org.openqa.selenium.WebElement; import org.openqa.selenium.remote.RemoteWebDriver; import org.openqa.selenium.support.FindBy; import org.openqa.selenium.support.PageFactory; import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; - -import lombok.Getter; - -import java.time.Duration; @Getter public class NavBarPage { + protected final RemoteWebDriver driver; - @FindBy(css = ".tab-horizontal .n-menu-item:nth-child(2) > .n-menu-item-content") + @FindBy(xpath = "//div[contains(@class, 'tab-horizontal')]//div[contains(@role,'menubar')]//span[contains(text(), 'Project')]") private WebElement projectTab; - @FindBy(css = ".tab-horizontal .n-menu-item:nth-child(3) > .n-menu-item-content") + @FindBy(xpath = "//div[contains(@class, 'tab-horizontal')]//div[contains(@role,'menubar')]//span[contains(text(), 'Resources')]") private WebElement resourceTab; - @FindBy(css = ".tab-horizontal .n-menu-item:nth-child(4) > .n-menu-item-content") + @FindBy(xpath = "//div[contains(@class, 'tab-horizontal')]//div[contains(@role,'menubar')]//span[contains(text(), 'Data Quality')]") private WebElement dataQualityTab; - @FindBy(css = ".tab-horizontal .n-menu-item:nth-child(5) > .n-menu-item-content") + @FindBy(xpath = "//div[contains(@class, 'tab-horizontal')]//div[contains(@role,'menubar')]//span[contains(text(), 'Datasource')]") private WebElement dataSourceTab; - @FindBy(css = ".tab-horizontal .n-menu-item:nth-child(7) > .n-menu-item-content") + @FindBy(xpath = "//div[contains(@class, 'tab-horizontal')]//div[contains(@role,'menubar')]//span[contains(text(), 'Security')]") private WebElement securityTab; public NavBarPage(RemoteWebDriver driver) { @@ -64,26 +63,35 @@ public NavBarPage(RemoteWebDriver driver) { public T goToNav(Class nav) { if (nav == ProjectPage.class) { - new WebDriverWait(driver, Duration.ofSeconds(60)).until(ExpectedConditions.elementToBeClickable(projectTab)); - projectTab.click(); + WebDriverWaitFactory.createWebDriverWait(driver).until(ExpectedConditions.elementToBeClickable(projectTab)); + ((JavascriptExecutor) driver).executeScript("arguments[0].click();", projectTab()); + WebDriverWaitFactory.createWebDriverWait(driver).until(ExpectedConditions.urlContains("/projects/list")); return nav.cast(new ProjectPage(driver)); } if (nav == SecurityPage.class) { - new WebDriverWait(driver, Duration.ofSeconds(60)).until(ExpectedConditions.elementToBeClickable(securityTab)); - securityTab.click(); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.elementToBeClickable(securityTab)); + ((JavascriptExecutor) driver).executeScript("arguments[0].click();", securityTab()); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.urlContains("/security/tenant-manage")); return nav.cast(new SecurityPage(driver)); } if (nav == ResourcePage.class) { - new WebDriverWait(driver, Duration.ofSeconds(60)).until(ExpectedConditions.elementToBeClickable(resourceTab)); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.elementToBeClickable(resourceTab)); ((JavascriptExecutor) driver).executeScript("arguments[0].click();", resourceTab()); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.urlContains("/resource/file-manage")); return nav.cast(new ResourcePage(driver)); } if (nav == DataSourcePage.class) { - new WebDriverWait(driver, Duration.ofSeconds(60)).until(ExpectedConditions.elementToBeClickable(dataSourceTab)); - dataSourceTab.click(); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.elementToBeClickable(dataSourceTab)); + ((JavascriptExecutor) driver).executeScript("arguments[0].click();", dataSourceTab()); + WebDriverWaitFactory.createWebDriverWait(driver).until(ExpectedConditions.urlContains("/datasource")); return nav.cast(new DataSourcePage(driver)); } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/datasource/DataSourcePage.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/datasource/DataSourcePage.java index bd2f7e795bd7..16613e25b367 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/datasource/DataSourcePage.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/datasource/DataSourcePage.java @@ -1,203 +1,200 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - * - */ - -package org.apache.dolphinscheduler.e2e.pages.datasource; - -import lombok.Getter; - -import org.apache.dolphinscheduler.e2e.pages.common.NavBarPage; - -import java.security.Key; -import java.time.Duration; -import java.util.List; - -import org.openqa.selenium.By; -import org.openqa.selenium.JavascriptExecutor; -import org.openqa.selenium.Keys; -import org.openqa.selenium.WebDriver; -import org.openqa.selenium.WebElement; -import org.openqa.selenium.remote.RemoteWebDriver; -import org.openqa.selenium.support.FindBy; -import org.openqa.selenium.support.FindBys; -import org.openqa.selenium.support.PageFactory; -import org.openqa.selenium.support.ui.ExpectedCondition; -import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; - - -@Getter -public class DataSourcePage extends NavBarPage implements NavBarPage.NavBarItem { - - @FindBy(className = "btn-create-data-source") - private WebElement buttonCreateDataSource; - - @FindBy(className = "data-source-items") - private List dataSourceItemsList; - - @FindBys({ - @FindBy(className = "n-popconfirm__action"), - @FindBy(className = "n-button--primary-type"), - }) - private WebElement buttonConfirm; - - @FindBys({ - @FindBy(className = "dialog-source-modal"), - }) - private WebElement dataSourceModal; - - private final CreateDataSourceForm createDataSourceForm; - - public DataSourcePage(RemoteWebDriver driver) { - super(driver); - - createDataSourceForm = new CreateDataSourceForm(); - } - - public DataSourcePage createDataSource(String dataSourceType, String dataSourceName, String dataSourceDescription, String ip, String port, String userName, String password, String database, - String jdbcParams) { - buttonCreateDataSource().click(); - - new WebDriverWait(driver, Duration.ofSeconds(10)).until(ExpectedConditions.visibilityOfElementLocated( - new By.ByClassName("dialog-source-modal"))); - - dataSourceModal().findElement(By.className(dataSourceType.toUpperCase()+"-box")).click(); - - new WebDriverWait(driver, Duration.ofSeconds(10)).until(ExpectedConditions.textToBePresentInElement(driver.findElement(By.className("dialog-create-data-source")), dataSourceType.toUpperCase())); - - createDataSourceForm().inputDataSourceName().sendKeys(dataSourceName); - createDataSourceForm().inputDataSourceDescription().sendKeys(dataSourceDescription); - createDataSourceForm().inputIP().sendKeys(ip); - createDataSourceForm().inputPort().sendKeys(Keys.CONTROL + "a"); - createDataSourceForm().inputPort().sendKeys(Keys.BACK_SPACE); - createDataSourceForm().inputPort().sendKeys(port); - createDataSourceForm().inputUserName().sendKeys(userName); - createDataSourceForm().inputPassword().sendKeys(password); - createDataSourceForm().inputDataBase().sendKeys(database); - - if (!"".equals(jdbcParams)) { - createDataSourceForm().inputJdbcParams().sendKeys(jdbcParams); - } - - createDataSourceForm().buttonSubmit().click(); - - return this; - } - - public DataSourcePage delete(String name) { - dataSourceItemsList() - .stream() - .filter(it -> it.getText().contains(name)) - .flatMap(it -> it.findElements(By.className("btn-delete")).stream()) - .filter(WebElement::isDisplayed) - .findFirst() - .orElseThrow(() -> new RuntimeException("No delete button in data source list")) - .click(); - - ((JavascriptExecutor) driver).executeScript("arguments[0].click();", buttonConfirm()); - - return this; - } - - @Getter - public class CreateDataSourceForm { - CreateDataSourceForm() { - PageFactory.initElements(driver, this); - } - - @FindBy(className = "n-base-select-option__content") - private List selectDataSourceType; - - @FindBys({ - @FindBy(className = "btn-data-source-type-drop-down"), - @FindBy(className = "n-base-selection"), - }) - private WebElement btnDataSourceTypeDropdown; - - @FindBys({ - @FindBy(className = "input-data-source-name"), - @FindBy(tagName = "input"), - }) - private WebElement inputDataSourceName; - - @FindBys({ - @FindBy(className = "input-data-source-description"), - @FindBy(tagName = "textarea"), - }) - private WebElement inputDataSourceDescription; - - @FindBys({ - @FindBy(className = "input-ip"), - @FindBy(tagName = "input"), - }) - private WebElement inputIP; - - @FindBys({ - @FindBy(className = "input-port"), - @FindBy(tagName = "input"), - }) - private WebElement inputPort; - - @FindBys({ - @FindBy(className = "input-username"), - @FindBy(tagName = "input"), - }) - private WebElement inputUserName; - - @FindBys({ - @FindBy(className = "input-password"), - @FindBy(tagName = "input"), - }) - private WebElement inputPassword; - - @FindBys({ - @FindBy(className = "input-data-base"), - @FindBy(tagName = "input"), - }) - private WebElement inputDataBase; - - @FindBys({ - @FindBy(className = "input-jdbc-params"), - @FindBy(tagName = "textarea"), - }) - private WebElement inputJdbcParams; - - @FindBy(className = "btn-submit") - private WebElement buttonSubmit; - - @FindBy(className = "btn-cancel") - private WebElement buttonCancel; - - @FindBy(className = "btn-test-connection") - private WebElement btnTestConnection; - - @FindBys({ - @FindBy(className = "input-zeppelin_rest_endpoint"), - @FindBy(tagName = "input"), - }) - private WebElement inputZeppelinRestEndpoint; - - @FindBys({ - @FindBy(className = "input-kubeConfig"), - @FindBy(tagName = "textarea"), - }) - private WebElement inputKubeConfig; - - } -} +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + */ + +package org.apache.dolphinscheduler.e2e.pages.datasource; + +import org.apache.dolphinscheduler.e2e.core.WebDriverWaitFactory; +import org.apache.dolphinscheduler.e2e.pages.common.NavBarPage; + +import java.util.List; + +import lombok.Getter; + +import org.openqa.selenium.By; +import org.openqa.selenium.JavascriptExecutor; +import org.openqa.selenium.Keys; +import org.openqa.selenium.WebElement; +import org.openqa.selenium.remote.RemoteWebDriver; +import org.openqa.selenium.support.FindBy; +import org.openqa.selenium.support.FindBys; +import org.openqa.selenium.support.PageFactory; +import org.openqa.selenium.support.ui.ExpectedConditions; + +@Getter +public class DataSourcePage extends NavBarPage implements NavBarPage.NavBarItem { + + @FindBy(className = "btn-create-data-source") + private WebElement buttonCreateDataSource; + + @FindBy(className = "data-source-items") + private List dataSourceItemsList; + + @FindBys({ + @FindBy(className = "n-popconfirm__action"), + @FindBy(className = "n-button--primary-type"), + }) + private WebElement buttonConfirm; + + @FindBy(className = "dialog-source-modal") + private WebElement dataSourceModal; + + private final CreateDataSourceForm createDataSourceForm; + + public DataSourcePage(RemoteWebDriver driver) { + super(driver); + + createDataSourceForm = new CreateDataSourceForm(); + } + + public DataSourcePage createDataSource(String dataSourceType, String dataSourceName, String dataSourceDescription, + String ip, String port, String userName, String password, String database, + String jdbcParams) { + buttonCreateDataSource().click(); + + WebDriverWaitFactory.createWebDriverWait(driver).until(ExpectedConditions.visibilityOf(dataSourceModal)); + WebElement dataSourceTypeButton = By.className(dataSourceType.toUpperCase() + "-box").findElement(driver); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.elementToBeClickable(dataSourceTypeButton)); + dataSourceTypeButton.click(); + + WebDriverWaitFactory.createWebDriverWait(driver).until(ExpectedConditions.textToBePresentInElement( + driver.findElement(By.className("dialog-create-data-source")), dataSourceType.toUpperCase())); + + createDataSourceForm().inputDataSourceName().sendKeys(dataSourceName); + createDataSourceForm().inputDataSourceDescription().sendKeys(dataSourceDescription); + createDataSourceForm().inputIP().sendKeys(ip); + createDataSourceForm().inputPort().sendKeys(Keys.CONTROL + "a"); + createDataSourceForm().inputPort().sendKeys(Keys.BACK_SPACE); + createDataSourceForm().inputPort().sendKeys(port); + createDataSourceForm().inputUserName().sendKeys(userName); + createDataSourceForm().inputPassword().sendKeys(password); + createDataSourceForm().inputDataBase().sendKeys(database); + + if (!"".equals(jdbcParams)) { + createDataSourceForm().inputJdbcParams().sendKeys(jdbcParams); + } + + createDataSourceForm().buttonSubmit().click(); + + return this; + } + + public DataSourcePage delete(String name) { + dataSourceItemsList() + .stream() + .filter(it -> it.getText().contains(name)) + .flatMap(it -> it.findElements(By.className("btn-delete")).stream()) + .filter(WebElement::isDisplayed) + .findFirst() + .orElseThrow(() -> new RuntimeException("No delete button in data source list")) + .click(); + + ((JavascriptExecutor) driver).executeScript("arguments[0].click();", buttonConfirm()); + + return this; + } + + @Getter + public class CreateDataSourceForm { + + CreateDataSourceForm() { + PageFactory.initElements(driver, this); + } + + @FindBy(className = "n-base-select-option__content") + private List selectDataSourceType; + + @FindBys({ + @FindBy(className = "btn-data-source-type-drop-down"), + @FindBy(className = "n-base-selection"), + }) + private WebElement btnDataSourceTypeDropdown; + + @FindBys({ + @FindBy(className = "input-data-source-name"), + @FindBy(tagName = "input"), + }) + private WebElement inputDataSourceName; + + @FindBys({ + @FindBy(className = "input-data-source-description"), + @FindBy(tagName = "textarea"), + }) + private WebElement inputDataSourceDescription; + + @FindBys({ + @FindBy(className = "input-ip"), + @FindBy(tagName = "input"), + }) + private WebElement inputIP; + + @FindBys({ + @FindBy(className = "input-port"), + @FindBy(tagName = "input"), + }) + private WebElement inputPort; + + @FindBys({ + @FindBy(className = "input-username"), + @FindBy(tagName = "input"), + }) + private WebElement inputUserName; + + @FindBys({ + @FindBy(className = "input-password"), + @FindBy(tagName = "input"), + }) + private WebElement inputPassword; + + @FindBys({ + @FindBy(className = "input-data-base"), + @FindBy(tagName = "input"), + }) + private WebElement inputDataBase; + + @FindBys({ + @FindBy(className = "input-jdbc-params"), + @FindBy(tagName = "textarea"), + }) + private WebElement inputJdbcParams; + + @FindBy(className = "btn-submit") + private WebElement buttonSubmit; + + @FindBy(className = "btn-cancel") + private WebElement buttonCancel; + + @FindBy(className = "btn-test-connection") + private WebElement btnTestConnection; + + @FindBys({ + @FindBy(className = "input-zeppelin_rest_endpoint"), + @FindBy(tagName = "input"), + }) + private WebElement inputZeppelinRestEndpoint; + + @FindBys({ + @FindBy(className = "input-kubeConfig"), + @FindBy(tagName = "textarea"), + }) + private WebElement inputKubeConfig; + + } +} diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/ProjectDetailPage.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/ProjectDetailPage.java index 2ad24507ab1d..79c46ba1f4e3 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/ProjectDetailPage.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/ProjectDetailPage.java @@ -19,19 +19,23 @@ */ package org.apache.dolphinscheduler.e2e.pages.project; +import org.apache.dolphinscheduler.e2e.core.WebDriverWaitFactory; import org.apache.dolphinscheduler.e2e.pages.common.NavBarPage; import org.apache.dolphinscheduler.e2e.pages.project.workflow.TaskInstanceTab; import org.apache.dolphinscheduler.e2e.pages.project.workflow.WorkflowDefinitionTab; import org.apache.dolphinscheduler.e2e.pages.project.workflow.WorkflowInstanceTab; +import lombok.Getter; +import lombok.SneakyThrows; + import org.openqa.selenium.WebElement; import org.openqa.selenium.remote.RemoteWebDriver; import org.openqa.selenium.support.FindBy; - -import lombok.Getter; +import org.openqa.selenium.support.ui.ExpectedConditions; @Getter public final class ProjectDetailPage extends NavBarPage { + @FindBy(css = ".tab-vertical .n-submenu:nth-of-type(2) .n-menu-item:nth-of-type(2) > .n-menu-item-content") private WebElement menuProcessDefinition; @@ -45,17 +49,23 @@ public ProjectDetailPage(RemoteWebDriver driver) { super(driver); } + @SneakyThrows public T goToTab(Class tab) { if (tab == WorkflowDefinitionTab.class) { menuProcessDefinition().click(); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.urlContains("/workflow-definition")); return tab.cast(new WorkflowDefinitionTab(driver)); } if (tab == WorkflowInstanceTab.class) { menuProcessInstances().click(); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.urlContains("/workflow/instances")); return tab.cast(new WorkflowInstanceTab(driver)); } if (tab == TaskInstanceTab.class) { menuTaskInstances().click(); + WebDriverWaitFactory.createWebDriverWait(driver).until(ExpectedConditions.urlContains("/task/instances")); return tab.cast(new TaskInstanceTab(driver)); } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/ProjectPage.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/ProjectPage.java index 6219fab099bf..94ca6ea5f8eb 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/ProjectPage.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/ProjectPage.java @@ -19,11 +19,16 @@ */ package org.apache.dolphinscheduler.e2e.pages.project; +import static org.assertj.core.api.Assertions.assertThat; +import static org.testcontainers.shaded.org.awaitility.Awaitility.await; + import org.apache.dolphinscheduler.e2e.pages.common.NavBarPage; import org.apache.dolphinscheduler.e2e.pages.common.NavBarPage.NavBarItem; import java.util.List; +import lombok.Getter; + import org.openqa.selenium.By; import org.openqa.selenium.JavascriptExecutor; import org.openqa.selenium.WebElement; @@ -31,13 +36,10 @@ import org.openqa.selenium.support.FindBy; import org.openqa.selenium.support.FindBys; import org.openqa.selenium.support.PageFactory; -import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; - -import lombok.Getter; @Getter public final class ProjectPage extends NavBarPage implements NavBarItem { + @FindBy(className = "btn-create-project") private WebElement buttonCreateProject; @@ -64,17 +66,24 @@ public ProjectPage create(String project) { buttonCreateProject().click(); createProjectForm().inputProjectName().sendKeys(project); createProjectForm().buttonSubmit().click(); + return this; + } + public ProjectPage createProjectUntilSuccess(String project) { + create(project); + await().untilAsserted(() -> assertThat(projectList()) + .as("project list should contain newly-created project") + .anyMatch(it -> it.getText().contains(project))); return this; } public ProjectPage delete(String project) { projectList() - .stream() - .filter(it -> it.getText().contains(project)) - .findFirst() - .orElseThrow(() -> new RuntimeException("Cannot find project: " + project)) - .findElement(By.className("delete")).click(); + .stream() + .filter(it -> it.getText().contains(project)) + .findFirst() + .orElseThrow(() -> new RuntimeException("Cannot find project: " + project)) + .findElement(By.className("delete")).click(); ((JavascriptExecutor) driver).executeScript("arguments[0].click();", buttonConfirm()); @@ -83,17 +92,18 @@ public ProjectPage delete(String project) { public ProjectDetailPage goTo(String project) { projectList().stream() - .filter(it -> it.getText().contains(project)) - .map(it -> it.findElement(By.className("project-name")).findElement(new By.ByTagName("button"))) - .findFirst() - .orElseThrow(() -> new RuntimeException("Cannot click the project item")) - .click(); + .filter(it -> it.getText().contains(project)) + .map(it -> it.findElement(By.className("project-name")).findElement(new By.ByTagName("button"))) + .findFirst() + .orElseThrow(() -> new RuntimeException("Cannot click the project item")) + .click(); return new ProjectDetailPage(driver); } @Getter public class CreateProjectForm { + CreateProjectForm() { PageFactory.initElements(driver, this); } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/TaskInstanceTab.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/TaskInstanceTab.java index 06f76676b819..592fd0e0b2cd 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/TaskInstanceTab.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/TaskInstanceTab.java @@ -20,22 +20,24 @@ package org.apache.dolphinscheduler.e2e.pages.project.workflow; -import lombok.Getter; -import lombok.RequiredArgsConstructor; import org.apache.dolphinscheduler.e2e.pages.common.NavBarPage; import org.apache.dolphinscheduler.e2e.pages.project.ProjectDetailPage; + +import java.util.List; +import java.util.stream.Collectors; + +import lombok.Getter; +import lombok.RequiredArgsConstructor; + import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import org.openqa.selenium.remote.RemoteWebDriver; import org.openqa.selenium.support.FindBy; -import org.openqa.selenium.support.FindBys; - -import java.util.List; -import java.util.stream.Collectors; @Getter public final class TaskInstanceTab extends NavBarPage implements ProjectDetailPage.Tab { - @FindBy(className = "items-task-instances") + + @FindBy(className = "batch-task-instance-items") private List instanceList; public TaskInstanceTab(RemoteWebDriver driver) { @@ -44,23 +46,36 @@ public TaskInstanceTab(RemoteWebDriver driver) { public List instances() { return instanceList() - .stream() - .filter(WebElement::isDisplayed) - .map(Row::new) - .filter(row -> !row.name().isEmpty()) - .collect(Collectors.toList()); + .stream() + .filter(WebElement::isDisplayed) + .map(Row::new) + .collect(Collectors.toList()); } @RequiredArgsConstructor public static class Row { + private final WebElement row; - public String state() { - return row.findElement(By.className("task-instance-state")).getText(); + public String taskInstanceName() { + return row.findElement(By.cssSelector("td[data-col-key=name]")).getText(); + } + + public String workflowInstanceName() { + return row.findElement(By.cssSelector("td[data-col-key=processInstanceName]")).getText(); } - public String name() { - return row.findElement(By.className("task-instance-name")).getText(); + public int retryTimes() { + return Integer.parseInt(row.findElement(By.cssSelector("td[data-col-key=retryTimes]")).getText()); } + + public boolean isSuccess() { + return !row.findElements(By.className("success")).isEmpty(); + } + + public boolean isFailed() { + return !row.findElements(By.className("failed")).isEmpty(); + } + } } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/WorkflowDefinitionTab.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/WorkflowDefinitionTab.java index d5a7b386c55b..ee38a0ff7dc5 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/WorkflowDefinitionTab.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/WorkflowDefinitionTab.java @@ -27,6 +27,8 @@ import java.util.List; import java.util.stream.Collectors; +import lombok.Getter; + import org.openqa.selenium.By; import org.openqa.selenium.JavascriptExecutor; import org.openqa.selenium.WebElement; @@ -35,10 +37,9 @@ import org.openqa.selenium.support.FindBys; import org.testcontainers.shaded.org.awaitility.Awaitility; -import lombok.Getter; - @Getter public final class WorkflowDefinitionTab extends NavBarPage implements ProjectDetailPage.Tab { + @FindBy(className = "btn-create-process") private WebElement buttonCreateProcess; @@ -61,8 +62,8 @@ public final class WorkflowDefinitionTab extends NavBarPage implements ProjectDe private WebElement buttonConfirm; @FindBys({ - @FindBy(className = "n-dialog__action"), - @FindBy(className = "n-button--default-type"), + @FindBy(className = "n-dialog__action"), + @FindBy(className = "n-button--default-type"), }) private WebElement publishSuccessButtonCancel; @@ -91,13 +92,13 @@ public WorkflowForm createSubProcessWorkflow() { public WorkflowDefinitionTab publish(String workflow) { workflowList() - .stream() - .filter(it -> it.findElement(By.className("workflow-name")).getAttribute("innerText").equals(workflow)) - .flatMap(it -> it.findElements(By.className("btn-publish")).stream()) - .filter(WebElement::isDisplayed) - .findFirst() - .orElseThrow(() -> new RuntimeException("Can not find publish button in workflow definition")) - .click(); + .stream() + .filter(it -> it.findElement(By.className("workflow-name")).getAttribute("innerText").equals(workflow)) + .flatMap(it -> it.findElements(By.className("btn-publish")).stream()) + .filter(WebElement::isDisplayed) + .findFirst() + .orElseThrow(() -> new RuntimeException("Can not find publish button in workflow definition")) + .click(); ((JavascriptExecutor) driver).executeScript("arguments[0].click();", buttonConfirm()); @@ -108,13 +109,13 @@ public WorkflowDefinitionTab publish(String workflow) { public WorkflowRunDialog run(String workflow) { workflowList() - .stream() - .filter(it -> it.findElement(By.className("workflow-name")).getAttribute("innerText").equals(workflow)) - .flatMap(it -> it.findElements(By.className("btn-run")).stream()) - .filter(WebElement::isDisplayed) - .findFirst() - .orElseThrow(() -> new RuntimeException("Can not find run button in workflow definition")) - .click(); + .stream() + .filter(it -> it.findElement(By.className("workflow-name")).getAttribute("innerText").equals(workflow)) + .flatMap(it -> it.findElements(By.className("btn-run")).stream()) + .filter(WebElement::isDisplayed) + .findFirst() + .orElseThrow(() -> new RuntimeException("Can not find run button in workflow definition")) + .click(); return new WorkflowRunDialog(this); } @@ -136,19 +137,18 @@ public WorkflowDefinitionTab cancelPublishAll() { public WorkflowDefinitionTab delete(String workflow) { Awaitility.await().untilAsserted(() -> assertThat(workflowList()) - .as("Workflow list should contain newly-created workflow") - .anyMatch( - it -> it.getText().contains(workflow) - )); + .as("Workflow list should contain newly-created workflow") + .anyMatch( + it -> it.getText().contains(workflow))); workflowList() - .stream() - .filter(it -> it.findElement(By.className("workflow-name")).getAttribute("innerText").equals(workflow)) - .flatMap(it -> it.findElements(By.className("btn-delete")).stream()) - .filter(WebElement::isDisplayed) - .findFirst() - .orElseThrow(() -> new RuntimeException("Can not find delete button in workflow definition")) - .click(); + .stream() + .filter(it -> it.findElement(By.className("workflow-name")).getAttribute("innerText").equals(workflow)) + .flatMap(it -> it.findElements(By.className("btn-delete")).stream()) + .filter(WebElement::isDisplayed) + .findFirst() + .orElseThrow(() -> new RuntimeException("Can not find delete button in workflow definition")) + .click(); return this; } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/WorkflowForm.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/WorkflowForm.java index 58c5c96051ce..319a59d37dac 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/WorkflowForm.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/WorkflowForm.java @@ -19,16 +19,19 @@ */ package org.apache.dolphinscheduler.e2e.pages.project.workflow; +import org.apache.dolphinscheduler.e2e.core.WebDriverWaitFactory; import org.apache.dolphinscheduler.e2e.pages.project.workflow.task.HttpTaskForm; +import org.apache.dolphinscheduler.e2e.pages.project.workflow.task.JavaTaskForm; +import org.apache.dolphinscheduler.e2e.pages.project.workflow.task.PythonTaskForm; import org.apache.dolphinscheduler.e2e.pages.project.workflow.task.ShellTaskForm; import org.apache.dolphinscheduler.e2e.pages.project.workflow.task.SubWorkflowTaskForm; import org.apache.dolphinscheduler.e2e.pages.project.workflow.task.SwitchTaskForm; -import org.apache.dolphinscheduler.e2e.pages.project.workflow.task.JavaTaskForm; import java.nio.charset.StandardCharsets; -import java.time.Duration; import java.util.List; -import java.util.concurrent.TimeUnit; + +import lombok.Getter; +import lombok.SneakyThrows; import org.openqa.selenium.By; import org.openqa.selenium.JavascriptExecutor; @@ -37,17 +40,14 @@ import org.openqa.selenium.interactions.Actions; import org.openqa.selenium.support.FindBy; import org.openqa.selenium.support.PageFactory; +import org.openqa.selenium.support.ui.ExpectedConditions; import com.google.common.io.Resources; -import lombok.Getter; -import lombok.SneakyThrows; -import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; - @SuppressWarnings("UnstableApiUsage") @Getter public final class WorkflowForm { + private WebDriver driver; private final WorkflowSaveDialog saveForm; private final WorkflowFormatDialog formatDialog; @@ -76,6 +76,8 @@ public T addTask(TaskType type) { final String dragAndDrop = String.join("\n", Resources.readLines(Resources.getResource("dragAndDrop.js"), StandardCharsets.UTF_8)); js.executeScript(dragAndDrop, task, canvas); + WebDriverWaitFactory.createWebDriverWait(driver).until(ExpectedConditions + .visibilityOfElementLocated(By.xpath("//*[contains(text(), 'Current node settings')]"))); switch (type) { case SHELL: @@ -88,13 +90,16 @@ public T addTask(TaskType type) { return (T) new HttpTaskForm(this); case JAVA: return (T) new JavaTaskForm(this); + case PYTHON: + return (T) new PythonTaskForm(this); } throw new UnsupportedOperationException("Unknown task type"); } public WebElement getTask(String taskName) { - List tasks = new WebDriverWait(driver, Duration.ofSeconds(20)) - .until(ExpectedConditions.visibilityOfAllElementsLocatedBy(By.cssSelector("svg > g > g[class^='x6-graph-svg-stage'] > g[data-shape^='dag-task']"))); + List tasks = WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.visibilityOfAllElementsLocatedBy( + By.cssSelector("svg > g > g[class^='x6-graph-svg-stage'] > g[data-shape^='dag-task']"))); WebElement task = tasks.stream() .filter(t -> t.getText().contains(taskName)) @@ -109,7 +114,8 @@ public WebElement getTask(String taskName) { public WorkflowSaveDialog submit() { buttonSave().click(); - + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.visibilityOfElementLocated(By.xpath("//*[contains(.,'Basic Information')]"))); return new WorkflowSaveDialog(this); } @@ -125,5 +131,6 @@ public enum TaskType { SWITCH, HTTP, JAVA, + PYTHON } } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/WorkflowFormatDialog.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/WorkflowFormatDialog.java index f0ecf53b4655..19778fe61648 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/WorkflowFormatDialog.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/WorkflowFormatDialog.java @@ -19,22 +19,19 @@ */ package org.apache.dolphinscheduler.e2e.pages.project.workflow; +import java.util.List; + import lombok.Getter; -import org.openqa.selenium.By; + import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebElement; import org.openqa.selenium.support.FindBy; import org.openqa.selenium.support.FindBys; import org.openqa.selenium.support.PageFactory; -import org.openqa.selenium.support.pagefactory.ByChained; -import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; - -import java.util.List; -import java.util.stream.Stream; @Getter public final class WorkflowFormatDialog { + private final WebDriver driver; private final WorkflowForm parent; diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/WorkflowInstanceTab.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/WorkflowInstanceTab.java index f55c5d0263cc..e853311f33b8 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/WorkflowInstanceTab.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/WorkflowInstanceTab.java @@ -25,25 +25,26 @@ import java.util.List; import java.util.stream.Collectors; +import lombok.Getter; +import lombok.RequiredArgsConstructor; + import org.openqa.selenium.By; import org.openqa.selenium.JavascriptExecutor; import org.openqa.selenium.WebElement; import org.openqa.selenium.remote.RemoteWebDriver; import org.openqa.selenium.support.FindBy; import org.openqa.selenium.support.FindBys; - -import lombok.Getter; -import lombok.RequiredArgsConstructor; import org.openqa.selenium.support.pagefactory.ByChained; @Getter public final class WorkflowInstanceTab extends NavBarPage implements ProjectDetailPage.Tab { + @FindBy(className = "items-workflow-instances") private List instanceList; @FindBys({ - @FindBy(className = "btn-selected"), - @FindBy(className = "n-checkbox-box"), + @FindBy(className = "btn-selected"), + @FindBy(className = "n-checkbox-box"), }) private WebElement checkBoxSelectAll; @@ -62,10 +63,10 @@ public WorkflowInstanceTab(RemoteWebDriver driver) { public List instances() { return instanceList() - .stream() - .filter(WebElement::isDisplayed) - .map(Row::new) - .collect(Collectors.toList()); + .stream() + .filter(WebElement::isDisplayed) + .map(Row::new) + .collect(Collectors.toList()); } public WorkflowInstanceTab deleteAll() { @@ -84,8 +85,13 @@ public WorkflowInstanceTab deleteAll() { @RequiredArgsConstructor public static class Row { + private final WebElement row; + public String workflowInstanceName() { + return row.findElement(By.className("workflow-name")).getText(); + } + public WebElement rerunButton() { return row.findElement(By.className("btn-rerun")); } @@ -94,17 +100,21 @@ public boolean isSuccess() { return !row.findElements(By.className("success")).isEmpty(); } + public boolean isFailed() { + return !row.findElements(By.className("failed")).isEmpty(); + } + public int executionTime() { return Integer.parseInt(row.findElement(By.className("workflow-run-times")).getText()); } public Row rerun() { row.findElements(new ByChained(By.className("btn-rerun"), By.className("n-button__content"))) - .stream() - .filter(WebElement::isDisplayed) - .findFirst() - .orElseThrow(() -> new RuntimeException("Cannot find rerun button")) - .click(); + .stream() + .filter(WebElement::isDisplayed) + .findFirst() + .orElseThrow(() -> new RuntimeException("Cannot find rerun button")) + .click(); return this; } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/WorkflowRunDialog.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/WorkflowRunDialog.java index 9a3e24fb8a64..4d5ea9329646 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/WorkflowRunDialog.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/WorkflowRunDialog.java @@ -19,22 +19,26 @@ */ package org.apache.dolphinscheduler.e2e.pages.project.workflow; -import org.openqa.selenium.WebElement; -import org.openqa.selenium.support.FindBy; -import org.openqa.selenium.support.PageFactory; +import org.apache.dolphinscheduler.e2e.core.WebDriverWaitFactory; import lombok.Getter; -import java.time.Duration; - +import org.openqa.selenium.By; +import org.openqa.selenium.WebElement; +import org.openqa.selenium.support.FindBy; +import org.openqa.selenium.support.FindBys; +import org.openqa.selenium.support.PageFactory; import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; @Getter public final class WorkflowRunDialog { + private final WorkflowDefinitionTab parent; - @FindBy(className = "btn-submit") + @FindBys({ + @FindBy(xpath = "//div[contains(text(), 'Please set the parameters before starting')]/../.."), + @FindBy(className = "btn-submit") + }) private WebElement buttonSubmit; public WorkflowRunDialog(WorkflowDefinitionTab parent) { @@ -44,10 +48,16 @@ public WorkflowRunDialog(WorkflowDefinitionTab parent) { } public WorkflowDefinitionTab submit() { - new WebDriverWait(parent().driver(), Duration.ofSeconds(20)).until(ExpectedConditions.elementToBeClickable(buttonSubmit())); + By runDialogTitleXpath = + By.xpath(String.format("//*[contains(text(), '%s')]", "Please set the parameters before starting")); + WebDriverWaitFactory.createWebDriverWait(parent.driver()) + .until(ExpectedConditions.visibilityOfElementLocated(runDialogTitleXpath)); + WebDriverWaitFactory.createWebDriverWait(parent.driver()) + .until(ExpectedConditions.elementToBeClickable(buttonSubmit())); buttonSubmit().click(); - + WebDriverWaitFactory.createWebDriverWait(parent.driver()) + .until(ExpectedConditions.invisibilityOfElementLocated(runDialogTitleXpath)); return parent(); } } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/WorkflowSaveDialog.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/WorkflowSaveDialog.java index 6f7b8470ae6a..f29d71e27ec1 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/WorkflowSaveDialog.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/WorkflowSaveDialog.java @@ -19,22 +19,21 @@ */ package org.apache.dolphinscheduler.e2e.pages.project.workflow; +import org.apache.dolphinscheduler.e2e.core.WebDriverWaitFactory; + import lombok.Getter; + import org.openqa.selenium.By; import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebElement; import org.openqa.selenium.support.FindBy; import org.openqa.selenium.support.FindBys; import org.openqa.selenium.support.PageFactory; -import org.openqa.selenium.support.pagefactory.ByChained; import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; - -import java.util.List; -import java.util.stream.Stream; @Getter public final class WorkflowSaveDialog { + private final WebDriver driver; private final WorkflowForm parent; @@ -44,7 +43,7 @@ public final class WorkflowSaveDialog { }) private WebElement inputName; - @FindBy(className = "btn-submit") + @FindBy(xpath = "//div[contains(text(), 'Basic Information')]/../following-sibling::div[contains(@class, 'n-card__footer')]//button[contains(@class, 'btn-submit')]") private WebElement buttonSubmit; @FindBys({ @@ -72,26 +71,25 @@ public WorkflowSaveDialog name(String name) { public WorkflowSaveDialog addGlobalParam(String key, String value) { final int len = globalParamsItems().findElements(By.tagName("input")).size(); - final WebDriver driver = parent().driver(); - if (len == 0) { buttonGlobalCustomParameters().click(); globalParamsItems().findElements(By.tagName("input")).get(0).sendKeys(key); globalParamsItems().findElements(By.tagName("input")).get(1).sendKeys(value); } else { - globalParamsItems().findElements(By.tagName("button")).get(len-1).click(); + globalParamsItems().findElements(By.tagName("button")).get(len - 1).click(); globalParamsItems().findElements(By.tagName("input")).get(len).sendKeys(key); - globalParamsItems().findElements(By.tagName("input")).get(len+1).sendKeys(value); + globalParamsItems().findElements(By.tagName("input")).get(len + 1).sendKeys(value); } return this; } public WorkflowForm submit() { - buttonSubmit().click(); - + WebDriverWaitFactory.createWebDriverWait(driver).until(ExpectedConditions.elementToBeClickable(buttonSubmit)); + buttonSubmit.click(); + WebDriverWaitFactory.createWebDriverWait(driver).until(ExpectedConditions.urlContains("workflow-definition")); return parent; } } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/task/HttpTaskForm.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/task/HttpTaskForm.java index 5869c6de9cc9..ab77ac3f516e 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/task/HttpTaskForm.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/task/HttpTaskForm.java @@ -22,14 +22,15 @@ import org.apache.dolphinscheduler.e2e.pages.common.HttpInput; import org.apache.dolphinscheduler.e2e.pages.project.workflow.WorkflowForm; + import org.openqa.selenium.WebDriver; -public class HttpTaskForm extends TaskNodeForm{ +public class HttpTaskForm extends TaskNodeForm { + private WebDriver driver; private HttpInput httpInput; - public HttpTaskForm(WorkflowForm parent) { super(parent); this.httpInput = new HttpInput(parent.driver()); diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/task/JavaTaskForm.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/task/JavaTaskForm.java index 960f7672fdf2..528c8b80d6d2 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/task/JavaTaskForm.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/task/JavaTaskForm.java @@ -22,9 +22,11 @@ import org.apache.dolphinscheduler.e2e.pages.common.CodeEditor; import org.apache.dolphinscheduler.e2e.pages.project.workflow.WorkflowForm; + import org.openqa.selenium.WebDriver; -public class JavaTaskForm extends TaskNodeForm{ +public class JavaTaskForm extends TaskNodeForm { + private CodeEditor codeEditor; private WebDriver driver; diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/task/PythonTaskForm.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/task/PythonTaskForm.java new file mode 100644 index 000000000000..0ebabda411aa --- /dev/null +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/task/PythonTaskForm.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + */ +package org.apache.dolphinscheduler.e2e.pages.project.workflow.task; + +import org.apache.dolphinscheduler.e2e.pages.common.CodeEditor; +import org.apache.dolphinscheduler.e2e.pages.project.workflow.WorkflowForm; + +import lombok.Getter; + +import org.openqa.selenium.WebDriver; + +@Getter +public final class PythonTaskForm extends TaskNodeForm { + + private CodeEditor codeEditor; + + private WebDriver driver; + + public PythonTaskForm(WorkflowForm parent) { + super(parent); + + this.codeEditor = new CodeEditor(parent.driver()); + + this.driver = parent.driver(); + } + + public PythonTaskForm script(String script) { + codeEditor.content(script); + + return this; + } +} diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/task/ShellTaskForm.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/task/ShellTaskForm.java index fb91bb7e9604..9f11f1f85ceb 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/task/ShellTaskForm.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/task/ShellTaskForm.java @@ -23,11 +23,12 @@ import org.apache.dolphinscheduler.e2e.pages.project.workflow.WorkflowForm; import lombok.Getter; -import org.openqa.selenium.JavascriptExecutor; + import org.openqa.selenium.WebDriver; @Getter public final class ShellTaskForm extends TaskNodeForm { + private CodeEditor codeEditor; private WebDriver driver; diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/task/SubWorkflowTaskForm.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/task/SubWorkflowTaskForm.java index d89037feaa56..3cf288e1eb0b 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/task/SubWorkflowTaskForm.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/task/SubWorkflowTaskForm.java @@ -19,22 +19,23 @@ */ package org.apache.dolphinscheduler.e2e.pages.project.workflow.task; +import org.apache.dolphinscheduler.e2e.core.WebDriverWaitFactory; import org.apache.dolphinscheduler.e2e.pages.project.workflow.WorkflowForm; +import java.util.List; + import lombok.Getter; + import org.openqa.selenium.By; import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebElement; import org.openqa.selenium.support.FindBy; import org.openqa.selenium.support.FindBys; import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; - -import java.time.Duration; -import java.util.List; @Getter public final class SubWorkflowTaskForm extends TaskNodeForm { + @FindBys({ @FindBy(className = "select-child-node"), @FindBy(className = "n-base-selection"), @@ -46,7 +47,6 @@ public final class SubWorkflowTaskForm extends TaskNodeForm { private WebDriver driver; - public SubWorkflowTaskForm(WorkflowForm parent) { super(parent); @@ -54,12 +54,14 @@ public SubWorkflowTaskForm(WorkflowForm parent) { } public SubWorkflowTaskForm childNode(String node) { - new WebDriverWait(driver, Duration.ofSeconds(5)).until(ExpectedConditions.elementToBeClickable(btnSelectChildNodeDropdown)); - + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.elementToBeClickable(btnSelectChildNodeDropdown)); + btnSelectChildNodeDropdown().click(); - new WebDriverWait(driver, Duration.ofSeconds(5)).until(ExpectedConditions.visibilityOfElementLocated(By.className( - "n-base-select-option__content"))); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.visibilityOfElementLocated(By.className( + "n-base-select-option__content"))); selectChildNode() .stream() diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/task/SwitchTaskForm.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/task/SwitchTaskForm.java index 988a00c7bd95..0948be7d9912 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/task/SwitchTaskForm.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/task/SwitchTaskForm.java @@ -19,18 +19,19 @@ */ package org.apache.dolphinscheduler.e2e.pages.project.workflow.task; -import lombok.Getter; +import org.apache.dolphinscheduler.e2e.core.WebDriverWaitFactory; import org.apache.dolphinscheduler.e2e.pages.project.workflow.WorkflowForm; + +import java.util.List; + +import lombok.Getter; + import org.openqa.selenium.By; import org.openqa.selenium.JavascriptExecutor; import org.openqa.selenium.WebElement; import org.openqa.selenium.support.FindBy; import org.openqa.selenium.support.FindBys; import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; - -import java.time.Duration; -import java.util.List; @Getter public final class SwitchTaskForm extends TaskNodeForm { @@ -50,14 +51,14 @@ public SwitchTaskForm(WorkflowForm parent) { } public SwitchTaskForm elseBranch(String elseBranchName) { - ((JavascriptExecutor)parent().driver()).executeScript("arguments[0].click();", inputElseBranch()); + ((JavascriptExecutor) parent().driver()).executeScript("arguments[0].click();", inputElseBranch()); final By optionsLocator = By.className("option-else-branches"); - new WebDriverWait(parent().driver(), Duration.ofSeconds(10)) + WebDriverWaitFactory.createWebDriverWait(parent().driver()) .until(ExpectedConditions.visibilityOfElementLocated(optionsLocator)); - List webElements = parent().driver().findElements(optionsLocator); + List webElements = parent().driver().findElements(optionsLocator); webElements.stream() .filter(it -> it.getText().contains(elseBranchName)) .findFirst() @@ -70,19 +71,20 @@ public SwitchTaskForm elseBranch(String elseBranchName) { } public SwitchTaskForm addIfBranch(String switchScript, String ifBranchName) { - ((JavascriptExecutor)parent().driver()).executeScript("arguments[0].click();", buttonAddBranch); + ((JavascriptExecutor) parent().driver()).executeScript("arguments[0].click();", buttonAddBranch); SwitchTaskIfBranch switchTaskIfBranch = new SwitchTaskIfBranch(this); switchTaskIfBranch.codeEditor().content(switchScript); - ((JavascriptExecutor)parent().driver()).executeScript("arguments[0].click();", switchTaskIfBranch.inputIfBranch()); + ((JavascriptExecutor) parent().driver()).executeScript("arguments[0].click();", + switchTaskIfBranch.inputIfBranch()); final By optionsLocator = By.className("option-if-branches"); - new WebDriverWait(parent().driver(), Duration.ofSeconds(10)) + WebDriverWaitFactory.createWebDriverWait(parent().driver()) .until(ExpectedConditions.visibilityOfElementLocated(optionsLocator)); - List webElements = parent().driver().findElements(optionsLocator); + List webElements = parent().driver().findElements(optionsLocator); webElements.stream() .filter(it -> it.getText().contains(ifBranchName)) .findFirst() diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/task/SwitchTaskIfBranch.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/task/SwitchTaskIfBranch.java index 593778f4319c..c3675768c9de 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/task/SwitchTaskIfBranch.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/task/SwitchTaskIfBranch.java @@ -19,8 +19,10 @@ */ package org.apache.dolphinscheduler.e2e.pages.project.workflow.task; -import lombok.Getter; import org.apache.dolphinscheduler.e2e.pages.common.CodeEditor; + +import lombok.Getter; + import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebElement; import org.openqa.selenium.support.FindBy; @@ -29,6 +31,7 @@ @Getter public final class SwitchTaskIfBranch { + private final WebDriver driver; private final SwitchTaskForm parent; diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/task/TaskNodeForm.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/task/TaskNodeForm.java index fadfd6c38b04..de4bac356758 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/task/TaskNodeForm.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/project/workflow/task/TaskNodeForm.java @@ -19,25 +19,26 @@ */ package org.apache.dolphinscheduler.e2e.pages.project.workflow.task; -import lombok.Getter; +import org.apache.dolphinscheduler.e2e.core.WebDriverWaitFactory; import org.apache.dolphinscheduler.e2e.pages.project.workflow.WorkflowForm; + +import java.util.List; + +import lombok.Getter; + import org.openqa.selenium.By; import org.openqa.selenium.JavascriptExecutor; +import org.openqa.selenium.Keys; import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebElement; import org.openqa.selenium.support.FindBy; import org.openqa.selenium.support.FindBys; import org.openqa.selenium.support.PageFactory; -import org.openqa.selenium.support.pagefactory.ByChained; import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; - -import java.time.Duration; -import java.util.List; -import java.util.stream.Stream; @Getter public abstract class TaskNodeForm { + @FindBys({ @FindBy(className = "input-node-name"), @FindBy(tagName = "input") @@ -48,14 +49,14 @@ public abstract class TaskNodeForm { private WebElement buttonSubmit; @FindBys({ - @FindBy(className = "input-param-key"), - @FindBy(tagName = "input"), + @FindBy(className = "input-param-key"), + @FindBy(tagName = "input"), }) private List inputParamKey; @FindBys({ - @FindBy(className = "input-param-value"), - @FindBy(tagName = "input"), + @FindBy(className = "input-param-value"), + @FindBy(tagName = "input"), }) private List inputParamValue; @@ -80,6 +81,12 @@ public abstract class TaskNodeForm { @FindBy(className = "btn-create-custom-parameter") private WebElement buttonCreateCustomParameters; + @FindBys({ + @FindBy(className = "resource-select"), + @FindBy(className = "n-base-selection"), + }) + private WebElement selectResource; + private final WorkflowForm parent; TaskNodeForm(WorkflowForm parent) { @@ -118,15 +125,15 @@ public TaskNodeForm addParam(String key, String value) { return this; } - public TaskNodeForm selectEnv(String envName){ - ((JavascriptExecutor)parent().driver()).executeScript("arguments[0].click();", selectEnv); + public TaskNodeForm selectEnv(String envName) { + ((JavascriptExecutor) parent().driver()).executeScript("arguments[0].click();", selectEnv); final By optionsLocator = By.className("n-base-selection-input__content"); - new WebDriverWait(parent.driver(), Duration.ofSeconds(20)) + WebDriverWaitFactory.createWebDriverWait(parent().driver()) .until(ExpectedConditions.visibilityOfElementLocated(optionsLocator)); - List webElements = parent.driver().findElements(optionsLocator); + List webElements = parent.driver().findElements(optionsLocator); webElements.stream() .filter(it -> it.getText().contains(envName)) @@ -138,14 +145,14 @@ public TaskNodeForm selectEnv(String envName){ } public TaskNodeForm preTask(String preTaskName) { - ((JavascriptExecutor)parent().driver()).executeScript("arguments[0].click();", selectPreTasks); + ((JavascriptExecutor) parent().driver()).executeScript("arguments[0].click();", selectPreTasks); final By optionsLocator = By.className("option-pre-tasks"); - new WebDriverWait(parent.driver(), Duration.ofSeconds(20)) + WebDriverWaitFactory.createWebDriverWait(parent.driver()) .until(ExpectedConditions.visibilityOfElementLocated(optionsLocator)); - List webElements = parent.driver().findElements(optionsLocator); + List webElements = parent.driver().findElements(optionsLocator); webElements.stream() .filter(it -> it.getText().contains(preTaskName)) .findFirst() @@ -157,6 +164,26 @@ public TaskNodeForm preTask(String preTaskName) { return this; } + public TaskNodeForm selectResource(String resourceName) { + ((JavascriptExecutor) parent().driver()).executeScript("arguments[0].click();", selectResource); + + final By optionsLocator = By.className("n-tree-node-content__text"); + + WebDriverWaitFactory.createWebDriverWait(parent().driver()) + .until(ExpectedConditions.visibilityOfElementLocated(optionsLocator)); + + parent().driver() + .findElements(optionsLocator) + .stream() + .filter(it -> it.getText().startsWith(resourceName)) + .findFirst() + .orElseThrow(() -> new RuntimeException("No such resource: " + resourceName)) + .click(); + + parent.driver().switchTo().activeElement().sendKeys(Keys.ESCAPE); + return this; + } + public WorkflowForm submit() { buttonSubmit.click(); diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/resource/FileManagePage.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/resource/FileManagePage.java index a2a780be9d78..5f373b8190c5 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/resource/FileManagePage.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/resource/FileManagePage.java @@ -1,311 +1,333 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - * - */ - -package org.apache.dolphinscheduler.e2e.pages.resource; - -import lombok.Getter; - -import org.apache.dolphinscheduler.e2e.pages.common.CodeEditor; -import org.apache.dolphinscheduler.e2e.pages.common.NavBarPage; - -import org.openqa.selenium.By; -import org.openqa.selenium.JavascriptExecutor; -import org.openqa.selenium.Keys; -import org.openqa.selenium.WebDriver; -import org.openqa.selenium.WebElement; -import org.openqa.selenium.remote.LocalFileDetector; -import org.openqa.selenium.remote.RemoteWebDriver; -import org.openqa.selenium.support.FindBy; -import org.openqa.selenium.support.FindBys; -import org.openqa.selenium.support.PageFactory; -import org.openqa.selenium.support.ui.ExpectedCondition; -import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; - -import java.io.File; -import java.time.Duration; -import java.util.List; - - -@Getter -public class FileManagePage extends NavBarPage implements ResourcePage.Tab { - @FindBy(className = "btn-create-directory") - private WebElement buttonCreateDirectory; - - @FindBy(className = "btn-create-file") - private WebElement buttonCreateFile; - - @FindBy(className = "btn-upload-resource") - private WebElement buttonUploadFile; - - private final CreateDirectoryBox createDirectoryBox; - - private final RenameBox renameBox; - - private final CreateFileBox createFileBox; - - private final UploadFileBox uploadFileBox; - - private final EditFileBox editFileBox; - - @FindBy(className = "items") - private List fileList; - - @FindBys({ - @FindBy(className = "n-popconfirm__action"), - @FindBy(className = "n-button--primary-type"), - }) - private WebElement buttonConfirm; - - @FindBys({ - @FindBy(className = "monaco-editor"), - @FindBy(className = "view-line"), - }) - private WebElement editor; - - public FileManagePage(RemoteWebDriver driver) { - super(driver); - - createDirectoryBox = new CreateDirectoryBox(); - - renameBox = new RenameBox(); - - createFileBox = new CreateFileBox(); - - uploadFileBox = new UploadFileBox(); - - editFileBox = new EditFileBox(); - } - - public FileManagePage createDirectory(String name) { - buttonCreateDirectory().click(); - - createDirectoryBox().inputDirectoryName().sendKeys(name); - createDirectoryBox().buttonSubmit().click(); - - return this; - } - - public FileManagePage cancelCreateDirectory(String name) { - buttonCreateDirectory().click(); - - createDirectoryBox().inputDirectoryName().sendKeys(name); - createDirectoryBox().buttonCancel().click(); - - return this; - } - - public FileManagePage rename(String currentName, String AfterName) { - fileList() - .stream() - .filter(it -> it.getText().contains(currentName)) - .flatMap(it -> it.findElements(By.className("btn-rename")).stream()) - .filter(WebElement::isDisplayed) - .findFirst() - .orElseThrow(() -> new RuntimeException("No rename button in file manage list")) - .click(); - - renameBox().inputName().sendKeys(Keys.CONTROL + "a"); - renameBox().inputName().sendKeys(Keys.BACK_SPACE); - renameBox().inputName().sendKeys(AfterName); - renameBox().buttonSubmit().click(); - - return this; - } - - public FileManagePage createSubDirectory(String directoryName, String subDirectoryName) { - fileList() - .stream() - .filter(it -> it.getText().contains(directoryName)) - .filter(WebElement::isDisplayed) - .findFirst() - .orElseThrow(() -> new RuntimeException(String.format("No %s in file manage list", directoryName))) - .click(); - - buttonCreateDirectory().click(); - - createDirectoryBox().inputDirectoryName().sendKeys(subDirectoryName); - createDirectoryBox().buttonSubmit().click(); - - return this; - } - - public FileManagePage delete(String name) { - fileList() - .stream() - .filter(it -> it.getText().contains(name)) - .flatMap(it -> it.findElements(By.className("btn-delete")).stream()) - .filter(WebElement::isDisplayed) - .findFirst() - .orElseThrow(() -> new RuntimeException("No delete button in file manage list")) - .click(); - - ((JavascriptExecutor) driver).executeScript("arguments[0].click();", buttonConfirm()); - - return this; - } - - public FileManagePage createFile(String fileName, String scripts) { - buttonCreateFile().click(); - - createFileBox().inputFileName().sendKeys(fileName); - createFileBox().codeEditor().content(scripts); - createFileBox().buttonSubmit().click(); - - return this; - } - - public FileManagePage editFile(String fileName, String scripts) { - fileList() - .stream() - .filter(it -> it.getText().contains(fileName)) - .flatMap(it -> it.findElements(By.className("btn-edit")).stream()) - .filter(WebElement::isDisplayed) - .findFirst() - .orElseThrow(() -> new RuntimeException("No edit button in file manage list")) - .click(); - - new WebDriverWait(driver, Duration.ofSeconds(20)).until(ExpectedConditions.urlContains("/edit")); - - new WebDriverWait(driver, Duration.ofSeconds(20)).until(ExpectedConditions.textToBePresentInElement(driver.findElement(By.tagName("body")), fileName)); - - editFileBox().codeEditor().content(scripts); - editFileBox().buttonSubmit().click(); - - return this; - } - - public FileManagePage uploadFile(String filePath) { - buttonUploadFile().click(); - - driver.setFileDetector(new LocalFileDetector()); - - uploadFileBox().buttonUpload().sendKeys(filePath); - uploadFileBox().buttonSubmit().click(); - - return this; - } - - public FileManagePage downloadFile(String fileName) { - fileList() - .stream() - .filter(it -> it.getText().contains(fileName)) - .flatMap(it -> it.findElements(By.className("btn-download")).stream()) - .filter(WebElement::isDisplayed) - .findFirst() - .orElseThrow(() -> new RuntimeException("No download button in file manage list")) - .click(); - - return this; - } - - @Getter - public class CreateDirectoryBox { - CreateDirectoryBox() { - PageFactory.initElements(driver, this); - } - - @FindBys({ - @FindBy(className = "input-directory-name"), - @FindBy(tagName = "input"), - }) - private WebElement inputDirectoryName; - - @FindBy(className = "btn-submit") - private WebElement buttonSubmit; - - @FindBy(className = "btn-cancel") - private WebElement buttonCancel; - } - - @Getter - public class RenameBox { - RenameBox() { - PageFactory.initElements(driver, this); - } - - @FindBys({ - @FindBy(className = "input-name"), - @FindBy(tagName = "input"), - }) - private WebElement inputName; - - @FindBy(className = "btn-submit") - private WebElement buttonSubmit; - - @FindBy(className = "btn-cancel") - private WebElement buttonCancel; - } - - @Getter - public class CreateFileBox { - CreateFileBox() { - PageFactory.initElements(driver, this); - } - - @FindBys({ - @FindBy(className = "input-file-name"), - @FindBy(tagName = "input"), - }) - private WebElement inputFileName; - - private final CodeEditor codeEditor = new CodeEditor(driver); - - @FindBy(className = "btn-submit") - private WebElement buttonSubmit; - - @FindBy(className = "btn-cancel") - private WebElement buttonCancel; - } - - @Getter - public class EditFileBox { - EditFileBox() { - PageFactory.initElements(driver, this); - } - - CodeEditor codeEditor = new CodeEditor(driver); - - @FindBy(className = "btn-submit") - private WebElement buttonSubmit; - - @FindBy(className = "btn-cancel") - private WebElement buttonCancel; - } - - @Getter - public class UploadFileBox { - UploadFileBox() { - PageFactory.initElements(driver, this); - } - - @FindBys({ - @FindBy(className = "btn-upload"), - @FindBy(tagName = "input"), - }) - private WebElement buttonUpload; - - @FindBy(className = "btn-submit") - private WebElement buttonSubmit; - - @FindBy(className = "btn-cancel") - private WebElement buttonCancel; - } -} +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + */ + +package org.apache.dolphinscheduler.e2e.pages.resource; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.testcontainers.shaded.org.awaitility.Awaitility.await; + +import org.apache.dolphinscheduler.e2e.core.WebDriverWaitFactory; +import org.apache.dolphinscheduler.e2e.pages.common.CodeEditor; +import org.apache.dolphinscheduler.e2e.pages.common.NavBarPage; + +import java.util.List; + +import lombok.Getter; + +import org.openqa.selenium.By; +import org.openqa.selenium.JavascriptExecutor; +import org.openqa.selenium.Keys; +import org.openqa.selenium.WebElement; +import org.openqa.selenium.remote.LocalFileDetector; +import org.openqa.selenium.remote.RemoteWebDriver; +import org.openqa.selenium.support.FindBy; +import org.openqa.selenium.support.FindBys; +import org.openqa.selenium.support.PageFactory; +import org.openqa.selenium.support.ui.ExpectedConditions; + +@Getter +public class FileManagePage extends NavBarPage implements ResourcePage.Tab { + + @FindBy(className = "btn-create-directory") + private WebElement buttonCreateDirectory; + + @FindBy(className = "btn-create-file") + private WebElement buttonCreateFile; + + @FindBy(className = "btn-upload-resource") + private WebElement buttonUploadFile; + + private final CreateDirectoryBox createDirectoryBox; + + private final RenameBox renameBox; + + private final UploadFileBox uploadFileBox; + + private final EditFileBox editFileBox; + + @FindBy(className = "items") + private List fileList; + + @FindBys({ + @FindBy(className = "n-popconfirm__action"), + @FindBy(className = "n-button--primary-type"), + }) + private WebElement buttonConfirm; + + @FindBys({ + @FindBy(className = "monaco-editor"), + @FindBy(className = "view-line"), + }) + private WebElement editor; + + public FileManagePage(RemoteWebDriver driver) { + super(driver); + + createDirectoryBox = new CreateDirectoryBox(); + + renameBox = new RenameBox(); + + uploadFileBox = new UploadFileBox(); + + editFileBox = new EditFileBox(); + + } + + public FileManagePage createDirectory(String name) { + buttonCreateDirectory().click(); + + createDirectoryBox().inputDirectoryName().sendKeys(name); + createDirectoryBox().buttonSubmit().click(); + + return this; + } + + public FileManagePage cancelCreateDirectory(String name) { + buttonCreateDirectory().click(); + + createDirectoryBox().inputDirectoryName().sendKeys(name); + createDirectoryBox().buttonCancel().click(); + + return this; + } + + public FileManagePage rename(String currentName, String AfterName) { + fileList() + .stream() + .filter(it -> it.getText().contains(currentName)) + .flatMap(it -> it.findElements(By.className("btn-rename")).stream()) + .filter(WebElement::isDisplayed) + .findFirst() + .orElseThrow(() -> new RuntimeException("No rename button in file manage list")) + .click(); + + renameBox().inputName().sendKeys(Keys.CONTROL + "a"); + renameBox().inputName().sendKeys(Keys.BACK_SPACE); + renameBox().inputName().sendKeys(AfterName); + renameBox().buttonSubmit().click(); + + return this; + } + + public FileManagePage createSubDirectory(String directoryName, String subDirectoryName) { + fileList() + .stream() + .filter(it -> it.getText().contains(directoryName)) + .filter(WebElement::isDisplayed) + .findFirst() + .orElseThrow(() -> new RuntimeException(String.format("No %s in file manage list", directoryName))) + .click(); + + buttonCreateDirectory().click(); + + createDirectoryBox().inputDirectoryName().sendKeys(subDirectoryName); + createDirectoryBox().buttonSubmit().click(); + + return this; + } + + public FileManagePage delete(String name) { + fileList() + .stream() + .filter(it -> it.getText().contains(name)) + .flatMap(it -> it.findElements(By.className("btn-delete")).stream()) + .filter(WebElement::isDisplayed) + .findFirst() + .orElseThrow(() -> new RuntimeException("No delete button in file manage list")) + .click(); + + ((JavascriptExecutor) driver).executeScript("arguments[0].click();", buttonConfirm()); + + return this; + } + + // todo: add file type + public FileManagePage createFile(String fileName, String scripts) { + + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.elementToBeClickable(buttonCreateFile())); + + buttonCreateFile().click(); + + WebDriverWaitFactory.createWebDriverWait(driver).until(ExpectedConditions.urlContains("/resource/file/create")); + + CreateFileBox createFileBox = new CreateFileBox(); + createFileBox.inputFileName().sendKeys(fileName); + createFileBox.codeEditor().content(scripts); + createFileBox.buttonSubmit().click(); + WebDriverWaitFactory.createWebDriverWait(driver).until(ExpectedConditions.urlContains("/resource/file-manage")); + return this; + } + + public FileManagePage createFileUntilSuccess(String fileName, String scripts) { + + createFile(fileName, scripts); + + await() + .untilAsserted(() -> assertThat(fileList()) + .as("File list should contain newly-created file: " + fileName) + .extracting(WebElement::getText) + .anyMatch(it -> it.contains(fileName))); + return this; + } + + public FileManagePage editFile(String fileName, String scripts) { + fileList() + .stream() + .filter(it -> it.getText().contains(fileName)) + .flatMap(it -> it.findElements(By.className("btn-edit")).stream()) + .filter(WebElement::isDisplayed) + .findFirst() + .orElseThrow(() -> new RuntimeException("No edit button in file manage list")) + .click(); + + WebDriverWaitFactory.createWebDriverWait(driver).until(ExpectedConditions.urlContains("/edit")); + + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.textToBePresentInElement(driver.findElement(By.tagName("body")), fileName)); + + editFileBox().codeEditor().content(scripts); + editFileBox().buttonSubmit().click(); + + return this; + } + + public FileManagePage uploadFile(String filePath) { + buttonUploadFile().click(); + + driver.setFileDetector(new LocalFileDetector()); + + uploadFileBox().buttonUpload().sendKeys(filePath); + uploadFileBox().buttonSubmit().click(); + + return this; + } + + public FileManagePage downloadFile(String fileName) { + fileList() + .stream() + .filter(it -> it.getText().contains(fileName)) + .flatMap(it -> it.findElements(By.className("btn-download")).stream()) + .filter(WebElement::isDisplayed) + .findFirst() + .orElseThrow(() -> new RuntimeException("No download button in file manage list")) + .click(); + + return this; + } + + @Getter + public class CreateDirectoryBox { + + CreateDirectoryBox() { + PageFactory.initElements(driver, this); + } + + @FindBys({ + @FindBy(className = "input-directory-name"), + @FindBy(tagName = "input"), + }) + private WebElement inputDirectoryName; + + @FindBy(className = "btn-submit") + private WebElement buttonSubmit; + + @FindBy(className = "btn-cancel") + private WebElement buttonCancel; + } + + @Getter + public class RenameBox { + + RenameBox() { + PageFactory.initElements(driver, this); + } + + @FindBys({ + @FindBy(className = "input-name"), + @FindBy(tagName = "input"), + }) + private WebElement inputName; + + @FindBy(className = "btn-submit") + private WebElement buttonSubmit; + + @FindBy(className = "btn-cancel") + private WebElement buttonCancel; + } + + @Getter + public class CreateFileBox { + + CreateFileBox() { + PageFactory.initElements(driver, this); + } + + @FindBys({ + @FindBy(className = "input-file-name"), + @FindBy(tagName = "input"), + }) + private WebElement inputFileName; + + private final CodeEditor codeEditor = new CodeEditor(driver); + + @FindBy(className = "btn-submit") + private WebElement buttonSubmit; + + @FindBy(className = "btn-cancel") + private WebElement buttonCancel; + } + + @Getter + public class EditFileBox { + + EditFileBox() { + PageFactory.initElements(driver, this); + } + + CodeEditor codeEditor = new CodeEditor(driver); + + @FindBy(className = "btn-submit") + private WebElement buttonSubmit; + + @FindBy(className = "btn-cancel") + private WebElement buttonCancel; + } + + @Getter + public class UploadFileBox { + + UploadFileBox() { + PageFactory.initElements(driver, this); + } + + @FindBys({ + @FindBy(className = "btn-upload"), + @FindBy(tagName = "input"), + }) + private WebElement buttonUpload; + + @FindBy(className = "btn-submit") + private WebElement buttonSubmit; + + @FindBy(className = "btn-cancel") + private WebElement buttonCancel; + } +} diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/resource/FunctionManagePage.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/resource/FunctionManagePage.java deleted file mode 100644 index fd9596e420f5..000000000000 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/resource/FunctionManagePage.java +++ /dev/null @@ -1,201 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - * - */ - -package org.apache.dolphinscheduler.e2e.pages.resource; - -import lombok.Getter; - -import org.apache.dolphinscheduler.e2e.pages.common.NavBarPage; - -import java.util.List; - -import org.openqa.selenium.By; -import org.openqa.selenium.JavascriptExecutor; -import org.openqa.selenium.Keys; -import org.openqa.selenium.WebElement; -import org.openqa.selenium.remote.RemoteWebDriver; -import org.openqa.selenium.support.FindBy; -import org.openqa.selenium.support.FindBys; -import org.openqa.selenium.support.PageFactory; -import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; - -@Getter -public class FunctionManagePage extends NavBarPage implements ResourcePage.Tab { - @FindBy(className = "btn-create-udf-function") - private WebElement buttonCreateUdfFunction; - - @FindBy(className = "items") - private List functionList; - - @FindBys({ - @FindBy(className = "n-popconfirm__action"), - @FindBy(className = "n-button--primary-type"), - }) - private WebElement buttonConfirm; - - private CreateUdfFunctionBox createUdfFunctionBox; - - private RenameUdfFunctionBox renameUdfFunctionBox; - - public FunctionManagePage(RemoteWebDriver driver) { - super(driver); - - createUdfFunctionBox = new CreateUdfFunctionBox(); - - renameUdfFunctionBox = new RenameUdfFunctionBox(); - } - - public FunctionManagePage createUdfFunction(String udfFunctionName, String className, String udfResourceName, String description) { - buttonCreateUdfFunction().click(); - - ((JavascriptExecutor) driver).executeScript("arguments[0].click();", createUdfFunctionBox().radioFunctionType()); - - createUdfFunctionBox().inputFunctionName().sendKeys(udfFunctionName); - - createUdfFunctionBox().inputClassName().sendKeys(className); - - createUdfFunctionBox().inputDescription().sendKeys(description); - - createUdfFunctionBox().buttonUdfResourceDropDown().click(); - - createUdfFunctionBox().selectUdfResource() - .stream() - .filter(it -> it.getAttribute("innerHTML").contains(udfResourceName)) - .findFirst() - .orElseThrow(() -> new RuntimeException(String.format("No %s in udf resource list", udfResourceName))) - .click(); - - createUdfFunctionBox().buttonSubmit().click(); - - return this; - } - - public FunctionManagePage renameUdfFunction(String currentName, String afterName) { - functionList() - .stream() - .filter(it -> it.getText().contains(currentName)) - .flatMap(it -> it.findElements(By.className("btn-edit")).stream()) - .filter(WebElement::isDisplayed) - .findFirst() - .orElseThrow(() -> new RuntimeException("No rename button in function manage list")) - .click(); - - renameUdfFunctionBox().inputFunctionName().sendKeys(Keys.CONTROL + "a"); - renameUdfFunctionBox().inputFunctionName().sendKeys(Keys.BACK_SPACE); - renameUdfFunctionBox().inputFunctionName().sendKeys(afterName); - - renameUdfFunctionBox.buttonSubmit().click(); - - return this; - } - - public FunctionManagePage deleteUdfFunction(String udfFunctionName) { - functionList() - .stream() - .filter(it -> it.getText().contains(udfFunctionName)) - .flatMap(it -> it.findElements(By.className("btn-delete")).stream()) - .filter(WebElement::isDisplayed) - .findFirst() - .orElseThrow(() -> new RuntimeException("No delete button in udf resource list")) - .click(); - - ((JavascriptExecutor) driver).executeScript("arguments[0].click();", buttonConfirm()); - - return this; - } - - @Getter - public class CreateUdfFunctionBox { - CreateUdfFunctionBox() { - PageFactory.initElements(driver, this); - } - - @FindBys({ - @FindBy(className = "radio-function-type"), - @FindBy(tagName = "input"), - }) - private WebElement radioFunctionType; - - @FindBys({ - @FindBy(className = "input-function-name"), - @FindBy(tagName = "input"), - }) - private WebElement inputFunctionName; - - @FindBys({ - @FindBy(className = "input-class-name"), - @FindBy(tagName = "input"), - }) - private WebElement inputClassName; - - @FindBys({ - @FindBy(className = "btn-udf-resource-dropdown"), - @FindBy(className = "n-base-selection"), - }) - private WebElement buttonUdfResourceDropDown; - - @FindBy(className = "n-tree-node-content__text") - private List selectUdfResource; - - @FindBys({ - @FindBy(className = "input-description"), - @FindBy(tagName = "textarea"), - }) - private WebElement inputDescription; - - @FindBy(className = "btn-submit") - private WebElement buttonSubmit; - - @FindBy(className = "btn-cancel") - private WebElement buttonCancel; - } - - @Getter - public class RenameUdfFunctionBox { - RenameUdfFunctionBox() { - PageFactory.initElements(driver, this); - } - - @FindBys({ - @FindBy(className = "input-function-name"), - @FindBy(tagName = "input"), - }) - private WebElement inputFunctionName; - - @FindBys({ - @FindBy(className = "input-class-name"), - @FindBy(tagName = "input"), - }) - private WebElement inputClassName; - - @FindBys({ - @FindBy(className = "input-description"), - @FindBy(tagName = "textarea"), - }) - private WebElement inputDescription; - - @FindBy(className = "btn-submit") - private WebElement buttonSubmit; - - @FindBy(className = "btn-cancel") - private WebElement buttonCancel; - } -} diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/resource/ResourcePage.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/resource/ResourcePage.java index 377af2e1aca1..bd79de08297b 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/resource/ResourcePage.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/resource/ResourcePage.java @@ -1,81 +1,61 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - * - */ -package org.apache.dolphinscheduler.e2e.pages.resource; - -import org.apache.dolphinscheduler.e2e.pages.common.NavBarPage; - -import java.time.Duration; - -import org.openqa.selenium.JavascriptExecutor; -import org.openqa.selenium.WebElement; -import org.openqa.selenium.remote.RemoteWebDriver; -import org.openqa.selenium.support.FindBy; -import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; - -import lombok.Getter; - - -@Getter -public class ResourcePage extends NavBarPage implements NavBarPage.NavBarItem { - @FindBy(css = ".tab-vertical > .n-menu-item:nth-child(1) > .n-menu-item-content") - private WebElement fileManageTab; - - @FindBy(css = ".tab-vertical .n-submenu:nth-of-type(2) > .n-submenu-children > .n-menu-item:nth-of-type(1) > .n-menu-item-content") - private WebElement udfManageTab; - - @FindBy(css = ".tab-vertical .n-submenu:nth-of-type(2) > .n-submenu-children > .n-menu-item:nth-of-type(2) > .n-menu-item-content") - private WebElement functionManageTab; - - public ResourcePage(RemoteWebDriver driver) { - super(driver); - } - - public T goToTab(Class tab) { - if (tab == FileManagePage.class) { - new WebDriverWait(driver, Duration.ofSeconds(20)).until(ExpectedConditions.urlContains("/resource")); - new WebDriverWait(driver, Duration.ofSeconds(20)).until(ExpectedConditions.elementToBeClickable(fileManageTab)); - ((JavascriptExecutor) driver).executeScript("arguments[0].click();", fileManageTab()); - new WebDriverWait(driver, Duration.ofSeconds(20)).until(ExpectedConditions.urlContains("/file-manage")); - return tab.cast(new FileManagePage(driver)); - } - - if (tab == UdfManagePage.class) { - new WebDriverWait(driver, Duration.ofSeconds(20)).until(ExpectedConditions.urlContains("/resource")); - new WebDriverWait(driver, Duration.ofSeconds(20)).until(ExpectedConditions.elementToBeClickable(udfManageTab)); - ((JavascriptExecutor) driver).executeScript("arguments[0].click();", udfManageTab()); - new WebDriverWait(driver, Duration.ofSeconds(20)).until(ExpectedConditions.urlContains("/resource-manage")); - return tab.cast(new UdfManagePage(driver)); - } - - if (tab == FunctionManagePage.class) { - new WebDriverWait(driver, Duration.ofSeconds(20)).until(ExpectedConditions.urlContains("/resource")); - new WebDriverWait(driver, Duration.ofSeconds(20)).until(ExpectedConditions.elementToBeClickable(functionManageTab)); - ((JavascriptExecutor) driver).executeScript("arguments[0].click();", functionManageTab()); - new WebDriverWait(driver, Duration.ofSeconds(20)).until(ExpectedConditions.urlContains("/function-manage")); - return tab.cast(new FunctionManagePage(driver)); - } - - throw new UnsupportedOperationException("Unknown tab: " + tab.getName()); - } - - public interface Tab { - } -} +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + */ +package org.apache.dolphinscheduler.e2e.pages.resource; + +import org.apache.dolphinscheduler.e2e.core.WebDriverWaitFactory; +import org.apache.dolphinscheduler.e2e.pages.common.NavBarPage; + +import lombok.Getter; + +import org.openqa.selenium.JavascriptExecutor; +import org.openqa.selenium.WebElement; +import org.openqa.selenium.remote.RemoteWebDriver; +import org.openqa.selenium.support.FindBy; +import org.openqa.selenium.support.PageFactory; +import org.openqa.selenium.support.ui.ExpectedConditions; + +@Getter +public class ResourcePage extends NavBarPage implements NavBarPage.NavBarItem { + + @FindBy(css = ".tab-vertical > .n-menu-item:nth-child(1) > .n-menu-item-content") + private WebElement fileManageTab; + + public ResourcePage(RemoteWebDriver driver) { + super(driver); + + PageFactory.initElements(driver, this); + } + + public T goToTab(Class tab) { + if (tab == FileManagePage.class) { + WebDriverWaitFactory.createWebDriverWait(driver).until(ExpectedConditions.urlContains("/resource")); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.elementToBeClickable(fileManageTab)); + ((JavascriptExecutor) driver).executeScript("arguments[0].click();", fileManageTab()); + WebDriverWaitFactory.createWebDriverWait(driver).until(ExpectedConditions.urlContains("/file-manage")); + return tab.cast(new FileManagePage(driver)); + } + + throw new UnsupportedOperationException("Unknown tab: " + tab.getName()); + } + + public interface Tab { + } +} diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/resource/UdfManagePage.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/resource/UdfManagePage.java deleted file mode 100644 index 37e07dec756b..000000000000 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/resource/UdfManagePage.java +++ /dev/null @@ -1,197 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - * - */ - -package org.apache.dolphinscheduler.e2e.pages.resource; - -import lombok.Getter; - -import org.apache.dolphinscheduler.e2e.pages.common.NavBarPage; - -import java.time.Duration; -import java.util.List; - -import org.openqa.selenium.By; -import org.openqa.selenium.JavascriptExecutor; -import org.openqa.selenium.WebElement; -import org.openqa.selenium.remote.LocalFileDetector; -import org.openqa.selenium.remote.RemoteWebDriver; -import org.openqa.selenium.support.FindBy; -import org.openqa.selenium.support.FindBys; -import org.openqa.selenium.support.PageFactory; -import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; - -@Getter -public class UdfManagePage extends NavBarPage implements ResourcePage.Tab { - @FindBy(className = "btn-create-directory") - private WebElement buttonCreateDirectory; - - @FindBy(className = "btn-upload-resource") - private WebElement buttonUploadUdf; - - @FindBy(className = "items") - private List udfList; - - @FindBys({ - @FindBy(className = "n-popconfirm__action"), - @FindBy(className = "n-button--primary-type"), - }) - private WebElement buttonConfirm; - - private final UploadFileBox uploadFileBox; - - private final RenameBox renameBox; - - private final CreateDirectoryBox createDirectoryBox; - - public UdfManagePage(RemoteWebDriver driver) { - super(driver); - - uploadFileBox = new UploadFileBox(); - - renameBox = new RenameBox(); - - createDirectoryBox = new CreateDirectoryBox(); - } - - public UdfManagePage createDirectory(String name) { - buttonCreateDirectory().click(); - - createDirectoryBox().inputDirectoryName().sendKeys(name); - createDirectoryBox().buttonSubmit().click(); - - return this; - } - - public UdfManagePage uploadFile(String filePath) { - new WebDriverWait(driver, Duration.ofSeconds(20)).until(ExpectedConditions.elementToBeClickable(buttonUploadUdf)); - - buttonUploadUdf().click(); - - driver.setFileDetector(new LocalFileDetector()); - - uploadFileBox().buttonUpload().sendKeys(filePath); - uploadFileBox().buttonSubmit().click(); - - return this; - } - - public UdfManagePage downloadFile(String fileName) { - udfList() - .stream() - .filter(it -> it.getText().contains(fileName)) - .flatMap(it -> it.findElements(By.className("btn-download")).stream()) - .filter(WebElement::isDisplayed) - .findFirst() - .orElseThrow(() -> new RuntimeException("No download button in udf manage list")) - .click(); - - return this; - } - - public UdfManagePage rename(String currentName, String AfterName) { - udfList() - .stream() - .filter(it -> it.getText().contains(currentName)) - .flatMap(it -> it.findElements(By.className("btn-rename")).stream()) - .filter(WebElement::isDisplayed) - .findFirst() - .orElseThrow(() -> new RuntimeException("No rename button in udf manage list")) - .click(); - - renameBox().inputName().clear(); - renameBox().inputName().sendKeys(AfterName); - renameBox().buttonSubmit().click(); - - return this; - } - - public UdfManagePage delete(String name) { - udfList() - .stream() - .filter(it -> it.getText().contains(name)) - .flatMap(it -> it.findElements(By.className("btn-delete")).stream()) - .filter(WebElement::isDisplayed) - .findFirst() - .orElseThrow(() -> new RuntimeException("No delete button in udf manage list")) - .click(); - - ((JavascriptExecutor) driver).executeScript("arguments[0].click();", buttonConfirm()); - - return this; - } - - @Getter - public class RenameBox { - RenameBox() { - PageFactory.initElements(driver, this); - } - - @FindBys({ - @FindBy(className = "input-name"), - @FindBy(tagName = "input"), - }) - private WebElement inputName; - - @FindBy(className = "btn-submit") - private WebElement buttonSubmit; - - @FindBy(className = "btn-cancel") - private WebElement buttonCancel; - } - - @Getter - public class UploadFileBox { - UploadFileBox() { - PageFactory.initElements(driver, this); - } - - @FindBys({ - @FindBy(className = "btn-upload"), - @FindBy(tagName = "input"), - }) - private WebElement buttonUpload; - - @FindBy(className = "btn-submit") - private WebElement buttonSubmit; - - @FindBy(className = "btn-cancel") - private WebElement buttonCancel; - } - - @Getter - public class CreateDirectoryBox { - CreateDirectoryBox() { - PageFactory.initElements(driver, this); - } - - @FindBys({ - @FindBy(className = "input-directory-name"), - @FindBy(tagName = "input"), - }) - private WebElement inputDirectoryName; - - @FindBy(className = "btn-submit") - private WebElement buttonSubmit; - - @FindBy(className = "btn-cancel") - private WebElement buttonCancel; - } -} diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/ClusterPage.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/ClusterPage.java index f95439768abe..8cf54969565c 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/ClusterPage.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/ClusterPage.java @@ -23,6 +23,8 @@ import java.util.List; +import lombok.Getter; + import org.openqa.selenium.By; import org.openqa.selenium.JavascriptExecutor; import org.openqa.selenium.Keys; @@ -31,13 +33,10 @@ import org.openqa.selenium.support.FindBy; import org.openqa.selenium.support.FindBys; import org.openqa.selenium.support.PageFactory; -import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; - -import lombok.Getter; @Getter public final class ClusterPage extends NavBarPage implements SecurityPage.Tab { + @FindBy(className = "btn-create-cluster") private WebElement buttonCreateCluster; @@ -45,8 +44,8 @@ public final class ClusterPage extends NavBarPage implements SecurityPage.Tab { private List clusterList; @FindBys({ - @FindBy(className = "n-popconfirm__action"), - @FindBy(className = "n-button--primary-type"), + @FindBy(className = "n-popconfirm__action"), + @FindBy(className = "n-button--primary-type"), }) private WebElement buttonConfirm; @@ -79,7 +78,6 @@ public ClusterPage update(String oldName, String name, String config, String des .orElseThrow(() -> new RuntimeException("No edit button in cluster list")) .click(); - editClusterForm().inputClusterName().sendKeys(Keys.CONTROL + "a"); editClusterForm().inputClusterName().sendKeys(Keys.BACK_SPACE); editClusterForm().inputClusterName().sendKeys(name); @@ -114,31 +112,32 @@ public ClusterPage delete(String name) { @Getter public class ClusterForm { + ClusterForm() { PageFactory.initElements(driver, this); } @FindBys({ - @FindBy(className = "input-cluster-name"), - @FindBy(tagName = "input"), + @FindBy(className = "input-cluster-name"), + @FindBy(tagName = "input"), }) private WebElement inputClusterName; @FindBys({ - @FindBy(className = "input-cluster-config"), - @FindBy(tagName = "textarea"), + @FindBy(className = "input-cluster-config"), + @FindBy(tagName = "textarea"), }) private WebElement inputClusterConfig; @FindBys({ - @FindBy(className = "input-cluster-desc"), - @FindBy(tagName = "input"), + @FindBy(className = "input-cluster-desc"), + @FindBy(tagName = "input"), }) private WebElement inputClusterDesc; @FindBys({ - @FindBy(className = "n-base-selection-tags"), - @FindBy(className = "n-tag__content"), + @FindBy(className = "n-base-selection-tags"), + @FindBy(className = "n-tag__content"), }) private WebElement selectedWorkerGroup; diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/EnvironmentPage.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/EnvironmentPage.java index 5d9f9bea36d9..d8fc86eb04d7 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/EnvironmentPage.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/EnvironmentPage.java @@ -19,11 +19,16 @@ package org.apache.dolphinscheduler.e2e.pages.security; +import static org.assertj.core.api.Assertions.assertThat; +import static org.testcontainers.shaded.org.awaitility.Awaitility.await; + +import org.apache.dolphinscheduler.e2e.core.WebDriverWaitFactory; import org.apache.dolphinscheduler.e2e.pages.common.NavBarPage; -import java.time.Duration; import java.util.List; +import lombok.Getter; + import org.openqa.selenium.By; import org.openqa.selenium.JavascriptExecutor; import org.openqa.selenium.Keys; @@ -32,13 +37,11 @@ import org.openqa.selenium.support.FindBy; import org.openqa.selenium.support.FindBys; import org.openqa.selenium.support.PageFactory; - -import lombok.Getter; import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; @Getter public final class EnvironmentPage extends NavBarPage implements SecurityPage.Tab { + @FindBy(className = "btn-create-environment") private WebElement buttonCreateEnvironment; @@ -46,8 +49,8 @@ public final class EnvironmentPage extends NavBarPage implements SecurityPage.Ta private List environmentList; @FindBys({ - @FindBy(className = "n-popconfirm__action"), - @FindBy(className = "n-button--primary-type"), + @FindBy(className = "n-popconfirm__action"), + @FindBy(className = "n-button--primary-type"), }) private WebElement buttonConfirm; @@ -67,8 +70,9 @@ public EnvironmentPage create(String name, String config, String desc, String wo createEnvironmentForm().inputEnvironmentDesc().sendKeys(desc); editEnvironmentForm().btnSelectWorkerGroupDropdown().click(); - new WebDriverWait(driver, Duration.ofSeconds(20)).until(ExpectedConditions.visibilityOfElementLocated(new By.ByClassName( - "n-base-select-option__content"))); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.visibilityOfElementLocated(new By.ByClassName( + "n-base-select-option__content"))); editEnvironmentForm().selectWorkerGroupList() .stream() .filter(it -> it.getText().contains(workerGroup)) @@ -81,17 +85,25 @@ public EnvironmentPage create(String name, String config, String desc, String wo return this; } + public EnvironmentPage createEnvironmentUntilSuccess(String name, String config, String desc, String workerGroup) { + create(name, config, desc, workerGroup); + await().untilAsserted(() -> assertThat(environmentList()) + .as("environment list should contain newly-created environment") + .anyMatch(it -> it.getText().contains(name))); + return this; + } + public EnvironmentPage update(String oldName, String name, String config, String desc, String workerGroup) { environmentList() .stream() - .filter(it -> it.findElement(By.className("environment-name")).getAttribute("innerHTML").contains(oldName)) + .filter(it -> it.findElement(By.className("environment-name")).getAttribute("innerHTML") + .contains(oldName)) .flatMap(it -> it.findElements(By.className("edit")).stream()) .filter(WebElement::isDisplayed) .findFirst() .orElseThrow(() -> new RuntimeException("No edit button in environment list")) .click(); - editEnvironmentForm().inputEnvironmentName().sendKeys(Keys.CONTROL + "a"); editEnvironmentForm().inputEnvironmentName().sendKeys(Keys.BACK_SPACE); editEnvironmentForm().inputEnvironmentName().sendKeys(name); @@ -106,8 +118,9 @@ public EnvironmentPage update(String oldName, String name, String config, String if (editEnvironmentForm().selectedWorkerGroup().getAttribute("innerHTML").equals(workerGroup)) { editEnvironmentForm().btnSelectWorkerGroupDropdown().click(); - new WebDriverWait(driver, Duration.ofSeconds(20)).until(ExpectedConditions.visibilityOfElementLocated(new By.ByClassName( - "n-base-select-option__content"))); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.visibilityOfElementLocated(new By.ByClassName( + "n-base-select-option__content"))); editEnvironmentForm().selectWorkerGroupList() .stream() .filter(it -> it.getText().contains(workerGroup)) @@ -139,25 +152,26 @@ public EnvironmentPage delete(String name) { @Getter public class EnvironmentForm { + EnvironmentForm() { PageFactory.initElements(driver, this); } @FindBys({ - @FindBy(className = "input-environment-name"), - @FindBy(tagName = "input"), + @FindBy(className = "input-environment-name"), + @FindBy(tagName = "input"), }) private WebElement inputEnvironmentName; @FindBys({ - @FindBy(className = "input-environment-config"), - @FindBy(tagName = "textarea"), + @FindBy(className = "input-environment-config"), + @FindBy(tagName = "textarea"), }) private WebElement inputEnvironmentConfig; @FindBys({ - @FindBy(className = "input-environment-desc"), - @FindBy(tagName = "input"), + @FindBy(className = "input-environment-desc"), + @FindBy(tagName = "input"), }) private WebElement inputEnvironmentDesc; @@ -171,8 +185,8 @@ public class EnvironmentForm { private List selectWorkerGroupList; @FindBys({ - @FindBy(className = "n-base-selection-tags"), - @FindBy(className = "n-tag__content"), + @FindBy(className = "n-base-selection-tags"), + @FindBy(className = "n-tag__content"), }) private WebElement selectedWorkerGroup; diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/NamespacePage.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/NamespacePage.java index 0d10e345fe46..31b805c125a1 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/NamespacePage.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/NamespacePage.java @@ -23,16 +23,17 @@ import java.util.List; +import lombok.Getter; + import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import org.openqa.selenium.remote.RemoteWebDriver; import org.openqa.selenium.support.FindBy; import org.openqa.selenium.support.PageFactory; -import lombok.Getter; - @Getter public final class NamespacePage extends NavBarPage implements SecurityPage.Tab { + @FindBy(id = "btnCreateNamespace") private WebElement buttonCreateNamespace; @@ -59,7 +60,8 @@ public NamespacePage create(String namespaceName, String namespaceValue) { public NamespacePage update(String namespaceName, String editNamespaceName, String editNamespaceValue) { namespaceList() .stream() - .filter(it -> it.findElement(By.className("namespaceName")).getAttribute("innerHTML").contains(namespaceName)) + .filter(it -> it.findElement(By.className("namespaceName")).getAttribute("innerHTML") + .contains(namespaceName)) .flatMap(it -> it.findElements(By.className("edit")).stream()) .filter(WebElement::isDisplayed) .findFirst() @@ -75,6 +77,7 @@ public NamespacePage update(String namespaceName, String editNamespaceName, Stri @Getter public class NamespaceForm { + NamespaceForm() { PageFactory.initElements(driver, this); } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/QueuePage.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/QueuePage.java index 2eb93d351736..bbe9a1719908 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/QueuePage.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/QueuePage.java @@ -21,9 +21,10 @@ import org.apache.dolphinscheduler.e2e.pages.common.NavBarPage; -import java.security.Key; import java.util.List; +import lombok.Getter; + import org.openqa.selenium.By; import org.openqa.selenium.Keys; import org.openqa.selenium.WebElement; @@ -32,10 +33,9 @@ import org.openqa.selenium.support.FindBys; import org.openqa.selenium.support.PageFactory; -import lombok.Getter; - @Getter public final class QueuePage extends NavBarPage implements SecurityPage.Tab { + @FindBy(className = "btn-create-queue") private WebElement buttonCreateQueue; @@ -84,13 +84,14 @@ public QueuePage update(String queueName, String editQueueName, String editQueue @Getter public class QueueForm { + QueueForm() { PageFactory.initElements(driver, this); } @FindBys({ - @FindBy(className = "input-queue-name"), - @FindBy(tagName = "input"), + @FindBy(className = "input-queue-name"), + @FindBy(tagName = "input"), }) private WebElement inputQueueName; diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/SecurityPage.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/SecurityPage.java index 5a5bb9c2776b..093d2201d311 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/SecurityPage.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/SecurityPage.java @@ -20,20 +20,17 @@ package org.apache.dolphinscheduler.e2e.pages.security; +import org.apache.dolphinscheduler.e2e.core.WebDriverWaitFactory; import org.apache.dolphinscheduler.e2e.pages.common.NavBarPage; import org.apache.dolphinscheduler.e2e.pages.common.NavBarPage.NavBarItem; +import lombok.Getter; + import org.openqa.selenium.JavascriptExecutor; import org.openqa.selenium.WebElement; import org.openqa.selenium.remote.RemoteWebDriver; import org.openqa.selenium.support.FindBy; -import org.openqa.selenium.support.FindBys; import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; - -import lombok.Getter; - -import java.time.Duration; @Getter public class SecurityPage extends NavBarPage implements NavBarItem { @@ -67,59 +64,76 @@ public SecurityPage(RemoteWebDriver driver) { } public T goToTab(Class tab) { + if (tab == TenantPage.class) { - new WebDriverWait(driver, Duration.ofSeconds(60)).until(ExpectedConditions.urlContains("/security")); - new WebDriverWait(driver, Duration.ofSeconds(60)).until(ExpectedConditions.elementToBeClickable(menuTenantManage)); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.elementToBeClickable(menuTenantManage)); ((JavascriptExecutor) driver).executeScript("arguments[0].click();", menuTenantManage()); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.urlContains("/security/tenant-manage")); return tab.cast(new TenantPage(driver)); } if (tab == UserPage.class) { - new WebDriverWait(driver, Duration.ofSeconds(60)).until(ExpectedConditions.urlContains("/security")); - new WebDriverWait(driver, Duration.ofSeconds(60)).until(ExpectedConditions.elementToBeClickable(menUserManage)); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.elementToBeClickable(menUserManage)); ((JavascriptExecutor) driver).executeScript("arguments[0].click();", menUserManage()); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.urlContains("/security/user-manage")); return tab.cast(new UserPage(driver)); } if (tab == WorkerGroupPage.class) { - new WebDriverWait(driver, Duration.ofSeconds(60)).until(ExpectedConditions.urlContains("/security")); - new WebDriverWait(driver, Duration.ofSeconds(60)).until(ExpectedConditions.elementToBeClickable(menWorkerGroupManage)); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.elementToBeClickable(menWorkerGroupManage)); ((JavascriptExecutor) driver).executeScript("arguments[0].click();", menWorkerGroupManage()); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.urlContains("/security/worker-group-manage")); return tab.cast(new WorkerGroupPage(driver)); } if (tab == QueuePage.class) { - new WebDriverWait(driver, Duration.ofSeconds(60)).until(ExpectedConditions.urlContains("/security")); - new WebDriverWait(driver, Duration.ofSeconds(60)).until(ExpectedConditions.elementToBeClickable(menuQueueManage)); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.elementToBeClickable(menuQueueManage)); ((JavascriptExecutor) driver).executeScript("arguments[0].click();", menuQueueManage()); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.urlContains("/security/yarn-queue-manage")); return tab.cast(new QueuePage(driver)); } if (tab == EnvironmentPage.class) { - new WebDriverWait(driver, Duration.ofSeconds(60)).until(ExpectedConditions.urlContains("/security")); - new WebDriverWait(driver, Duration.ofSeconds(60)).until(ExpectedConditions.elementToBeClickable(menuEnvironmentManage)); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.elementToBeClickable(menuEnvironmentManage)); ((JavascriptExecutor) driver).executeScript("arguments[0].click();", menuEnvironmentManage()); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.urlContains("/security/environment-manage")); return tab.cast(new EnvironmentPage(driver)); } if (tab == ClusterPage.class) { - new WebDriverWait(driver, Duration.ofSeconds(60)).until(ExpectedConditions.urlContains("/security")); - new WebDriverWait(driver, Duration.ofSeconds(60)).until(ExpectedConditions.elementToBeClickable(menuClusterManage)); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.elementToBeClickable(menuClusterManage)); ((JavascriptExecutor) driver).executeScript("arguments[0].click();", menuClusterManage()); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.urlContains("/security/cluster-manage")); return tab.cast(new ClusterPage(driver)); } if (tab == TokenPage.class) { - new WebDriverWait(driver, Duration.ofSeconds(60)).until(ExpectedConditions.urlContains("/security")); - new WebDriverWait(driver, Duration.ofSeconds(60)).until(ExpectedConditions.elementToBeClickable(menuTokenManage)); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.elementToBeClickable(menuTokenManage)); ((JavascriptExecutor) driver).executeScript("arguments[0].click();", menuTokenManage()); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.urlContains("/security/token-manage")); return tab.cast(new TokenPage(driver)); } if (tab == NamespacePage.class) { - new WebDriverWait(driver, Duration.ofSeconds(60)).until(ExpectedConditions.urlContains("/security")); - new WebDriverWait(driver, Duration.ofSeconds(60)).until(ExpectedConditions.elementToBeClickable(menuNamespaceManage)); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.elementToBeClickable(menuNamespaceManage)); ((JavascriptExecutor) driver).executeScript("arguments[0].click();", menuNamespaceManage()); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.urlContains("/security/k8s-namespace-manage")); return tab.cast(new NamespacePage(driver)); } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/TenantPage.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/TenantPage.java index cb24af307f70..d1225bd0bec3 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/TenantPage.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/TenantPage.java @@ -19,9 +19,14 @@ package org.apache.dolphinscheduler.e2e.pages.security; +import org.apache.dolphinscheduler.e2e.models.tenant.ITenant; import org.apache.dolphinscheduler.e2e.pages.common.NavBarPage; import java.util.List; +import java.util.stream.Collectors; + +import lombok.Getter; +import lombok.RequiredArgsConstructor; import org.openqa.selenium.By; import org.openqa.selenium.JavascriptExecutor; @@ -32,10 +37,9 @@ import org.openqa.selenium.support.FindBys; import org.openqa.selenium.support.PageFactory; -import lombok.Getter; - @Getter public final class TenantPage extends NavBarPage implements SecurityPage.Tab { + @FindBy(className = "btn-create-tenant") private WebElement buttonCreateTenant; @@ -43,8 +47,8 @@ public final class TenantPage extends NavBarPage implements SecurityPage.Tab { private List tenantList; @FindBys({ - @FindBy(className = "n-popconfirm__action"), - @FindBy(className = "n-button--primary-type"), + @FindBy(className = "n-popconfirm__action"), + @FindBy(className = "n-button--primary-type"), }) private WebElement buttonConfirm; @@ -61,11 +65,28 @@ public TenantPage(RemoteWebDriver driver) { editTenantForm = new TenantForm(); } + public List tenants() { + return tenantList.stream() + .filter(WebElement::isDisplayed) + .map(Row::new) + .collect(Collectors.toList()); + } + + public boolean containsTenant(String tenant) { + return tenantList.stream() + .anyMatch(it -> it.findElement(By.className("tenant-code")).getText().contains(tenant)); + } + + public TenantPage create(ITenant tenant) { + return create(tenant.getTenantCode(), tenant.getDescription()); + } + public TenantPage create(String tenant) { return create(tenant, ""); } public TenantPage create(String tenant, String description) { + buttonCreateTenant().click(); tenantForm().inputTenantCode().sendKeys(tenant); tenantForm().inputDescription().sendKeys(description); @@ -76,12 +97,12 @@ public TenantPage create(String tenant, String description) { public TenantPage update(String tenant, String description) { tenantList().stream() - .filter(it -> it.findElement(By.className("tenant-code")).getAttribute("innerHTML").contains(tenant)) - .flatMap(it -> it.findElements(By.className("edit")).stream()) - .filter(WebElement::isDisplayed) - .findFirst() - .orElseThrow(() -> new RuntimeException("No edit button in tenant list")) - .click(); + .filter(it -> it.findElement(By.className("tenant-code")).getAttribute("innerHTML").contains(tenant)) + .flatMap(it -> it.findElements(By.className("edit")).stream()) + .filter(WebElement::isDisplayed) + .findFirst() + .orElseThrow(() -> new RuntimeException("No edit button in tenant list")) + .click(); editTenantForm().inputDescription().sendKeys(Keys.CONTROL + "a"); editTenantForm().inputDescription().sendKeys(Keys.BACK_SPACE); @@ -93,13 +114,13 @@ public TenantPage update(String tenant, String description) { public TenantPage delete(String tenant) { tenantList() - .stream() - .filter(it -> it.getText().contains(tenant)) - .flatMap(it -> it.findElements(By.className("delete")).stream()) - .filter(WebElement::isDisplayed) - .findFirst() - .orElseThrow(() -> new RuntimeException("No delete button in user list")) - .click(); + .stream() + .filter(it -> it.getText().contains(tenant)) + .flatMap(it -> it.findElements(By.className("delete")).stream()) + .filter(WebElement::isDisplayed) + .findFirst() + .orElseThrow(() -> new RuntimeException("No delete button in user list")) + .click(); ((JavascriptExecutor) driver).executeScript("arguments[0].click();", buttonConfirm()); @@ -108,6 +129,7 @@ public TenantPage delete(String tenant) { @Getter public class TenantForm { + TenantForm() { PageFactory.initElements(driver, this); } @@ -133,4 +155,15 @@ public class TenantForm { @FindBy(className = "btn-cancel") private WebElement buttonCancel; } + + @RequiredArgsConstructor + public static class Row { + + private final WebElement row; + + public String tenantCode() { + return row.findElement(By.cssSelector("td[data-col-key=tenantCode]")).getText(); + } + + } } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/TokenPage.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/TokenPage.java index 5def2ad64ff3..9ba0ceb69da1 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/TokenPage.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/TokenPage.java @@ -19,12 +19,14 @@ package org.apache.dolphinscheduler.e2e.pages.security; +import org.apache.dolphinscheduler.e2e.core.WebDriverWaitFactory; import org.apache.dolphinscheduler.e2e.pages.common.NavBarPage; import org.apache.dolphinscheduler.e2e.pages.security.SecurityPage.Tab; -import java.time.Duration; import java.util.List; +import lombok.Getter; + import org.openqa.selenium.By; import org.openqa.selenium.JavascriptExecutor; import org.openqa.selenium.WebElement; @@ -33,14 +35,12 @@ import org.openqa.selenium.support.FindBys; import org.openqa.selenium.support.PageFactory; import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; - -import lombok.Getter; import com.google.common.base.Strings; @Getter public final class TokenPage extends NavBarPage implements Tab { + @FindBy(className = "btn-create-token") private WebElement buttonCreateToken; @@ -48,8 +48,8 @@ public final class TokenPage extends NavBarPage implements Tab { private List tokenList; @FindBys({ - @FindBy(className = "n-popconfirm__action"), - @FindBy(className = "n-button--primary-type"), + @FindBy(className = "n-popconfirm__action"), + @FindBy(className = "n-button--primary-type"), }) private WebElement buttonConfirm; @@ -69,10 +69,12 @@ public TokenPage(RemoteWebDriver driver) { public TokenPage create(String userName) { buttonCreateToken().click(); - new WebDriverWait(driver, Duration.ofSeconds(30)).until(ExpectedConditions.elementToBeClickable(createTokenForm().selectUserNameDropdown())); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.elementToBeClickable(createTokenForm().selectUserNameDropdown())); createTokenForm().selectUserNameDropdown().click(); - new WebDriverWait(driver, Duration.ofSeconds(30)).until(ExpectedConditions.visibilityOfElementLocated(new By.ByClassName( - "n-base-select-option__content"))); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.visibilityOfElementLocated(new By.ByClassName( + "n-base-select-option__content"))); createTokenForm().selectUserNameList() .stream() .filter(it -> it.getText().contains(userName)) @@ -81,7 +83,8 @@ public TokenPage create(String userName) { userName))) .click(); - new WebDriverWait(driver, Duration.ofSeconds(30)).until(ExpectedConditions.elementToBeClickable(createTokenForm().buttonGenerateToken())); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.elementToBeClickable(createTokenForm().buttonGenerateToken())); createTokenForm().buttonGenerateToken().click(); createTokenForm().buttonSubmit().click(); @@ -91,16 +94,18 @@ public TokenPage create(String userName) { public TokenPage update(String userName) { tokenList().stream() - .filter(it -> it.findElement(By.className("username")).getAttribute("innerHTML").contains(userName)) - .flatMap(it -> it.findElements(By.className("edit")).stream()) - .filter(WebElement::isDisplayed) - .findFirst() - .orElseThrow(() -> new RuntimeException("No edit button in token list")) - .click(); - - new WebDriverWait(driver, Duration.ofSeconds(30)).until(ExpectedConditions.elementToBeClickable(editTokenForm().buttonGenerateToken())); + .filter(it -> it.findElement(By.className("username")).getAttribute("innerHTML").contains(userName)) + .flatMap(it -> it.findElements(By.className("edit")).stream()) + .filter(WebElement::isDisplayed) + .findFirst() + .orElseThrow(() -> new RuntimeException("No edit button in token list")) + .click(); + + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.elementToBeClickable(editTokenForm().buttonGenerateToken())); editTokenForm().buttonGenerateToken().click(); - new WebDriverWait(driver, Duration.ofSeconds(30)).until(ExpectedConditions.elementToBeClickable(editTokenForm().buttonGenerateToken())); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.elementToBeClickable(editTokenForm().buttonGenerateToken())); editTokenForm().buttonSubmit().click(); @@ -109,23 +114,23 @@ public TokenPage update(String userName) { public String getToken(String userName) { return tokenList().stream() - .filter(it -> it.findElement(By.className("username")).getAttribute("innerHTML").contains(userName)) - .flatMap(it -> it.findElements(By.className("token")).stream()) - .filter(it -> !Strings.isNullOrEmpty(it.getAttribute("innerHTML"))) - .map(it -> it.getAttribute("innerHTML")) - .findFirst() - .orElseThrow(() -> new IllegalArgumentException("No token for such user: " + userName)); + .filter(it -> it.findElement(By.className("username")).getAttribute("innerHTML").contains(userName)) + .flatMap(it -> it.findElements(By.className("token")).stream()) + .filter(it -> !Strings.isNullOrEmpty(it.getAttribute("innerHTML"))) + .map(it -> it.getAttribute("innerHTML")) + .findFirst() + .orElseThrow(() -> new IllegalArgumentException("No token for such user: " + userName)); } public TokenPage delete(String userName) { tokenList() - .stream() - .filter(it -> it.getText().contains(userName)) - .flatMap(it -> it.findElements(By.className("delete")).stream()) - .filter(WebElement::isDisplayed) - .findFirst() - .orElseThrow(() -> new RuntimeException("No delete button in token list")) - .click(); + .stream() + .filter(it -> it.getText().contains(userName)) + .flatMap(it -> it.findElements(By.className("delete")).stream()) + .filter(WebElement::isDisplayed) + .findFirst() + .orElseThrow(() -> new RuntimeException("No delete button in token list")) + .click(); ((JavascriptExecutor) driver).executeScript("arguments[0].click();", buttonConfirm()); @@ -134,13 +139,14 @@ public TokenPage delete(String userName) { @Getter public class TokenForm { + TokenForm() { PageFactory.initElements(driver, this); } @FindBys({ - @FindBy(className = "input-username"), - @FindBy(className = "n-base-selection"), + @FindBy(className = "input-username"), + @FindBy(className = "n-base-selection"), }) private WebElement selectUserNameDropdown; diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/UserPage.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/UserPage.java index 26a236ad52c8..bd6d2d7d4998 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/UserPage.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/UserPage.java @@ -19,11 +19,14 @@ package org.apache.dolphinscheduler.e2e.pages.security; +import org.apache.dolphinscheduler.e2e.core.WebDriverWaitFactory; +import org.apache.dolphinscheduler.e2e.models.users.IUser; import org.apache.dolphinscheduler.e2e.pages.common.NavBarPage; -import java.time.Duration; import java.util.List; +import lombok.Getter; + import org.openqa.selenium.By; import org.openqa.selenium.JavascriptExecutor; import org.openqa.selenium.Keys; @@ -32,13 +35,11 @@ import org.openqa.selenium.support.FindBy; import org.openqa.selenium.support.FindBys; import org.openqa.selenium.support.PageFactory; - -import lombok.Getter; import org.openqa.selenium.support.ui.ExpectedConditions; -import org.openqa.selenium.support.ui.WebDriverWait; @Getter public final class UserPage extends NavBarPage implements SecurityPage.Tab { + @FindBy(className = "btn-create-user") private WebElement buttonCreateUser; @@ -46,15 +47,14 @@ public final class UserPage extends NavBarPage implements SecurityPage.Tab { private List userList; @FindBys({ - @FindBy(className = "n-popconfirm__action"), - @FindBy(className = "n-button--primary-type"), + @FindBy(className = "n-popconfirm__action"), + @FindBy(className = "n-button--primary-type"), }) private WebElement buttonConfirm; private final UserForm createUserForm = new UserForm(); private final UserForm editUserForm = new UserForm(); - public UserPage(RemoteWebDriver driver) { super(driver); } @@ -67,15 +67,16 @@ public UserPage create(String user, String password, String email, String phone, createUserForm().btnSelectTenantDropdown().click(); - new WebDriverWait(driver, Duration.ofSeconds(30)).until(ExpectedConditions.visibilityOfElementLocated(new By.ByClassName( - "n-base-select-option__content"))); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.visibilityOfElementLocated(new By.ByClassName( + "n-base-select-option__content"))); createUserForm().selectTenant() - .stream() - .filter(it -> it.getText().contains(tenant)) - .findFirst() - .orElseThrow(() -> new RuntimeException(String.format("No %s in tenant dropdown list", tenant))) - .click(); + .stream() + .filter(it -> it.getText().contains(tenant)) + .findFirst() + .orElseThrow(() -> new RuntimeException(String.format("No %s in tenant dropdown list", tenant))) + .click(); createUserForm().inputEmail().sendKeys(email); createUserForm().inputPhone().sendKeys(phone); @@ -84,24 +85,37 @@ public UserPage create(String user, String password, String email, String phone, return this; } - public UserPage update(String user, String editUser, String editEmail, String editPhone, + public UserPage update(IUser user) { + return update( + user.getUserName(), + user.getUserName(), + user.getEmail(), + user.getPhone(), + user.getTenant()); + } + + public UserPage update(String user, + String editUser, + String editEmail, + String editPhone, String tenant) { userList().stream() - .filter(it -> it.findElement(By.className("name")).getAttribute("innerHTML").contains(user)) - .flatMap(it -> it.findElements(By.className("edit")).stream()) - .filter(WebElement::isDisplayed) - .findFirst() - .orElseThrow(() -> new RuntimeException("No edit button in user list")) - .click(); - - editUserForm().inputUserName().sendKeys(Keys.CONTROL+"a"); + .filter(it -> it.findElement(By.className("name")).getAttribute("innerHTML").contains(user)) + .flatMap(it -> it.findElements(By.className("edit")).stream()) + .filter(WebElement::isDisplayed) + .findFirst() + .orElseThrow(() -> new RuntimeException("No edit button in user list")) + .click(); + + editUserForm().inputUserName().sendKeys(Keys.CONTROL + "a"); editUserForm().inputUserName().sendKeys(Keys.BACK_SPACE); editUserForm().inputUserName().sendKeys(editUser); createUserForm().btnSelectTenantDropdown().click(); - new WebDriverWait(driver, Duration.ofSeconds(30)).until(ExpectedConditions.visibilityOfElementLocated(new By.ByClassName( - "n-base-select-option__content"))); + WebDriverWaitFactory.createWebDriverWait(driver) + .until(ExpectedConditions.visibilityOfElementLocated(new By.ByClassName( + "n-base-select-option__content"))); createUserForm().selectTenant() .stream() @@ -110,11 +124,11 @@ public UserPage update(String user, String editUser, String editEmail, String ed .orElseThrow(() -> new RuntimeException(String.format("No %s in tenant dropdown list", tenant))) .click(); - editUserForm().inputEmail().sendKeys(Keys.CONTROL+"a"); + editUserForm().inputEmail().sendKeys(Keys.CONTROL + "a"); editUserForm().inputEmail().sendKeys(Keys.BACK_SPACE); editUserForm().inputEmail().sendKeys(editEmail); - editUserForm().inputPhone().sendKeys(Keys.CONTROL+"a"); + editUserForm().inputPhone().sendKeys(Keys.CONTROL + "a"); editUserForm().inputPhone().sendKeys(Keys.BACK_SPACE); editUserForm().inputPhone().sendKeys(editPhone); @@ -125,13 +139,13 @@ public UserPage update(String user, String editUser, String editEmail, String ed public UserPage delete(String user) { userList() - .stream() - .filter(it -> it.findElement(By.className("name")).getAttribute("innerHTML").contains(user)) - .flatMap(it -> it.findElements(By.className("delete")).stream()) - .filter(WebElement::isDisplayed) - .findFirst() - .orElseThrow(() -> new RuntimeException("No delete button in user list")) - .click(); + .stream() + .filter(it -> it.findElement(By.className("name")).getAttribute("innerHTML").contains(user)) + .flatMap(it -> it.findElements(By.className("delete")).stream()) + .filter(WebElement::isDisplayed) + .findFirst() + .orElseThrow(() -> new RuntimeException("No delete button in user list")) + .click(); ((JavascriptExecutor) driver).executeScript("arguments[0].click();", buttonConfirm()); @@ -140,25 +154,26 @@ public UserPage delete(String user) { @Getter public class UserForm { + UserForm() { PageFactory.initElements(driver, this); } @FindBys({ - @FindBy(className = "input-username"), - @FindBy(tagName = "input"), + @FindBy(className = "input-username"), + @FindBy(tagName = "input"), }) private WebElement inputUserName; @FindBys({ - @FindBy(className = "input-password"), - @FindBy(tagName = "input"), + @FindBy(className = "input-password"), + @FindBy(tagName = "input"), }) private WebElement inputUserPassword; @FindBys({ - @FindBy(className = "select-tenant"), - @FindBy(className = "n-base-selection"), + @FindBy(className = "select-tenant"), + @FindBy(className = "n-base-selection"), }) private WebElement btnSelectTenantDropdown; diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/WorkerGroupPage.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/WorkerGroupPage.java index ced1e5a70948..736db769d9d3 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/WorkerGroupPage.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/security/WorkerGroupPage.java @@ -19,8 +19,12 @@ package org.apache.dolphinscheduler.e2e.pages.security; -import lombok.Getter; import org.apache.dolphinscheduler.e2e.pages.common.NavBarPage; + +import java.util.List; + +import lombok.Getter; + import org.openqa.selenium.By; import org.openqa.selenium.JavascriptExecutor; import org.openqa.selenium.Keys; @@ -30,11 +34,9 @@ import org.openqa.selenium.support.FindBys; import org.openqa.selenium.support.PageFactory; -import java.util.List; - - @Getter public final class WorkerGroupPage extends NavBarPage implements SecurityPage.Tab { + @FindBy(className = "btn-create-worker-group") private WebElement buttonCreateWorkerGroup; @@ -42,16 +44,14 @@ public final class WorkerGroupPage extends NavBarPage implements SecurityPage.Ta private List workerGroupList; @FindBys({ - @FindBy(className = "n-popconfirm__action"), - @FindBy(className = "n-button--primary-type"), + @FindBy(className = "n-popconfirm__action"), + @FindBy(className = "n-button--primary-type"), }) private WebElement buttonConfirm; private final WorkerGroupForm createWorkerForm = new WorkerGroupForm(); private final WorkerGroupForm editWorkerForm = new WorkerGroupForm(); - - public WorkerGroupPage(RemoteWebDriver driver) { super(driver); } @@ -87,16 +87,15 @@ public WorkerGroupPage update(String workerGroupName, String editWorkerGroupName return this; } - public WorkerGroupPage delete(String Worker) { workerGroupList() - .stream() - .filter(it -> it.findElement(By.className("name")).getAttribute("innerHTML").contains(Worker)) - .flatMap(it -> it.findElements(By.className("delete")).stream()) - .filter(WebElement::isDisplayed) - .findFirst() - .orElseThrow(() -> new RuntimeException("No delete button in workerGroup list")) - .click(); + .stream() + .filter(it -> it.findElement(By.className("name")).getAttribute("innerHTML").contains(Worker)) + .flatMap(it -> it.findElements(By.className("delete")).stream()) + .filter(WebElement::isDisplayed) + .findFirst() + .orElseThrow(() -> new RuntimeException("No delete button in workerGroup list")) + .click(); ((JavascriptExecutor) driver).executeScript("arguments[0].click();", buttonConfirm()); @@ -105,19 +104,20 @@ public WorkerGroupPage delete(String Worker) { @Getter public class WorkerGroupForm { + WorkerGroupForm() { PageFactory.initElements(driver, this); } @FindBys({ - @FindBy(className = "input-worker-group-name"), - @FindBy(tagName = "input"), + @FindBy(className = "input-worker-group-name"), + @FindBy(tagName = "input"), }) private WebElement inputWorkerGroupName; @FindBys({ - @FindBy(className = "select-worker-address"), - @FindBy(className = "n-base-selection"), + @FindBy(className = "select-worker-address"), + @FindBy(className = "n-base-selection"), }) private WebElement btnSelectWorkerAddress; diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/basic/docker-compose.yaml b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/basic/docker-compose.yaml index 89d645c2c15c..37bebedc647b 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/basic/docker-compose.yaml +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/basic/docker-compose.yaml @@ -28,7 +28,7 @@ services: networks: - e2e healthcheck: - test: [ "CMD", "curl", "http://localhost:12345/actuator/health" ] + test: [ "CMD", "curl", "http://localhost:12345/dolphinscheduler/actuator/health" ] interval: 5s timeout: 5s retries: 120 diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/datasource-clickhouse/docker-compose.yaml b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/datasource-clickhouse/docker-compose.yaml index 3b9ccc0f938d..ec108f067ebf 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/datasource-clickhouse/docker-compose.yaml +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/datasource-clickhouse/docker-compose.yaml @@ -28,7 +28,7 @@ services: networks: - e2e healthcheck: - test: [ "CMD", "curl", "http://localhost:12345/actuator/health" ] + test: [ "CMD", "curl", "http://localhost:12345/dolphinscheduler/actuator/health" ] interval: 5s timeout: 5s retries: 120 diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/datasource-hive/docker-compose.yaml b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/datasource-hive/docker-compose.yaml index b7609ff874b6..d28abc8c2279 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/datasource-hive/docker-compose.yaml +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/datasource-hive/docker-compose.yaml @@ -28,7 +28,7 @@ services: networks: - e2e healthcheck: - test: [ "CMD", "curl", "http://localhost:12345/actuator/health" ] + test: [ "CMD", "curl", "http://localhost:12345/dolphinscheduler/actuator/health" ] interval: 5s timeout: 5s retries: 120 diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/datasource-mysql/docker-compose.yaml b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/datasource-mysql/docker-compose.yaml index 099736805e2b..8287eb83de05 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/datasource-mysql/docker-compose.yaml +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/datasource-mysql/docker-compose.yaml @@ -31,7 +31,7 @@ services: - ./download-mysql.sh:/tmp/download-mysql.sh entrypoint: ['bash', '-c', '/bin/bash /tmp/download-mysql.sh && /opt/dolphinscheduler/bin/start.sh && tail -f /dev/null'] healthcheck: - test: [ "CMD", "curl", "http://localhost:12345/actuator/health" ] + test: [ "CMD", "curl", "http://localhost:12345/dolphinscheduler/actuator/health" ] interval: 5s timeout: 5s retries: 120 diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/datasource-postgresql/docker-compose.yaml b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/datasource-postgresql/docker-compose.yaml index 7d1e38832cf9..878bf2436497 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/datasource-postgresql/docker-compose.yaml +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/datasource-postgresql/docker-compose.yaml @@ -28,9 +28,9 @@ services: networks: - e2e healthcheck: - test: [ "CMD", "curl", "http://localhost:12345/actuator/health" ] + test: [ "CMD", "curl", "http://localhost:12345/dolphinscheduler/actuator/health" ] interval: 5s - timeout: 300s + timeout: 5s retries: 120 depends_on: postgres: diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/datasource-sqlserver/docker-compose.yaml b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/datasource-sqlserver/docker-compose.yaml index cf94105313c5..0e85692395b4 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/datasource-sqlserver/docker-compose.yaml +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/datasource-sqlserver/docker-compose.yaml @@ -28,7 +28,7 @@ services: networks: - e2e healthcheck: - test: [ "CMD", "curl", "http://localhost:12345/actuator/health" ] + test: [ "CMD", "curl", "http://localhost:12345/dolphinscheduler/actuator/health" ] interval: 5s timeout: 5s retries: 120 diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/file-manage/aws.yaml b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/file-manage/aws.yaml new file mode 100644 index 000000000000..6d453bb78a07 --- /dev/null +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/file-manage/aws.yaml @@ -0,0 +1,65 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +aws: + s3: + # The AWS credentials provider type. support: AWSStaticCredentialsProvider, InstanceProfileCredentialsProvider + # AWSStaticCredentialsProvider: use the access key and secret key to authenticate + # InstanceProfileCredentialsProvider: use the IAM role to authenticate + credentials.provider.type: AWSStaticCredentialsProvider + access.key.id: accessKey123 + access.key.secret: secretKey123 + region: us-east-1 + bucket.name: dolphinscheduler + endpoint: http://s3:9000 + emr: + # The AWS credentials provider type. support: AWSStaticCredentialsProvider, InstanceProfileCredentialsProvider + # AWSStaticCredentialsProvider: use the access key and secret key to authenticate + # InstanceProfileCredentialsProvider: use the IAM role to authenticate + credentials.provider.type: AWSStaticCredentialsProvider + access.key.id: minioadmin + access.key.secret: minioadmin + region: cn-north-1 + endpoint: http://localhost:9000 + sagemaker: + # The AWS credentials provider type. support: AWSStaticCredentialsProvider, InstanceProfileCredentialsProvider + # AWSStaticCredentialsProvider: use the access key and secret key to authenticate + # InstanceProfileCredentialsProvider: use the IAM role to authenticate + credentials.provider.type: AWSStaticCredentialsProvider + access.key.id: minioadmin + access.key.secret: minioadmin + region: cn-north-1 + endpoint: http://localhost:9000 + dms: + # The AWS credentials provider type. support: AWSStaticCredentialsProvider, InstanceProfileCredentialsProvider + # AWSStaticCredentialsProvider: use the access key and secret key to authenticate + # InstanceProfileCredentialsProvider: use the IAM role to authenticate + credentials.provider.type: AWSStaticCredentialsProvider + access.key.id: minioadmin + access.key.secret: minioadmin + region: cn-north-1 + endpoint: http://localhost:9000 + datasync: + # The AWS credentials provider type. support: AWSStaticCredentialsProvider, InstanceProfileCredentialsProvider + # AWSStaticCredentialsProvider: use the access key and secret key to authenticate + # InstanceProfileCredentialsProvider: use the IAM role to authenticate + credentials.provider.type: AWSStaticCredentialsProvider + access.key.id: minioadmin + access.key.secret: minioadmin + region: cn-north-1 + endpoint: http://localhost:9000 + diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/file-manage/common.properties b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/file-manage/common.properties index 7583b3293a6f..604befdbf8ca 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/file-manage/common.properties +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/file-manage/common.properties @@ -37,17 +37,6 @@ resource.azure.tenant.id=minioadmin # The query interval resource.query.interval=10000 -# The AWS access key. if resource.storage.type=S3 or use EMR-Task, This configuration is required -resource.aws.access.key.id=accessKey123 -# The AWS secret access key. if resource.storage.type=S3 or use EMR-Task, This configuration is required -resource.aws.secret.access.key=secretKey123 -# The AWS Region to use. if resource.storage.type=S3 or use EMR-Task, This configuration is required -resource.aws.region=us-east-1 -# The name of the bucket. You need to create them by yourself. Otherwise, the system cannot start. All buckets in Amazon S3 share a single namespace; ensure the bucket is given a unique name. -resource.aws.s3.bucket.name=dolphinscheduler -# You need to set this parameter when private cloud s3. If S3 uses public cloud, you only need to set resource.aws.region or set to the endpoint of a public cloud such as S3.cn-north-1.amazonaws.com.cn -resource.aws.s3.endpoint=http://s3:9000 - # alibaba cloud access key id, required if you set resource.storage.type=OSS resource.alibaba.cloud.access.key.id= # alibaba cloud access key secret, required if you set resource.storage.type=OSS diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/file-manage/docker-compose.yaml b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/file-manage/docker-compose.yaml index 9a46ed02ad26..49b9de813fa8 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/file-manage/docker-compose.yaml +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/file-manage/docker-compose.yaml @@ -28,12 +28,13 @@ services: networks: - e2e healthcheck: - test: [ "CMD", "curl", "http://localhost:12345/actuator/health" ] + test: [ "CMD", "curl", "http://localhost:12345/dolphinscheduler/actuator/health" ] interval: 5s timeout: 5s retries: 120 volumes: - ./common.properties:/opt/dolphinscheduler/conf/common.properties + - ./aws.yaml:/opt/dolphinscheduler/conf/aws.yaml depends_on: s3: condition: service_healthy diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/python-task/Dockerfile b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/python-task/Dockerfile new file mode 100644 index 000000000000..92103973f18f --- /dev/null +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/python-task/Dockerfile @@ -0,0 +1,25 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +FROM apache/dolphinscheduler-standalone-server:ci + +RUN apt update \ + && apt install -y software-properties-common \ + && add-apt-repository ppa:deadsnakes/ppa \ + && apt update \ + && apt-get install -y python3.8 libpython3.8-dev python3.8-dev python3.8-distutils \ + && rm -rf /var/lib/apt/lists/* diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/python-task/docker-compose.yaml b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/python-task/docker-compose.yaml new file mode 100644 index 000000000000..3a287cce3dd8 --- /dev/null +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/python-task/docker-compose.yaml @@ -0,0 +1,40 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +version: "3.8" + +services: + dolphinscheduler: + image: apache/dolphinscheduler-standalone-server:ci-python + build: + context: . + dockerfile: ./Dockerfile + environment: + MASTER_MAX_CPU_LOAD_AVG: 100 + WORKER_TENANT_AUTO_CREATE: 'true' + ports: + - "12345:12345" + networks: + - e2e + healthcheck: + test: [ "CMD", "curl", "http://localhost:12345/dolphinscheduler/actuator/health" ] + interval: 5s + timeout: 5s + retries: 120 + +networks: + e2e: diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/workflow-http/docker-compose.yaml b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/workflow-http/docker-compose.yaml index 2591e3d74adf..0f6a4a620526 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/workflow-http/docker-compose.yaml +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/workflow-http/docker-compose.yaml @@ -29,7 +29,7 @@ services: networks: - e2e healthcheck: - test: [ "CMD", "curl", "http://localhost:12345/actuator/health" ] + test: [ "CMD", "curl", "http://localhost:12345/dolphinscheduler/actuator/health" ] interval: 5s timeout: 5s retries: 120 diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-core/src/main/java/org/apache/dolphinscheduler/e2e/core/Constants.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-core/src/main/java/org/apache/dolphinscheduler/e2e/core/Constants.java index caa3a2b819c4..1eb0fb4bafd6 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-core/src/main/java/org/apache/dolphinscheduler/e2e/core/Constants.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-core/src/main/java/org/apache/dolphinscheduler/e2e/core/Constants.java @@ -17,13 +17,14 @@ package org.apache.dolphinscheduler.e2e.core; -import lombok.experimental.UtilityClass; - import java.nio.file.Path; import java.nio.file.Paths; +import lombok.experimental.UtilityClass; + @UtilityClass public final class Constants { + /** * tmp directory path */ @@ -38,4 +39,8 @@ public final class Constants { * chrome download path in selenium/standalone-chrome-debug container */ public static final String SELENIUM_CONTAINER_CHROME_DOWNLOAD_PATH = "/home/seluser/Downloads"; + + public static final String LINE_SEPARATOR = "\n"; + + public static final long DEFAULT_SLEEP_MILLISECONDS = 500; } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-core/src/main/java/org/apache/dolphinscheduler/e2e/core/DolphinScheduler.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-core/src/main/java/org/apache/dolphinscheduler/e2e/core/DolphinScheduler.java index 8d49ca3b9112..2feab903d7ee 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-core/src/main/java/org/apache/dolphinscheduler/e2e/core/DolphinScheduler.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-core/src/main/java/org/apache/dolphinscheduler/e2e/core/DolphinScheduler.java @@ -37,5 +37,6 @@ @TestMethodOrder(OrderAnnotation.class) @ExtendWith(DolphinSchedulerExtension.class) public @interface DolphinScheduler { + String[] composeFiles(); } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-core/src/main/java/org/apache/dolphinscheduler/e2e/core/DolphinSchedulerExtension.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-core/src/main/java/org/apache/dolphinscheduler/e2e/core/DolphinSchedulerExtension.java index 21c740952cd2..973af59e12ff 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-core/src/main/java/org/apache/dolphinscheduler/e2e/core/DolphinSchedulerExtension.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-core/src/main/java/org/apache/dolphinscheduler/e2e/core/DolphinSchedulerExtension.java @@ -37,6 +37,8 @@ import java.util.stream.Collectors; import java.util.stream.Stream; +import lombok.extern.slf4j.Slf4j; + import org.junit.jupiter.api.extension.AfterAllCallback; import org.junit.jupiter.api.extension.BeforeAllCallback; import org.junit.jupiter.api.extension.BeforeEachCallback; @@ -45,6 +47,7 @@ import org.openqa.selenium.chrome.ChromeOptions; import org.openqa.selenium.remote.RemoteWebDriver; import org.testcontainers.Testcontainers; +import org.testcontainers.containers.BindMode; import org.testcontainers.containers.BrowserWebDriverContainer; import org.testcontainers.containers.ComposeContainer; import org.testcontainers.containers.wait.strategy.Wait; @@ -54,10 +57,9 @@ import com.google.common.base.Strings; import com.google.common.net.HostAndPort; -import lombok.extern.slf4j.Slf4j; - @Slf4j final class DolphinSchedulerExtension implements BeforeAllCallback, AfterAllCallback, BeforeEachCallback { + private final boolean LOCAL_MODE = Objects.equals(System.getProperty("local"), "true"); private final boolean M1_CHIP_FLAG = Objects.equals(System.getProperty("m1_chip"), "true"); @@ -79,8 +81,8 @@ final class DolphinSchedulerExtension implements BeforeAllCallback, AfterAllCall @Override @SuppressWarnings("UnstableApiUsage") public void beforeAll(ExtensionContext context) throws IOException { - Awaitility.setDefaultTimeout(Duration.ofSeconds(60)); - Awaitility.setDefaultPollInterval(Duration.ofSeconds(2)); + Awaitility.setDefaultTimeout(Duration.ofSeconds(120)); + Awaitility.setDefaultPollInterval(Duration.ofMillis(500)); setRecordPath(); @@ -97,14 +99,17 @@ public void beforeAll(ExtensionContext context) throws IOException { browser.withAccessToHost(true); } browser.start(); - - driver = new RemoteWebDriver(browser.getSeleniumAddress(), new ChromeOptions()); + ChromeOptions chromeOptions = new ChromeOptions(); + chromeOptions.addArguments("--allow-running-insecure-content"); + chromeOptions.addArguments(String.format("--unsafely-treat-insecure-origin-as-secure=http://%s:%s", + address.getHost(), address.getPort())); + driver = new RemoteWebDriver(browser.getSeleniumAddress(), chromeOptions); driver.manage().timeouts() - .implicitlyWait(Duration.ofSeconds(10)) - .pageLoadTimeout(Duration.ofSeconds(10)); + .implicitlyWait(Duration.ofSeconds(1)) + .pageLoadTimeout(Duration.ofSeconds(5)); driver.manage().window() - .maximize(); + .maximize(); driver.get(new URL("http", address.getHost(), address.getPort(), rootPath).toString()); @@ -112,9 +117,10 @@ public void beforeAll(ExtensionContext context) throws IOException { final Class clazz = context.getRequiredTestClass(); Stream.of(clazz.getDeclaredFields()) - .filter(it -> Modifier.isStatic(it.getModifiers())) - .filter(f -> WebDriver.class.isAssignableFrom(f.getType())) - .forEach(it -> setDriver(clazz, it)); + .filter(it -> Modifier.isStatic(it.getModifiers())) + .filter(f -> WebDriver.class.isAssignableFrom(f.getType())) + .forEach(it -> setDriver(clazz, it)); + WebDriverHolder.setWebDriver(driver); } private void runInLocal() { @@ -127,29 +133,40 @@ private void runInDockerContainer(ExtensionContext context) { compose = createDockerCompose(context); compose.start(); - address = HostAndPort.fromParts("host.testcontainers.internal", compose.getServicePort(serviceName, DOCKER_PORT)); + address = + HostAndPort.fromParts("host.testcontainers.internal", compose.getServicePort(serviceName, DOCKER_PORT)); rootPath = "/dolphinscheduler/ui/"; } private void setBrowserContainerByOsName() { DockerImageName imageName; - if (LOCAL_MODE && M1_CHIP_FLAG) { - imageName = DockerImageName.parse("seleniarm/standalone-chromium:4.1.2-20220227") + if (M1_CHIP_FLAG) { + imageName = DockerImageName.parse("seleniarm/standalone-chromium:124.0-chromedriver-124.0") .asCompatibleSubstituteFor("selenium/standalone-chrome"); + if (!Files.exists(Constants.HOST_CHROME_DOWNLOAD_PATH)) { + try { + Files.createDirectories(Constants.HOST_CHROME_DOWNLOAD_PATH); + } catch (IOException e) { + log.error("Failed to create chrome download directory: {}", Constants.HOST_CHROME_DOWNLOAD_PATH); + throw new RuntimeException(e); + } + } + browser = new BrowserWebDriverContainer<>(imageName) .withCapabilities(new ChromeOptions()) .withCreateContainerCmdModifier(cmd -> cmd.withUser("root")) .withFileSystemBind(Constants.HOST_CHROME_DOWNLOAD_PATH.toFile().getAbsolutePath(), - Constants.SELENIUM_CONTAINER_CHROME_DOWNLOAD_PATH) + Constants.SELENIUM_CONTAINER_CHROME_DOWNLOAD_PATH, BindMode.READ_WRITE) + .withRecordingMode(RECORD_ALL, record.toFile(), MP4) .withStartupTimeout(Duration.ofSeconds(300)); } else { browser = new BrowserWebDriverContainer<>() .withCapabilities(new ChromeOptions()) .withCreateContainerCmdModifier(cmd -> cmd.withUser("root")) .withFileSystemBind(Constants.HOST_CHROME_DOWNLOAD_PATH.toFile().getAbsolutePath(), - Constants.SELENIUM_CONTAINER_CHROME_DOWNLOAD_PATH) + Constants.SELENIUM_CONTAINER_CHROME_DOWNLOAD_PATH, BindMode.READ_WRITE) .withRecordingMode(RECORD_ALL, record.toFile(), MP4) .withStartupTimeout(Duration.ofSeconds(300)); } @@ -181,8 +198,8 @@ public void afterAll(ExtensionContext context) { public void beforeEach(ExtensionContext context) { final Object instance = context.getRequiredTestInstance(); Stream.of(instance.getClass().getDeclaredFields()) - .filter(f -> WebDriver.class.isAssignableFrom(f.getType())) - .forEach(it -> setDriver(instance, it)); + .filter(f -> WebDriver.class.isAssignableFrom(f.getType())) + .forEach(it -> setDriver(instance, it)); } private void setDriver(Object object, Field field) { @@ -190,7 +207,7 @@ private void setDriver(Object object, Field field) { field.setAccessible(true); field.set(object, driver); } catch (IllegalAccessException e) { - LOGGER.error("Failed to inject web driver to field: {}", field.getName(), e); + log.error("Failed to inject web driver to field: {}", field.getName(), e); } } @@ -198,22 +215,21 @@ private ComposeContainer createDockerCompose(ExtensionContext context) { final Class clazz = context.getRequiredTestClass(); final DolphinScheduler annotation = clazz.getAnnotation(DolphinScheduler.class); final List files = Stream.of(annotation.composeFiles()) - .map(it -> DolphinScheduler.class.getClassLoader().getResource(it)) - .filter(Objects::nonNull) - .map(URL::getPath) - .map(File::new) - .collect(Collectors.toList()); - - ComposeContainer compose = new ComposeContainer(files) + .map(it -> DolphinScheduler.class.getClassLoader().getResource(it)) + .filter(Objects::nonNull) + .map(URL::getPath) + .map(File::new) + .collect(Collectors.toList()); + + ComposeContainer compose = new ComposeContainer(files) .withPull(true) .withTailChildContainers(true) .withLocalCompose(true) .withExposedService( serviceName, DOCKER_PORT, Wait.forListeningPort().withStartupTimeout(Duration.ofSeconds(300))) - .withLogConsumer(serviceName, outputFrame -> LOGGER.info(outputFrame.getUtf8String())) + .withLogConsumer(serviceName, outputFrame -> log.info(outputFrame.getUtf8String())) .waitingFor(serviceName, Wait.forHealthcheck().withStartupTimeout(Duration.ofSeconds(300))); - return compose; } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-core/src/main/java/org/apache/dolphinscheduler/e2e/core/TestDescription.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-core/src/main/java/org/apache/dolphinscheduler/e2e/core/TestDescription.java index 59ef66b7f6ae..fbf277f2a46b 100644 --- a/dolphinscheduler-e2e/dolphinscheduler-e2e-core/src/main/java/org/apache/dolphinscheduler/e2e/core/TestDescription.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-core/src/main/java/org/apache/dolphinscheduler/e2e/core/TestDescription.java @@ -24,12 +24,13 @@ import java.io.UnsupportedEncodingException; import java.net.URLEncoder; -import org.junit.jupiter.api.extension.ExtensionContext; - import lombok.RequiredArgsConstructor; +import org.junit.jupiter.api.extension.ExtensionContext; + @RequiredArgsConstructor final class TestDescription implements org.testcontainers.lifecycle.TestDescription { + private static final String UNKNOWN_NAME = "unknown"; private final ExtensionContext context; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/BlockingOpportunity.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-core/src/main/java/org/apache/dolphinscheduler/e2e/core/WebDriverHolder.java similarity index 71% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/BlockingOpportunity.java rename to dolphinscheduler-e2e/dolphinscheduler-e2e-core/src/main/java/org/apache/dolphinscheduler/e2e/core/WebDriverHolder.java index 5fc7c5073fb1..48e8c53e928c 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/BlockingOpportunity.java +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-core/src/main/java/org/apache/dolphinscheduler/e2e/core/WebDriverHolder.java @@ -15,21 +15,20 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.enums; +package org.apache.dolphinscheduler.e2e.core; -public enum BlockingOpportunity { +import org.openqa.selenium.remote.RemoteWebDriver; - BLOCKING_ON_SUCCESS("BlockingOnSuccess"), - BLOCKING_ON_FAILED("BlockingOnFailed"); +public class WebDriverHolder { - private final String desc; + public static RemoteWebDriver browser; - BlockingOpportunity(String desc) { - this.desc = desc; + public static void setWebDriver(RemoteWebDriver driver) { + browser = driver; } - public String getDesc() { - return desc; + public static RemoteWebDriver getWebDriver() { + return browser; } } diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-core/src/main/java/org/apache/dolphinscheduler/e2e/core/WebDriverWaitFactory.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-core/src/main/java/org/apache/dolphinscheduler/e2e/core/WebDriverWaitFactory.java new file mode 100644 index 000000000000..2d9f47eb23a8 --- /dev/null +++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-core/src/main/java/org/apache/dolphinscheduler/e2e/core/WebDriverWaitFactory.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.e2e.core; + +import java.time.Duration; + +import org.openqa.selenium.WebDriver; +import org.openqa.selenium.support.ui.WebDriverWait; + +public class WebDriverWaitFactory { + + private static final Duration DEFAULT_INTERVAL = Duration.ofMillis(500); + + private static final Duration DEFAULT_TIMEOUT = Duration.ofSeconds(10); + + /** + * Create a WebDriverWait instance with default timeout 60s and interval 100ms. + */ + public static WebDriverWait createWebDriverWait(WebDriver driver) { + return createWebDriverWait(driver, DEFAULT_TIMEOUT); + } + + public static WebDriverWait createWebDriverWait(WebDriver driver, Duration timeout) { + return new WebDriverWait(driver, timeout, DEFAULT_INTERVAL); + } + + public static WebDriverWait createWebDriverWait(WebDriver driver, Duration timeout, Duration interval) { + return new WebDriverWait(driver, timeout, interval); + } + +} diff --git a/dolphinscheduler-e2e/lombok.config b/dolphinscheduler-e2e/lombok.config index 0056b8f78b64..cc34fa2311d7 100644 --- a/dolphinscheduler-e2e/lombok.config +++ b/dolphinscheduler-e2e/lombok.config @@ -16,5 +16,5 @@ # lombok.accessors.fluent=true -lombok.log.fieldname=LOGGER +lombok.log.fieldname=log lombok.accessors.fluent=true diff --git a/dolphinscheduler-e2e/pom.xml b/dolphinscheduler-e2e/pom.xml index c7ce90b1a62f..3ee8886acb13 100644 --- a/dolphinscheduler-e2e/pom.xml +++ b/dolphinscheduler-e2e/pom.xml @@ -31,18 +31,20 @@ - 8 - 8 + 11 + 11 UTF-8 5.8.1 - 4.6.0 + 4.21.0 1.18.20 3.20.2 1.5.30 1.7.36 2.17.2 31.0.1-jre + 2.22.2 + 1.19.8 @@ -119,7 +121,7 @@ org.testcontainers testcontainers-bom - 1.19.3 + ${testcontainers.version} import pom @@ -131,7 +133,7 @@ org.apache.maven.plugins maven-surefire-plugin - 2.22.2 + ${maven-surefire-plugin.version} diff --git a/dolphinscheduler-extract/dolphinscheduler-extract-alert/src/main/java/org/apache/dolphinscheduler/extract/alert/request/AlertSendResponse.java b/dolphinscheduler-extract/dolphinscheduler-extract-alert/src/main/java/org/apache/dolphinscheduler/extract/alert/request/AlertSendResponse.java index 832f3e1fabda..e1db2a233e20 100644 --- a/dolphinscheduler-extract/dolphinscheduler-extract-alert/src/main/java/org/apache/dolphinscheduler/extract/alert/request/AlertSendResponse.java +++ b/dolphinscheduler-extract/dolphinscheduler-extract-alert/src/main/java/org/apache/dolphinscheduler/extract/alert/request/AlertSendResponse.java @@ -37,6 +37,14 @@ public class AlertSendResponse { private List resResults; + public static AlertSendResponse success(List resResults) { + return new AlertSendResponse(true, resResults); + } + + public static AlertSendResponse fail(List resResults) { + return new AlertSendResponse(false, resResults); + } + @Data @NoArgsConstructor @AllArgsConstructor @@ -46,6 +54,14 @@ public static class AlertSendResponseResult implements Serializable { private String message; + public static AlertSendResponseResult success() { + return new AlertSendResponseResult(true, null); + } + + public static AlertSendResponseResult fail(String message) { + return new AlertSendResponseResult(false, message); + } + } } diff --git a/dolphinscheduler-extract/dolphinscheduler-extract-base/pom.xml b/dolphinscheduler-extract/dolphinscheduler-extract-base/pom.xml index 9501d55706d9..0554ea3c3cee 100644 --- a/dolphinscheduler-extract/dolphinscheduler-extract-base/pom.xml +++ b/dolphinscheduler-extract/dolphinscheduler-extract-base/pom.xml @@ -47,6 +47,13 @@ dolphinscheduler-common ${project.version} + + + org.apache.dolphinscheduler + dolphinscheduler-meter + ${project.version} + + io.netty netty-all diff --git a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/NettyRemotingClient.java b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/NettyRemotingClient.java deleted file mode 100644 index e4682f5224be..000000000000 --- a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/NettyRemotingClient.java +++ /dev/null @@ -1,292 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.extract.base; - -import org.apache.dolphinscheduler.common.thread.ThreadUtils; -import org.apache.dolphinscheduler.extract.base.config.NettyClientConfig; -import org.apache.dolphinscheduler.extract.base.exception.RemotingException; -import org.apache.dolphinscheduler.extract.base.exception.RemotingTimeoutException; -import org.apache.dolphinscheduler.extract.base.exception.RemotingTooMuchRequestException; -import org.apache.dolphinscheduler.extract.base.future.InvokeCallback; -import org.apache.dolphinscheduler.extract.base.future.ReleaseSemaphore; -import org.apache.dolphinscheduler.extract.base.future.ResponseFuture; -import org.apache.dolphinscheduler.extract.base.protocal.Transporter; -import org.apache.dolphinscheduler.extract.base.protocal.TransporterDecoder; -import org.apache.dolphinscheduler.extract.base.protocal.TransporterEncoder; -import org.apache.dolphinscheduler.extract.base.utils.CallerThreadExecutePolicy; -import org.apache.dolphinscheduler.extract.base.utils.Constants; -import org.apache.dolphinscheduler.extract.base.utils.Host; -import org.apache.dolphinscheduler.extract.base.utils.NettyUtils; - -import java.net.InetSocketAddress; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.Semaphore; -import java.util.concurrent.ThreadFactory; -import java.util.concurrent.ThreadPoolExecutor; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; - -import lombok.extern.slf4j.Slf4j; -import io.netty.bootstrap.Bootstrap; -import io.netty.channel.Channel; -import io.netty.channel.ChannelFuture; -import io.netty.channel.ChannelInitializer; -import io.netty.channel.ChannelOption; -import io.netty.channel.EventLoopGroup; -import io.netty.channel.epoll.Epoll; -import io.netty.channel.epoll.EpollEventLoopGroup; -import io.netty.channel.nio.NioEventLoopGroup; -import io.netty.channel.socket.SocketChannel; -import io.netty.handler.timeout.IdleStateHandler; - -@Slf4j -public class NettyRemotingClient implements AutoCloseable { - - private final Bootstrap bootstrap = new Bootstrap(); - - private final ConcurrentHashMap channels = new ConcurrentHashMap<>(128); - - private final AtomicBoolean isStarted = new AtomicBoolean(false); - - private final EventLoopGroup workerGroup; - - private final NettyClientConfig clientConfig; - - private final Semaphore asyncSemaphore = new Semaphore(1024, true); - - private final ExecutorService callbackExecutor; - - private final NettyClientHandler clientHandler; - - private final ScheduledExecutorService responseFutureExecutor; - - public NettyRemotingClient(final NettyClientConfig clientConfig) { - this.clientConfig = clientConfig; - ThreadFactory nettyClientThreadFactory = ThreadUtils.newDaemonThreadFactory("NettyClientThread-"); - if (Epoll.isAvailable()) { - this.workerGroup = new EpollEventLoopGroup(clientConfig.getWorkerThreads(), nettyClientThreadFactory); - } else { - this.workerGroup = new NioEventLoopGroup(clientConfig.getWorkerThreads(), nettyClientThreadFactory); - } - this.callbackExecutor = new ThreadPoolExecutor( - Constants.CPUS, - Constants.CPUS, - 1, - TimeUnit.MINUTES, - new LinkedBlockingQueue<>(1000), - ThreadUtils.newDaemonThreadFactory("NettyClientCallbackThread-"), - new CallerThreadExecutePolicy()); - this.clientHandler = new NettyClientHandler(this, callbackExecutor); - - this.responseFutureExecutor = Executors.newSingleThreadScheduledExecutor( - ThreadUtils.newDaemonThreadFactory("NettyClientResponseFutureThread-")); - - this.start(); - } - - private void start() { - - this.bootstrap - .group(this.workerGroup) - .channel(NettyUtils.getSocketChannelClass()) - .option(ChannelOption.SO_KEEPALIVE, clientConfig.isSoKeepalive()) - .option(ChannelOption.TCP_NODELAY, clientConfig.isTcpNoDelay()) - .option(ChannelOption.SO_SNDBUF, clientConfig.getSendBufferSize()) - .option(ChannelOption.SO_RCVBUF, clientConfig.getReceiveBufferSize()) - .option(ChannelOption.CONNECT_TIMEOUT_MILLIS, clientConfig.getConnectTimeoutMillis()) - .handler(new ChannelInitializer() { - - @Override - public void initChannel(SocketChannel ch) { - ch.pipeline() - .addLast("client-idle-handler", - new IdleStateHandler( - Constants.NETTY_CLIENT_HEART_BEAT_TIME, - 0, - 0, - TimeUnit.MILLISECONDS)) - .addLast(new TransporterDecoder(), clientHandler, new TransporterEncoder()); - } - }); - this.responseFutureExecutor.scheduleWithFixedDelay(ResponseFuture::scanFutureTable, 0, 1, TimeUnit.SECONDS); - isStarted.compareAndSet(false, true); - } - - public void sendAsync(final Host host, - final Transporter transporter, - final long timeoutMillis, - final InvokeCallback invokeCallback) throws InterruptedException, RemotingException { - final Channel channel = getChannel(host); - if (channel == null) { - throw new RemotingException("network error"); - } - /* - * request unique identification - */ - final long opaque = transporter.getHeader().getOpaque(); - /* - * control concurrency number - */ - boolean acquired = this.asyncSemaphore.tryAcquire(timeoutMillis, TimeUnit.MILLISECONDS); - if (acquired) { - final ReleaseSemaphore releaseSemaphore = new ReleaseSemaphore(this.asyncSemaphore); - - /* - * response future - */ - final ResponseFuture responseFuture = new ResponseFuture(opaque, - timeoutMillis, - invokeCallback, - releaseSemaphore); - try { - channel.writeAndFlush(transporter).addListener(future -> { - if (future.isSuccess()) { - responseFuture.setSendOk(true); - return; - } else { - responseFuture.setSendOk(false); - } - responseFuture.setCause(future.cause()); - responseFuture.putResponse(null); - try { - responseFuture.executeInvokeCallback(); - } catch (Exception ex) { - log.error("execute callback error", ex); - } finally { - responseFuture.release(); - } - }); - } catch (Exception ex) { - responseFuture.release(); - throw new RemotingException(String.format("Send transporter to host: %s failed", host), ex); - } - } else { - String message = String.format( - "try to acquire async semaphore timeout: %d, waiting thread num: %d, total permits: %d", - timeoutMillis, asyncSemaphore.getQueueLength(), asyncSemaphore.availablePermits()); - throw new RemotingTooMuchRequestException(message); - } - } - - public IRpcResponse sendSync(final Host host, final Transporter transporter, - final long timeoutMillis) throws InterruptedException, RemotingException { - final Channel channel = getChannel(host); - if (channel == null) { - throw new RemotingException(String.format("connect to : %s fail", host)); - } - final long opaque = transporter.getHeader().getOpaque(); - final ResponseFuture responseFuture = new ResponseFuture(opaque, timeoutMillis, null, null); - channel.writeAndFlush(transporter).addListener(future -> { - if (future.isSuccess()) { - responseFuture.setSendOk(true); - return; - } else { - responseFuture.setSendOk(false); - } - responseFuture.setCause(future.cause()); - responseFuture.putResponse(null); - log.error("Send Sync request {} to host {} failed", transporter, host, responseFuture.getCause()); - }); - /* - * sync wait for result - */ - IRpcResponse iRpcResponse = responseFuture.waitResponse(); - if (iRpcResponse == null) { - if (responseFuture.isSendOK()) { - throw new RemotingTimeoutException(host.toString(), timeoutMillis, responseFuture.getCause()); - } else { - throw new RemotingException(host.toString(), responseFuture.getCause()); - } - } - return iRpcResponse; - } - - public Channel getChannel(Host host) { - Channel channel = channels.get(host); - if (channel != null && channel.isActive()) { - return channel; - } - return createChannel(host, true); - } - - /** - * create channel - * - * @param host host - * @param isSync sync flag - * @return channel - */ - public Channel createChannel(Host host, boolean isSync) { - ChannelFuture future; - try { - synchronized (bootstrap) { - future = bootstrap.connect(new InetSocketAddress(host.getIp(), host.getPort())); - } - if (isSync) { - future.sync(); - } - if (future.isSuccess()) { - Channel channel = future.channel(); - channels.put(host, channel); - return channel; - } - } catch (Exception ex) { - log.warn(String.format("connect to %s error", host), ex); - } - return null; - } - - @Override - public void close() { - if (isStarted.compareAndSet(true, false)) { - try { - closeChannels(); - if (workerGroup != null) { - this.workerGroup.shutdownGracefully(); - } - if (callbackExecutor != null) { - this.callbackExecutor.shutdownNow(); - } - if (this.responseFutureExecutor != null) { - this.responseFutureExecutor.shutdownNow(); - } - log.info("netty client closed"); - } catch (Exception ex) { - log.error("netty client close exception", ex); - } - } - } - - private void closeChannels() { - for (Channel channel : this.channels.values()) { - channel.close(); - } - this.channels.clear(); - } - - public void closeChannel(Host host) { - Channel channel = this.channels.remove(host); - if (channel != null) { - channel.close(); - } - } -} diff --git a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/RpcMethod.java b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/RpcMethod.java index cd1b778c9a0d..2dcf689ad870 100644 --- a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/RpcMethod.java +++ b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/RpcMethod.java @@ -28,6 +28,6 @@ @Documented public @interface RpcMethod { - long timeout() default 3000L; + long timeout() default -1; } diff --git a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/SyncRequestDto.java b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/SyncRequestDto.java new file mode 100644 index 000000000000..7df649cd5790 --- /dev/null +++ b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/SyncRequestDto.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.extract.base; + +import org.apache.dolphinscheduler.extract.base.protocal.Transporter; +import org.apache.dolphinscheduler.extract.base.utils.Host; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@AllArgsConstructor +@NoArgsConstructor +public class SyncRequestDto { + + private Host serverHost; + private Transporter transporter; + private long timeoutMillis; + +} diff --git a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/BaseRemoteMethodInvoker.java b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/AbstractClientMethodInvoker.java similarity index 83% rename from dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/BaseRemoteMethodInvoker.java rename to dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/AbstractClientMethodInvoker.java index 519dd87199f1..b753f1efa773 100644 --- a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/BaseRemoteMethodInvoker.java +++ b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/AbstractClientMethodInvoker.java @@ -17,12 +17,11 @@ package org.apache.dolphinscheduler.extract.base.client; -import org.apache.dolphinscheduler.extract.base.NettyRemotingClient; import org.apache.dolphinscheduler.extract.base.utils.Host; import java.lang.reflect.Method; -public abstract class BaseRemoteMethodInvoker implements ClientMethodInvoker { +abstract class AbstractClientMethodInvoker implements ClientMethodInvoker { protected final String methodIdentifier; @@ -32,7 +31,7 @@ public abstract class BaseRemoteMethodInvoker implements ClientMethodInvoker { protected final Host serverHost; - public BaseRemoteMethodInvoker(Host serverHost, Method localMethod, NettyRemotingClient nettyRemotingClient) { + AbstractClientMethodInvoker(Host serverHost, Method localMethod, NettyRemotingClient nettyRemotingClient) { this.serverHost = serverHost; this.localMethod = localMethod; this.nettyRemotingClient = nettyRemotingClient; diff --git a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/ClientInvocationHandler.java b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/ClientInvocationHandler.java index d5c9ab73d3d4..41ec3e056d19 100644 --- a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/ClientInvocationHandler.java +++ b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/ClientInvocationHandler.java @@ -19,7 +19,6 @@ import static com.google.common.base.Preconditions.checkNotNull; -import org.apache.dolphinscheduler.extract.base.NettyRemotingClient; import org.apache.dolphinscheduler.extract.base.RpcMethod; import org.apache.dolphinscheduler.extract.base.utils.Host; @@ -31,7 +30,7 @@ import lombok.extern.slf4j.Slf4j; @Slf4j -public class ClientInvocationHandler implements InvocationHandler { +class ClientInvocationHandler implements InvocationHandler { private final NettyRemotingClient nettyRemotingClient; @@ -39,7 +38,7 @@ public class ClientInvocationHandler implements InvocationHandler { private final Host serverHost; - public ClientInvocationHandler(Host serverHost, NettyRemotingClient nettyRemotingClient) { + ClientInvocationHandler(Host serverHost, NettyRemotingClient nettyRemotingClient) { this.serverHost = checkNotNull(serverHost); this.nettyRemotingClient = checkNotNull(nettyRemotingClient); this.methodInvokerMap = new ConcurrentHashMap<>(); diff --git a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/ClientMethodInvoker.java b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/ClientMethodInvoker.java index dcf53b0311d8..a287fd95ce97 100644 --- a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/ClientMethodInvoker.java +++ b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/ClientMethodInvoker.java @@ -19,7 +19,7 @@ import java.lang.reflect.Method; -public interface ClientMethodInvoker { +interface ClientMethodInvoker { Object invoke(Object proxy, Method method, Object[] args) throws Throwable; diff --git a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/IRpcClientProxyFactory.java b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/IRpcClientProxyFactory.java index e60b0f18b0ad..afd3adf34888 100644 --- a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/IRpcClientProxyFactory.java +++ b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/IRpcClientProxyFactory.java @@ -17,7 +17,7 @@ package org.apache.dolphinscheduler.extract.base.client; -public interface IRpcClientProxyFactory { +interface IRpcClientProxyFactory { /** * Create the client proxy. diff --git a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/JdkDynamicRpcClientProxyFactory.java b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/JdkDynamicRpcClientProxyFactory.java index 5635a88f344a..bf329ab3fc96 100644 --- a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/JdkDynamicRpcClientProxyFactory.java +++ b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/JdkDynamicRpcClientProxyFactory.java @@ -17,7 +17,6 @@ package org.apache.dolphinscheduler.extract.base.client; -import org.apache.dolphinscheduler.extract.base.NettyRemotingClient; import org.apache.dolphinscheduler.extract.base.utils.Host; import java.lang.reflect.Proxy; @@ -34,7 +33,7 @@ /** * This class is used to create a proxy client which will transform local method invocation to remove invocation. */ -public class JdkDynamicRpcClientProxyFactory implements IRpcClientProxyFactory { +class JdkDynamicRpcClientProxyFactory implements IRpcClientProxyFactory { private final NettyRemotingClient nettyRemotingClient; @@ -49,7 +48,7 @@ public Map load(String key) { } }); - public JdkDynamicRpcClientProxyFactory(NettyRemotingClient nettyRemotingClient) { + JdkDynamicRpcClientProxyFactory(NettyRemotingClient nettyRemotingClient) { this.nettyRemotingClient = nettyRemotingClient; } diff --git a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/NettyClientHandler.java b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/NettyClientHandler.java similarity index 84% rename from dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/NettyClientHandler.java rename to dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/NettyClientHandler.java index b0d998af83ee..5f50e2441cd6 100644 --- a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/NettyClientHandler.java +++ b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/NettyClientHandler.java @@ -15,16 +15,15 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.extract.base; +package org.apache.dolphinscheduler.extract.base.client; +import org.apache.dolphinscheduler.extract.base.StandardRpcResponse; import org.apache.dolphinscheduler.extract.base.future.ResponseFuture; import org.apache.dolphinscheduler.extract.base.protocal.HeartBeatTransporter; import org.apache.dolphinscheduler.extract.base.protocal.Transporter; import org.apache.dolphinscheduler.extract.base.serialize.JsonSerializer; import org.apache.dolphinscheduler.extract.base.utils.ChannelUtils; -import java.util.concurrent.ExecutorService; - import lombok.extern.slf4j.Slf4j; import io.netty.channel.ChannelFutureListener; import io.netty.channel.ChannelHandler; @@ -38,11 +37,8 @@ public class NettyClientHandler extends ChannelInboundHandlerAdapter { private final NettyRemotingClient nettyRemotingClient; - private final ExecutorService callbackExecutor; - - public NettyClientHandler(NettyRemotingClient nettyRemotingClient, ExecutorService callbackExecutor) { + public NettyClientHandler(NettyRemotingClient nettyRemotingClient) { this.nettyRemotingClient = nettyRemotingClient; - this.callbackExecutor = callbackExecutor; } @Override @@ -64,13 +60,7 @@ private void processReceived(final Transporter transporter) { } StandardRpcResponse deserialize = JsonSerializer.deserialize(transporter.getBody(), StandardRpcResponse.class); future.setIRpcResponse(deserialize); - future.release(); - if (future.getInvokeCallback() != null) { - future.removeFuture(); - this.callbackExecutor.execute(future::executeInvokeCallback); - } else { - future.putResponse(deserialize); - } + future.putResponse(deserialize); } @Override @@ -87,7 +77,7 @@ public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exc .writeAndFlush(HeartBeatTransporter.getHeartBeatTransporter()) .addListener(ChannelFutureListener.CLOSE_ON_FAILURE); if (log.isDebugEnabled()) { - log.debug("Client send heart beat to: {}", ChannelUtils.getRemoteAddress(ctx.channel())); + log.info("Client send heartbeat to: {}", ctx.channel().remoteAddress()); } } else { super.userEventTriggered(ctx, evt); diff --git a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/NettyRemotingClient.java b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/NettyRemotingClient.java new file mode 100644 index 000000000000..4aea4d6dfe9c --- /dev/null +++ b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/NettyRemotingClient.java @@ -0,0 +1,244 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.extract.base.client; + +import org.apache.dolphinscheduler.common.thread.ThreadUtils; +import org.apache.dolphinscheduler.extract.base.IRpcResponse; +import org.apache.dolphinscheduler.extract.base.SyncRequestDto; +import org.apache.dolphinscheduler.extract.base.config.NettyClientConfig; +import org.apache.dolphinscheduler.extract.base.exception.RemotingException; +import org.apache.dolphinscheduler.extract.base.exception.RemotingTimeoutException; +import org.apache.dolphinscheduler.extract.base.future.ResponseFuture; +import org.apache.dolphinscheduler.extract.base.metrics.ClientSyncDurationMetrics; +import org.apache.dolphinscheduler.extract.base.metrics.ClientSyncExceptionMetrics; +import org.apache.dolphinscheduler.extract.base.metrics.RpcMetrics; +import org.apache.dolphinscheduler.extract.base.protocal.Transporter; +import org.apache.dolphinscheduler.extract.base.protocal.TransporterDecoder; +import org.apache.dolphinscheduler.extract.base.protocal.TransporterEncoder; +import org.apache.dolphinscheduler.extract.base.utils.Host; +import org.apache.dolphinscheduler.extract.base.utils.NettyUtils; + +import java.net.InetSocketAddress; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.locks.ReentrantLock; + +import lombok.extern.slf4j.Slf4j; +import io.netty.bootstrap.Bootstrap; +import io.netty.channel.Channel; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.epoll.Epoll; +import io.netty.channel.epoll.EpollEventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.handler.timeout.IdleStateHandler; + +@Slf4j +public class NettyRemotingClient implements AutoCloseable { + + private final Bootstrap bootstrap = new Bootstrap(); + + private final ReentrantLock channelsLock = new ReentrantLock(); + private final Map channels = new ConcurrentHashMap<>(); + + private final AtomicBoolean isStarted = new AtomicBoolean(false); + + private final EventLoopGroup workerGroup; + + private final NettyClientConfig clientConfig; + + private final NettyClientHandler clientHandler; + + public NettyRemotingClient(final NettyClientConfig clientConfig) { + this.clientConfig = clientConfig; + ThreadFactory nettyClientThreadFactory = ThreadUtils.newDaemonThreadFactory("NettyClientThread-"); + if (Epoll.isAvailable()) { + this.workerGroup = new EpollEventLoopGroup(clientConfig.getWorkerThreads(), nettyClientThreadFactory); + } else { + this.workerGroup = new NioEventLoopGroup(clientConfig.getWorkerThreads(), nettyClientThreadFactory); + } + this.clientHandler = new NettyClientHandler(this); + + this.start(); + } + + private void start() { + + this.bootstrap + .group(this.workerGroup) + .channel(NettyUtils.getSocketChannelClass()) + .option(ChannelOption.SO_KEEPALIVE, clientConfig.isSoKeepalive()) + .option(ChannelOption.TCP_NODELAY, clientConfig.isTcpNoDelay()) + .option(ChannelOption.SO_SNDBUF, clientConfig.getSendBufferSize()) + .option(ChannelOption.SO_RCVBUF, clientConfig.getReceiveBufferSize()) + .option(ChannelOption.CONNECT_TIMEOUT_MILLIS, clientConfig.getConnectTimeoutMillis()) + .handler(new ChannelInitializer() { + + @Override + public void initChannel(SocketChannel ch) { + ch.pipeline() + .addLast("client-idle-handler", + new IdleStateHandler( + 0, + clientConfig.getHeartBeatIntervalMillis(), + 0, + TimeUnit.MILLISECONDS)) + .addLast(new TransporterDecoder(), clientHandler, new TransporterEncoder()); + } + }); + isStarted.compareAndSet(false, true); + } + + public IRpcResponse sendSync(SyncRequestDto syncRequestDto) throws RemotingException { + long start = System.currentTimeMillis(); + + final Host host = syncRequestDto.getServerHost(); + final Transporter transporter = syncRequestDto.getTransporter(); + final long timeoutMillis = syncRequestDto.getTimeoutMillis() < 0 ? clientConfig.getConnectTimeoutMillis() + : syncRequestDto.getTimeoutMillis(); + final long opaque = transporter.getHeader().getOpaque(); + + try { + final Channel channel = getOrCreateChannel(host); + if (channel == null) { + throw new RemotingException(String.format("connect to : %s fail", host)); + } + final ResponseFuture responseFuture = new ResponseFuture(opaque, timeoutMillis); + channel.writeAndFlush(transporter).addListener(future -> { + if (future.isSuccess()) { + responseFuture.setSendOk(true); + return; + } else { + responseFuture.setSendOk(false); + } + responseFuture.setCause(future.cause()); + responseFuture.putResponse(null); + log.error("Send Sync request {} to host {} failed", transporter, host, responseFuture.getCause()); + }); + /* + * sync wait for result + */ + IRpcResponse iRpcResponse = responseFuture.waitResponse(); + if (iRpcResponse == null) { + if (responseFuture.isSendOK()) { + throw new RemotingTimeoutException(host.toString(), timeoutMillis, responseFuture.getCause()); + } else { + throw new RemotingException(host.toString(), responseFuture.getCause()); + } + } + return iRpcResponse; + } catch (Exception ex) { + ClientSyncExceptionMetrics clientSyncExceptionMetrics = ClientSyncExceptionMetrics + .of(syncRequestDto) + .withThrowable(ex); + RpcMetrics.recordClientSyncRequestException(clientSyncExceptionMetrics); + if (ex instanceof RemotingException) { + throw (RemotingException) ex; + } else { + throw new RemotingException(ex); + } + } finally { + ClientSyncDurationMetrics clientSyncDurationMetrics = ClientSyncDurationMetrics + .of(syncRequestDto) + .withMilliseconds(System.currentTimeMillis() - start); + RpcMetrics.recordClientSyncRequestDuration(clientSyncDurationMetrics); + } + } + + Channel getOrCreateChannel(Host host) { + Channel channel = channels.get(host); + if (channel != null && channel.isActive()) { + return channel; + } + try { + channelsLock.lock(); + channel = channels.get(host); + if (channel != null && channel.isActive()) { + return channel; + } + channel = createChannel(host); + channels.put(host, channel); + } finally { + channelsLock.unlock(); + } + return channel; + } + + /** + * create channel + * + * @param host host + * @return channel + */ + Channel createChannel(Host host) { + try { + ChannelFuture future = bootstrap.connect(new InetSocketAddress(host.getIp(), host.getPort())); + future = future.sync(); + if (future.isSuccess()) { + return future.channel(); + } else { + throw new IllegalArgumentException("connect to host: " + host + " failed", future.cause()); + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new RuntimeException("Connect to host: " + host + " failed", e); + } + } + + @Override + public void close() { + if (isStarted.compareAndSet(true, false)) { + try { + closeChannels(); + if (workerGroup != null) { + this.workerGroup.shutdownGracefully(); + } + log.info("netty client closed"); + } catch (Exception ex) { + log.error("netty client close exception", ex); + } + } + } + + private void closeChannels() { + try { + channelsLock.lock(); + channels.values().forEach(Channel::close); + } finally { + channelsLock.unlock(); + } + } + + public void closeChannel(Host host) { + try { + channelsLock.lock(); + Channel channel = this.channels.remove(host); + if (channel != null) { + channel.close(); + } + } finally { + channelsLock.unlock(); + } + } +} diff --git a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/NettyRemotingClientFactory.java b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/NettyRemotingClientFactory.java similarity index 95% rename from dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/NettyRemotingClientFactory.java rename to dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/NettyRemotingClientFactory.java index 7bbebfbf3d89..d14a8aa54efc 100644 --- a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/NettyRemotingClientFactory.java +++ b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/NettyRemotingClientFactory.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.extract.base; +package org.apache.dolphinscheduler.extract.base.client; import org.apache.dolphinscheduler.extract.base.config.NettyClientConfig; diff --git a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/SingletonJdkDynamicRpcClientProxyFactory.java b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/SingletonJdkDynamicRpcClientProxyFactory.java index 28d82532bebd..44d310e70b1e 100644 --- a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/SingletonJdkDynamicRpcClientProxyFactory.java +++ b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/SingletonJdkDynamicRpcClientProxyFactory.java @@ -17,7 +17,6 @@ package org.apache.dolphinscheduler.extract.base.client; -import org.apache.dolphinscheduler.extract.base.NettyRemotingClientFactory; import org.apache.dolphinscheduler.extract.base.config.NettyClientConfig; public class SingletonJdkDynamicRpcClientProxyFactory { diff --git a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/SyncClientMethodInvoker.java b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/SyncClientMethodInvoker.java index b5fdf3fb71cd..ccbdad945b7d 100644 --- a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/SyncClientMethodInvoker.java +++ b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/client/SyncClientMethodInvoker.java @@ -18,9 +18,9 @@ package org.apache.dolphinscheduler.extract.base.client; import org.apache.dolphinscheduler.extract.base.IRpcResponse; -import org.apache.dolphinscheduler.extract.base.NettyRemotingClient; import org.apache.dolphinscheduler.extract.base.RpcMethod; import org.apache.dolphinscheduler.extract.base.StandardRpcRequest; +import org.apache.dolphinscheduler.extract.base.SyncRequestDto; import org.apache.dolphinscheduler.extract.base.exception.MethodInvocationException; import org.apache.dolphinscheduler.extract.base.protocal.Transporter; import org.apache.dolphinscheduler.extract.base.protocal.TransporterHeader; @@ -29,9 +29,9 @@ import java.lang.reflect.Method; -public class SyncClientMethodInvoker extends BaseRemoteMethodInvoker { +class SyncClientMethodInvoker extends AbstractClientMethodInvoker { - public SyncClientMethodInvoker(Host serverHost, Method localMethod, NettyRemotingClient nettyRemotingClient) { + SyncClientMethodInvoker(Host serverHost, Method localMethod, NettyRemotingClient nettyRemotingClient) { super(serverHost, localMethod, nettyRemotingClient); } @@ -42,8 +42,12 @@ public Object invoke(Object proxy, Method method, Object[] args) throws Throwabl transporter.setBody(JsonSerializer.serialize(StandardRpcRequest.of(args))); transporter.setHeader(TransporterHeader.of(methodIdentifier)); - IRpcResponse iRpcResponse = - nettyRemotingClient.sendSync(serverHost, transporter, sync.timeout()); + SyncRequestDto syncRequestDto = SyncRequestDto.builder() + .timeoutMillis(sync.timeout()) + .transporter(transporter) + .serverHost(serverHost) + .build(); + IRpcResponse iRpcResponse = nettyRemotingClient.sendSync(syncRequestDto); if (!iRpcResponse.isSuccess()) { throw MethodInvocationException.of(iRpcResponse.getMessage()); } diff --git a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/config/NettyClientConfig.java b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/config/NettyClientConfig.java index a41a439b5c5f..a00ff540f4a5 100644 --- a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/config/NettyClientConfig.java +++ b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/config/NettyClientConfig.java @@ -17,6 +17,8 @@ package org.apache.dolphinscheduler.extract.base.config; +import java.time.Duration; + import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; @@ -64,4 +66,14 @@ public class NettyClientConfig { @Builder.Default private int connectTimeoutMillis = 3000; + /** + * Will send {@link org.apache.dolphinscheduler.extract.base.protocal.HeartBeatTransporter} to netty server every + * heartBeatIntervalMillis, used to keep the {@link io.netty.channel.Channel} active. + */ + @Builder.Default + private long heartBeatIntervalMillis = Duration.ofSeconds(10).toMillis(); + + @Builder.Default + private int defaultRpcTimeoutMillis = 10_000; + } diff --git a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/config/NettyServerConfig.java b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/config/NettyServerConfig.java index 9d4a2ee3d227..cc0aa04f68c9 100644 --- a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/config/NettyServerConfig.java +++ b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/config/NettyServerConfig.java @@ -17,6 +17,8 @@ package org.apache.dolphinscheduler.extract.base.config; +import java.time.Duration; + import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; @@ -37,7 +39,7 @@ public class NettyServerConfig { private int soBacklog = 1024; /** - * whether tpc delay + * whether tcp delay */ @Builder.Default private boolean tcpNoDelay = true; @@ -66,6 +68,12 @@ public class NettyServerConfig { @Builder.Default private int workerThread = Runtime.getRuntime().availableProcessors() * 2; + /** + * If done's receive any data from a {@link io.netty.channel.Channel} during 180s then will close it. + */ + @Builder.Default + private long connectionIdleTime = Duration.ofSeconds(60).toMillis(); + /** * listen port */ diff --git a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/future/ResponseFuture.java b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/future/ResponseFuture.java index 35405c557803..1fbbd9ed6c42 100644 --- a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/future/ResponseFuture.java +++ b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/future/ResponseFuture.java @@ -19,8 +19,6 @@ import org.apache.dolphinscheduler.extract.base.IRpcResponse; -import java.util.Iterator; -import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @@ -34,17 +32,13 @@ @Slf4j public class ResponseFuture { - private static final ConcurrentHashMap FUTURE_TABLE = new ConcurrentHashMap<>(256); + private static final ConcurrentHashMap FUTURE_TABLE = new ConcurrentHashMap<>(); private final long opaque; // remove the timeout private final long timeoutMillis; - private final InvokeCallback invokeCallback; - - private final ReleaseSemaphore releaseSemaphore; - private final CountDownLatch latch = new CountDownLatch(1); private final long beginTimestamp = System.currentTimeMillis(); @@ -57,14 +51,9 @@ public class ResponseFuture { private Throwable cause; - public ResponseFuture(long opaque, - long timeoutMillis, - InvokeCallback invokeCallback, - ReleaseSemaphore releaseSemaphore) { + public ResponseFuture(long opaque, long timeoutMillis) { this.opaque = opaque; this.timeoutMillis = timeoutMillis; - this.invokeCallback = invokeCallback; - this.releaseSemaphore = releaseSemaphore; FUTURE_TABLE.put(opaque, this); } @@ -90,10 +79,6 @@ public static ResponseFuture getFuture(long opaque) { return FUTURE_TABLE.get(opaque); } - public void removeFuture() { - FUTURE_TABLE.remove(opaque); - } - /** * whether timeout * @@ -104,15 +89,6 @@ public boolean isTimeout() { return diff > this.timeoutMillis; } - /** - * execute invoke callback - */ - public void executeInvokeCallback() { - if (invokeCallback != null) { - invokeCallback.operationComplete(this); - } - } - public boolean isSendOK() { return sendOk; } @@ -129,52 +105,4 @@ public Throwable getCause() { return cause; } - public long getOpaque() { - return opaque; - } - - public long getTimeoutMillis() { - return timeoutMillis; - } - - public long getBeginTimestamp() { - return beginTimestamp; - } - - public InvokeCallback getInvokeCallback() { - return invokeCallback; - } - - /** - * release - */ - public void release() { - if (this.releaseSemaphore != null) { - this.releaseSemaphore.release(); - } - } - - /** - * scan future table - */ - public static void scanFutureTable() { - Iterator> it = FUTURE_TABLE.entrySet().iterator(); - while (it.hasNext()) { - Map.Entry next = it.next(); - ResponseFuture future = next.getValue(); - if ((future.getBeginTimestamp() + future.getTimeoutMillis() + 1000) > System.currentTimeMillis()) { - continue; - } - try { - // todo: use thread pool to execute the async callback, otherwise will block the scan thread - future.release(); - future.executeInvokeCallback(); - } catch (Exception ex) { - log.error("ScanFutureTable, execute callback error, requestId: {}", future.getOpaque(), ex); - } - it.remove(); - log.debug("Remove timeout request: {}", future); - } - } - } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/context/ExecutionContext.java b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/metrics/ClientSyncDurationMetrics.java similarity index 51% rename from dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/context/ExecutionContext.java rename to dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/metrics/ClientSyncDurationMetrics.java index 8ad401386827..60124363b769 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/context/ExecutionContext.java +++ b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/metrics/ClientSyncDurationMetrics.java @@ -15,13 +15,11 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.server.master.dispatch.context; +package org.apache.dolphinscheduler.extract.base.metrics; -import static org.apache.dolphinscheduler.common.constants.Constants.DEFAULT_WORKER_GROUP; - -import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import org.apache.dolphinscheduler.extract.base.utils.Host; -import org.apache.dolphinscheduler.server.master.dispatch.enums.ExecutorType; +import org.apache.dolphinscheduler.common.utils.NetUtils; +import org.apache.dolphinscheduler.extract.base.SyncRequestDto; +import org.apache.dolphinscheduler.extract.base.protocal.Transporter; import lombok.AllArgsConstructor; import lombok.Builder; @@ -32,26 +30,27 @@ @Builder @NoArgsConstructor @AllArgsConstructor -public class ExecutionContext { +public class ClientSyncDurationMetrics { - private Host host; + private Transporter transporter; - private TaskInstance taskInstance; + private long milliseconds; - private ExecutorType executorType; + @Builder.Default + private String clientHost = NetUtils.getHost(); - /** - * worker group - */ - private String workerGroup; + private String serverHost; - public ExecutionContext(ExecutorType executorType, TaskInstance taskInstance) { - this(executorType, DEFAULT_WORKER_GROUP, taskInstance); + public static ClientSyncDurationMetrics of(SyncRequestDto syncRequestDto) { + return ClientSyncDurationMetrics.builder() + .transporter(syncRequestDto.getTransporter()) + .serverHost(syncRequestDto.getServerHost().getIp()) + .build(); } - public ExecutionContext(ExecutorType executorType, String workerGroup, TaskInstance taskInstance) { - this.executorType = executorType; - this.workerGroup = workerGroup; - this.taskInstance = taskInstance; + public ClientSyncDurationMetrics withMilliseconds(long milliseconds) { + this.milliseconds = milliseconds; + return this; } + } diff --git a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/metrics/ClientSyncExceptionMetrics.java b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/metrics/ClientSyncExceptionMetrics.java new file mode 100644 index 000000000000..6f91132547ed --- /dev/null +++ b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/metrics/ClientSyncExceptionMetrics.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.extract.base.metrics; + +import org.apache.dolphinscheduler.common.utils.NetUtils; +import org.apache.dolphinscheduler.extract.base.SyncRequestDto; +import org.apache.dolphinscheduler.extract.base.protocal.Transporter; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class ClientSyncExceptionMetrics { + + private Transporter transporter; + + private String clientHost; + + @Builder.Default + private String serverHost = NetUtils.getHost(); + + private Throwable throwable; + + public static ClientSyncExceptionMetrics of(SyncRequestDto syncRequestDto) { + return ClientSyncExceptionMetrics.builder() + .transporter(syncRequestDto.getTransporter()) + .build(); + + } + + public ClientSyncExceptionMetrics withThrowable(Throwable throwable) { + this.throwable = throwable; + return this; + } + +} diff --git a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/metrics/RpcMetrics.java b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/metrics/RpcMetrics.java new file mode 100644 index 000000000000..c2dbbfefa655 --- /dev/null +++ b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/metrics/RpcMetrics.java @@ -0,0 +1,94 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.extract.base.metrics; + +import org.apache.dolphinscheduler.extract.base.protocal.Transporter; +import org.apache.dolphinscheduler.extract.base.protocal.TransporterHeader; + +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.TimeUnit; + +import io.micrometer.core.instrument.Counter; +import io.micrometer.core.instrument.Metrics; +import io.micrometer.core.instrument.Timer; + +public class RpcMetrics { + + private static final Map rpcRequestDurationTimer = new ConcurrentHashMap<>(); + + private static final Map rpcRequestExceptionCounter = new ConcurrentHashMap<>(); + + public static void recordClientSyncRequestException(ClientSyncExceptionMetrics clientSyncExceptionMetrics) { + recordClientSyncRequestException( + clientSyncExceptionMetrics.getThrowable(), + Optional.of(clientSyncExceptionMetrics) + .map(ClientSyncExceptionMetrics::getTransporter) + .map(Transporter::getHeader) + .map(TransporterHeader::getMethodIdentifier) + .orElseGet(() -> "unknown"), + clientSyncExceptionMetrics.getClientHost(), + clientSyncExceptionMetrics.getServerHost()); + } + + public static void recordClientSyncRequestException(final Throwable throwable, + final String methodName, + final String clientHost, + final String serverHost) { + final String exceptionType = throwable == null ? "unknown" : throwable.getClass().getSimpleName(); + final Counter counter = rpcRequestExceptionCounter.computeIfAbsent(exceptionType, + (et) -> Counter.builder("ds.rpc.client.sync.request.exception.count") + .tag("method_name", methodName) + .tag("client_host", clientHost) + .tag("server_host", serverHost) + .tag("exception_name", et) + .description("rpc sync request exception counter for exception type: " + et) + .register(Metrics.globalRegistry)); + counter.increment(); + } + + public static void recordClientSyncRequestDuration(ClientSyncDurationMetrics clientSyncDurationMetrics) { + recordClientSyncRequestDuration( + Optional.of(clientSyncDurationMetrics) + .map(ClientSyncDurationMetrics::getTransporter) + .map(Transporter::getHeader) + .map(TransporterHeader::getMethodIdentifier) + .orElseGet(() -> "unknown"), + clientSyncDurationMetrics.getMilliseconds(), + clientSyncDurationMetrics.getClientHost(), + clientSyncDurationMetrics.getServerHost()); + } + + public static void recordClientSyncRequestDuration(final String methodName, + final long milliseconds, + final String clientHost, + final String serverHost) { + rpcRequestDurationTimer.computeIfAbsent(methodName, + (method) -> Timer.builder("ds.rpc.client.sync.request.duration.time") + .tag("method_name", method) + .tag("client_host", clientHost) + .tag("server_host", serverHost) + .publishPercentiles(0.5, 0.75, 0.95, 0.99) + .publishPercentileHistogram() + .description("time cost of sync rpc request, unit ms") + .register(Metrics.globalRegistry)) + .record(milliseconds, TimeUnit.MILLISECONDS); + } + +} diff --git a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/server/JdkDynamicServerHandler.java b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/server/JdkDynamicServerHandler.java index b4978172f124..4f9a7034c8c0 100644 --- a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/server/JdkDynamicServerHandler.java +++ b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/server/JdkDynamicServerHandler.java @@ -19,7 +19,6 @@ import static com.google.common.base.Preconditions.checkNotNull; -import org.apache.dolphinscheduler.extract.base.NettyRemotingServer; import org.apache.dolphinscheduler.extract.base.StandardRpcRequest; import org.apache.dolphinscheduler.extract.base.StandardRpcResponse; import org.apache.dolphinscheduler.extract.base.protocal.HeartBeatTransporter; @@ -30,6 +29,7 @@ import java.util.Map; import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutorService; import java.util.concurrent.RejectedExecutionException; import lombok.extern.slf4j.Slf4j; @@ -38,18 +38,19 @@ import io.netty.channel.ChannelHandler; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelInboundHandlerAdapter; +import io.netty.handler.timeout.IdleState; import io.netty.handler.timeout.IdleStateEvent; @Slf4j @ChannelHandler.Sharable -public class JdkDynamicServerHandler extends ChannelInboundHandlerAdapter { +class JdkDynamicServerHandler extends ChannelInboundHandlerAdapter { - private final NettyRemotingServer nettyRemotingServer; + private final ExecutorService methodInvokeExecutor; private final Map methodInvokerMap; - public JdkDynamicServerHandler(NettyRemotingServer nettyRemotingServer) { - this.nettyRemotingServer = nettyRemotingServer; + JdkDynamicServerHandler(ExecutorService methodInvokeExecutor) { + this.methodInvokeExecutor = methodInvokeExecutor; this.methodInvokerMap = new ConcurrentHashMap<>(); } @@ -90,7 +91,7 @@ private void processReceived(final Channel channel, final Transporter transporte channel.writeAndFlush(response); return; } - nettyRemotingServer.getDefaultExecutor().execute(() -> { + methodInvokeExecutor.execute(() -> { StandardRpcResponse iRpcResponse; try { StandardRpcRequest standardRpcRequest = @@ -160,7 +161,11 @@ public void channelWritabilityChanged(ChannelHandlerContext ctx) { @Override public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception { if (evt instanceof IdleStateEvent) { - ctx.channel().close(); + IdleStateEvent event = (IdleStateEvent) evt; + if (event.state() == IdleState.READER_IDLE) { + log.warn("Not receive heart beat from: {}, will close the channel", ctx.channel().remoteAddress()); + ctx.close(); + } } else { super.userEventTriggered(ctx, evt); } diff --git a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/NettyRemotingServer.java b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/server/NettyRemotingServer.java similarity index 73% rename from dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/NettyRemotingServer.java rename to dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/server/NettyRemotingServer.java index 365a17dd030f..9ebf802b1ed2 100644 --- a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/NettyRemotingServer.java +++ b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/server/NettyRemotingServer.java @@ -15,16 +15,13 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.extract.base; +package org.apache.dolphinscheduler.extract.base.server; import org.apache.dolphinscheduler.common.thread.ThreadUtils; import org.apache.dolphinscheduler.extract.base.config.NettyServerConfig; import org.apache.dolphinscheduler.extract.base.exception.RemoteException; import org.apache.dolphinscheduler.extract.base.protocal.TransporterDecoder; import org.apache.dolphinscheduler.extract.base.protocal.TransporterEncoder; -import org.apache.dolphinscheduler.extract.base.server.JdkDynamicServerHandler; -import org.apache.dolphinscheduler.extract.base.server.ServerMethodInvoker; -import org.apache.dolphinscheduler.extract.base.utils.Constants; import org.apache.dolphinscheduler.extract.base.utils.NettyUtils; import java.util.concurrent.ExecutorService; @@ -32,6 +29,7 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; +import lombok.Getter; import lombok.extern.slf4j.Slf4j; import io.netty.bootstrap.ServerBootstrap; import io.netty.channel.ChannelFuture; @@ -48,12 +46,15 @@ * remoting netty server */ @Slf4j -public class NettyRemotingServer { +class NettyRemotingServer { private final ServerBootstrap serverBootstrap = new ServerBootstrap(); - private final ExecutorService defaultExecutor = ThreadUtils - .newDaemonFixedThreadExecutor("NettyRemotingServerThread", Runtime.getRuntime().availableProcessors() * 2); + @Getter + private final String serverName; + + @Getter + private final ExecutorService methodInvokerExecutor; private final EventLoopGroup bossGroup; @@ -61,16 +62,20 @@ public class NettyRemotingServer { private final NettyServerConfig serverConfig; - private final JdkDynamicServerHandler serverHandler = new JdkDynamicServerHandler(this); + private final JdkDynamicServerHandler channelHandler; private final AtomicBoolean isStarted = new AtomicBoolean(false); - public NettyRemotingServer(final NettyServerConfig serverConfig) { + NettyRemotingServer(final NettyServerConfig serverConfig) { this.serverConfig = serverConfig; + this.serverName = serverConfig.getServerName(); + this.methodInvokerExecutor = ThreadUtils.newDaemonFixedThreadExecutor( + serverName + "MethodInvoker-%d", Runtime.getRuntime().availableProcessors() * 2 + 1); + this.channelHandler = new JdkDynamicServerHandler(methodInvokerExecutor); ThreadFactory bossThreadFactory = - ThreadUtils.newDaemonThreadFactory(serverConfig.getServerName() + "BossThread_%s"); + ThreadUtils.newDaemonThreadFactory(serverName + "BossThread-%d"); ThreadFactory workerThreadFactory = - ThreadUtils.newDaemonThreadFactory(serverConfig.getServerName() + "WorkerThread_%s"); + ThreadUtils.newDaemonThreadFactory(serverName + "WorkerThread-%d"); if (Epoll.isAvailable()) { this.bossGroup = new EpollEventLoopGroup(1, bossThreadFactory); this.workGroup = new EpollEventLoopGroup(serverConfig.getWorkerThread(), workerThreadFactory); @@ -80,7 +85,7 @@ public NettyRemotingServer(final NettyServerConfig serverConfig) { } } - public void start() { + void start() { if (isStarted.compareAndSet(false, true)) { this.serverBootstrap .group(this.bossGroup, this.workGroup) @@ -103,9 +108,9 @@ protected void initChannel(SocketChannel ch) { try { future = serverBootstrap.bind(serverConfig.getListenPort()).sync(); } catch (Exception e) { - log.error("{} bind fail {}, exit", serverConfig.getServerName(), e.getMessage(), e); throw new RemoteException( - String.format("%s bind %s fail", serverConfig.getServerName(), serverConfig.getListenPort())); + String.format("%s bind %s fail", serverConfig.getServerName(), serverConfig.getListenPort()), + e); } if (future.isSuccess()) { @@ -113,14 +118,9 @@ protected void initChannel(SocketChannel ch) { return; } - if (future.cause() != null) { - throw new RemoteException( - String.format("%s bind %s fail", serverConfig.getServerName(), serverConfig.getListenPort()), - future.cause()); - } else { - throw new RemoteException( - String.format("%s bind %s fail", serverConfig.getServerName(), serverConfig.getListenPort())); - } + throw new RemoteException( + String.format("%s bind %s fail", serverConfig.getServerName(), serverConfig.getListenPort()), + future.cause()); } } @@ -134,19 +134,15 @@ private void initNettyChannel(SocketChannel ch) { .addLast("encoder", new TransporterEncoder()) .addLast("decoder", new TransporterDecoder()) .addLast("server-idle-handle", - new IdleStateHandler(0, 0, Constants.NETTY_SERVER_HEART_BEAT_TIME, TimeUnit.MILLISECONDS)) - .addLast("handler", serverHandler); - } - - public ExecutorService getDefaultExecutor() { - return defaultExecutor; + new IdleStateHandler(serverConfig.getConnectionIdleTime(), 0, 0, TimeUnit.MILLISECONDS)) + .addLast("handler", channelHandler); } - public void registerMethodInvoker(ServerMethodInvoker methodInvoker) { - serverHandler.registerMethodInvoker(methodInvoker); + void registerMethodInvoker(ServerMethodInvoker methodInvoker) { + channelHandler.registerMethodInvoker(methodInvoker); } - public void close() { + void close() { if (isStarted.compareAndSet(true, false)) { try { if (bossGroup != null) { @@ -155,7 +151,7 @@ public void close() { if (workGroup != null) { this.workGroup.shutdownGracefully(); } - defaultExecutor.shutdown(); + methodInvokerExecutor.shutdown(); } catch (Exception ex) { log.error("netty server close exception", ex); } diff --git a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/NettyRemotingServerFactory.java b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/server/NettyRemotingServerFactory.java similarity index 84% rename from dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/NettyRemotingServerFactory.java rename to dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/server/NettyRemotingServerFactory.java index 6bf1b8d31ce2..70ed0529e803 100644 --- a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/NettyRemotingServerFactory.java +++ b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/server/NettyRemotingServerFactory.java @@ -15,16 +15,16 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.extract.base; +package org.apache.dolphinscheduler.extract.base.server; import org.apache.dolphinscheduler.extract.base.config.NettyServerConfig; import lombok.experimental.UtilityClass; @UtilityClass -public class NettyRemotingServerFactory { +class NettyRemotingServerFactory { - public NettyRemotingServer buildNettyRemotingServer(NettyServerConfig nettyServerConfig) { + NettyRemotingServer buildNettyRemotingServer(NettyServerConfig nettyServerConfig) { return new NettyRemotingServer(nettyServerConfig); } } diff --git a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/server/RpcServer.java b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/server/RpcServer.java new file mode 100644 index 000000000000..213868ba46e4 --- /dev/null +++ b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/server/RpcServer.java @@ -0,0 +1,74 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.extract.base.server; + +import org.apache.dolphinscheduler.extract.base.RpcMethod; +import org.apache.dolphinscheduler.extract.base.RpcService; +import org.apache.dolphinscheduler.extract.base.config.NettyServerConfig; + +import java.lang.reflect.Method; + +import lombok.extern.slf4j.Slf4j; + +/** + * The RpcServer based on Netty. The server will register the method invoker and provide the service to the client. + * Once the server is started, it will listen on the port and wait for the client to connect. + *
+ *          RpcServer rpcServer = new RpcServer(new NettyServerConfig());
+ *          rpcServer.registerServerMethodInvokerProvider(new ServerMethodInvokerProviderImpl());
+ *          rpcServer.start();
+ * 
+ */ +@Slf4j +public class RpcServer implements ServerMethodInvokerRegistry, AutoCloseable { + + private final NettyRemotingServer nettyRemotingServer; + + public RpcServer(NettyServerConfig nettyServerConfig) { + this.nettyRemotingServer = NettyRemotingServerFactory.buildNettyRemotingServer(nettyServerConfig); + } + + public void start() { + nettyRemotingServer.start(); + } + + @Override + public void registerServerMethodInvokerProvider(Object serverMethodInvokerProviderBean) { + for (Class anInterface : serverMethodInvokerProviderBean.getClass().getInterfaces()) { + if (anInterface.getAnnotation(RpcService.class) == null) { + continue; + } + for (Method method : anInterface.getDeclaredMethods()) { + RpcMethod rpcMethod = method.getAnnotation(RpcMethod.class); + if (rpcMethod == null) { + continue; + } + ServerMethodInvoker serverMethodInvoker = + new ServerMethodInvokerImpl(serverMethodInvokerProviderBean, method); + nettyRemotingServer.registerMethodInvoker(serverMethodInvoker); + log.debug("Register ServerMethodInvoker: {} to bean: {}", + serverMethodInvoker.getMethodIdentify(), serverMethodInvoker.getMethodProviderIdentify()); + } + } + } + + @Override + public void close() { + nettyRemotingServer.close(); + } +} diff --git a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/server/ServerMethodInvoker.java b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/server/ServerMethodInvoker.java index ee633217b298..151b54bb9750 100644 --- a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/server/ServerMethodInvoker.java +++ b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/server/ServerMethodInvoker.java @@ -17,10 +17,12 @@ package org.apache.dolphinscheduler.extract.base.server; -public interface ServerMethodInvoker { +interface ServerMethodInvoker { String getMethodIdentify(); + String getMethodProviderIdentify(); + Object invoke(final Object... arg) throws Throwable; } diff --git a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/server/ServerMethodInvokerImpl.java b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/server/ServerMethodInvokerImpl.java index eea9da5e14a2..4c29650aa030 100644 --- a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/server/ServerMethodInvokerImpl.java +++ b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/server/ServerMethodInvokerImpl.java @@ -20,7 +20,7 @@ import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; -public class ServerMethodInvokerImpl implements ServerMethodInvoker { +class ServerMethodInvokerImpl implements ServerMethodInvoker { private final Object serviceBean; @@ -28,7 +28,7 @@ public class ServerMethodInvokerImpl implements ServerMethodInvoker { private final String methodIdentify; - public ServerMethodInvokerImpl(Object serviceBean, Method method) { + ServerMethodInvokerImpl(Object serviceBean, Method method) { this.serviceBean = serviceBean; this.method = method; this.methodIdentify = method.toGenericString(); @@ -48,4 +48,9 @@ public Object invoke(Object... args) throws Throwable { public String getMethodIdentify() { return methodIdentify; } + + @Override + public String getMethodProviderIdentify() { + return serviceBean.getClass().getName(); + } } diff --git a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/server/ServerMethodInvokerRegistry.java b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/server/ServerMethodInvokerRegistry.java new file mode 100644 index 000000000000..4e56be26174b --- /dev/null +++ b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/server/ServerMethodInvokerRegistry.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.extract.base.server; + +interface ServerMethodInvokerRegistry { + + /** + * Register service object, which will be used to invoke the {@link ServerMethodInvoker}. + * The serverMethodInvokerProviderObject should implement with interface which contains {@link org.apache.dolphinscheduler.extract.base.RpcService} annotation. + */ + void registerServerMethodInvokerProvider(Object serverMethodInvokerProviderObject); + +} diff --git a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/server/SpringServerMethodInvokerDiscovery.java b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/server/SpringServerMethodInvokerDiscovery.java index 2b87a70080fc..de4943990cef 100644 --- a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/server/SpringServerMethodInvokerDiscovery.java +++ b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/server/SpringServerMethodInvokerDiscovery.java @@ -17,11 +17,7 @@ package org.apache.dolphinscheduler.extract.base.server; -import org.apache.dolphinscheduler.extract.base.NettyRemotingServer; -import org.apache.dolphinscheduler.extract.base.RpcMethod; -import org.apache.dolphinscheduler.extract.base.RpcService; - -import java.lang.reflect.Method; +import org.apache.dolphinscheduler.extract.base.config.NettyServerConfig; import lombok.extern.slf4j.Slf4j; @@ -29,38 +25,21 @@ import org.springframework.beans.factory.config.BeanPostProcessor; import org.springframework.lang.Nullable; +/** + * The RpcServer which will auto discovery the {@link ServerMethodInvoker} from Spring container. + */ @Slf4j -public class SpringServerMethodInvokerDiscovery implements BeanPostProcessor { +public class SpringServerMethodInvokerDiscovery extends RpcServer implements BeanPostProcessor { - protected final NettyRemotingServer nettyRemotingServer; - - public SpringServerMethodInvokerDiscovery(NettyRemotingServer nettyRemotingServer) { - this.nettyRemotingServer = nettyRemotingServer; + public SpringServerMethodInvokerDiscovery(NettyServerConfig nettyServerConfig) { + super(nettyServerConfig); } @Nullable @Override public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException { - Class[] interfaces = bean.getClass().getInterfaces(); - for (Class anInterface : interfaces) { - if (anInterface.getAnnotation(RpcService.class) == null) { - continue; - } - registerRpcMethodInvoker(anInterface, bean, beanName); - } + registerServerMethodInvokerProvider(bean); return bean; } - private void registerRpcMethodInvoker(Class anInterface, Object bean, String beanName) { - Method[] declaredMethods = anInterface.getDeclaredMethods(); - for (Method method : declaredMethods) { - RpcMethod rpcMethod = method.getAnnotation(RpcMethod.class); - if (rpcMethod == null) { - continue; - } - ServerMethodInvoker methodInvoker = new ServerMethodInvokerImpl(bean, method); - nettyRemotingServer.registerMethodInvoker(methodInvoker); - log.debug("Register ServerMethodInvoker: {} to bean: {}", methodInvoker.getMethodIdentify(), beanName); - } - } } diff --git a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/utils/Constants.java b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/utils/Constants.java index 76e3872d315a..94b92b253972 100644 --- a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/utils/Constants.java +++ b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/main/java/org/apache/dolphinscheduler/extract/base/utils/Constants.java @@ -35,10 +35,6 @@ private Constants() { public static final String SLASH = "/"; - public static final int NETTY_SERVER_HEART_BEAT_TIME = 1000 * 60 * 3 + 1000; - - public static final int NETTY_CLIENT_HEART_BEAT_TIME = 1000 * 6; - /** * charset */ diff --git a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/test/java/org/apache/dolphinscheduler/extract/base/client/SingletonJdkDynamicRpcClientProxyFactoryTest.java b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/test/java/org/apache/dolphinscheduler/extract/base/client/SingletonJdkDynamicRpcClientProxyFactoryTest.java index 521cf7c75a14..92ed49934cc2 100644 --- a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/test/java/org/apache/dolphinscheduler/extract/base/client/SingletonJdkDynamicRpcClientProxyFactoryTest.java +++ b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/test/java/org/apache/dolphinscheduler/extract/base/client/SingletonJdkDynamicRpcClientProxyFactoryTest.java @@ -20,7 +20,6 @@ import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; import static org.junit.jupiter.api.Assertions.assertEquals; -import org.apache.dolphinscheduler.extract.base.NettyRemotingServer; import org.apache.dolphinscheduler.extract.base.RpcMethod; import org.apache.dolphinscheduler.extract.base.RpcService; import org.apache.dolphinscheduler.extract.base.config.NettyServerConfig; @@ -37,7 +36,7 @@ public class SingletonJdkDynamicRpcClientProxyFactoryTest { - private NettyRemotingServer nettyRemotingServer; + private SpringServerMethodInvokerDiscovery springServerMethodInvokerDiscovery; private String serverAddress; @@ -48,11 +47,10 @@ public void setUp() { .serverName("ApiServer") .listenPort(listenPort) .build(); - nettyRemotingServer = new NettyRemotingServer(nettyServerConfig); - nettyRemotingServer.start(); serverAddress = "localhost:" + listenPort; - new SpringServerMethodInvokerDiscovery(nettyRemotingServer) - .postProcessAfterInitialization(new IServiceImpl(), "iServiceImpl"); + springServerMethodInvokerDiscovery = new SpringServerMethodInvokerDiscovery(nettyServerConfig); + springServerMethodInvokerDiscovery.registerServerMethodInvokerProvider(new IServiceImpl()); + springServerMethodInvokerDiscovery.start(); } @Test @@ -82,7 +80,7 @@ public void testVoid() { @AfterEach public void tearDown() { - nettyRemotingServer.close(); + springServerMethodInvokerDiscovery.close(); } @RpcService diff --git a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/test/java/org/apache/dolphinscheduler/extract/base/metrics/RpcMetricsTest.java b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/test/java/org/apache/dolphinscheduler/extract/base/metrics/RpcMetricsTest.java new file mode 100644 index 000000000000..352e21820609 --- /dev/null +++ b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/test/java/org/apache/dolphinscheduler/extract/base/metrics/RpcMetricsTest.java @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.extract.base.metrics; + +import static com.google.common.truth.Truth.assertThat; + +import org.apache.dolphinscheduler.common.utils.NetUtils; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import io.micrometer.core.instrument.Metrics; +import io.micrometer.core.instrument.simple.SimpleMeterRegistry; + +class RpcMetricsTest { + + @BeforeEach + public void setup() { + Metrics.globalRegistry.clear(); + Metrics.addRegistry(new SimpleMeterRegistry()); + } + + @Test + void testRecordClientSyncRequestException() { + assertThat(Metrics.globalRegistry.find("ds.rpc.client.sync.request.exception.count").counter()).isNull(); + + String clientHost = NetUtils.getHost(); + String serverHost = NetUtils.getHost(); + + RpcMetrics.recordClientSyncRequestException( + new IllegalArgumentException("id is null"), "getById", clientHost, serverHost); + RpcMetrics.recordClientSyncRequestException( + new IllegalArgumentException("name is null"), "getByName", clientHost, serverHost); + RpcMetrics.recordClientSyncRequestException( + new IllegalArgumentException("age is null"), "getByAge", clientHost, serverHost); + RpcMetrics.recordClientSyncRequestException(new UnsupportedOperationException("update id is not supported"), + "updateById", clientHost, serverHost); + assertThat(Metrics.globalRegistry.find("ds.rpc.client.sync.request.exception.count").counter()).isNotNull(); + } + + @Test + void testRecordRpcRequestDuration() { + assertThat(Metrics.globalRegistry.find("ds.rpc.client.sync.request.duration.time").timer()).isNull(); + + String clientHost = NetUtils.getHost(); + String serverHost = NetUtils.getHost(); + + RpcMetrics.recordClientSyncRequestDuration("getById", 100, clientHost, serverHost); + RpcMetrics.recordClientSyncRequestDuration("getByName", 200, clientHost, serverHost); + RpcMetrics.recordClientSyncRequestDuration("getByAge", 300, clientHost, serverHost); + RpcMetrics.recordClientSyncRequestDuration("updateById", 400, clientHost, serverHost); + assertThat(Metrics.globalRegistry.find("ds.rpc.client.sync.request.duration.time").timer()).isNotNull(); + } + +} diff --git a/dolphinscheduler-extract/dolphinscheduler-extract-base/src/test/java/org/apache/dolphinscheduler/extract/base/utils/HostTest.java b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/test/java/org/apache/dolphinscheduler/extract/base/utils/HostTest.java new file mode 100644 index 000000000000..5bd1a0a0ba8d --- /dev/null +++ b/dolphinscheduler-extract/dolphinscheduler-extract-base/src/test/java/org/apache/dolphinscheduler/extract/base/utils/HostTest.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.extract.base.utils; + +import org.junit.jupiter.api.Test; + +import com.google.common.truth.Truth; + +class HostTest { + + @Test + void testEquals() { + Truth.assertThat(Host.of("localhost:8080")).isEqualTo(Host.of("localhost:8080")); + } + +} diff --git a/dolphinscheduler-extract/dolphinscheduler-extract-master/src/main/java/org/apache/dolphinscheduler/extract/master/transportor/StreamingTaskTriggerResponse.java b/dolphinscheduler-extract/dolphinscheduler-extract-master/src/main/java/org/apache/dolphinscheduler/extract/master/transportor/StreamingTaskTriggerResponse.java index 0f9f2652802b..d25611aee004 100644 --- a/dolphinscheduler-extract/dolphinscheduler-extract-master/src/main/java/org/apache/dolphinscheduler/extract/master/transportor/StreamingTaskTriggerResponse.java +++ b/dolphinscheduler-extract/dolphinscheduler-extract-master/src/main/java/org/apache/dolphinscheduler/extract/master/transportor/StreamingTaskTriggerResponse.java @@ -19,9 +19,11 @@ import lombok.AllArgsConstructor; import lombok.Data; +import lombok.NoArgsConstructor; @Data @AllArgsConstructor +@NoArgsConstructor public class StreamingTaskTriggerResponse { private boolean success; diff --git a/dolphinscheduler-master/pom.xml b/dolphinscheduler-master/pom.xml index 31627d4b7fce..1b99dd97f3e9 100644 --- a/dolphinscheduler-master/pom.xml +++ b/dolphinscheduler-master/pom.xml @@ -102,10 +102,6 @@ org.codehaus.janino janino
- - com.github.ben-manes.caffeine - caffeine - org.apache.hbase.thirdparty diff --git a/dolphinscheduler-master/src/main/assembly/dolphinscheduler-master-server.xml b/dolphinscheduler-master/src/main/assembly/dolphinscheduler-master-server.xml index 9fc3a3b67967..f069b070796e 100644 --- a/dolphinscheduler-master/src/main/assembly/dolphinscheduler-master-server.xml +++ b/dolphinscheduler-master/src/main/assembly/dolphinscheduler-master-server.xml @@ -52,6 +52,14 @@ ${basedir}/../dolphinscheduler-common/src/main/resources **/*.properties + **/*.yaml + + conf + + + ${basedir}/../dolphinscheduler-authentication/dolphinscheduler-aws-authentication/src/main/resources + + **/*.yaml conf diff --git a/dolphinscheduler-master/src/main/bin/jvm_args_env.sh b/dolphinscheduler-master/src/main/bin/jvm_args_env.sh index c183f31bb12f..1171cd78044f 100644 --- a/dolphinscheduler-master/src/main/bin/jvm_args_env.sh +++ b/dolphinscheduler-master/src/main/bin/jvm_args_env.sh @@ -24,6 +24,7 @@ -XX:+PrintGCDetails -Xloggc:gc.log +-XX:-OmitStackTraceInFastThrow -XX:+ExitOnOutOfMemoryError -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=dump.hprof diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/MasterServer.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/MasterServer.java index d37ca9d0167f..92d7dcfaecfd 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/MasterServer.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/MasterServer.java @@ -17,14 +17,19 @@ package org.apache.dolphinscheduler.server.master; +import org.apache.dolphinscheduler.common.CommonConfiguration; import org.apache.dolphinscheduler.common.IStoppable; import org.apache.dolphinscheduler.common.constants.Constants; import org.apache.dolphinscheduler.common.lifecycle.ServerLifeCycleManager; import org.apache.dolphinscheduler.common.thread.DefaultUncaughtExceptionHandler; import org.apache.dolphinscheduler.common.thread.ThreadUtils; +import org.apache.dolphinscheduler.dao.DaoConfiguration; import org.apache.dolphinscheduler.meter.metrics.MetricsProvider; import org.apache.dolphinscheduler.meter.metrics.SystemMetrics; +import org.apache.dolphinscheduler.plugin.datasource.api.plugin.DataSourceProcessorProvider; +import org.apache.dolphinscheduler.plugin.storage.api.StorageConfiguration; import org.apache.dolphinscheduler.plugin.task.api.TaskPluginManager; +import org.apache.dolphinscheduler.registry.api.RegistryConfiguration; import org.apache.dolphinscheduler.scheduler.api.SchedulerApi; import org.apache.dolphinscheduler.server.master.metrics.MasterServerMetrics; import org.apache.dolphinscheduler.server.master.registry.MasterRegistryClient; @@ -34,6 +39,7 @@ import org.apache.dolphinscheduler.server.master.runner.FailoverExecuteThread; import org.apache.dolphinscheduler.server.master.runner.MasterSchedulerBootstrap; import org.apache.dolphinscheduler.server.master.runner.taskgroup.TaskGroupCoordinator; +import org.apache.dolphinscheduler.service.ServiceConfiguration; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import javax.annotation.PostConstruct; @@ -44,15 +50,15 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; -import org.springframework.cache.annotation.EnableCaching; -import org.springframework.context.annotation.ComponentScan; -import org.springframework.transaction.annotation.EnableTransactionManagement; +import org.springframework.context.annotation.Import; -@SpringBootApplication -@ComponentScan("org.apache.dolphinscheduler") -@EnableTransactionManagement -@EnableCaching @Slf4j +@Import({DaoConfiguration.class, + ServiceConfiguration.class, + CommonConfiguration.class, + StorageConfiguration.class, + RegistryConfiguration.class}) +@SpringBootApplication public class MasterServer implements IStoppable { @Autowired @@ -61,9 +67,6 @@ public class MasterServer implements IStoppable { @Autowired private MasterRegistryClient masterRegistryClient; - @Autowired - private TaskPluginManager taskPluginManager; - @Autowired private MasterSchedulerBootstrap masterSchedulerBootstrap; @@ -105,7 +108,8 @@ public void run() throws SchedulerException { this.masterRPCServer.start(); // install task plugin - this.taskPluginManager.loadPlugin(); + TaskPluginManager.loadTaskPlugin(); + DataSourceProcessorProvider.initialize(); this.masterSlotManager.start(); @@ -123,7 +127,7 @@ public void run() throws SchedulerException { MasterServerMetrics.registerMasterCpuUsageGauge(() -> { SystemMetrics systemMetrics = metricsProvider.getSystemMetrics(); - return systemMetrics.getTotalCpuUsedPercentage(); + return systemMetrics.getSystemCpuUsagePercentage(); }); MasterServerMetrics.registerMasterMemoryAvailableGauge(() -> { SystemMetrics systemMetrics = metricsProvider.getSystemMetrics(); diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/builder/TaskExecutionContextBuilder.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/builder/TaskExecutionContextBuilder.java index 832c1b336b95..5990a53e0fe7 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/builder/TaskExecutionContextBuilder.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/builder/TaskExecutionContextBuilder.java @@ -66,7 +66,6 @@ public TaskExecutionContextBuilder buildTaskInstanceRelatedInfo(TaskInstance tas taskExecutionContext.setWorkerGroup(taskInstance.getWorkerGroup()); taskExecutionContext.setEnvironmentConfig(taskInstance.getEnvironmentConfig()); taskExecutionContext.setHost(taskInstance.getHost()); - taskExecutionContext.setDelayTime(taskInstance.getDelayTime()); taskExecutionContext.setVarPool(taskInstance.getVarPool()); taskExecutionContext.setDryRun(taskInstance.getDryRun()); taskExecutionContext.setTestFlag(taskInstance.getTestFlag()); diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/command/CommandFetcherConfiguration.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/command/CommandFetcherConfiguration.java new file mode 100644 index 000000000000..4a4d3c1efc65 --- /dev/null +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/command/CommandFetcherConfiguration.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.command; + +import static com.google.common.base.Preconditions.checkNotNull; + +import org.apache.dolphinscheduler.dao.repository.CommandDao; +import org.apache.dolphinscheduler.server.master.config.CommandFetchStrategy; +import org.apache.dolphinscheduler.server.master.config.MasterConfig; +import org.apache.dolphinscheduler.server.master.registry.MasterSlotManager; + +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +@Configuration +public class CommandFetcherConfiguration { + + @Bean + public ICommandFetcher commandFetcher(MasterConfig masterConfig, + MasterSlotManager masterSlotManager, + CommandDao commandDao) { + CommandFetchStrategy commandFetchStrategy = + checkNotNull(masterConfig.getCommandFetchStrategy(), "command fetch strategy is null"); + switch (commandFetchStrategy.getType()) { + case ID_SLOT_BASED: + CommandFetchStrategy.IdSlotBasedFetchConfig idSlotBasedFetchConfig = + (CommandFetchStrategy.IdSlotBasedFetchConfig) commandFetchStrategy.getConfig(); + return new IdSlotBasedCommandFetcher(idSlotBasedFetchConfig, masterSlotManager, commandDao); + default: + throw new IllegalArgumentException( + "unsupported command fetch strategy type: " + commandFetchStrategy.getType()); + } + } +} diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/command/ICommandFetcher.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/command/ICommandFetcher.java new file mode 100644 index 000000000000..c315a9b29497 --- /dev/null +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/command/ICommandFetcher.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.command; + +import org.apache.dolphinscheduler.dao.entity.Command; + +import java.util.List; + +/** + * The command fetcher used to fetch commands + */ +public interface ICommandFetcher { + + /** + * Fetch commands + * + * @return command list which need to be handled + */ + List fetchCommands(); + +} diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/command/IdSlotBasedCommandFetcher.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/command/IdSlotBasedCommandFetcher.java new file mode 100644 index 000000000000..a4178200938e --- /dev/null +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/command/IdSlotBasedCommandFetcher.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.command; + +import org.apache.dolphinscheduler.dao.entity.Command; +import org.apache.dolphinscheduler.dao.repository.CommandDao; +import org.apache.dolphinscheduler.server.master.config.CommandFetchStrategy; +import org.apache.dolphinscheduler.server.master.metrics.ProcessInstanceMetrics; +import org.apache.dolphinscheduler.server.master.registry.MasterSlotManager; + +import java.util.Collections; +import java.util.List; + +import lombok.extern.slf4j.Slf4j; + +/** + * The command fetcher which is fetch commands by command id and slot. + */ +@Slf4j +public class IdSlotBasedCommandFetcher implements ICommandFetcher { + + private final CommandFetchStrategy.IdSlotBasedFetchConfig idSlotBasedFetchConfig; + + private final CommandDao commandDao; + + private final MasterSlotManager masterSlotManager; + + public IdSlotBasedCommandFetcher(CommandFetchStrategy.IdSlotBasedFetchConfig idSlotBasedFetchConfig, + MasterSlotManager masterSlotManager, + CommandDao commandDao) { + this.idSlotBasedFetchConfig = idSlotBasedFetchConfig; + this.masterSlotManager = masterSlotManager; + this.commandDao = commandDao; + } + + @Override + public List fetchCommands() { + long scheduleStartTime = System.currentTimeMillis(); + int currentSlotIndex = masterSlotManager.getSlot(); + int totalSlot = masterSlotManager.getMasterSize(); + if (totalSlot <= 0 || currentSlotIndex < 0) { + log.warn("Slot is validated, current master slots: {}, the current slot index is {}", totalSlot, + currentSlotIndex); + return Collections.emptyList(); + } + List commands = commandDao.queryCommandByIdSlot( + currentSlotIndex, + totalSlot, + idSlotBasedFetchConfig.getIdStep(), + idSlotBasedFetchConfig.getFetchSize()); + long cost = System.currentTimeMillis() - scheduleStartTime; + log.info("Fetch commands: {} success, cost: {}ms, totalSlot: {}, currentSlotIndex: {}", commands.size(), cost, + totalSlot, currentSlotIndex); + ProcessInstanceMetrics.recordCommandQueryTime(cost); + return commands; + } + +} diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/config/CommandFetchStrategy.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/config/CommandFetchStrategy.java new file mode 100644 index 000000000000..e61941677c65 --- /dev/null +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/config/CommandFetchStrategy.java @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.config; + +import lombok.Data; + +import org.springframework.validation.Errors; + +@Data +public class CommandFetchStrategy { + + private CommandFetchStrategyType type = CommandFetchStrategyType.ID_SLOT_BASED; + + private CommandFetchConfig config = new IdSlotBasedFetchConfig(); + + public void validate(Errors errors) { + config.validate(errors); + } + + public enum CommandFetchStrategyType { + ID_SLOT_BASED, + ; + } + + public interface CommandFetchConfig { + + void validate(Errors errors); + + } + + @Data + public static class IdSlotBasedFetchConfig implements CommandFetchConfig { + + private int idStep = 1; + private int fetchSize = 10; + + @Override + public void validate(Errors errors) { + if (idStep <= 0) { + errors.rejectValue("step", null, "step must be greater than 0"); + } + if (fetchSize <= 0) { + errors.rejectValue("fetchSize", null, "fetchSize must be greater than 0"); + } + } + } + +} diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/config/MasterConfig.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/config/MasterConfig.java index 02c0dcb819d6..d9e6ab3b7d9b 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/config/MasterConfig.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/config/MasterConfig.java @@ -48,10 +48,6 @@ public class MasterConfig implements Validator { * The master RPC server listen port. */ private int listenPort = 5678; - /** - * The max batch size used to fetch command from database. - */ - private int fetchCommandNum = 10; /** * The thread number used to prepare processInstance. This number shouldn't bigger than fetchCommandNum. */ @@ -98,10 +94,16 @@ public class MasterConfig implements Validator { private Duration workerGroupRefreshInterval = Duration.ofSeconds(10L); - // ip:listenPort + private CommandFetchStrategy commandFetchStrategy = new CommandFetchStrategy(); + + /** + * The IP address and listening port of the master server in the format 'ip:listenPort'. + */ private String masterAddress; - // /nodes/master/ip:listenPort + /** + * The registry path for the master server in the format '/nodes/master/ip:listenPort'. + */ private String masterRegistryPath; @Override @@ -115,9 +117,6 @@ public void validate(Object target, Errors errors) { if (masterConfig.getListenPort() <= 0) { errors.rejectValue("listen-port", null, "is invalidated"); } - if (masterConfig.getFetchCommandNum() <= 0) { - errors.rejectValue("fetch-command-num", null, "should be a positive value"); - } if (masterConfig.getPreExecThreads() <= 0) { errors.rejectValue("per-exec-threads", null, "should be a positive value"); } @@ -149,6 +148,7 @@ public void validate(Object target, Errors errors) { if (StringUtils.isEmpty(masterConfig.getMasterAddress())) { masterConfig.setMasterAddress(NetUtils.getAddr(masterConfig.getListenPort())); } + commandFetchStrategy.validate(errors); masterConfig.setMasterRegistryPath( RegistryNodeType.MASTER.getRegistryPath() + "/" + masterConfig.getMasterAddress()); @@ -159,7 +159,6 @@ private void printConfig() { String config = "\n****************************Master Configuration**************************************" + "\n listen-port -> " + listenPort + - "\n fetch-command-num -> " + fetchCommandNum + "\n pre-exec-threads -> " + preExecThreads + "\n exec-threads -> " + execThreads + "\n dispatch-task-number -> " + dispatchTaskNumber + @@ -175,6 +174,7 @@ private void printConfig() { "\n master-address -> " + masterAddress + "\n master-registry-path: " + masterRegistryPath + "\n worker-group-refresh-interval: " + workerGroupRefreshInterval + + "\n command-fetch-strategy: " + commandFetchStrategy + "\n****************************Master Configuration**************************************"; log.info(config); } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/config/MasterServerLoadProtection.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/config/MasterServerLoadProtection.java index 03570d691d2b..6b259738fed2 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/config/MasterServerLoadProtection.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/config/MasterServerLoadProtection.java @@ -17,57 +17,11 @@ package org.apache.dolphinscheduler.server.master.config; -import org.apache.dolphinscheduler.meter.metrics.SystemMetrics; +import org.apache.dolphinscheduler.meter.metrics.BaseServerLoadProtection; -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.NoArgsConstructor; import lombok.extern.slf4j.Slf4j; @Slf4j -@Data -@NoArgsConstructor -@AllArgsConstructor -public class MasterServerLoadProtection { - - private boolean enabled = true; - - private double maxCpuUsagePercentageThresholds = 0.7; - - private double maxJVMMemoryUsagePercentageThresholds = 0.7; - - private double maxSystemMemoryUsagePercentageThresholds = 0.7; - - private double maxDiskUsagePercentageThresholds = 0.7; - - public boolean isOverload(SystemMetrics systemMetrics) { - if (!enabled) { - return false; - } - if (systemMetrics.getTotalCpuUsedPercentage() > maxCpuUsagePercentageThresholds) { - log.info( - "Master OverLoad: the TotalCpuUsedPercentage: {} is over then the MaxCpuUsagePercentageThresholds {}", - systemMetrics.getTotalCpuUsedPercentage(), maxCpuUsagePercentageThresholds); - return true; - } - if (systemMetrics.getJvmMemoryUsedPercentage() > maxJVMMemoryUsagePercentageThresholds) { - log.info( - "Master OverLoad: the JvmMemoryUsedPercentage: {} is over then the MaxJVMMemoryUsagePercentageThresholds {}", - systemMetrics.getJvmMemoryUsedPercentage(), maxCpuUsagePercentageThresholds); - return true; - } - if (systemMetrics.getDiskUsedPercentage() > maxDiskUsagePercentageThresholds) { - log.info("Master OverLoad: the DiskUsedPercentage: {} is over then the MaxDiskUsagePercentageThresholds {}", - systemMetrics.getDiskUsedPercentage(), maxCpuUsagePercentageThresholds); - return true; - } - if (systemMetrics.getSystemMemoryUsedPercentage() > maxSystemMemoryUsagePercentageThresholds) { - log.info( - "Master OverLoad: the SystemMemoryUsedPercentage: {} is over then the MaxSystemMemoryUsagePercentageThresholds {}", - systemMetrics.getSystemMemoryUsedPercentage(), maxSystemMemoryUsagePercentageThresholds); - return true; - } - return false; - } +public class MasterServerLoadProtection extends BaseServerLoadProtection { } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/LowerWeightRoundRobin.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/LowerWeightRoundRobin.java index d03fd59adaef..9c3c4fd6960f 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/LowerWeightRoundRobin.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/LowerWeightRoundRobin.java @@ -25,10 +25,12 @@ public class LowerWeightRoundRobin extends AbstractSelector { /** - * select + * Selects a HostWeight from a collection of HostWeight objects. + * The selection is based on the current weight of each HostWeight. + * The HostWeight with the smallest current weight is selected. * - * @param sources sources - * @return HostWeight + * @param sources A collection of HostWeight objects to select from. + * @return The selected HostWeight with the smallest current weight. */ @Override public HostWeight doSelect(Collection sources) { diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RandomSelector.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RandomSelector.java index 2b7488a37040..a0f83232a252 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RandomSelector.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RandomSelector.java @@ -27,6 +27,14 @@ */ public class RandomSelector extends AbstractSelector { + /** + * This method selects a HostWorker from a collection of HostWorker objects using a weighted random algorithm. + * The selection is based on the weight of each HostWorker. + * A random number is generated and the HostWorker whose weight spans this random number is selected. + * + * @param source A collection of HostWorker objects to select from. + * @return The selected HostWorker based on the weighted random algorithm. + */ @Override public HostWorker doSelect(final Collection source) { diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RoundRobinSelector.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RoundRobinSelector.java index 8f21acef6d1e..b47eff87b9e4 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RoundRobinSelector.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RoundRobinSelector.java @@ -73,6 +73,14 @@ void setLastUpdate(long lastUpdate) { } + /** + * This method selects a HostWorker from a collection of HostWorker objects using a weighted round-robin algorithm. + * The selection is based on the current weight of each HostWorker. + * The HostWorker with the highest current weight is selected. + * + * @param source A collection of HostWorker objects to select from. + * @return The selected HostWorker with the highest current weight. + */ @Override public HostWorker doSelect(Collection source) { diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/StateEventHandler.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/StateEventHandler.java index 08ee6b66c5e3..3e6f3d4adabc 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/StateEventHandler.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/StateEventHandler.java @@ -28,10 +28,9 @@ public interface StateEventHandler { * @param stateEvent given state event. * @throws StateEventHandleException this exception means it can be recovered. * @throws StateEventHandleError this exception means it cannot be recovered, so the event need to drop. - * @throws StateEventHandleException this means it can be recovered. */ boolean handleStateEvent(WorkflowExecuteRunnable workflowExecuteRunnable, - StateEvent stateEvent) throws StateEventHandleException, StateEventHandleError, StateEventHandleFailure; + StateEvent stateEvent) throws StateEventHandleException, StateEventHandleError; StateEventType getEventType(); } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/WorkflowBlockStateEventHandler.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/WorkflowBlockStateEventHandler.java deleted file mode 100644 index 07f89fc5445f..000000000000 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/WorkflowBlockStateEventHandler.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.server.master.event; - -import org.apache.dolphinscheduler.common.enums.StateEventType; -import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import org.apache.dolphinscheduler.plugin.task.api.parameters.BlockingParameters; -import org.apache.dolphinscheduler.server.master.runner.WorkflowExecuteRunnable; - -import java.util.Optional; - -import lombok.extern.slf4j.Slf4j; - -import com.google.auto.service.AutoService; - -@AutoService(StateEventHandler.class) -@Slf4j -public class WorkflowBlockStateEventHandler implements StateEventHandler { - - @Override - public boolean handleStateEvent(WorkflowExecuteRunnable workflowExecuteRunnable, - StateEvent stateEvent) throws StateEventHandleError { - log.info("Handle workflow instance state block event"); - Optional taskInstanceOptional = - workflowExecuteRunnable.getTaskInstance(stateEvent.getTaskInstanceId()); - if (!taskInstanceOptional.isPresent()) { - throw new StateEventHandleError("Cannot find taskInstance from taskMap by taskInstanceId: " - + stateEvent.getTaskInstanceId()); - } - TaskInstance task = taskInstanceOptional.get(); - - BlockingParameters parameters = JSONUtils.parseObject(task.getTaskParams(), BlockingParameters.class); - if (parameters != null && parameters.isAlertWhenBlocking()) { - workflowExecuteRunnable.processBlock(); - } - return true; - } - - @Override - public StateEventType getEventType() { - return StateEventType.PROCESS_BLOCKED; - } -} diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/metrics/MasterServerMetrics.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/metrics/MasterServerMetrics.java index 09ba1cb4ba3e..3b99eebfc93f 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/metrics/MasterServerMetrics.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/metrics/MasterServerMetrics.java @@ -43,6 +43,11 @@ public class MasterServerMetrics { .description("Master server consume command count") .register(Metrics.globalRegistry); + private final Counter masterHeartBeatCounter = + Counter.builder("ds.master.heartbeat.count") + .description("master heartbeat count") + .register(Metrics.globalRegistry); + public void registerMasterMemoryAvailableGauge(Supplier supplier) { Gauge.builder("ds.master.memory.available", supplier) .description("Master memory available") @@ -51,7 +56,7 @@ public void registerMasterMemoryAvailableGauge(Supplier supplier) { public void registerMasterCpuUsageGauge(Supplier supplier) { Gauge.builder("ds.master.cpu.usage", supplier) - .description("worker cpu usage") + .description("master cpu usage") .register(Metrics.globalRegistry); } @@ -75,4 +80,7 @@ public void incMasterConsumeCommand(int commandCount) { masterConsumeCommandCounter.increment(commandCount); } + public void incMasterHeartbeatCount() { + masterHeartBeatCounter.increment(); + } } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterSlotManager.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterSlotManager.java index 834f56c2a456..155b97311796 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterSlotManager.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterSlotManager.java @@ -70,7 +70,8 @@ public class SlotChangeListener implements MasterInfoChangeListener { public void notify(Map masterNodeInfo) { List serverList = masterNodeInfo.values().stream() .filter(heartBeat -> !heartBeat.getServerStatus().equals(ServerStatus.BUSY)) - .map(this::convertHeartBeatToServer).collect(Collectors.toList()); + .map(this::convertHeartBeatToServer) + .collect(Collectors.toList()); syncMasterNodes(serverList); } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/ServerNodeManager.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/ServerNodeManager.java index 11a994aacdb1..d532168b2ad6 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/ServerNodeManager.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/ServerNodeManager.java @@ -25,6 +25,7 @@ import org.apache.dolphinscheduler.dao.AlertDao; import org.apache.dolphinscheduler.dao.entity.WorkerGroup; import org.apache.dolphinscheduler.dao.mapper.WorkerGroupMapper; +import org.apache.dolphinscheduler.dao.utils.WorkerGroupUtils; import org.apache.dolphinscheduler.registry.api.Event; import org.apache.dolphinscheduler.registry.api.Event.Type; import org.apache.dolphinscheduler.registry.api.RegistryClient; @@ -36,7 +37,6 @@ import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.ArrayUtils; -import org.apache.commons.lang3.StringUtils; import java.util.ArrayList; import java.util.Arrays; @@ -245,14 +245,16 @@ private void updateMasterNodes() { } private void updateWorkerNodes() { - workerGroupWriteLock.lock(); + workerNodeInfoWriteLock.lock(); try { Map workerNodeMaps = registryClient.getServerMaps(RegistryNodeType.WORKER); for (Map.Entry entry : workerNodeMaps.entrySet()) { - workerNodeInfo.put(entry.getKey(), JSONUtils.parseObject(entry.getValue(), WorkerHeartBeat.class)); + String nodeAddress = entry.getKey(); + WorkerHeartBeat workerHeartBeat = JSONUtils.parseObject(entry.getValue(), WorkerHeartBeat.class); + workerNodeInfo.put(nodeAddress, workerHeartBeat); } } finally { - workerGroupWriteLock.unlock(); + workerNodeInfoWriteLock.unlock(); } } @@ -271,8 +273,8 @@ private void updateWorkerGroupMappings() { .filter(workerNodeInfo::containsKey).collect(Collectors.toSet()); tmpWorkerGroupMappings.put(workerGroupName, activeWorkerNodes); } - if (!tmpWorkerGroupMappings.containsKey(Constants.DEFAULT_WORKER_GROUP)) { - tmpWorkerGroupMappings.put(Constants.DEFAULT_WORKER_GROUP, workerNodeInfo.keySet()); + if (!tmpWorkerGroupMappings.containsKey(WorkerGroupUtils.getDefaultWorkerGroup())) { + tmpWorkerGroupMappings.put(WorkerGroupUtils.getDefaultWorkerGroup(), workerNodeInfo.keySet()); } } finally { workerNodeInfoReadLock.unlock(); @@ -305,9 +307,7 @@ public Map> getWorkerGroupNodes() { public Set getWorkerGroupNodes(String workerGroup) throws WorkerGroupNotFoundException { workerGroupReadLock.lock(); try { - if (StringUtils.isEmpty(workerGroup)) { - workerGroup = Constants.DEFAULT_WORKER_GROUP; - } + workerGroup = WorkerGroupUtils.getWorkerGroupOrDefault(workerGroup); Set nodes = workerGroupNodes.get(workerGroup); if (nodes == null) { throw new WorkerGroupNotFoundException(String.format("WorkerGroup: %s is invalidated", workerGroup)); diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/rpc/MasterRpcServer.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/rpc/MasterRpcServer.java index 0eaf885d11bf..ab89b021d618 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/rpc/MasterRpcServer.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/rpc/MasterRpcServer.java @@ -17,7 +17,6 @@ package org.apache.dolphinscheduler.server.master.rpc; -import org.apache.dolphinscheduler.extract.base.NettyRemotingServerFactory; import org.apache.dolphinscheduler.extract.base.config.NettyServerConfig; import org.apache.dolphinscheduler.extract.base.server.SpringServerMethodInvokerDiscovery; import org.apache.dolphinscheduler.server.master.config.MasterConfig; @@ -31,21 +30,8 @@ public class MasterRpcServer extends SpringServerMethodInvokerDiscovery implements AutoCloseable { public MasterRpcServer(MasterConfig masterConfig) { - super(NettyRemotingServerFactory.buildNettyRemotingServer(NettyServerConfig.builder() - .serverName("MasterRpcServer").listenPort(masterConfig.getListenPort()).build())); - } - - public void start() { - log.info("Starting MasterRPCServer..."); - nettyRemotingServer.start(); - log.info("Started MasterRPCServer..."); - } - - @Override - public void close() { - log.info("Closing MasterRPCServer..."); - nettyRemotingServer.close(); - log.info("Closed MasterRPCServer..."); + super(NettyServerConfig.builder().serverName("MasterRpcServer").listenPort(masterConfig.getListenPort()) + .build()); } } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/BaseTaskDispatcher.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/BaseTaskDispatcher.java index 30ab8fadec19..793ae378d0aa 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/BaseTaskDispatcher.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/BaseTaskDispatcher.java @@ -55,10 +55,11 @@ public void dispatchTask(TaskExecuteRunnable taskExecuteRunnable) throws TaskDis taskInstanceDispatchHost = getTaskInstanceDispatchHost(taskExecuteRunnable) .orElseThrow(() -> new TaskDispatchException("Cannot find the host to execute task.")); } catch (WorkerGroupNotFoundException workerGroupNotFoundException) { - log.error("Dispatch task: {} failed, worker group not found.", - taskExecuteRunnable.getTaskExecutionContext().getTaskName(), workerGroupNotFoundException); - addDispatchFailedEvent(taskExecuteRunnable); - return; + // todo: this is a temporary solution, we should refactor the ServerNodeManager to make sure there won't + // throw WorkerGroupNotFoundException unless the worker group is not exist in database + throw new TaskDispatchException( + "Dispatch task: " + taskExecuteRunnable.getTaskExecutionContext().getTaskName() + " failed", + workerGroupNotFoundException); } taskExecuteRunnable.getTaskExecutionContext().setHost(taskInstanceDispatchHost.getAddress()); doDispatch(taskExecuteRunnable); diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/BaseTaskExecuteRunnable.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/BaseTaskExecuteRunnable.java index fefdbf349384..2e41173c4bc3 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/BaseTaskExecuteRunnable.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/BaseTaskExecuteRunnable.java @@ -52,4 +52,32 @@ public TaskExecutionContext getTaskExecutionContext() { return taskExecutionContext; } + @Override + public int compareTo(TaskExecuteRunnable other) { + if (other == null) { + return 1; + } + int workflowInstancePriorityCompareResult = workflowInstance.getProcessInstancePriority().getCode() - + other.getWorkflowInstance().getProcessInstancePriority().getCode(); + if (workflowInstancePriorityCompareResult != 0) { + return workflowInstancePriorityCompareResult; + } + + // smaller number, higher priority + int taskInstancePriorityCompareResult = taskInstance.getTaskInstancePriority().getCode() + - other.getTaskInstance().getTaskInstancePriority().getCode(); + if (taskInstancePriorityCompareResult != 0) { + return taskInstancePriorityCompareResult; + } + + // larger number, higher priority + int taskGroupPriorityCompareResult = + taskInstance.getTaskGroupPriority() - other.getTaskInstance().getTaskGroupPriority(); + if (taskGroupPriorityCompareResult != 0) { + return -taskGroupPriorityCompareResult; + } + // earlier submit time, higher priority + return taskInstance.getFirstSubmitTime().compareTo(other.getTaskInstance().getFirstSubmitTime()); + } + } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/DefaultTaskExecuteRunnable.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/DefaultTaskExecuteRunnable.java index c1b13717bdd3..ba4f44721627 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/DefaultTaskExecuteRunnable.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/DefaultTaskExecuteRunnable.java @@ -24,7 +24,7 @@ import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.server.master.runner.operator.TaskExecuteRunnableOperatorManager; -public class DefaultTaskExecuteRunnable extends PriorityDelayTaskExecuteRunnable { +public class DefaultTaskExecuteRunnable extends BaseTaskExecuteRunnable { private final TaskExecuteRunnableOperatorManager taskExecuteRunnableOperatorManager; diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/GlobalTaskDispatchWaitingQueue.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/GlobalTaskDispatchWaitingQueue.java index f03bd6b90322..21d6c890f5cc 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/GlobalTaskDispatchWaitingQueue.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/GlobalTaskDispatchWaitingQueue.java @@ -17,7 +17,8 @@ package org.apache.dolphinscheduler.server.master.runner; -import java.util.concurrent.DelayQueue; +import org.apache.dolphinscheduler.server.master.runner.queue.DelayEntry; +import org.apache.dolphinscheduler.server.master.runner.queue.PriorityDelayQueue; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; @@ -25,26 +26,42 @@ import org.springframework.stereotype.Component; /** - * The class is used to store {@link TaskExecuteRunnable} which needs to be dispatched. The {@link TaskExecuteRunnable} will be stored in a {@link DelayQueue}, - * if the {@link TaskExecuteRunnable}'s delay time is 0, then it will be consumed by {@link GlobalTaskDispatchWaitingQueueLooper}. + * The class is used to store {@link TaskExecuteRunnable} which needs to be dispatched. The {@link TaskExecuteRunnable} + * will be stored in {@link PriorityDelayQueue}, if the {@link TaskExecuteRunnable}'s delay time is 0, then it will be + * consumed by {@link GlobalTaskDispatchWaitingQueueLooper}. + *

+ * The order of {@link TaskExecuteRunnable} in the {@link PriorityDelayQueue} is determined by {@link TaskExecuteRunnable#compareTo}. */ @Slf4j @Component public class GlobalTaskDispatchWaitingQueue { - private final DelayQueue queue = new DelayQueue<>(); + private final PriorityDelayQueue> priorityDelayQueue = new PriorityDelayQueue<>(); - public void submitTaskExecuteRunnable(DefaultTaskExecuteRunnable priorityTaskExecuteRunnable) { - queue.put(priorityTaskExecuteRunnable); + /** + * Submit a {@link TaskExecuteRunnable} with delay time 0, it will be consumed immediately. + */ + public void dispatchTaskExecuteRunnable(TaskExecuteRunnable taskExecuteRunnable) { + dispatchTaskExecuteRunnableWithDelay(taskExecuteRunnable, 0); } + /** + * Submit a {@link TaskExecuteRunnable} with delay time, if the delay time <= 0 then it can be consumed. + */ + public void dispatchTaskExecuteRunnableWithDelay(TaskExecuteRunnable taskExecuteRunnable, long delayTimeMills) { + priorityDelayQueue.add(new DelayEntry<>(delayTimeMills, taskExecuteRunnable)); + } + + /** + * Consume {@link TaskExecuteRunnable} from the {@link PriorityDelayQueue}, only the delay time <= 0 can be consumed. + */ @SneakyThrows - public DefaultTaskExecuteRunnable takeTaskExecuteRunnable() { - return queue.take(); + public TaskExecuteRunnable takeTaskExecuteRunnable() { + return priorityDelayQueue.take().getData(); } public int getWaitingDispatchTaskNumber() { - return queue.size(); + return priorityDelayQueue.size(); } } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/GlobalTaskDispatchWaitingQueueLooper.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/GlobalTaskDispatchWaitingQueueLooper.java index eabbdd8e10e3..5cfb285c2820 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/GlobalTaskDispatchWaitingQueueLooper.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/GlobalTaskDispatchWaitingQueueLooper.java @@ -18,13 +18,11 @@ package org.apache.dolphinscheduler.server.master.runner; import org.apache.dolphinscheduler.common.thread.BaseDaemonThread; -import org.apache.dolphinscheduler.common.thread.ThreadUtils; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.plugin.task.api.enums.TaskExecutionStatus; import org.apache.dolphinscheduler.server.master.runner.dispatcher.TaskDispatchFactory; -import org.apache.dolphinscheduler.server.master.runner.dispatcher.TaskDispatcher; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicInteger; import lombok.extern.slf4j.Slf4j; @@ -43,10 +41,6 @@ public class GlobalTaskDispatchWaitingQueueLooper extends BaseDaemonThread imple private final AtomicBoolean RUNNING_FLAG = new AtomicBoolean(false); - private final AtomicInteger DISPATCHED_CONSECUTIVE_FAILURE_TIMES = new AtomicInteger(); - - private static final Integer MAX_DISPATCHED_FAILED_TIMES = 100; - public GlobalTaskDispatchWaitingQueueLooper() { super("GlobalTaskDispatchWaitingQueueLooper"); } @@ -64,29 +58,34 @@ public synchronized void start() { @Override public void run() { - DefaultTaskExecuteRunnable defaultTaskExecuteRunnable; while (RUNNING_FLAG.get()) { - defaultTaskExecuteRunnable = globalTaskDispatchWaitingQueue.takeTaskExecuteRunnable(); - try { - TaskExecutionStatus status = defaultTaskExecuteRunnable.getTaskInstance().getState(); - if (status != TaskExecutionStatus.SUBMITTED_SUCCESS && status != TaskExecutionStatus.DELAY_EXECUTION) { - log.warn("The TaskInstance {} state is : {}, will not dispatch", - defaultTaskExecuteRunnable.getTaskInstance().getName(), status); - continue; - } + doDispatch(); + } + } - TaskDispatcher taskDispatcher = - taskDispatchFactory.getTaskDispatcher(defaultTaskExecuteRunnable.getTaskInstance()); - taskDispatcher.dispatchTask(defaultTaskExecuteRunnable); - DISPATCHED_CONSECUTIVE_FAILURE_TIMES.set(0); - } catch (Exception e) { - defaultTaskExecuteRunnable.getTaskExecutionContext().increaseDispatchFailTimes(); - globalTaskDispatchWaitingQueue.submitTaskExecuteRunnable(defaultTaskExecuteRunnable); - if (DISPATCHED_CONSECUTIVE_FAILURE_TIMES.incrementAndGet() > MAX_DISPATCHED_FAILED_TIMES) { - ThreadUtils.sleep(10 * 1000L); - } - log.error("Dispatch Task: {} failed", defaultTaskExecuteRunnable.getTaskInstance().getName(), e); + void doDispatch() { + final TaskExecuteRunnable taskExecuteRunnable = globalTaskDispatchWaitingQueue.takeTaskExecuteRunnable(); + TaskInstance taskInstance = taskExecuteRunnable.getTaskInstance(); + if (taskInstance == null) { + // This case shouldn't happen, but if it does, log an error and continue + log.error("The TaskInstance is null, drop it(This case shouldn't happen)"); + return; + } + try { + TaskExecutionStatus status = taskInstance.getState(); + if (status != TaskExecutionStatus.SUBMITTED_SUCCESS && status != TaskExecutionStatus.DELAY_EXECUTION) { + log.warn("The TaskInstance {} state is : {}, will not dispatch", taskInstance.getName(), status); + return; } + taskDispatchFactory.getTaskDispatcher(taskInstance).dispatchTask(taskExecuteRunnable); + } catch (Exception e) { + // If dispatch failed, will put the task back to the queue + // The task will be dispatched after waiting time. + // the waiting time will increase multiple of times, but will not exceed 60 seconds + long waitingTimeMills = Math.max( + taskExecuteRunnable.getTaskExecutionContext().increaseDispatchFailTimes() * 1_000L, 60_000L); + globalTaskDispatchWaitingQueue.dispatchTaskExecuteRunnableWithDelay(taskExecuteRunnable, waitingTimeMills); + log.error("Dispatch Task: {} failed will retry after: {}/ms", taskInstance.getName(), waitingTimeMills, e); } } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterSchedulerBootstrap.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterSchedulerBootstrap.java index 2fddd9438474..c1b5d0ffabfd 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterSchedulerBootstrap.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterSchedulerBootstrap.java @@ -26,21 +26,18 @@ import org.apache.dolphinscheduler.meter.metrics.MetricsProvider; import org.apache.dolphinscheduler.meter.metrics.SystemMetrics; import org.apache.dolphinscheduler.server.master.cache.ProcessInstanceExecCacheManager; +import org.apache.dolphinscheduler.server.master.command.ICommandFetcher; import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.server.master.config.MasterServerLoadProtection; import org.apache.dolphinscheduler.server.master.event.WorkflowEvent; import org.apache.dolphinscheduler.server.master.event.WorkflowEventQueue; import org.apache.dolphinscheduler.server.master.event.WorkflowEventType; -import org.apache.dolphinscheduler.server.master.exception.MasterException; import org.apache.dolphinscheduler.server.master.exception.WorkflowCreateException; import org.apache.dolphinscheduler.server.master.metrics.MasterServerMetrics; -import org.apache.dolphinscheduler.server.master.metrics.ProcessInstanceMetrics; -import org.apache.dolphinscheduler.server.master.registry.MasterSlotManager; import org.apache.dolphinscheduler.service.command.CommandService; import org.apache.commons.collections4.CollectionUtils; -import java.util.Collections; import java.util.List; import java.util.Optional; @@ -56,6 +53,9 @@ @Slf4j public class MasterSchedulerBootstrap extends BaseDaemonThread implements AutoCloseable { + @Autowired + private ICommandFetcher commandFetcher; + @Autowired private CommandService commandService; @@ -74,9 +74,6 @@ public class MasterSchedulerBootstrap extends BaseDaemonThread implements AutoCl @Autowired private WorkflowEventLooper workflowEventLooper; - @Autowired - private MasterSlotManager masterSlotManager; - @Autowired private MasterTaskExecutorBootstrap masterTaskExecutorBootstrap; @@ -125,7 +122,7 @@ public void run() { Thread.sleep(Constants.SLEEP_TIME_MILLIS); continue; } - List commands = findCommands(); + List commands = commandFetcher.fetchCommands(); if (CollectionUtils.isEmpty(commands)) { // indicate that no command ,sleep for 1s Thread.sleep(Constants.SLEEP_TIME_MILLIS); @@ -170,29 +167,4 @@ public void run() { } } - private List findCommands() throws MasterException { - try { - long scheduleStartTime = System.currentTimeMillis(); - int thisMasterSlot = masterSlotManager.getSlot(); - int masterCount = masterSlotManager.getMasterSize(); - if (masterCount <= 0) { - log.warn("Master count: {} is invalid, the current slot: {}", masterCount, thisMasterSlot); - return Collections.emptyList(); - } - int pageSize = masterConfig.getFetchCommandNum(); - final List result = - commandService.findCommandPageBySlot(pageSize, masterCount, thisMasterSlot); - if (CollectionUtils.isNotEmpty(result)) { - long cost = System.currentTimeMillis() - scheduleStartTime; - log.info( - "Master schedule bootstrap loop command success, fetch command size: {}, cost: {}ms, current slot: {}, total slot size: {}", - result.size(), cost, thisMasterSlot, masterCount); - ProcessInstanceMetrics.recordCommandQueryTime(cost); - } - return result; - } catch (Exception ex) { - throw new MasterException("Master loop command from database error", ex); - } - } - } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/PriorityDelayTaskExecuteRunnable.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/PriorityDelayTaskExecuteRunnable.java deleted file mode 100644 index 255ec6c8ac2c..000000000000 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/PriorityDelayTaskExecuteRunnable.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.server.master.runner; - -import org.apache.dolphinscheduler.common.utils.DateUtils; -import org.apache.dolphinscheduler.dao.entity.ProcessInstance; -import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; - -import java.util.concurrent.Delayed; -import java.util.concurrent.TimeUnit; - -public abstract class PriorityDelayTaskExecuteRunnable extends BaseTaskExecuteRunnable implements Delayed { - - public PriorityDelayTaskExecuteRunnable(ProcessInstance workflowInstance, - TaskInstance taskInstance, - TaskExecutionContext taskExecutionContext) { - super(workflowInstance, taskInstance, taskExecutionContext); - } - - @Override - public long getDelay(TimeUnit unit) { - return unit.convert( - DateUtils.getRemainTime(taskExecutionContext.getFirstSubmitTime(), - taskExecutionContext.getDelayTime() * 60L), - TimeUnit.SECONDS); - } - - @Override - public int compareTo(Delayed o) { - if (o == null) { - return 1; - } - int delayTimeCompareResult = - Long.compare(this.getDelay(TimeUnit.MILLISECONDS), o.getDelay(TimeUnit.MILLISECONDS)); - if (delayTimeCompareResult != 0) { - return delayTimeCompareResult; - } - PriorityDelayTaskExecuteRunnable other = (PriorityDelayTaskExecuteRunnable) o; - // the smaller dispatch fail times, the higher priority - int dispatchFailTimesCompareResult = taskExecutionContext.getDispatchFailTimes() - - other.getTaskExecutionContext().getDispatchFailTimes(); - if (dispatchFailTimesCompareResult != 0) { - return dispatchFailTimesCompareResult; - } - int workflowInstancePriorityCompareResult = workflowInstance.getProcessInstancePriority().getCode() - - other.getWorkflowInstance().getProcessInstancePriority().getCode(); - if (workflowInstancePriorityCompareResult != 0) { - return workflowInstancePriorityCompareResult; - } - long workflowInstanceIdCompareResult = workflowInstance.getId().compareTo(other.getWorkflowInstance().getId()); - if (workflowInstanceIdCompareResult != 0) { - return workflowInstancePriorityCompareResult; - } - int taskInstancePriorityCompareResult = taskInstance.getTaskInstancePriority().getCode() - - other.getTaskInstance().getTaskInstancePriority().getCode(); - if (taskInstancePriorityCompareResult != 0) { - return taskInstancePriorityCompareResult; - } - // larger number, higher priority - int taskGroupPriorityCompareResult = - taskInstance.getTaskGroupPriority() - other.getTaskInstance().getTaskGroupPriority(); - if (taskGroupPriorityCompareResult != 0) { - return -taskGroupPriorityCompareResult; - } - // The task instance shouldn't be equals - return taskInstance.getId().compareTo(other.getTaskInstance().getId()); - } - -} diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/StreamTaskExecuteRunnable.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/StreamTaskExecuteRunnable.java index 2f61507e727b..90fb25fcf0ae 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/StreamTaskExecuteRunnable.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/StreamTaskExecuteRunnable.java @@ -17,8 +17,6 @@ package org.apache.dolphinscheduler.server.master.runner; -import static org.apache.dolphinscheduler.common.constants.Constants.DEFAULT_WORKER_GROUP; - import org.apache.dolphinscheduler.common.constants.Constants; import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.common.enums.Priority; @@ -31,6 +29,8 @@ import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.mapper.ProcessTaskRelationMapper; import org.apache.dolphinscheduler.dao.repository.TaskInstanceDao; +import org.apache.dolphinscheduler.dao.utils.EnvironmentUtils; +import org.apache.dolphinscheduler.dao.utils.WorkerGroupUtils; import org.apache.dolphinscheduler.extract.base.client.SingletonJdkDynamicRpcClientProxyFactory; import org.apache.dolphinscheduler.extract.master.transportor.StreamingTaskTriggerRequest; import org.apache.dolphinscheduler.extract.worker.ITaskInstanceExecutionEventAckListener; @@ -43,7 +43,6 @@ import org.apache.dolphinscheduler.plugin.task.api.enums.TaskExecutionStatus; import org.apache.dolphinscheduler.plugin.task.api.model.Property; import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters; -import org.apache.dolphinscheduler.plugin.task.api.parameters.ParametersNode; import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.ResourceParametersHelper; import org.apache.dolphinscheduler.plugin.task.api.utils.LogUtils; import org.apache.dolphinscheduler.plugin.task.api.utils.ParameterUtils; @@ -89,8 +88,6 @@ public class StreamTaskExecuteRunnable implements Runnable { protected ProcessTaskRelationMapper processTaskRelationMapper; - protected TaskPluginManager taskPluginManager; - private StreamTaskInstanceExecCacheManager streamTaskInstanceExecCacheManager; protected TaskDefinition taskDefinition; @@ -115,7 +112,6 @@ public StreamTaskExecuteRunnable(TaskDefinition taskDefinition, this.processService = SpringApplicationContext.getBean(ProcessService.class); this.masterConfig = SpringApplicationContext.getBean(MasterConfig.class); this.workerTaskDispatcher = SpringApplicationContext.getBean(WorkerTaskDispatcher.class); - this.taskPluginManager = SpringApplicationContext.getBean(TaskPluginManager.class); this.processTaskRelationMapper = SpringApplicationContext.getBean(ProcessTaskRelationMapper.class); this.taskInstanceDao = SpringApplicationContext.getBean(TaskInstanceDao.class); this.streamTaskInstanceExecCacheManager = @@ -270,12 +266,11 @@ public TaskInstance newTaskInstance(TaskDefinition taskDefinition) { // task dry run flag taskInstance.setDryRun(taskExecuteStartMessage.getDryRun()); - taskInstance.setWorkerGroup(StringUtils.isBlank(taskDefinition.getWorkerGroup()) ? DEFAULT_WORKER_GROUP - : taskDefinition.getWorkerGroup()); - taskInstance.setEnvironmentCode( - taskDefinition.getEnvironmentCode() == 0 ? -1 : taskDefinition.getEnvironmentCode()); + taskInstance.setWorkerGroup(WorkerGroupUtils.getWorkerGroupOrDefault(taskDefinition.getWorkerGroup())); + taskInstance + .setEnvironmentCode(EnvironmentUtils.getEnvironmentCodeOrDefault(taskDefinition.getEnvironmentCode())); - if (!taskInstance.getEnvironmentCode().equals(-1L)) { + if (!EnvironmentUtils.isEnvironmentCodeEmpty(taskInstance.getEnvironmentCode())) { Environment environment = processService.findEnvironmentByCode(taskInstance.getEnvironmentCode()); if (Objects.nonNull(environment) && StringUtils.isNotEmpty(environment.getConfig())) { taskInstance.setEnvironmentConfig(environment.getConfig()); @@ -312,14 +307,11 @@ protected TaskExecutionContext getTaskExecutionContext(TaskInstance taskInstance return null; } - TaskChannel taskChannel = taskPluginManager.getTaskChannel(taskInstance.getTaskType()); - ResourceParametersHelper resources = taskChannel.getResources(taskInstance.getTaskParams()); + TaskChannel taskChannel = TaskPluginManager.getTaskChannel(taskInstance.getTaskType()); + ResourceParametersHelper resources = taskChannel.parseParameters(taskInstance.getTaskParams()).getResources(); - AbstractParameters baseParam = taskPluginManager.getParameters( - ParametersNode.builder() - .taskType(taskInstance.getTaskType()) - .taskParams(taskInstance.getTaskParams()) - .build()); + AbstractParameters baseParam = + TaskPluginManager.parseTaskParameters(taskInstance.getTaskType(), taskInstance.getTaskParams()); Map propertyMap = paramParsingPreparation(taskInstance, baseParam); TaskExecutionContext taskExecutionContext = TaskExecutionContextBuilder.get() .buildWorkflowInstanceHost(masterConfig.getMasterAddress()) diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/TaskExecuteRunnable.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/TaskExecuteRunnable.java index 8f661896171e..62617f4aacdf 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/TaskExecuteRunnable.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/TaskExecuteRunnable.java @@ -25,7 +25,7 @@ * This interface is used to define a task which is executing. * todo: split to MasterTaskExecuteRunnable and WorkerTaskExecuteRunnable */ -public interface TaskExecuteRunnable { +public interface TaskExecuteRunnable extends Comparable { void dispatch(); @@ -40,4 +40,5 @@ public interface TaskExecuteRunnable { TaskInstance getTaskInstance(); TaskExecutionContext getTaskExecutionContext(); + } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/TaskExecutionContextFactory.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/TaskExecutionContextFactory.java index ab1806ff6720..aec31f5abc24 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/TaskExecutionContextFactory.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/TaskExecutionContextFactory.java @@ -38,7 +38,6 @@ import org.apache.dolphinscheduler.dao.entity.DqRuleInputEntry; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import org.apache.dolphinscheduler.dao.entity.UdfFunc; import org.apache.dolphinscheduler.plugin.task.api.DataQualityTaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.K8sTaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; @@ -49,12 +48,10 @@ import org.apache.dolphinscheduler.plugin.task.api.model.Property; import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters; import org.apache.dolphinscheduler.plugin.task.api.parameters.K8sTaskParameters; -import org.apache.dolphinscheduler.plugin.task.api.parameters.ParametersNode; import org.apache.dolphinscheduler.plugin.task.api.parameters.dataquality.DataQualityParameters; import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.AbstractResourceParameters; import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.DataSourceParameters; import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.ResourceParametersHelper; -import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.UdfFuncParameters; import org.apache.dolphinscheduler.plugin.task.api.utils.JdbcUrlParser; import org.apache.dolphinscheduler.plugin.task.api.utils.MapUtils; import org.apache.dolphinscheduler.plugin.task.spark.SparkParameters; @@ -74,7 +71,6 @@ import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Optional; import lombok.extern.slf4j.Slf4j; @@ -90,9 +86,6 @@ public class TaskExecutionContextFactory { @Autowired private ProcessService processService; - @Autowired - private TaskPluginManager taskPluginManager; - @Autowired private CuringParamsService curingParamsService; @@ -105,16 +98,15 @@ public class TaskExecutionContextFactory { public TaskExecutionContext createTaskExecutionContext(TaskInstance taskInstance) throws TaskExecutionContextCreateException { ProcessInstance workflowInstance = taskInstance.getProcessInstance(); - ResourceParametersHelper resources = - Optional.ofNullable(taskPluginManager.getTaskChannel(taskInstance.getTaskType())) - .map(taskChannel -> taskChannel.getResources(taskInstance.getTaskParams())) - .orElse(null); + ResourceParametersHelper resources = TaskPluginManager.getTaskChannel(taskInstance.getTaskType()) + .parseParameters(taskInstance.getTaskParams()) + .getResources(); setTaskResourceInfo(resources); Map businessParamsMap = curingParamsService.preBuildBusinessParams(workflowInstance); - AbstractParameters baseParam = taskPluginManager.getParameters(ParametersNode.builder() - .taskType(taskInstance.getTaskType()).taskParams(taskInstance.getTaskParams()).build()); + AbstractParameters baseParam = + TaskPluginManager.parseTaskParameters(taskInstance.getTaskType(), taskInstance.getTaskParams()); Map propertyMap = curingParamsService.paramParsingPreparation(taskInstance, baseParam, workflowInstance); TaskExecutionContext taskExecutionContext = TaskExecutionContextBuilder.get() @@ -158,9 +150,6 @@ private void setTaskResourceInfo(ResourceParametersHelper resourceParametersHelp case DATASOURCE: setTaskDataSourceResourceInfo(map); break; - case UDF: - setTaskUdfFuncResourceInfo(map); - break; default: break; } @@ -184,19 +173,6 @@ private void setTaskDataSourceResourceInfo(Map map) { - if (MapUtils.isEmpty(map)) { - return; - } - List udfFuncList = processService.queryUdfFunListByIds(map.keySet().toArray(new Integer[map.size()])); - - udfFuncList.forEach(udfFunc -> { - UdfFuncParameters udfFuncParameters = - JSONUtils.parseObject(JSONUtils.toJsonString(udfFunc), UdfFuncParameters.class); - map.put(udfFunc.getId(), udfFuncParameters); - }); - } - private void setDataQualityTaskRelation(DataQualityTaskExecutionContext dataQualityTaskExecutionContext, TaskInstance taskInstance, String tenantCode) { DataQualityParameters dataQualityParameters = @@ -412,6 +388,7 @@ public DataSource getDefaultDataSource() { /** * The StatisticsValueWriterConfig will be used in DataQualityApplication that * writes the statistics value into dolphin scheduler datasource + * * @param dataQualityTaskExecutionContext */ private void setStatisticsValueWriterConfig(DataQualityTaskExecutionContext dataQualityTaskExecutionContext) { diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/WorkflowExecuteRunnable.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/WorkflowExecuteRunnable.java index eafba17f6965..c58d741d41f8 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/WorkflowExecuteRunnable.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/WorkflowExecuteRunnable.java @@ -25,9 +25,9 @@ import static org.apache.dolphinscheduler.common.constants.CommandKeyConstants.CMD_PARAM_START_NODES; import static org.apache.dolphinscheduler.common.constants.CommandKeyConstants.CMD_PARAM_START_PARAMS; import static org.apache.dolphinscheduler.common.constants.Constants.COMMA; -import static org.apache.dolphinscheduler.common.constants.Constants.DEFAULT_WORKER_GROUP; import static org.apache.dolphinscheduler.common.constants.DateConstants.YYYY_MM_DD_HH_MM_SS; -import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.TASK_TYPE_BLOCKING; +import static org.apache.dolphinscheduler.dao.utils.EnvironmentUtils.getEnvironmentCodeOrDefault; +import static org.apache.dolphinscheduler.dao.utils.WorkerGroupUtils.getWorkerGroupOrDefault; import org.apache.dolphinscheduler.common.constants.Constants; import org.apache.dolphinscheduler.common.enums.CommandType; @@ -51,22 +51,22 @@ import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.repository.ProcessInstanceDao; import org.apache.dolphinscheduler.dao.repository.TaskInstanceDao; +import org.apache.dolphinscheduler.dao.utils.EnvironmentUtils; import org.apache.dolphinscheduler.dao.utils.TaskCacheUtils; +import org.apache.dolphinscheduler.dao.utils.WorkerGroupUtils; import org.apache.dolphinscheduler.extract.base.client.SingletonJdkDynamicRpcClientProxyFactory; import org.apache.dolphinscheduler.extract.worker.ITaskInstanceOperator; import org.apache.dolphinscheduler.extract.worker.transportor.UpdateWorkflowHostRequest; import org.apache.dolphinscheduler.extract.worker.transportor.UpdateWorkflowHostResponse; import org.apache.dolphinscheduler.plugin.task.api.enums.DependResult; -import org.apache.dolphinscheduler.plugin.task.api.enums.Direct; import org.apache.dolphinscheduler.plugin.task.api.enums.TaskExecutionStatus; -import org.apache.dolphinscheduler.plugin.task.api.model.Property; -import org.apache.dolphinscheduler.plugin.task.api.parameters.SwitchParameters; import org.apache.dolphinscheduler.plugin.task.api.utils.LogUtils; +import org.apache.dolphinscheduler.plugin.task.api.utils.TaskTypeUtils; +import org.apache.dolphinscheduler.plugin.task.api.utils.VarPoolUtils; import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.server.master.event.StateEvent; import org.apache.dolphinscheduler.server.master.event.StateEventHandleError; import org.apache.dolphinscheduler.server.master.event.StateEventHandleException; -import org.apache.dolphinscheduler.server.master.event.StateEventHandleFailure; import org.apache.dolphinscheduler.server.master.event.StateEventHandler; import org.apache.dolphinscheduler.server.master.event.StateEventHandlerManager; import org.apache.dolphinscheduler.server.master.event.TaskStateEvent; @@ -75,7 +75,6 @@ import org.apache.dolphinscheduler.server.master.metrics.TaskMetrics; import org.apache.dolphinscheduler.server.master.runner.execute.DefaultTaskExecuteRunnableFactory; import org.apache.dolphinscheduler.server.master.runner.taskgroup.TaskGroupCoordinator; -import org.apache.dolphinscheduler.server.master.utils.TaskUtils; import org.apache.dolphinscheduler.server.master.utils.WorkflowInstanceUtils; import org.apache.dolphinscheduler.service.alert.ListenerEventAlertManager; import org.apache.dolphinscheduler.service.alert.ProcessAlertManager; @@ -97,6 +96,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.Comparator; import java.util.Date; import java.util.HashMap; import java.util.HashSet; @@ -298,14 +298,7 @@ public void handleEvents() { stateEvent, stateEventHandleException); ThreadUtils.sleep(Constants.SLEEP_TIME_MILLIS); - } catch (StateEventHandleFailure stateEventHandleFailure) { - log.error("State event handle failed, will move event to the tail: {}", - stateEvent, - stateEventHandleFailure); - this.stateEvents.remove(stateEvent); - this.stateEvents.offer(stateEvent); - ThreadUtils.sleep(Constants.SLEEP_TIME_MILLIS); - } catch (Exception e) { + } catch (Throwable e) { // we catch the exception here, since if the state event handle failed, the state event will still // keep // in the stateEvents queue. @@ -376,7 +369,8 @@ public void taskFinished(TaskInstance taskInstance) throws StateEventHandleExcep if (taskInstance.getState().isSuccess()) { completeTaskSet.add(taskInstance.getTaskCode()); - mergeTaskInstanceVarPool(taskInstance); + workflowInstance.setVarPool(VarPoolUtils.mergeVarPoolJsonString( + Lists.newArrayList(workflowInstance.getVarPool(), taskInstance.getVarPool()))); processInstanceDao.upsertProcessInstance(workflowInstance); ProjectUser projectUser = processService.queryProjectWithUserByProcessInstanceId(workflowInstance.getId()); @@ -385,25 +379,24 @@ public void taskFinished(TaskInstance taskInstance) throws StateEventHandleExcep if (taskInstance.getIsCache().equals(Flag.YES)) { saveCacheTaskInstance(taskInstance); } - if (!workflowInstance.isBlocked()) { - submitPostNode(taskInstance.getTaskCode()); - } + submitPostNode(taskInstance.getTaskCode()); } else if (taskInstance.taskCanRetry() && !workflowInstance.getState().isReadyStop()) { // retry task log.info("Retry taskInstance taskInstance state: {}", taskInstance.getState()); retryTaskInstance(taskInstance); - } else if (taskInstance.getState().isFailure()) { + } else if (taskInstance.getState().isFailure() || taskInstance.getState().isKill() + || taskInstance.getState().isStop()) { completeTaskSet.add(taskInstance.getTaskCode()); - ProjectUser projectUser = - processService.queryProjectWithUserByProcessInstanceId(workflowInstance.getId()); - listenerEventAlertManager.publishTaskFailListenerEvent(workflowInstance, taskInstance, projectUser); + listenerEventAlertManager.publishTaskFailListenerEvent(workflowInstance, taskInstance); + if (isTaskNeedPutIntoErrorMap(taskInstance)) { + errorTaskMap.put(taskInstance.getTaskCode(), taskInstance.getId()); + } // There are child nodes and the failure policy is: CONTINUE if (workflowInstance.getFailureStrategy() == FailureStrategy.CONTINUE && DagHelper.haveAllNodeAfterNode( taskInstance.getTaskCode(), workflowExecuteContext.getWorkflowGraph().getDag())) { submitPostNode(taskInstance.getTaskCode()); } else { - errorTaskMap.put(taskInstance.getTaskCode(), taskInstance.getId()); if (workflowInstance.getFailureStrategy() == FailureStrategy.END) { killAllTasks(); } @@ -441,7 +434,6 @@ private void releaseTaskGroupIfNeeded(TaskInstance taskInstance) { /** * crate new task instance to retry, different objects from the original - * */ private void retryTaskInstance(TaskInstance taskInstance) throws StateEventHandleException { ProcessInstance workflowInstance = workflowExecuteContext.getWorkflowInstance(); @@ -532,16 +524,6 @@ public void checkTaskInstanceByStateEvent(TaskStateEvent stateEvent) throws Stat } } - /** - * check if task instance exist by id - */ - public boolean checkTaskInstanceById(int taskInstanceId) { - if (taskInstanceMap.isEmpty()) { - return false; - } - return taskInstanceMap.containsKey(taskInstanceId); - } - /** * get task instance from memory */ @@ -657,6 +639,7 @@ private int createComplementDataCommand(Date scheduleDate) { command.setProcessInstanceId(0); command.setProcessDefinitionVersion(workflowInstance.getProcessDefinitionVersion()); command.setTestFlag(workflowInstance.getTestFlag()); + command.setTenantCode(workflowInstance.getTenantCode()); int create = commandService.createCommand(command); processService.saveCommandTrigger(command.getId(), workflowInstance.getId()); return create; @@ -822,10 +805,7 @@ private void initTaskQueue() throws StateEventHandleException, CronParseExceptio completeTaskSet.add(task.getTaskCode()); continue; } - if (task.isConditionsTask() || DagHelper.haveConditionsAfterNode(task.getTaskCode(), - workflowExecuteContext.getWorkflowGraph().getDag())) { - continue; - } + if (task.taskCanRetry()) { if (task.getState().isNeedFaultTolerance()) { log.info("TaskInstance needs fault tolerance, will be added to standby list."); @@ -841,7 +821,7 @@ private void initTaskQueue() throws StateEventHandleException, CronParseExceptio } continue; } - if (task.getState().isFailure()) { + if (isTaskNeedPutIntoErrorMap(task)) { errorTaskMap.put(task.getTaskCode(), task.getId()); } } finally { @@ -966,16 +946,6 @@ void tryToDispatchTaskInstance(TaskInstance taskInstance, TaskExecuteRunnable ta if (!taskInstance.getState().isFinished()) { taskExecuteRunnable.dispatch(); } else { - if (workflowExecuteContext.getWorkflowInstance().isBlocked()) { - TaskStateEvent processBlockEvent = TaskStateEvent.builder() - .processInstanceId(workflowExecuteContext.getWorkflowInstance().getId()) - .taskInstanceId(taskInstance.getId()) - .status(taskInstance.getState()) - .type(StateEventType.PROCESS_BLOCKED) - .build(); - this.stateEvents.add(processBlockEvent); - } - TaskStateEvent taskStateChangeEvent = TaskStateEvent.builder() .processInstanceId(workflowExecuteContext.getWorkflowInstance().getId()) .taskInstanceId(taskInstance.getId()) @@ -1070,7 +1040,7 @@ public TaskInstance cloneTolerantTaskInstance(TaskInstance taskInstance) { * new a taskInstance * * @param processInstance process instance - * @param taskNode task node + * @param taskNode task node * @return task instance */ public TaskInstance newTaskInstance(ProcessInstance processInstance, TaskNode taskNode) { @@ -1112,7 +1082,7 @@ public TaskInstance newTaskInstance(ProcessInstance processInstance, TaskNode ta taskInstance.setRetryInterval(taskNode.getRetryInterval()); // set task param - taskInstance.setTaskParams(taskNode.getTaskParams()); + taskInstance.setTaskParams(taskNode.getParams()); // set task group and priority taskInstance.setTaskGroupId(taskNode.getTaskGroupId()); @@ -1131,25 +1101,22 @@ public TaskInstance newTaskInstance(ProcessInstance processInstance, TaskNode ta taskInstance.setTaskInstancePriority(taskNode.getTaskInstancePriority()); } - String processWorkerGroup = processInstance.getWorkerGroup(); - processWorkerGroup = StringUtils.isBlank(processWorkerGroup) ? DEFAULT_WORKER_GROUP : processWorkerGroup; - String taskWorkerGroup = - StringUtils.isBlank(taskNode.getWorkerGroup()) ? processWorkerGroup : taskNode.getWorkerGroup(); + String processWorkerGroup = getWorkerGroupOrDefault(processInstance.getWorkerGroup()); + String taskWorkerGroup = getWorkerGroupOrDefault(taskNode.getWorkerGroup()); - Long processEnvironmentCode = - Objects.isNull(processInstance.getEnvironmentCode()) ? -1 : processInstance.getEnvironmentCode(); - Long taskEnvironmentCode = - Objects.isNull(taskNode.getEnvironmentCode()) ? processEnvironmentCode : taskNode.getEnvironmentCode(); + Long processEnvironmentCode = getEnvironmentCodeOrDefault(processInstance.getEnvironmentCode()); + Long taskEnvironmentCode = getEnvironmentCodeOrDefault(taskNode.getEnvironmentCode()); - if (!processWorkerGroup.equals(DEFAULT_WORKER_GROUP) && taskWorkerGroup.equals(DEFAULT_WORKER_GROUP)) { + if (WorkerGroupUtils.isWorkerGroupEmpty(taskWorkerGroup)) { + // If the task workerGroup is empty, then use the workflow workerGroup/environment taskInstance.setWorkerGroup(processWorkerGroup); - taskInstance.setEnvironmentCode(processEnvironmentCode); + taskInstance.setEnvironmentCode(getEnvironmentCodeOrDefault(taskEnvironmentCode, processEnvironmentCode)); } else { taskInstance.setWorkerGroup(taskWorkerGroup); - taskInstance.setEnvironmentCode(taskEnvironmentCode); + taskInstance.setEnvironmentCode(getEnvironmentCodeOrDefault(taskEnvironmentCode, processEnvironmentCode)); } - if (!taskInstance.getEnvironmentCode().equals(-1L)) { + if (!EnvironmentUtils.isEnvironmentCodeEmpty(taskInstance.getEnvironmentCode())) { Environment environment = processService.findEnvironmentByCode(taskInstance.getEnvironmentCode()); if (Objects.nonNull(environment) && StringUtils.isNotEmpty(environment.getConfig())) { taskInstance.setEnvironmentConfig(environment.getConfig()); @@ -1161,80 +1128,32 @@ public TaskInstance newTaskInstance(ProcessInstance processInstance, TaskNode ta return taskInstance; } - public void getPreVarPool(TaskInstance taskInstance, Set preTask) { + void initializeTaskInstanceVarPool(TaskInstance taskInstance) { + // get pre task ,get all the task varPool to this task + // Do not use dag.getPreviousNodes because of the dag may be miss the upstream node + String preTasks = + workflowExecuteContext.getWorkflowGraph().getTaskNodeByCode(taskInstance.getTaskCode()).getPreTasks(); + Set preTaskList = new HashSet<>(JSONUtils.toList(preTasks, Long.class)); ProcessInstance workflowInstance = workflowExecuteContext.getWorkflowInstance(); - Map allProperty = new HashMap<>(); - Map allTaskInstance = new HashMap<>(); - if (CollectionUtils.isNotEmpty(preTask)) { - for (Long preTaskCode : preTask) { - Optional existTaskInstanceOptional = getTaskInstance(preTaskCode); - if (!existTaskInstanceOptional.isPresent()) { - continue; - } - Integer taskId = existTaskInstanceOptional.get().getId(); - if (taskId == null) { - continue; - } - TaskInstance preTaskInstance = taskInstanceMap.get(taskId); - if (preTaskInstance == null) { - continue; - } - String preVarPool = preTaskInstance.getVarPool(); - if (StringUtils.isNotEmpty(preVarPool)) { - List properties = JSONUtils.toList(preVarPool, Property.class); - for (Property info : properties) { - setVarPoolValue(allProperty, allTaskInstance, preTaskInstance, info); - } - } - } - if (allProperty.size() > 0) { - taskInstance.setVarPool(JSONUtils.toJsonString(allProperty.values())); - } - } else { - if (StringUtils.isNotEmpty(workflowInstance.getVarPool())) { - taskInstance.setVarPool(workflowInstance.getVarPool()); - } + if (CollectionUtils.isEmpty(preTaskList)) { + taskInstance.setVarPool(workflowInstance.getVarPool()); + return; } + List preTaskInstanceVarPools = preTaskList + .stream() + .map(taskCode -> getTaskInstance(taskCode).orElse(null)) + .filter(Objects::nonNull) + .sorted(Comparator.comparing(TaskInstance::getEndTime)) + .map(TaskInstance::getVarPool) + .collect(Collectors.toList()); + taskInstance.setVarPool(VarPoolUtils.mergeVarPoolJsonString(preTaskInstanceVarPools)); } public Collection getAllTaskInstances() { return taskInstanceMap.values(); } - private void setVarPoolValue(Map allProperty, - Map allTaskInstance, - TaskInstance preTaskInstance, Property thisProperty) { - // for this taskInstance all the param in this part is IN. - thisProperty.setDirect(Direct.IN); - // get the pre taskInstance Property's name - String proName = thisProperty.getProp(); - // if the Previous nodes have the Property of same name - if (allProperty.containsKey(proName)) { - // comparison the value of two Property - Property otherPro = allProperty.get(proName); - // if this property'value of loop is empty,use the other,whether the other's value is empty or not - if (StringUtils.isEmpty(thisProperty.getValue())) { - allProperty.put(proName, otherPro); - // if property'value of loop is not empty,and the other's value is not empty too, use the earlier value - } else if (StringUtils.isNotEmpty(otherPro.getValue())) { - TaskInstance otherTask = allTaskInstance.get(proName); - if (otherTask.getEndTime().getTime() > preTaskInstance.getEndTime().getTime()) { - allProperty.put(proName, thisProperty); - allTaskInstance.put(proName, preTaskInstance); - } else { - allProperty.put(proName, otherPro); - } - } else { - allProperty.put(proName, thisProperty); - allTaskInstance.put(proName, preTaskInstance); - } - } else { - allProperty.put(proName, thisProperty); - allTaskInstance.put(proName, preTaskInstance); - } - } - /** * get complete task instance map, taskCode as key */ @@ -1284,12 +1203,12 @@ private void submitPostNode(Long parentNodeCode) throws StateEventHandleExceptio || state == TaskExecutionStatus.DISPATCH || state == TaskExecutionStatus.SUBMITTED_SUCCESS || state == TaskExecutionStatus.DELAY_EXECUTION) { - // try to take over task instance - if (state == TaskExecutionStatus.SUBMITTED_SUCCESS || state == TaskExecutionStatus.DELAY_EXECUTION - || state == TaskExecutionStatus.DISPATCH) { + if (state == TaskExecutionStatus.SUBMITTED_SUCCESS + || state == TaskExecutionStatus.DELAY_EXECUTION) { // The taskInstance is not in running, directly takeover it } else if (tryToTakeOverTaskInstance(existTaskInstance)) { - log.info("Success take over task {}", existTaskInstance.getName()); + // If the taskInstance has already dispatched to worker then will try to take-over it + log.info("Success take over task {} -> status: {}", existTaskInstance.getName(), state); continue; } else { // set the task instance state to fault tolerance @@ -1311,45 +1230,10 @@ private void submitPostNode(Long parentNodeCode) throws StateEventHandleExceptio } // the end node of the branch of the dag if (parentNodeCode != null && dag.getEndNode().contains(parentNodeCode)) { - Optional existTaskInstanceOptional = getTaskInstance(parentNodeCode); - if (existTaskInstanceOptional.isPresent()) { - TaskInstance endTaskInstance = taskInstanceMap.get(existTaskInstanceOptional.get().getId()); - String taskInstanceVarPool = endTaskInstance.getVarPool(); - if (StringUtils.isNotEmpty(taskInstanceVarPool)) { - Set taskProperties = new HashSet<>(JSONUtils.toList(taskInstanceVarPool, Property.class)); - String processInstanceVarPool = workflowInstance.getVarPool(); - List processGlobalParams = - new ArrayList<>(JSONUtils.toList(workflowInstance.getGlobalParams(), Property.class)); - Map oldProcessGlobalParamsMap = processGlobalParams.stream() - .collect(Collectors.toMap(Property::getProp, Property::getDirect)); - Set processVarPoolOut = taskProperties.stream() - .filter(property -> property.getDirect().equals(Direct.OUT) - && oldProcessGlobalParamsMap.containsKey(property.getProp()) - && oldProcessGlobalParamsMap.get(property.getProp()).equals(Direct.OUT)) - .collect(Collectors.toSet()); - Set taskVarPoolIn = - taskProperties.stream().filter(property -> property.getDirect().equals(Direct.IN)) - .collect(Collectors.toSet()); - if (StringUtils.isNotEmpty(processInstanceVarPool)) { - Set properties = - new HashSet<>(JSONUtils.toList(processInstanceVarPool, Property.class)); - Set newProcessVarPoolKeys = - taskProperties.stream().map(Property::getProp).collect(Collectors.toSet()); - properties = properties.stream() - .filter(property -> !newProcessVarPoolKeys.contains(property.getProp())) - .collect(Collectors.toSet()); - properties.addAll(processVarPoolOut); - properties.addAll(taskVarPoolIn); - - workflowInstance.setVarPool(JSONUtils.toJsonString(properties)); - } else { - Set varPool = new HashSet<>(); - varPool.addAll(taskVarPoolIn); - varPool.addAll(processVarPoolOut); - workflowInstance.setVarPool(JSONUtils.toJsonString(varPool)); - } - } - } + getTaskInstance(parentNodeCode) + .ifPresent(endTaskInstance -> workflowInstance.setVarPool(VarPoolUtils.mergeVarPoolJsonString( + Lists.newArrayList(workflowInstance.getVarPool(), endTaskInstance.getVarPool())))); + } // if previous node success , post node submit @@ -1377,7 +1261,7 @@ private void submitPostNode(Long parentNodeCode) throws StateEventHandleExceptio private boolean tryToTakeOverTaskInstance(TaskInstance taskInstance) { ProcessInstance workflowInstance = workflowExecuteContext.getWorkflowInstance(); - if (TaskUtils.isMasterTask(taskInstance.getTaskType())) { + if (TaskTypeUtils.isLogicTask(taskInstance.getTaskType())) { return false; } try { @@ -1443,13 +1327,14 @@ private DependResult isTaskDepsComplete(Long taskCode) { if (depTaskState.isKill()) { return DependResult.NON_EXEC; } - // ignore task state if current task is block - if (taskNode.isBlockingTask()) { + + // always return success if current task is condition + if (TaskTypeUtils.isConditionTask(taskNode.getType())) { continue; } - // always return success if current task is condition - if (taskNode.isConditionsTask()) { + // always return success if current task is switch + if (TaskTypeUtils.isSwitchTask(taskNode.getType())) { continue; } @@ -1459,7 +1344,7 @@ private DependResult isTaskDepsComplete(Long taskCode) { } } log.info("The dependTasks of task all success, currentTaskCode: {}, dependTaskCodes: {}", - taskCode, Arrays.toString(completeTaskSet.toArray())); + taskCode, Arrays.toString(indirectDepCodeList.toArray())); return DependResult.SUCCESS; } @@ -1494,7 +1379,7 @@ private void setIndirectDepList(Long taskCode, List indirectDepCodeList) { private boolean dependTaskSuccess(Long dependNodeCode, Long nextNodeCode) { DAG dag = workflowExecuteContext.getWorkflowGraph().getDag(); TaskNode dependentNode = dag.getNode(dependNodeCode); - if (dependentNode.isConditionsTask()) { + if (TaskTypeUtils.isConditionTask(dependentNode.getType())) { // condition task need check the branch to run List nextTaskList = DagHelper.parseConditionTask(dependNodeCode, skipTaskNodeMap, dag, getCompleteTaskInstanceMap()); @@ -1507,12 +1392,6 @@ private boolean dependTaskSuccess(Long dependNodeCode, Long nextNodeCode) { } return true; } - if (dependentNode.isSwitchTask()) { - TaskInstance dependentTaskInstance = taskInstanceMap.get(validTaskMap.get(dependentNode.getCode())); - SwitchParameters switchParameters = dependentTaskInstance.getSwitchDependency(); - return switchParameters.getDependTaskList().get(switchParameters.getResultConditionLocation()).getNextNode() - .contains(nextNodeCode); - } Optional existTaskInstanceOptional = getTaskInstance(dependNodeCode); if (!existTaskInstanceOptional.isPresent()) { return false; @@ -1552,8 +1431,7 @@ private List getCompleteTaskByState(TaskExecutionStatus state) { */ private WorkflowExecutionStatus runningState(WorkflowExecutionStatus state) { if (state == WorkflowExecutionStatus.READY_STOP || state == WorkflowExecutionStatus.READY_PAUSE - || state == WorkflowExecutionStatus.READY_BLOCK || - state == WorkflowExecutionStatus.DELAY_EXECUTION) { + || state == WorkflowExecutionStatus.DELAY_EXECUTION) { // if the running task is not completed, the state remains unchanged return state; } else { @@ -1612,7 +1490,7 @@ private WorkflowExecutionStatus processReadyPause() { } List pauseList = getCompleteTaskByState(TaskExecutionStatus.PAUSE); - if (CollectionUtils.isNotEmpty(pauseList) || workflowInstance.isBlocked() || !isComplementEnd() + if (CollectionUtils.isNotEmpty(pauseList) || !isComplementEnd() || standByTaskInstancePriorityQueue.size() > 0) { return WorkflowExecutionStatus.PAUSE; } else { @@ -1620,30 +1498,6 @@ private WorkflowExecutionStatus processReadyPause() { } } - /** - * prepare for block - * if process has tasks still running, pause them - * if readyToSubmitTaskQueue is not empty, kill them - * else return block status directly - * - * @return ExecutionStatus - */ - private WorkflowExecutionStatus processReadyBlock() { - if (taskExecuteRunnableMap.size() > 0) { - for (DefaultTaskExecuteRunnable taskExecuteRunnable : taskExecuteRunnableMap.values()) { - if (!TASK_TYPE_BLOCKING.equals(taskExecuteRunnable.getTaskInstance().getTaskType())) { - taskExecuteRunnable.pause(); - } - } - } - if (standByTaskInstancePriorityQueue.size() > 0) { - for (Iterator iter = standByTaskInstancePriorityQueue.iterator(); iter.hasNext();) { - iter.next().setState(TaskExecutionStatus.PAUSE); - } - } - return WorkflowExecutionStatus.BLOCK; - } - /** * generate the latest process instance status by the tasks state * @@ -1659,13 +1513,6 @@ private WorkflowExecutionStatus getProcessInstanceState(ProcessInstance instance return executionStatus; } - // block - if (state == WorkflowExecutionStatus.READY_BLOCK) { - WorkflowExecutionStatus executionStatus = processReadyBlock(); - log.info("The workflowInstance is ready to block, the workflowInstance status is {}", executionStatus); - return executionStatus; - } - // pause if (state == WorkflowExecutionStatus.READY_PAUSE) { WorkflowExecutionStatus executionStatus = processReadyPause(); @@ -1907,14 +1754,8 @@ public void submitStandByTask() throws StateEventHandleException { continue; } } - // init varPool only this task is the first time running if (task.isFirstRun()) { - // get pre task ,get all the task varPool to this task - // Do not use dag.getPreviousNodes because of the dag may be miss the upstream node - String preTasks = workflowExecuteContext.getWorkflowGraph() - .getTaskNodeByCode(task.getTaskCode()).getPreTasks(); - Set preTaskList = new HashSet<>(JSONUtils.toList(preTasks, Long.class)); - getPreVarPool(task, preTaskList); + initializeTaskInstanceVarPool(task); } DependResult dependResult = getDependResultForTask(task); if (DependResult.SUCCESS == dependResult) { @@ -2095,38 +1936,14 @@ protected void clearDataIfExecuteTask() { taskInstanceDao.updateById(taskInstance); } - Set removeSet = new HashSet<>(); - for (TaskInstance taskInstance : removeTaskInstances) { - String taskVarPool = taskInstance.getVarPool(); - if (StringUtils.isNotEmpty(taskVarPool)) { - List properties = JSONUtils.toList(taskVarPool, Property.class); - List keys = properties.stream() - .filter(property -> property.getDirect().equals(Direct.OUT)) - .map(property -> String.format("%s_%s", property.getProp(), property.getType())) - .collect(Collectors.toList()); - removeSet.addAll(keys); - } - } - - // remove varPool data and update process instance - // TODO: we can remove this snippet if : we get varPool from pre taskInstance instead of process instance when - // task can not get pre task from incomplete dag - List processProperties = JSONUtils.toList(workflowInstance.getVarPool(), Property.class); - processProperties = processProperties.stream() - .filter(property -> !(property.getDirect().equals(Direct.IN) - && removeSet.contains(String.format("%s_%s", property.getProp(), property.getType())))) - .collect(Collectors.toList()); - - workflowInstance.setVarPool(JSONUtils.toJsonString(processProperties)); + workflowInstance.setVarPool( + VarPoolUtils.subtractVarPoolJson(workflowInstance.getVarPool(), + removeTaskInstances.stream().map(TaskInstance::getVarPool).collect(Collectors.toList()))); processInstanceDao.updateById(workflowInstance); - // remove task instance from taskInstanceMap, completeTaskSet, validTaskMap, errorTaskMap - // completeTaskSet remove dependency taskInstanceMap, so the sort can't change - completeTaskSet.removeIf(taskCode -> { - Optional existTaskInstanceOptional = getTaskInstance(taskCode); - return existTaskInstanceOptional - .filter(taskInstance -> dag.containsNode(taskInstance.getTaskCode())).isPresent(); - }); + // remove task instance from taskInstanceMap,taskCodeInstanceMap , completeTaskSet, validTaskMap, errorTaskMap + completeTaskSet.removeIf(dag::containsNode); + taskCodeInstanceMap.entrySet().removeIf(entity -> dag.containsNode(entity.getValue().getTaskCode())); taskInstanceMap.entrySet().removeIf(entry -> dag.containsNode(entry.getValue().getTaskCode())); validTaskMap.entrySet().removeIf(entry -> dag.containsNode(entry.getKey())); errorTaskMap.entrySet().removeIf(entry -> dag.containsNode(entry.getKey())); @@ -2145,6 +1962,24 @@ private void saveCacheTaskInstance(TaskInstance taskInstance) { } } + /** + * Whether the task instance need to put into {@link #errorTaskMap}. + * Only the task instance is failed or killed, and it is parent of condition task. + * Then it should be put into {@link #errorTaskMap}. + *

Once a task instance is put into {@link #errorTaskMap}, it will be thought as failed and make the workflow be failed. + */ + private boolean isTaskNeedPutIntoErrorMap(TaskInstance taskInstance) { + if (!taskInstance.getState().isFailure() && !taskInstance.getState().isStop() + && !taskInstance.getState().isKill()) { + return false; + } + TaskNode taskNode = workflowExecuteContext.getWorkflowGraph().getTaskNodeByCode(taskInstance.getTaskCode()); + if (DagHelper.haveConditionsAfterNode(taskNode.getCode(), workflowExecuteContext.getWorkflowGraph().getDag())) { + return false; + } + return true; + } + private enum WorkflowRunnableStatus { CREATED, INITIALIZE_QUEUE, STARTED, ; @@ -2152,31 +1987,10 @@ private enum WorkflowRunnableStatus { } private void sendTaskLogOnMasterToRemoteIfNeeded(TaskInstance taskInstance) { - if (RemoteLogUtils.isRemoteLoggingEnable() && TaskUtils.isMasterTask(taskInstance.getTaskType())) { + if (RemoteLogUtils.isRemoteLoggingEnable() && TaskTypeUtils.isLogicTask(taskInstance.getTaskType())) { RemoteLogUtils.sendRemoteLog(taskInstance.getLogPath()); log.info("Master sends task log {} to remote storage asynchronously.", taskInstance.getLogPath()); } } - private void mergeTaskInstanceVarPool(TaskInstance taskInstance) { - String taskVarPoolJson = taskInstance.getVarPool(); - if (StringUtils.isEmpty(taskVarPoolJson)) { - return; - } - ProcessInstance workflowInstance = workflowExecuteContext.getWorkflowInstance(); - String processVarPoolJson = workflowInstance.getVarPool(); - if (StringUtils.isEmpty(processVarPoolJson)) { - workflowInstance.setVarPool(taskVarPoolJson); - return; - } - List processVarPool = new ArrayList<>(JSONUtils.toList(processVarPoolJson, Property.class)); - List taskVarPool = JSONUtils.toList(taskVarPoolJson, Property.class); - Set newProcessVarPoolKeys = taskVarPool.stream().map(Property::getProp).collect(Collectors.toSet()); - processVarPool = processVarPool.stream().filter(property -> !newProcessVarPoolKeys.contains(property.getProp())) - .collect(Collectors.toList()); - - processVarPool.addAll(taskVarPool); - - workflowInstance.setVarPool(JSONUtils.toJsonString(processVarPool)); - } } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/dispatcher/TaskDispatchFactory.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/dispatcher/TaskDispatchFactory.java index 52469fb54c5a..9943ee1b5bfe 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/dispatcher/TaskDispatchFactory.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/dispatcher/TaskDispatchFactory.java @@ -18,7 +18,7 @@ package org.apache.dolphinscheduler.server.master.runner.dispatcher; import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import org.apache.dolphinscheduler.server.master.utils.TaskUtils; +import org.apache.dolphinscheduler.plugin.task.api.utils.TaskTypeUtils; import lombok.extern.slf4j.Slf4j; @@ -36,7 +36,10 @@ public class TaskDispatchFactory { private WorkerTaskDispatcher workerTaskDispatcher; public TaskDispatcher getTaskDispatcher(String taskType) { - return TaskUtils.isMasterTask(taskType) ? masterTaskDispatcher : workerTaskDispatcher; + if (TaskTypeUtils.isLogicTask(taskType)) { + return masterTaskDispatcher; + } + return workerTaskDispatcher; } public TaskDispatcher getTaskDispatcher(TaskInstance taskInstance) { diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/operator/BaseTaskExecuteRunnableDispatchOperator.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/operator/BaseTaskExecuteRunnableDispatchOperator.java index 8fa2e2926dc2..72073359d3d7 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/operator/BaseTaskExecuteRunnableDispatchOperator.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/operator/BaseTaskExecuteRunnableDispatchOperator.java @@ -17,14 +17,13 @@ package org.apache.dolphinscheduler.server.master.runner.operator; +import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.repository.TaskInstanceDao; import org.apache.dolphinscheduler.plugin.task.api.enums.TaskExecutionStatus; import org.apache.dolphinscheduler.server.master.runner.DefaultTaskExecuteRunnable; import org.apache.dolphinscheduler.server.master.runner.GlobalTaskDispatchWaitingQueue; -import java.util.concurrent.TimeUnit; - import lombok.extern.slf4j.Slf4j; @Slf4j @@ -43,16 +42,17 @@ public BaseTaskExecuteRunnableDispatchOperator( @Override public void operate(DefaultTaskExecuteRunnable taskExecuteRunnable) { - long remainTime = taskExecuteRunnable.getDelay(TimeUnit.SECONDS); TaskInstance taskInstance = taskExecuteRunnable.getTaskInstance(); - if (remainTime > 0) { + long remainTimeMills = + DateUtils.getRemainTime(taskInstance.getFirstSubmitTime(), taskInstance.getDelayTime() * 60L) * 1_000; + if (remainTimeMills > 0) { taskInstance.setState(TaskExecutionStatus.DELAY_EXECUTION); taskInstanceDao.updateById(taskInstance); - log.info("Current taskInstance: {} is choose delay execution, delay time: {}/min, remainTime: {}/s", + log.info("Current taskInstance: {} is choose delay execution, delay time: {}/min, remainTime: {}/ms", taskInstance.getName(), taskInstance.getDelayTime(), - remainTime); + remainTimeMills); } - globalTaskDispatchWaitingQueue.submitTaskExecuteRunnable(taskExecuteRunnable); + globalTaskDispatchWaitingQueue.dispatchTaskExecuteRunnableWithDelay(taskExecuteRunnable, remainTimeMills); } } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/operator/TaskExecuteRunnableOperatorManager.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/operator/TaskExecuteRunnableOperatorManager.java index 1b92f5e75ca9..0a67e801dcaa 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/operator/TaskExecuteRunnableOperatorManager.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/operator/TaskExecuteRunnableOperatorManager.java @@ -17,8 +17,8 @@ package org.apache.dolphinscheduler.server.master.runner.operator; +import org.apache.dolphinscheduler.plugin.task.api.utils.TaskTypeUtils; import org.apache.dolphinscheduler.server.master.runner.DefaultTaskExecuteRunnable; -import org.apache.dolphinscheduler.server.master.utils.TaskUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -51,28 +51,29 @@ public class TaskExecuteRunnableOperatorManager { private LogicTaskExecuteRunnableTimeoutOperator logicTaskTimeoutOperator; public TaskExecuteRunnableOperator getTaskKillOperator(DefaultTaskExecuteRunnable defaultTaskExecuteRunnable) { - if (TaskUtils.isMasterTask(defaultTaskExecuteRunnable.getTaskInstance().getTaskType())) { + if (TaskTypeUtils.isLogicTask(defaultTaskExecuteRunnable.getTaskInstance().getTaskType())) { return logicTaskKillOperator; } return taskKillOperator; } public TaskExecuteRunnableOperator getTaskPauseOperator(DefaultTaskExecuteRunnable defaultTaskExecuteRunnable) { - if (TaskUtils.isMasterTask(defaultTaskExecuteRunnable.getTaskInstance().getTaskType())) { + if (TaskTypeUtils.isLogicTask(defaultTaskExecuteRunnable.getTaskInstance().getTaskType())) { + return logicTaskPauseOperator; } return taskPauseOperator; } public TaskExecuteRunnableOperator getTaskDispatchOperator(DefaultTaskExecuteRunnable defaultTaskExecuteRunnable) { - if (TaskUtils.isMasterTask(defaultTaskExecuteRunnable.getTaskInstance().getTaskType())) { + if (TaskTypeUtils.isLogicTask(defaultTaskExecuteRunnable.getTaskInstance().getTaskType())) { return logicTaskDispatchOperator; } return taskDispatchOperator; } public TaskExecuteRunnableOperator getTaskTimeoutOperator(DefaultTaskExecuteRunnable defaultTaskExecuteRunnable) { - if (TaskUtils.isMasterTask(defaultTaskExecuteRunnable.getTaskInstance().getTaskType())) { + if (TaskTypeUtils.isLogicTask(defaultTaskExecuteRunnable.getTaskInstance().getTaskType())) { return logicTaskTimeoutOperator; } return taskTimeoutOperator; diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/queue/DelayEntry.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/queue/DelayEntry.java new file mode 100644 index 000000000000..da6a75026199 --- /dev/null +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/queue/DelayEntry.java @@ -0,0 +1,82 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.runner.queue; + +import static com.google.common.base.Preconditions.checkNotNull; + +import java.util.Objects; +import java.util.concurrent.Delayed; +import java.util.concurrent.TimeUnit; + +import lombok.Getter; + +import org.jetbrains.annotations.NotNull; + +public class DelayEntry> implements Delayed { + + private final long delayTimeMills; + + private final long triggerTimeMills; + + @Getter + private final V data; + + public DelayEntry(long delayTimeMills, V data) { + this.delayTimeMills = delayTimeMills; + this.triggerTimeMills = System.currentTimeMillis() + delayTimeMills; + this.data = checkNotNull(data, "data is null"); + } + + @Override + public long getDelay(@NotNull TimeUnit unit) { + long remainTimeMills = triggerTimeMills - System.currentTimeMillis(); + if (TimeUnit.MILLISECONDS.equals(unit)) { + return remainTimeMills; + } + return unit.convert(remainTimeMills, TimeUnit.MILLISECONDS); + } + + @Override + public int compareTo(@NotNull Delayed o) { + DelayEntry other = (DelayEntry) o; + int delayTimeMillsCompareResult = Long.compare(delayTimeMills, other.delayTimeMills); + if (delayTimeMillsCompareResult != 0) { + return delayTimeMillsCompareResult; + } + + if (data == null || other.data == null) { + return 0; + } + return data.compareTo(other.data); + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + DelayEntry that = (DelayEntry) o; + return delayTimeMills == that.delayTimeMills && Objects.equals(data, that.data); + } + + @Override + public int hashCode() { + return Objects.hash(delayTimeMills, data); + } +} diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/queue/PriorityDelayQueue.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/queue/PriorityDelayQueue.java new file mode 100644 index 000000000000..8ed48696253f --- /dev/null +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/queue/PriorityDelayQueue.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.runner.queue; + +import java.util.concurrent.DelayQueue; + +import lombok.SneakyThrows; + +public class PriorityDelayQueue { + + private final DelayQueue queue = new DelayQueue<>(); + + public void add(V v) { + queue.put(v); + } + + @SneakyThrows + public V take() { + return queue.take(); + } + + public int size() { + return queue.size(); + } + +} diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/BaseSyncLogicTask.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/BaseSyncLogicTask.java index 10a4ec1e7cd9..064e054ed321 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/BaseSyncLogicTask.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/BaseSyncLogicTask.java @@ -18,9 +18,12 @@ package org.apache.dolphinscheduler.server.master.runner.task; import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters; +import org.apache.dolphinscheduler.server.master.exception.LogicTaskInitializeException; import org.apache.dolphinscheduler.server.master.exception.MasterTaskExecuteException; +import org.apache.dolphinscheduler.server.master.runner.WorkflowExecuteRunnable; import lombok.extern.slf4j.Slf4j; @@ -28,10 +31,20 @@ public abstract class BaseSyncLogicTask implements ISyncLogicTask { protected final TaskExecutionContext taskExecutionContext; + + protected final WorkflowExecuteRunnable workflowExecuteRunnable; + protected final TaskInstance taskInstance; protected final T taskParameters; - protected BaseSyncLogicTask(TaskExecutionContext taskExecutionContext, T taskParameters) { + protected BaseSyncLogicTask(WorkflowExecuteRunnable workflowExecuteRunnable, + TaskExecutionContext taskExecutionContext, + T taskParameters) throws LogicTaskInitializeException { this.taskExecutionContext = taskExecutionContext; + this.workflowExecuteRunnable = workflowExecuteRunnable; + this.taskInstance = + workflowExecuteRunnable.getTaskInstance(taskExecutionContext.getTaskInstanceId()).orElseThrow( + () -> new LogicTaskInitializeException( + "Cannot find the task instance in workflow execute runnable")); this.taskParameters = taskParameters; log.info("Success initialize task parameters: \n{}", JSONUtils.toPrettyJsonString(taskParameters)); } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/blocking/BlockingLogicTask.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/blocking/BlockingLogicTask.java deleted file mode 100644 index acc05aaf2d6c..000000000000 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/blocking/BlockingLogicTask.java +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.server.master.runner.task.blocking; - -import org.apache.dolphinscheduler.common.constants.Constants; -import org.apache.dolphinscheduler.common.enums.BlockingOpportunity; -import org.apache.dolphinscheduler.common.enums.WorkflowExecutionStatus; -import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.apache.dolphinscheduler.dao.entity.ProcessInstance; -import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import org.apache.dolphinscheduler.dao.repository.ProcessInstanceDao; -import org.apache.dolphinscheduler.dao.repository.TaskInstanceDao; -import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; -import org.apache.dolphinscheduler.plugin.task.api.enums.DependResult; -import org.apache.dolphinscheduler.plugin.task.api.enums.TaskExecutionStatus; -import org.apache.dolphinscheduler.plugin.task.api.model.DependentItem; -import org.apache.dolphinscheduler.plugin.task.api.model.DependentTaskModel; -import org.apache.dolphinscheduler.plugin.task.api.parameters.BlockingParameters; -import org.apache.dolphinscheduler.plugin.task.api.parameters.DependentParameters; -import org.apache.dolphinscheduler.plugin.task.api.utils.DependentUtils; -import org.apache.dolphinscheduler.server.master.cache.ProcessInstanceExecCacheManager; -import org.apache.dolphinscheduler.server.master.exception.MasterTaskExecuteException; -import org.apache.dolphinscheduler.server.master.runner.task.BaseSyncLogicTask; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.function.Function; -import java.util.stream.Collectors; - -import lombok.extern.slf4j.Slf4j; - -import com.fasterxml.jackson.core.type.TypeReference; - -@Slf4j -public class BlockingLogicTask extends BaseSyncLogicTask { - - public static final String TASK_TYPE = "BLOCKING"; - - private final ProcessInstanceExecCacheManager processInstanceExecCacheManager; - - private final ProcessInstanceDao processInstanceDao; - - private final TaskInstanceDao taskInstanceDao; - - public BlockingLogicTask(TaskExecutionContext taskExecutionContext, - ProcessInstanceExecCacheManager processInstanceExecCacheManager, - ProcessInstanceDao processInstanceDao, - TaskInstanceDao taskInstanceDao) { - super(taskExecutionContext, - JSONUtils.parseObject(taskExecutionContext.getTaskParams(), new TypeReference() { - })); - this.processInstanceExecCacheManager = processInstanceExecCacheManager; - this.processInstanceDao = processInstanceDao; - this.taskInstanceDao = taskInstanceDao; - } - - @Override - public void handle() throws MasterTaskExecuteException { - DependResult conditionResult = calculateConditionResult(); - DependResult expected = taskParameters.getBlockingOpportunity() - .equals(BlockingOpportunity.BLOCKING_ON_SUCCESS.getDesc()) - ? DependResult.SUCCESS - : DependResult.FAILED; - boolean isBlocked = (expected == conditionResult); - log.info("blocking opportunity: expected-->{}, actual-->{}", expected, conditionResult); - ProcessInstance workflowInstance = processInstanceExecCacheManager - .getByProcessInstanceId(taskExecutionContext.getProcessInstanceId()).getWorkflowExecuteContext() - .getWorkflowInstance(); - workflowInstance.setBlocked(isBlocked); - if (isBlocked) { - workflowInstance.setStateWithDesc(WorkflowExecutionStatus.READY_BLOCK, "ready block"); - } - taskExecutionContext.setCurrentExecutionStatus(TaskExecutionStatus.SUCCESS); - } - - private DependResult calculateConditionResult() throws MasterTaskExecuteException { - // todo: Directly get the task instance from the cache - Map completeTaskList = taskInstanceDao - .queryValidTaskListByWorkflowInstanceId(taskExecutionContext.getProcessInstanceId(), - taskExecutionContext.getTestFlag()) - .stream() - .collect(Collectors.toMap(TaskInstance::getTaskCode, Function.identity())); - - // todo: we need to parse the task parameter from TaskExecutionContext - TaskInstance taskInstance = - processInstanceExecCacheManager.getByProcessInstanceId(taskExecutionContext.getProcessInstanceId()) - .getTaskInstance(taskExecutionContext.getTaskInstanceId()) - .orElseThrow(() -> new MasterTaskExecuteException("Task instance not found")); - DependentParameters dependentParameters = taskInstance.getDependency(); - - List tempResultList = new ArrayList<>(); - for (DependentTaskModel dependentTaskModel : dependentParameters.getDependTaskList()) { - List itemDependResult = new ArrayList<>(); - for (DependentItem item : dependentTaskModel.getDependItemList()) { - itemDependResult.add(getDependResultForItem(item, completeTaskList)); - } - DependResult tempResult = - DependentUtils.getDependResultForRelation(dependentTaskModel.getRelation(), itemDependResult); - tempResultList.add(tempResult); - } - return DependentUtils.getDependResultForRelation(dependentParameters.getRelation(), tempResultList); - } - - private DependResult getDependResultForItem(DependentItem item, Map completeTaskList) { - - DependResult dependResult = DependResult.SUCCESS; - if (!completeTaskList.containsKey(item.getDepTaskCode())) { - log.info("depend item: {} have not completed yet.", item.getDepTaskCode()); - dependResult = DependResult.FAILED; - return dependResult; - } - TaskInstance taskInstance = completeTaskList.get(item.getDepTaskCode()); - if (taskInstance.getState() != item.getStatus()) { - log.info("depend item : {} expect status: {}, actual status: {}", item.getDepTaskCode(), item.getStatus(), - taskInstance.getState().name()); - dependResult = DependResult.FAILED; - } - log.info("Dependent item complete {} {},{}", - Constants.DEPENDENT_SPLIT, item.getDepTaskCode(), dependResult); - return dependResult; - } - -} diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/blocking/BlockingLogicTaskPluginFactory.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/blocking/BlockingLogicTaskPluginFactory.java deleted file mode 100644 index b4fdd56c100a..000000000000 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/blocking/BlockingLogicTaskPluginFactory.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.server.master.runner.task.blocking; - -import org.apache.dolphinscheduler.dao.repository.ProcessInstanceDao; -import org.apache.dolphinscheduler.dao.repository.TaskInstanceDao; -import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; -import org.apache.dolphinscheduler.server.master.cache.ProcessInstanceExecCacheManager; -import org.apache.dolphinscheduler.server.master.runner.task.ILogicTaskPluginFactory; - -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; - -@Component -public class BlockingLogicTaskPluginFactory implements ILogicTaskPluginFactory { - - @Autowired - private ProcessInstanceDao processInstanceDao; - - @Autowired - private TaskInstanceDao taskInstanceDao; - - @Autowired - private ProcessInstanceExecCacheManager processInstanceExecCacheManager; - - @Override - public BlockingLogicTask createLogicTask(TaskExecutionContext taskExecutionContext) { - return new BlockingLogicTask(taskExecutionContext, processInstanceExecCacheManager, processInstanceDao, - taskInstanceDao); - } - - @Override - public String getTaskType() { - return BlockingLogicTask.TASK_TYPE; - } -} diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/condition/ConditionLogicTask.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/condition/ConditionLogicTask.java index 803a8043ff36..10f7c52cf5d3 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/condition/ConditionLogicTask.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/condition/ConditionLogicTask.java @@ -17,18 +17,17 @@ package org.apache.dolphinscheduler.server.master.runner.task.condition; -import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import org.apache.dolphinscheduler.dao.repository.ProcessInstanceDao; import org.apache.dolphinscheduler.dao.repository.TaskInstanceDao; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.enums.DependResult; import org.apache.dolphinscheduler.plugin.task.api.enums.TaskExecutionStatus; import org.apache.dolphinscheduler.plugin.task.api.model.DependentItem; -import org.apache.dolphinscheduler.plugin.task.api.parameters.DependentParameters; +import org.apache.dolphinscheduler.plugin.task.api.parameters.ConditionsParameters; import org.apache.dolphinscheduler.plugin.task.api.utils.DependentUtils; -import org.apache.dolphinscheduler.server.master.cache.ProcessInstanceExecCacheManager; import org.apache.dolphinscheduler.server.master.exception.LogicTaskInitializeException; +import org.apache.dolphinscheduler.server.master.runner.WorkflowExecuteRunnable; import org.apache.dolphinscheduler.server.master.runner.task.BaseSyncLogicTask; import java.util.List; @@ -39,50 +38,42 @@ import lombok.extern.slf4j.Slf4j; +import com.fasterxml.jackson.core.type.TypeReference; + @Slf4j -public class ConditionLogicTask extends BaseSyncLogicTask { +public class ConditionLogicTask extends BaseSyncLogicTask { public static final String TASK_TYPE = "CONDITIONS"; private final TaskInstanceDao taskInstanceDao; - private final ProcessInstanceDao workflowInstanceDao; - public ConditionLogicTask(TaskExecutionContext taskExecutionContext, - ProcessInstanceExecCacheManager processInstanceExecCacheManager, - TaskInstanceDao taskInstanceDao, - ProcessInstanceDao workflowInstanceDao) throws LogicTaskInitializeException { - // todo: we need to change the parameter in front-end, so that we can directly use json to parse - super(taskExecutionContext, - processInstanceExecCacheManager.getByProcessInstanceId(taskExecutionContext.getProcessInstanceId()) - .getTaskInstance(taskExecutionContext.getTaskInstanceId()) - .orElseThrow(() -> new LogicTaskInitializeException( - "Cannot find the task instance in workflow execute runnable")) - .getDependency()); - // todo:check the parameters, why we don't use conditionTask? taskInstance.getDependency(); + public ConditionLogicTask(WorkflowExecuteRunnable workflowExecuteRunnable, + TaskExecutionContext taskExecutionContext, + TaskInstanceDao taskInstanceDao) throws LogicTaskInitializeException { + super(workflowExecuteRunnable, taskExecutionContext, + JSONUtils.parseObject(taskExecutionContext.getTaskParams(), new TypeReference() { + })); this.taskInstanceDao = taskInstanceDao; - this.workflowInstanceDao = workflowInstanceDao; } @Override public void handle() { - // calculate the conditionResult DependResult conditionResult = calculateConditionResult(); - TaskExecutionStatus taskExecutionStatus = - (conditionResult == DependResult.SUCCESS) ? TaskExecutionStatus.SUCCESS : TaskExecutionStatus.FAILURE; - log.info("The condition result is {}, task instance statue will be: {}", conditionResult, taskExecutionStatus); - taskExecutionContext.setCurrentExecutionStatus(taskExecutionStatus); + log.info("The condition result is {}", conditionResult); + taskParameters.getConditionResult().setConditionSuccess(conditionResult == DependResult.SUCCESS); + taskInstance.setTaskParams(JSONUtils.toJsonString(taskParameters)); + taskExecutionContext.setCurrentExecutionStatus(TaskExecutionStatus.SUCCESS); } private DependResult calculateConditionResult() { - final ProcessInstance processInstance = - workflowInstanceDao.queryById(taskExecutionContext.getProcessInstanceId()); - final List taskInstances = - taskInstanceDao.queryValidTaskListByWorkflowInstanceId(processInstance.getId(), - processInstance.getTestFlag()); - final Map taskInstanceMap = - taskInstances.stream().collect(Collectors.toMap(TaskInstance::getTaskCode, Function.identity())); + final List taskInstances = taskInstanceDao.queryValidTaskListByWorkflowInstanceId( + taskExecutionContext.getProcessInstanceId(), taskExecutionContext.getTestFlag()); + final Map taskInstanceMap = taskInstances.stream() + .collect(Collectors.toMap(TaskInstance::getTaskCode, Function.identity())); - List dependResults = taskParameters.getDependTaskList().stream() + ConditionsParameters.ConditionDependency dependence = taskParameters.getDependence(); + List dependResults = dependence.getDependTaskList() + .stream() .map(dependentTaskModel -> DependentUtils.getDependResultForRelation( dependentTaskModel.getRelation(), dependentTaskModel.getDependItemList() @@ -90,7 +81,7 @@ private DependResult calculateConditionResult() { .map(dependentItem -> getDependResultForItem(dependentItem, taskInstanceMap)) .collect(Collectors.toList()))) .collect(Collectors.toList()); - return DependentUtils.getDependResultForRelation(taskParameters.getRelation(), dependResults); + return DependentUtils.getDependResultForRelation(dependence.getRelation(), dependResults); } private DependResult getDependResultForItem(DependentItem item, Map taskInstanceMap) { @@ -101,8 +92,9 @@ private DependResult getDependResultForItem(DependentItem item, Map initializeDependentTaskList() { log.info("Begin to initialize dependent task list"); + List dependTaskList = dependentParameters.getDependence().getDependTaskList(); + final Set projectCodes = new HashSet<>(); final Set processDefinitionCodes = new HashSet<>(); final Set taskDefinitionCodes = new HashSet<>(); - for (DependentTaskModel taskModel : dependentParameters.getDependTaskList()) { + for (DependentTaskModel taskModel : dependTaskList) { for (DependentItem dependentItem : taskModel.getDependItemList()) { projectCodes.add(dependentItem.getProjectCode()); processDefinitionCodes.add(dependentItem.getDefinitionCode()); @@ -142,7 +144,7 @@ private List initializeDependentTaskList() { .collect(Collectors.toMap(TaskDefinition::getCode, Function.identity())); final TaskInstance taskInstance = taskInstanceDao.queryById(taskExecutionContext.getTaskInstanceId()); - List dependentExecutes = dependentParameters.getDependTaskList() + List dependentExecutes = dependTaskList .stream() .map(dependentTaskModel -> { for (DependentItem dependentItem : dependentTaskModel.getDependItemList()) { @@ -210,7 +212,7 @@ private DependResult calculateDependResult() { } dependResultList.add(dependResult); } - return DependentUtils.getDependResultForRelation(this.dependentParameters.getRelation(), + return DependentUtils.getDependResultForRelation(dependentParameters.getDependence().getRelation(), dependResultList); } @@ -218,7 +220,8 @@ private boolean isAllDependentTaskFinished() { boolean isAllDependentTaskFinished = true; for (DependentExecute dependentExecute : dependentTaskList) { if (!dependentExecute.finish(dependentDate, processInstance.getTestFlag(), - dependentParameters.getFailurePolicy(), dependentParameters.getFailureWaitingTime())) { + dependentParameters.getDependence().getFailurePolicy(), + dependentParameters.getDependence().getFailureWaitingTime())) { isAllDependentTaskFinished = false; } dependentExecute.getDependResultMap().forEach((dependentKey, dependResult) -> { @@ -238,7 +241,7 @@ private boolean isAllDependentTaskFinished() { @Override public @NonNull Duration getAsyncTaskStateCheckInterval() { - return dependentParameters.getCheckInterval() == null ? DEFAULT_STATE_CHECK_INTERVAL - : Duration.ofSeconds(dependentParameters.getCheckInterval()); + return dependentParameters.getDependence().getCheckInterval() == null ? DEFAULT_STATE_CHECK_INTERVAL + : Duration.ofSeconds(dependentParameters.getDependence().getCheckInterval()); } } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dependent/DependentLogicTask.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dependent/DependentLogicTask.java index e6ee3a78ca7f..774c6cb1f15f 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dependent/DependentLogicTask.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dependent/DependentLogicTask.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.server.master.runner.task.dependent; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.repository.ProcessDefinitionDao; import org.apache.dolphinscheduler.dao.repository.ProcessInstanceDao; @@ -26,8 +27,6 @@ import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.enums.TaskExecutionStatus; import org.apache.dolphinscheduler.plugin.task.api.parameters.DependentParameters; -import org.apache.dolphinscheduler.server.master.cache.ProcessInstanceExecCacheManager; -import org.apache.dolphinscheduler.server.master.exception.LogicTaskInitializeException; import org.apache.dolphinscheduler.server.master.exception.MasterTaskExecuteException; import org.apache.dolphinscheduler.server.master.runner.WorkflowExecuteRunnable; import org.apache.dolphinscheduler.server.master.runner.execute.AsyncTaskExecuteFunction; @@ -37,6 +36,8 @@ import lombok.extern.slf4j.Slf4j; +import com.fasterxml.jackson.core.type.TypeReference; + @Slf4j public class DependentLogicTask extends BaseAsyncLogicTask { @@ -48,7 +49,7 @@ public class DependentLogicTask extends BaseAsyncLogicTask private final TaskInstanceDao taskInstanceDao; private final ProcessInstanceDao processInstanceDao; - private final ProcessInstanceExecCacheManager processInstanceExecCacheManager; + private final WorkflowExecuteRunnable workflowExecuteRunnable; public DependentLogicTask(TaskExecutionContext taskExecutionContext, ProjectDao projectDao, @@ -56,19 +57,16 @@ public DependentLogicTask(TaskExecutionContext taskExecutionContext, TaskDefinitionDao taskDefinitionDao, TaskInstanceDao taskInstanceDao, ProcessInstanceDao processInstanceDao, - ProcessInstanceExecCacheManager processInstanceExecCacheManager) throws LogicTaskInitializeException { + WorkflowExecuteRunnable workflowExecuteRunnable) { super(taskExecutionContext, - processInstanceExecCacheManager.getByProcessInstanceId(taskExecutionContext.getProcessInstanceId()) - .getTaskInstance(taskExecutionContext.getTaskInstanceId()) - .orElseThrow(() -> new LogicTaskInitializeException( - "Cannot find the task instance in workflow execute runnable")) - .getDependency()); + JSONUtils.parseObject(taskExecutionContext.getTaskParams(), new TypeReference() { + })); this.projectDao = projectDao; this.processDefinitionDao = processDefinitionDao; this.taskDefinitionDao = taskDefinitionDao; this.taskInstanceDao = taskInstanceDao; this.processInstanceDao = processInstanceDao; - this.processInstanceExecCacheManager = processInstanceExecCacheManager; + this.workflowExecuteRunnable = workflowExecuteRunnable; } @@ -85,8 +83,6 @@ public AsyncTaskExecuteFunction getAsyncTaskExecuteFunction() { @Override public void pause() throws MasterTaskExecuteException { - WorkflowExecuteRunnable workflowExecuteRunnable = - processInstanceExecCacheManager.getByProcessInstanceId(taskExecutionContext.getProcessInstanceId()); if (workflowExecuteRunnable == null) { log.error("Cannot find the WorkflowExecuteRunnable"); return; diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dependent/DependentLogicTaskPluginFactory.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dependent/DependentLogicTaskPluginFactory.java index e8012ac3f9b1..970dd964d114 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dependent/DependentLogicTaskPluginFactory.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dependent/DependentLogicTaskPluginFactory.java @@ -25,6 +25,7 @@ import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.server.master.cache.ProcessInstanceExecCacheManager; import org.apache.dolphinscheduler.server.master.exception.LogicTaskInitializeException; +import org.apache.dolphinscheduler.server.master.runner.WorkflowExecuteRunnable; import org.apache.dolphinscheduler.server.master.runner.task.ILogicTaskPluginFactory; import lombok.extern.slf4j.Slf4j; @@ -52,6 +53,12 @@ public class DependentLogicTaskPluginFactory implements ILogicTaskPluginFactory< @Override public DependentLogicTask createLogicTask(TaskExecutionContext taskExecutionContext) throws LogicTaskInitializeException { + int workflowInstanceId = taskExecutionContext.getProcessInstanceId(); + WorkflowExecuteRunnable workflowExecuteRunnable = + processInstanceExecCacheManager.getByProcessInstanceId(workflowInstanceId); + if (workflowExecuteRunnable == null) { + throw new LogicTaskInitializeException("Cannot find the WorkflowExecuteRunnable: " + workflowInstanceId); + } return new DependentLogicTask( taskExecutionContext, projectDao, @@ -59,7 +66,7 @@ public DependentLogicTask createLogicTask(TaskExecutionContext taskExecutionCont taskDefinitionDao, taskInstanceDao, processInstanceDao, - processInstanceExecCacheManager); + workflowExecuteRunnable); } @Override diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dynamic/DynamicCommandUtils.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dynamic/DynamicCommandUtils.java index e360a8857e26..2401562f15ab 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dynamic/DynamicCommandUtils.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dynamic/DynamicCommandUtils.java @@ -65,6 +65,7 @@ static public Command createCommand(ProcessInstance processInstance, command.setProcessInstancePriority(processInstance.getProcessInstancePriority()); command.setWorkerGroup(processInstance.getWorkerGroup()); command.setDryRun(processInstance.getDryRun()); + command.setTenantCode(processInstance.getTenantCode()); return command; } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dynamic/DynamicLogicTask.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dynamic/DynamicLogicTask.java index 3baa10b343ef..12cae5c53ea0 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dynamic/DynamicLogicTask.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/dynamic/DynamicLogicTask.java @@ -252,12 +252,61 @@ private List getDynamicInputParameters() { @Override public void kill() { try { - changeRunningSubprocessInstancesToStop(WorkflowExecutionStatus.READY_STOP); + doKillSubWorkflowInstances(); } catch (MasterTaskExecuteException e) { log.error("kill {} error", taskInstance.getName(), e); } } + private void doKillSubWorkflowInstances() throws MasterTaskExecuteException { + List existsSubProcessInstanceList = + subWorkflowService.getAllDynamicSubWorkflow(processInstance.getId(), taskInstance.getTaskCode()); + if (CollectionUtils.isEmpty(existsSubProcessInstanceList)) { + return; + } + + commandMapper.deleteByWorkflowInstanceIds( + existsSubProcessInstanceList.stream().map(ProcessInstance::getId).collect(Collectors.toList())); + + List runningSubProcessInstanceList = + subWorkflowService.filterRunningProcessInstances(existsSubProcessInstanceList); + doKillRunningSubWorkflowInstances(runningSubProcessInstanceList); + + List waitToRunProcessInstances = + subWorkflowService.filterWaitToRunProcessInstances(existsSubProcessInstanceList); + doKillWaitToRunSubWorkflowInstances(waitToRunProcessInstances); + + this.haveBeenCanceled = true; + } + + private void doKillRunningSubWorkflowInstances(List runningSubProcessInstanceList) throws MasterTaskExecuteException { + for (ProcessInstance subProcessInstance : runningSubProcessInstanceList) { + subProcessInstance.setState(WorkflowExecutionStatus.READY_STOP); + processInstanceDao.updateById(subProcessInstance); + if (subProcessInstance.getState().isFinished()) { + log.info("The process instance [{}] is finished, no need to stop", subProcessInstance.getId()); + continue; + } + try { + sendToSubProcess(taskExecutionContext, subProcessInstance); + log.info("Success send [{}] request to SubWorkflow's master: {}", WorkflowExecutionStatus.READY_STOP, + subProcessInstance.getHost()); + } catch (Exception e) { + throw new MasterTaskExecuteException( + String.format("Send stop request to SubWorkflow's master: %s failed", + subProcessInstance.getHost()), + e); + } + } + } + + private void doKillWaitToRunSubWorkflowInstances(List waitToRunWorkflowInstances) { + for (ProcessInstance subProcessInstance : waitToRunWorkflowInstances) { + subProcessInstance.setState(WorkflowExecutionStatus.STOP); + processInstanceDao.updateById(subProcessInstance); + } + } + private void changeRunningSubprocessInstancesToStop(WorkflowExecutionStatus stopStatus) throws MasterTaskExecuteException { this.haveBeenCanceled = true; List existsSubProcessInstanceList = diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/subworkflow/SubWorkflowLogicTask.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/subworkflow/SubWorkflowLogicTask.java index 1883a27d8b85..5a44966bca4c 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/subworkflow/SubWorkflowLogicTask.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/subworkflow/SubWorkflowLogicTask.java @@ -27,7 +27,6 @@ import org.apache.dolphinscheduler.extract.master.transportor.WorkflowInstanceStateChangeEvent; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.parameters.SubProcessParameters; -import org.apache.dolphinscheduler.server.master.cache.ProcessInstanceExecCacheManager; import org.apache.dolphinscheduler.server.master.exception.MasterTaskExecuteException; import org.apache.dolphinscheduler.server.master.runner.WorkflowExecuteRunnable; import org.apache.dolphinscheduler.server.master.runner.execute.AsyncTaskExecuteFunction; @@ -41,29 +40,27 @@ public class SubWorkflowLogicTask extends BaseAsyncLogicTask { public static final String TASK_TYPE = "SUB_PROCESS"; - private final ProcessInstanceExecCacheManager processInstanceExecCacheManager; + private final WorkflowExecuteRunnable workflowExecuteRunnable; private final ProcessInstanceDao processInstanceDao; public SubWorkflowLogicTask(TaskExecutionContext taskExecutionContext, - ProcessInstanceExecCacheManager processInstanceExecCacheManager, + WorkflowExecuteRunnable workflowExecuteRunnable, ProcessInstanceDao processInstanceDao) { super(taskExecutionContext, JSONUtils.parseObject(taskExecutionContext.getTaskParams(), new TypeReference() { })); - this.processInstanceExecCacheManager = processInstanceExecCacheManager; + this.workflowExecuteRunnable = workflowExecuteRunnable; this.processInstanceDao = processInstanceDao; } @Override - public AsyncTaskExecuteFunction getAsyncTaskExecuteFunction() throws MasterTaskExecuteException { + public AsyncTaskExecuteFunction getAsyncTaskExecuteFunction() { // todo: create sub workflow instance here? return new SubWorkflowAsyncTaskExecuteFunction(taskExecutionContext, processInstanceDao); } @Override public void pause() throws MasterTaskExecuteException { - WorkflowExecuteRunnable workflowExecuteRunnable = - processInstanceExecCacheManager.getByProcessInstanceId(taskExecutionContext.getProcessInstanceId()); if (workflowExecuteRunnable == null) { log.warn("Cannot find WorkflowExecuteRunnable"); return; @@ -99,8 +96,6 @@ public void pause() throws MasterTaskExecuteException { @Override public void kill() { - WorkflowExecuteRunnable workflowExecuteRunnable = - processInstanceExecCacheManager.getByProcessInstanceId(taskExecutionContext.getProcessInstanceId()); if (workflowExecuteRunnable == null) { log.warn("Cannot find WorkflowExecuteRunnable"); return; diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/subworkflow/SubWorkflowLogicTaskPluginFactory.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/subworkflow/SubWorkflowLogicTaskPluginFactory.java index ef32515b2d2c..cc6542c135d6 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/subworkflow/SubWorkflowLogicTaskPluginFactory.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/subworkflow/SubWorkflowLogicTaskPluginFactory.java @@ -20,6 +20,8 @@ import org.apache.dolphinscheduler.dao.repository.ProcessInstanceDao; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.server.master.cache.ProcessInstanceExecCacheManager; +import org.apache.dolphinscheduler.server.master.exception.LogicTaskInitializeException; +import org.apache.dolphinscheduler.server.master.runner.WorkflowExecuteRunnable; import org.apache.dolphinscheduler.server.master.runner.task.ILogicTaskPluginFactory; import lombok.extern.slf4j.Slf4j; @@ -37,8 +39,15 @@ public class SubWorkflowLogicTaskPluginFactory implements ILogicTaskPluginFactor private ProcessInstanceExecCacheManager processInstanceExecCacheManager; @Override - public SubWorkflowLogicTask createLogicTask(TaskExecutionContext taskExecutionContext) { - return new SubWorkflowLogicTask(taskExecutionContext, processInstanceExecCacheManager, processInstanceDao); + public SubWorkflowLogicTask createLogicTask(TaskExecutionContext taskExecutionContext) throws LogicTaskInitializeException { + int workflowInstanceId = taskExecutionContext.getProcessInstanceId(); + WorkflowExecuteRunnable workflowExecuteRunnable = + processInstanceExecCacheManager.getByProcessInstanceId(workflowInstanceId); + if (workflowExecuteRunnable == null) { + throw new LogicTaskInitializeException( + "Cannot find the WorkflowExecuteRunnable by : " + workflowInstanceId); + } + return new SubWorkflowLogicTask(taskExecutionContext, workflowExecuteRunnable, processInstanceDao); } @Override diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/switchtask/SwitchLogicTask.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/switchtask/SwitchLogicTask.java index 1f52f9287d37..faab60bd0cfe 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/switchtask/SwitchLogicTask.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/switchtask/SwitchLogicTask.java @@ -24,21 +24,26 @@ import org.apache.dolphinscheduler.plugin.task.api.model.Property; import org.apache.dolphinscheduler.plugin.task.api.model.SwitchResultVo; import org.apache.dolphinscheduler.plugin.task.api.parameters.SwitchParameters; -import org.apache.dolphinscheduler.server.master.cache.ProcessInstanceExecCacheManager; import org.apache.dolphinscheduler.server.master.exception.LogicTaskInitializeException; import org.apache.dolphinscheduler.server.master.exception.MasterTaskExecuteException; +import org.apache.dolphinscheduler.server.master.graph.IWorkflowGraph; +import org.apache.dolphinscheduler.server.master.runner.IWorkflowExecuteContext; import org.apache.dolphinscheduler.server.master.runner.WorkflowExecuteRunnable; import org.apache.dolphinscheduler.server.master.runner.task.BaseSyncLogicTask; import org.apache.dolphinscheduler.server.master.utils.SwitchTaskUtils; +import org.apache.dolphinscheduler.service.model.TaskNode; import org.apache.commons.collections4.CollectionUtils; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; +import com.fasterxml.jackson.core.type.TypeReference; + @Slf4j public class SwitchLogicTask extends BaseSyncLogicTask { @@ -47,104 +52,96 @@ public class SwitchLogicTask extends BaseSyncLogicTask { private final WorkflowExecuteRunnable workflowExecuteRunnable; private final TaskInstance taskInstance; - public SwitchLogicTask(TaskExecutionContext taskExecutionContext, - ProcessInstanceExecCacheManager processInstanceExecCacheManager) throws LogicTaskInitializeException { - super(taskExecutionContext, - // todo: we need to refactor the logic task parameter........ - processInstanceExecCacheManager.getByProcessInstanceId(taskExecutionContext.getProcessInstanceId()) - .getTaskInstance(taskExecutionContext.getTaskInstanceId()) - .orElseThrow(() -> new LogicTaskInitializeException( - "Cannot find the task instance in workflow execute runnable")) - .getSwitchDependency()); - this.workflowExecuteRunnable = - processInstanceExecCacheManager.getByProcessInstanceId(taskExecutionContext.getProcessInstanceId()); - this.taskInstance = workflowExecuteRunnable.getTaskInstance(taskExecutionContext.getTaskInstanceId()) - .orElseThrow(() -> new LogicTaskInitializeException( - "Cannot find the task instance in workflow execute runnable")); + public SwitchLogicTask(WorkflowExecuteRunnable workflowExecuteRunnable, + TaskExecutionContext taskExecutionContext) throws LogicTaskInitializeException { + super(workflowExecuteRunnable, + taskExecutionContext, + JSONUtils.parseObject(taskExecutionContext.getTaskParams(), new TypeReference() { + })); + this.workflowExecuteRunnable = workflowExecuteRunnable; + this.taskInstance = + workflowExecuteRunnable.getTaskInstance(taskExecutionContext.getTaskInstanceId()).orElseThrow( + () -> new LogicTaskInitializeException( + "Cannot find the task instance in workflow execute runnable")); } @Override public void handle() throws MasterTaskExecuteException { - // Calculate the condition result and get the next node - if (CollectionUtils.isEmpty(taskParameters.getDependTaskList())) { + if (CollectionUtils.isEmpty(taskParameters.getSwitchResult().getDependTaskList())) { + // If the branch is empty then will go into the default branch + // This case shouldn't happen, we can directly throw exception and forbid the user to set branch + log.info("The switch items is empty"); moveToDefaultBranch(); } else { calculateSwitchBranch(); } - taskInstance.setSwitchDependency(taskParameters); - log.info("Switch task execute finished"); + checkIfBranchExist(taskParameters.getNextBranch()); + taskInstance.setTaskParams(JSONUtils.toJsonString(taskParameters)); taskExecutionContext.setCurrentExecutionStatus(TaskExecutionStatus.SUCCESS); + log.info("Switch task execute finished: {}", taskExecutionContext.getCurrentExecutionStatus().name()); } private void moveToDefaultBranch() { - checkIfBranchExist(taskParameters.getNextNode()); - - List switchResultVos = taskParameters.getDependTaskList(); - switchResultVos.add(new SwitchResultVo(null, taskParameters.getNextNode())); - taskParameters.setResultConditionLocation(switchResultVos.size() - 1); - + log.info("Begin to move to the default branch"); + if (taskParameters.getSwitchResult().getNextNode() == null) { + throw new IllegalArgumentException( + "The default branch is empty, please check the switch task configuration"); + } + taskParameters.setNextBranch(taskParameters.getSwitchResult().getNextNode()); log.info("The condition is not satisfied, move to the default branch: {}", - taskParameters.getNextNode().stream().map(node -> workflowExecuteRunnable.getWorkflowExecuteContext() - .getWorkflowGraph().getDag().getNode(node).getName()).collect(Collectors.toList())); + getTaskName(taskParameters.getNextBranch())); } private void calculateSwitchBranch() { - List switchResultVos = taskParameters.getDependTaskList(); - if (CollectionUtils.isEmpty(switchResultVos)) { - moveToDefaultBranch(); - } + List switchResultVos = taskParameters.getSwitchResult().getDependTaskList(); Map globalParams = taskExecutionContext.getPrepareParamsMap(); Map varParams = JSONUtils .toList(taskInstance.getVarPool(), Property.class) .stream() .collect(Collectors.toMap(Property::getProp, Property -> Property)); - int finalConditionLocation = -1; - for (int i = 0; i < switchResultVos.size(); i++) { - SwitchResultVo switchResultVo = switchResultVos.get(i); - log.info("Begin to execute {} condition: {} ", i, switchResultVo.getCondition()); - String content = SwitchTaskUtils.generateContentWithTaskParams(switchResultVo.getCondition(), globalParams, - varParams); - log.info("Format condition sentence::{} successfully", content); - boolean result; + Long nextBranch = null; + for (SwitchResultVo switchResultVo : switchResultVos) { + log.info("Begin to execute switch item: {} ", switchResultVo); try { - result = SwitchTaskUtils.evaluate(content); - log.info("Execute condition sentence: {} successfully: {}", content, result); - if (result) { - finalConditionLocation = i; + String content = SwitchTaskUtils.generateContentWithTaskParams(switchResultVo.getCondition(), + globalParams, varParams); + log.info("Format condition sentence::{} successfully", content); + boolean conditionResult = SwitchTaskUtils.evaluate(content); + log.info("Execute condition sentence: {} successfully: {}", content, conditionResult); + if (conditionResult) { + // If matched, break the loop + nextBranch = switchResultVo.getNextNode(); + break; } } catch (Exception e) { - log.info("Execute condition sentence: {} failed", content, e); + log.info("Execute switch item: {} failed", switchResultVo, e); } } - if (finalConditionLocation >= 0) { - checkIfBranchExist(switchResultVos.get(finalConditionLocation).getNextNode()); - log.info("The condition is satisfied, move to the branch: {}", - switchResultVos.get(finalConditionLocation).getNextNode().stream() - .map(node -> workflowExecuteRunnable.getWorkflowExecuteContext().getWorkflowGraph().getDag() - .getNode(node).getName()) - .collect(Collectors.toList())); - taskParameters.setResultConditionLocation(finalConditionLocation); - } else { - log.info("All conditions are not satisfied, move to the default branch"); + + if (nextBranch == null) { + log.info("All switch item is not satisfied"); moveToDefaultBranch(); } } - private void checkIfBranchExist(List branchNode) { - if (CollectionUtils.isEmpty(branchNode)) { - throw new IllegalArgumentException("The branchNode is empty, please check the switch task configuration"); + private void checkIfBranchExist(Long branchNode) { + if (branchNode == null) { + throw new IllegalArgumentException("The branch is empty, please check the switch task configuration"); } - for (Long branch : branchNode) { - if (branch == null) { - throw new IllegalArgumentException("The branch is empty, please check the switch task configuration"); - } - if (!workflowExecuteRunnable.getWorkflowExecuteContext().getWorkflowGraph().getDag().containsNode(branch)) { - throw new IllegalArgumentException( - "The branch(code= " + branchNode - + ") is not in the dag, please check the switch task configuration"); - } + if (!workflowExecuteRunnable.getWorkflowExecuteContext().getWorkflowGraph().getDag().containsNode(branchNode)) { + throw new IllegalArgumentException( + "The branch(code= " + branchNode + + ") is not in the dag, please check the switch task configuration"); } } + private String getTaskName(Long taskCode) { + Optional taskNode = Optional.ofNullable(workflowExecuteRunnable.getWorkflowExecuteContext()) + .map(IWorkflowExecuteContext::getWorkflowGraph) + .map(IWorkflowGraph::getDag) + .map(dag -> dag.getNode(taskCode)); + return taskNode.map(TaskNode::getName).orElse(null); + } + } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/switchtask/SwitchLogicTaskPluginFactory.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/switchtask/SwitchLogicTaskPluginFactory.java index dfb47aa92d75..926e70c63796 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/switchtask/SwitchLogicTaskPluginFactory.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/switchtask/SwitchLogicTaskPluginFactory.java @@ -20,6 +20,7 @@ import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.server.master.cache.ProcessInstanceExecCacheManager; import org.apache.dolphinscheduler.server.master.exception.LogicTaskInitializeException; +import org.apache.dolphinscheduler.server.master.runner.WorkflowExecuteRunnable; import org.apache.dolphinscheduler.server.master.runner.task.ILogicTaskPluginFactory; import lombok.extern.slf4j.Slf4j; @@ -36,7 +37,14 @@ public class SwitchLogicTaskPluginFactory implements ILogicTaskPluginFactory taskGroupQueues = - taskGroupQueueDao.queryAcquiredTaskGroupQueueByGroupId(taskGroup.getId()); - int actualUseSize = taskGroupQueues.size(); + int actualUseSize = taskGroupQueueDao.countUsingTaskGroupQueueByGroupId(taskGroup.getId()); if (taskGroup.getUseSize() == actualUseSize) { continue; } @@ -160,13 +162,35 @@ private void amendTaskGroupUseSize() { taskGroup.setUseSize(actualUseSize); taskGroupDao.updateById(taskGroup); } + log.info("Success amend TaskGroup useSize cost: {}/ms", taskGroupCoordinatorRoundTimeCost.getTime()); } /** * Make sure the TaskGroupQueue status is {@link TaskGroupQueueStatus#RELEASE} when the related {@link TaskInstance} is not exist or status is finished. */ private void amendTaskGroupQueueStatus() { - List taskGroupQueues = taskGroupQueueDao.queryAllInQueueTaskGroupQueue(); + int minTaskGroupQueueId = -1; + int limit = DEFAULT_LIMIT; + StopWatch taskGroupCoordinatorRoundTimeCost = StopWatch.createStarted(); + while (true) { + List taskGroupQueues = + taskGroupQueueDao.queryInQueueTaskGroupQueue(minTaskGroupQueueId, limit); + if (CollectionUtils.isEmpty(taskGroupQueues)) { + break; + } + amendTaskGroupQueueStatus(taskGroupQueues); + if (taskGroupQueues.size() < limit) { + break; + } + minTaskGroupQueueId = taskGroupQueues.get(taskGroupQueues.size() - 1).getId(); + } + log.info("Success amend TaskGroupQueue status cost: {}/ms", taskGroupCoordinatorRoundTimeCost.getTime()); + } + + /** + * Make sure the TaskGroupQueue status is {@link TaskGroupQueueStatus#RELEASE} when the related {@link TaskInstance} is not exist or status is finished. + */ + private void amendTaskGroupQueueStatus(List taskGroupQueues) { List taskInstanceIds = taskGroupQueues.stream() .map(TaskGroupQueue::getTaskId) .collect(Collectors.toList()); @@ -198,10 +222,30 @@ private void dealWithForceStartTaskGroupQueue() { // Find the force start task group queue(Which is inQueue and forceStart is YES) // Notify the related waiting task instance // Set the taskGroupQueue status to RELEASE and remove it from queue - List taskGroupQueues = taskGroupQueueDao.queryAllInQueueTaskGroupQueue() - .stream() - .filter(taskGroupQueue -> Flag.YES.getCode() == taskGroupQueue.getForceStart()) - .collect(Collectors.toList()); + // We use limit here to avoid OOM, and we will retry to notify force start queue at next time + int minTaskGroupQueueId = -1; + int limit = DEFAULT_LIMIT; + StopWatch taskGroupCoordinatorRoundTimeCost = StopWatch.createStarted(); + while (true) { + List taskGroupQueues = + taskGroupQueueDao.queryWaitNotifyForceStartTaskGroupQueue(minTaskGroupQueueId, limit); + if (CollectionUtils.isEmpty(taskGroupQueues)) { + break; + } + dealWithForceStartTaskGroupQueue(taskGroupQueues); + if (taskGroupQueues.size() < limit) { + break; + } + minTaskGroupQueueId = taskGroupQueues.get(taskGroupQueues.size() - 1).getId(); + } + log.info("Success deal with force start TaskGroupQueue cost: {}/ms", + taskGroupCoordinatorRoundTimeCost.getTime()); + } + + private void dealWithForceStartTaskGroupQueue(List taskGroupQueues) { + // Find the force start task group queue(Which is inQueue and forceStart is YES) + // Notify the related waiting task instance + // Set the taskGroupQueue status to RELEASE and remove it from queue for (TaskGroupQueue taskGroupQueue : taskGroupQueues) { try { LogUtils.setTaskInstanceIdMDC(taskGroupQueue.getTaskId()); diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/service/WorkerFailoverService.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/service/WorkerFailoverService.java index 86515d032d84..000804ce8d2e 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/service/WorkerFailoverService.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/service/WorkerFailoverService.java @@ -31,6 +31,7 @@ import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.enums.TaskExecutionStatus; import org.apache.dolphinscheduler.plugin.task.api.utils.LogUtils; +import org.apache.dolphinscheduler.plugin.task.api.utils.TaskTypeUtils; import org.apache.dolphinscheduler.registry.api.RegistryClient; import org.apache.dolphinscheduler.registry.api.enums.RegistryNodeType; import org.apache.dolphinscheduler.server.master.builder.TaskExecutionContextBuilder; @@ -40,7 +41,6 @@ import org.apache.dolphinscheduler.server.master.metrics.TaskMetrics; import org.apache.dolphinscheduler.server.master.runner.WorkflowExecuteRunnable; import org.apache.dolphinscheduler.server.master.runner.WorkflowExecuteThreadPool; -import org.apache.dolphinscheduler.server.master.utils.TaskUtils; import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.utils.ProcessUtils; @@ -167,7 +167,7 @@ private void failoverTaskInstance(@NonNull ProcessInstance processInstance, @Non taskInstance.setProcessInstance(processInstance); - if (!TaskUtils.isMasterTask(taskInstance.getTaskType())) { + if (!TaskTypeUtils.isLogicTask(taskInstance.getTaskType())) { killYarnTask(taskInstance, processInstance); } else { log.info("The failover taskInstance is a master task, no need to failover in worker failover"); diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/task/MasterHeartBeatTask.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/task/MasterHeartBeatTask.java index e9b0970ed300..f6c1e7ee3ca6 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/task/MasterHeartBeatTask.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/task/MasterHeartBeatTask.java @@ -29,6 +29,7 @@ import org.apache.dolphinscheduler.registry.api.RegistryClient; import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.server.master.config.MasterServerLoadProtection; +import org.apache.dolphinscheduler.server.master.metrics.MasterServerMetrics; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; @@ -64,7 +65,8 @@ public MasterHeartBeat getHeartBeat() { return MasterHeartBeat.builder() .startupTime(ServerLifeCycleManager.getServerStartupTime()) .reportTime(System.currentTimeMillis()) - .cpuUsage(systemMetrics.getTotalCpuUsedPercentage()) + .jvmCpuUsage(systemMetrics.getJvmCpuUsagePercentage()) + .cpuUsage(systemMetrics.getSystemCpuUsagePercentage()) .jvmMemoryUsage(systemMetrics.getJvmMemoryUsedPercentage()) .memoryUsage(systemMetrics.getSystemMemoryUsedPercentage()) .diskUsage(systemMetrics.getDiskUsedPercentage()) @@ -79,6 +81,7 @@ public MasterHeartBeat getHeartBeat() { public void writeHeartBeat(MasterHeartBeat masterHeartBeat) { String masterHeartBeatJson = JSONUtils.toJsonString(masterHeartBeat); registryClient.persistEphemeral(heartBeatPath, masterHeartBeatJson); + MasterServerMetrics.incMasterHeartbeatCount(); log.debug("Success write master heartBeatInfo into registry, masterRegistryPath: {}, heartBeatInfo: {}", heartBeatPath, masterHeartBeatJson); } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/utils/DependentExecute.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/utils/DependentExecute.java index 15ab34de34e8..28f9fd682bfe 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/utils/DependentExecute.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/utils/DependentExecute.java @@ -320,7 +320,7 @@ private ProcessInstance findLastProcessInterval(Long definitionCode, Long taskCo int testFlag) { ProcessInstance lastSchedulerProcess = - processInstanceDao.queryLastSchedulerProcessInterval(definitionCode, dateInterval, testFlag); + processInstanceDao.queryLastSchedulerProcessInterval(definitionCode, taskCode, dateInterval, testFlag); ProcessInstance lastManualProcess = processInstanceDao.queryLastManualProcessInterval(definitionCode, taskCode, dateInterval, testFlag); diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/utils/SwitchTaskUtils.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/utils/SwitchTaskUtils.java index 52f274cbcb92..1676df7e01c1 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/utils/SwitchTaskUtils.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/utils/SwitchTaskUtils.java @@ -63,7 +63,6 @@ public static String generateContentWithTaskParams(String condition, Map MASTER_TASK_TYPES = Sets.newHashSet( - BlockingLogicTask.TASK_TYPE, - ConditionLogicTask.TASK_TYPE, - DependentLogicTask.TASK_TYPE, - SubWorkflowLogicTask.TASK_TYPE, - SwitchLogicTask.TASK_TYPE, - DynamicLogicTask.TASK_TYPE); - - public boolean isMasterTask(String taskType) { - return MASTER_TASK_TYPES.contains(taskType); - } -} diff --git a/dolphinscheduler-master/src/main/resources/application.yaml b/dolphinscheduler-master/src/main/resources/application.yaml index ce7b1df7ddbc..da1e9869285c 100644 --- a/dolphinscheduler-master/src/main/resources/application.yaml +++ b/dolphinscheduler-master/src/main/resources/application.yaml @@ -22,17 +22,6 @@ spring: jackson: time-zone: UTC date-format: "yyyy-MM-dd HH:mm:ss" - cache: - # default enable cache, you can disable by `type: none` - type: none - cache-names: - - tenant - - user - - processDefinition - - processTaskRelation - - taskDefinition - caffeine: - spec: maximumSize=100,expireAfterWrite=300s,recordStats datasource: driver-class-name: org.postgresql.Driver url: jdbc:postgresql://127.0.0.1:5432/dolphinscheduler @@ -40,15 +29,7 @@ spring: password: root hikari: connection-test-query: select 1 - minimum-idle: 5 - auto-commit: true - validation-timeout: 3000 pool-name: DolphinScheduler - maximum-pool-size: 50 - connection-timeout: 30000 - idle-timeout: 600000 - leak-detection-threshold: 0 - initialization-fail-timeout: 1 quartz: job-store-type: jdbc jdbc: @@ -70,6 +51,7 @@ spring: org.quartz.scheduler.makeSchedulerThreadDaemon: true org.quartz.jobStore.driverDelegateClass: org.quartz.impl.jdbcjobstore.PostgreSQLDelegate org.quartz.jobStore.clusterCheckinInterval: 5000 + cloud.discovery.client.composite-indicator.enabled: false # Mybatis-plus configuration, you don't need to change it mybatis-plus: @@ -92,18 +74,16 @@ registry: namespace: dolphinscheduler connect-string: localhost:2181 retry-policy: - base-sleep-time: 60ms - max-sleep: 300ms + base-sleep-time: 1s + max-sleep: 3s max-retries: 5 - session-timeout: 30s - connection-timeout: 9s - block-until-connected: 600ms + session-timeout: 60s + connection-timeout: 15s + block-until-connected: 15s digest: ~ master: listen-port: 5678 - # master fetch command num - fetch-command-num: 10 # master prepare execute thread number to limit handle commands in parallel pre-exec-threads: 10 # master execute thread number to limit process instances in parallel @@ -122,10 +102,10 @@ master: server-load-protection: # If set true, will open master overload protection enabled: true - # Master max cpu usage, when the master's cpu usage is smaller then this value, master server can execute workflow. - max-cpu-usage-percentage-thresholds: 0.7 - # Master max JVM memory usage , when the master's jvm memory usage is smaller then this value, master server can execute workflow. - max-jvm-memory-usage-percentage-thresholds: 0.7 + # Master max system cpu usage, when the master's system cpu usage is smaller then this value, master server can execute workflow. + max-system-cpu-usage-percentage-thresholds: 0.7 + # Master max jvm cpu usage, when the master's jvm cpu usage is smaller then this value, master server can execute workflow. + max-jvm-cpu-usage-percentage-thresholds: 0.7 # Master max System memory usage , when the master's system memory usage is smaller then this value, master server can execute workflow. max-system-memory-usage-percentage-thresholds: 0.7 # Master max disk usage , when the master's disk usage is smaller then this value, master server can execute workflow. @@ -140,6 +120,13 @@ master: # The max waiting time to reconnect to registry if you set the strategy to waiting max-waiting-time: 100s worker-group-refresh-interval: 10s + command-fetch-strategy: + type: ID_SLOT_BASED + config: + # The incremental id step + id-step: 1 + # master fetch command num + fetch-size: 10 server: port: 5679 diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/config/MasterConfigTest.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/config/MasterConfigTest.java index ed982933d287..9d26aa81f4bb 100644 --- a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/config/MasterConfigTest.java +++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/config/MasterConfigTest.java @@ -17,16 +17,16 @@ package org.apache.dolphinscheduler.server.master.config; -import org.junit.jupiter.api.Assertions; +import static com.google.common.truth.Truth.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.ActiveProfiles; -import org.springframework.test.context.junit.jupiter.SpringExtension; -@ActiveProfiles("master") -@ExtendWith(SpringExtension.class) +@AutoConfigureMockMvc @SpringBootTest(classes = MasterConfig.class) public class MasterConfigTest { @@ -36,6 +36,29 @@ public class MasterConfigTest { @Test public void getMasterDispatchTaskNumber() { int masterDispatchTaskNumber = masterConfig.getDispatchTaskNumber(); - Assertions.assertEquals(3, masterDispatchTaskNumber); + assertEquals(30, masterDispatchTaskNumber); + } + + @Test + public void getServerLoadProtection() { + MasterServerLoadProtection serverLoadProtection = masterConfig.getServerLoadProtection(); + assertTrue(serverLoadProtection.isEnabled()); + assertEquals(0.77, serverLoadProtection.getMaxSystemCpuUsagePercentageThresholds()); + assertEquals(0.77, serverLoadProtection.getMaxJvmCpuUsagePercentageThresholds()); + assertEquals(0.77, serverLoadProtection.getMaxJvmCpuUsagePercentageThresholds()); + assertEquals(0.77, serverLoadProtection.getMaxSystemMemoryUsagePercentageThresholds()); + assertEquals(0.77, serverLoadProtection.getMaxDiskUsagePercentageThresholds()); + } + + @Test + public void getCommandFetchStrategy() { + CommandFetchStrategy commandFetchStrategy = masterConfig.getCommandFetchStrategy(); + assertThat(commandFetchStrategy.getType()) + .isEqualTo(CommandFetchStrategy.CommandFetchStrategyType.ID_SLOT_BASED); + + CommandFetchStrategy.IdSlotBasedFetchConfig idSlotBasedFetchConfig = + (CommandFetchStrategy.IdSlotBasedFetchConfig) commandFetchStrategy.getConfig(); + assertThat(idSlotBasedFetchConfig.getIdStep()).isEqualTo(3); + assertThat(idSlotBasedFetchConfig.getFetchSize()).isEqualTo(11); } } diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/config/MasterServerLoadProtectionTest.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/config/MasterServerLoadProtectionTest.java index 90627f99d35b..ce12eb1bd94f 100644 --- a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/config/MasterServerLoadProtectionTest.java +++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/config/MasterServerLoadProtectionTest.java @@ -30,7 +30,8 @@ void isOverload() { SystemMetrics systemMetrics = SystemMetrics.builder() .jvmMemoryUsedPercentage(0.71) .systemMemoryUsedPercentage(0.71) - .totalCpuUsedPercentage(0.71) + .systemCpuUsagePercentage(0.71) + .jvmCpuUsagePercentage(0.71) .diskUsedPercentage(0.71) .build(); masterServerLoadProtection.setEnabled(false); diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/rpc/MasterRpcServerTest.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/rpc/MasterRpcServerTest.java new file mode 100644 index 000000000000..1e5a77edb332 --- /dev/null +++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/rpc/MasterRpcServerTest.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.rpc; + +import org.apache.dolphinscheduler.server.master.config.MasterConfig; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +class MasterRpcServerTest { + + private final MasterRpcServer masterRpcServer = new MasterRpcServer(new MasterConfig()); + + @Test + void testStart() { + Assertions.assertDoesNotThrow(masterRpcServer::start); + } + + @Test + void testClose() { + Assertions.assertDoesNotThrow(masterRpcServer::close); + } +} diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/runner/GlobalTaskDispatchWaitingQueueTest.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/runner/GlobalTaskDispatchWaitingQueueTest.java new file mode 100644 index 000000000000..843456b98fe7 --- /dev/null +++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/runner/GlobalTaskDispatchWaitingQueueTest.java @@ -0,0 +1,184 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.runner; + +import static com.google.common.truth.Truth.assertThat; +import static org.awaitility.Awaitility.await; +import static org.junit.jupiter.api.Assertions.assertThrowsExactly; + +import org.apache.dolphinscheduler.common.enums.Priority; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; +import org.apache.dolphinscheduler.server.master.runner.operator.TaskExecuteRunnableOperatorManager; + +import org.apache.commons.lang3.time.DateUtils; + +import java.time.Duration; +import java.util.Date; +import java.util.concurrent.CompletableFuture; + +import org.awaitility.Awaitility; +import org.awaitility.core.ConditionTimeoutException; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +class GlobalTaskDispatchWaitingQueueTest { + + private GlobalTaskDispatchWaitingQueue globalTaskDispatchWaitingQueue; + + @BeforeEach + public void setUp() { + globalTaskDispatchWaitingQueue = new GlobalTaskDispatchWaitingQueue(); + } + + @Test + void submitTaskExecuteRunnable() { + TaskExecuteRunnable taskExecuteRunnable = createTaskExecuteRunnable(); + globalTaskDispatchWaitingQueue.dispatchTaskExecuteRunnable(taskExecuteRunnable); + Awaitility.await() + .atMost(Duration.ofSeconds(1)) + .untilAsserted( + () -> Assertions.assertNotNull(globalTaskDispatchWaitingQueue.takeTaskExecuteRunnable())); + } + + @Test + void testSubmitTaskExecuteRunnableWithDelay() { + TaskExecuteRunnable taskExecuteRunnable = createTaskExecuteRunnable(); + globalTaskDispatchWaitingQueue.dispatchTaskExecuteRunnableWithDelay(taskExecuteRunnable, 3_000L); + globalTaskDispatchWaitingQueue.dispatchTaskExecuteRunnable(taskExecuteRunnable); + + assertThat(globalTaskDispatchWaitingQueue.takeTaskExecuteRunnable()).isNotNull(); + Awaitility.await() + .atLeast(Duration.ofSeconds(2)) + .atMost(Duration.ofSeconds(4)) + .untilAsserted( + () -> Assertions.assertNotNull(globalTaskDispatchWaitingQueue.takeTaskExecuteRunnable())); + } + + @Test + void takeTaskExecuteRunnable_NoElementShouldBlock() { + CompletableFuture completableFuture = + CompletableFuture.runAsync(() -> globalTaskDispatchWaitingQueue.takeTaskExecuteRunnable()); + assertThrowsExactly(ConditionTimeoutException.class, + () -> await() + .atLeast(Duration.ofSeconds(2)) + .timeout(Duration.ofSeconds(3)) + .until(completableFuture::isDone)); + } + + @Test + void takeTaskExecuteRunnable_withDifferentTaskInstancePriority() { + TaskExecuteRunnable taskExecuteRunnable1 = createTaskExecuteRunnable(); + taskExecuteRunnable1.getTaskInstance().setId(1); + taskExecuteRunnable1.getTaskInstance().setTaskInstancePriority(Priority.MEDIUM); + globalTaskDispatchWaitingQueue.dispatchTaskExecuteRunnable(taskExecuteRunnable1); + + TaskExecuteRunnable taskExecuteRunnable2 = createTaskExecuteRunnable(); + taskExecuteRunnable2.getTaskInstance().setId(2); + taskExecuteRunnable2.getTaskInstance().setTaskInstancePriority(Priority.HIGH); + globalTaskDispatchWaitingQueue.dispatchTaskExecuteRunnable(taskExecuteRunnable2); + + TaskExecuteRunnable taskExecuteRunnable3 = createTaskExecuteRunnable(); + taskExecuteRunnable3.getTaskInstance().setId(3); + taskExecuteRunnable3.getTaskInstance().setTaskInstancePriority(Priority.LOW); + globalTaskDispatchWaitingQueue.dispatchTaskExecuteRunnable(taskExecuteRunnable3); + + assertThat(globalTaskDispatchWaitingQueue.takeTaskExecuteRunnable().getTaskInstance().getId()) + .isEqualTo(2); + assertThat(globalTaskDispatchWaitingQueue.takeTaskExecuteRunnable().getTaskInstance().getId()) + .isEqualTo(1); + assertThat(globalTaskDispatchWaitingQueue.takeTaskExecuteRunnable().getTaskInstance().getId()) + .isEqualTo(3); + } + + @Test + void takeTaskExecuteRunnable_withDifferentTaskGroupPriority() { + TaskExecuteRunnable taskExecuteRunnable1 = createTaskExecuteRunnable(); + taskExecuteRunnable1.getTaskInstance().setId(1); + taskExecuteRunnable1.getTaskInstance().setTaskGroupPriority(Priority.MEDIUM.getCode()); + globalTaskDispatchWaitingQueue.dispatchTaskExecuteRunnable(taskExecuteRunnable1); + + TaskExecuteRunnable taskExecuteRunnable2 = createTaskExecuteRunnable(); + taskExecuteRunnable2.getTaskInstance().setId(2); + taskExecuteRunnable2.getTaskInstance().setTaskGroupPriority(Priority.HIGH.getCode()); + globalTaskDispatchWaitingQueue.dispatchTaskExecuteRunnable(taskExecuteRunnable2); + + TaskExecuteRunnable taskExecuteRunnable3 = createTaskExecuteRunnable(); + taskExecuteRunnable3.getTaskInstance().setId(3); + taskExecuteRunnable3.getTaskInstance().setTaskGroupPriority(Priority.LOW.getCode()); + globalTaskDispatchWaitingQueue.dispatchTaskExecuteRunnable(taskExecuteRunnable3); + + assertThat(globalTaskDispatchWaitingQueue.takeTaskExecuteRunnable().getTaskInstance().getId()) + .isEqualTo(3); + assertThat(globalTaskDispatchWaitingQueue.takeTaskExecuteRunnable().getTaskInstance().getId()) + .isEqualTo(1); + assertThat(globalTaskDispatchWaitingQueue.takeTaskExecuteRunnable().getTaskInstance().getId()) + .isEqualTo(2); + } + + @Test + void takeTaskExecuteRunnable_withDifferentSubmitTime() { + Date now = new Date(); + + TaskExecuteRunnable taskExecuteRunnable1 = createTaskExecuteRunnable(); + taskExecuteRunnable1.getTaskInstance().setId(1); + taskExecuteRunnable1.getTaskInstance().setFirstSubmitTime(now); + globalTaskDispatchWaitingQueue.dispatchTaskExecuteRunnable(taskExecuteRunnable1); + + TaskExecuteRunnable taskExecuteRunnable2 = createTaskExecuteRunnable(); + taskExecuteRunnable2.getTaskInstance().setId(2); + taskExecuteRunnable2.getTaskInstance().setFirstSubmitTime(DateUtils.addMinutes(now, 1)); + globalTaskDispatchWaitingQueue.dispatchTaskExecuteRunnable(taskExecuteRunnable2); + + TaskExecuteRunnable taskExecuteRunnable3 = createTaskExecuteRunnable(); + taskExecuteRunnable3.getTaskInstance().setId(3); + taskExecuteRunnable3.getTaskInstance().setFirstSubmitTime(DateUtils.addMinutes(now, -1)); + globalTaskDispatchWaitingQueue.dispatchTaskExecuteRunnable(taskExecuteRunnable3); + + assertThat(globalTaskDispatchWaitingQueue.takeTaskExecuteRunnable().getTaskInstance().getId()) + .isEqualTo(3); + assertThat(globalTaskDispatchWaitingQueue.takeTaskExecuteRunnable().getTaskInstance().getId()) + .isEqualTo(1); + assertThat(globalTaskDispatchWaitingQueue.takeTaskExecuteRunnable().getTaskInstance().getId()) + .isEqualTo(2); + } + + @Test + void getWaitingDispatchTaskNumber() { + Assertions.assertEquals(0, globalTaskDispatchWaitingQueue.getWaitingDispatchTaskNumber()); + TaskExecuteRunnable taskExecuteRunnable = createTaskExecuteRunnable(); + globalTaskDispatchWaitingQueue.dispatchTaskExecuteRunnable(taskExecuteRunnable); + Assertions.assertEquals(1, globalTaskDispatchWaitingQueue.getWaitingDispatchTaskNumber()); + } + + private TaskExecuteRunnable createTaskExecuteRunnable() { + ProcessInstance processInstance = new ProcessInstance(); + processInstance.setProcessInstancePriority(Priority.MEDIUM); + + TaskInstance taskInstance = new TaskInstance(); + taskInstance.setTaskInstancePriority(Priority.MEDIUM); + taskInstance.setFirstSubmitTime(new Date()); + + TaskExecutionContext taskExecutionContext = new TaskExecutionContext(); + + return new DefaultTaskExecuteRunnable(processInstance, taskInstance, taskExecutionContext, + new TaskExecuteRunnableOperatorManager()); + } +} diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/runner/WorkflowExecuteRunnableTest.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/runner/WorkflowExecuteRunnableTest.java index c08fb206f4dc..0adc938d5c36 100644 --- a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/runner/WorkflowExecuteRunnableTest.java +++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/runner/WorkflowExecuteRunnableTest.java @@ -71,6 +71,7 @@ import org.mockito.quality.Strictness; import org.springframework.context.ApplicationContext; +import com.google.common.collect.Lists; import com.google.common.collect.Sets; @ExtendWith(MockitoExtension.class) @@ -106,6 +107,8 @@ public class WorkflowExecuteRunnableTest { private TaskGroupCoordinator taskGroupCoordinator; + private WorkflowExecuteContext workflowExecuteContext; + @BeforeEach public void init() throws Exception { applicationContext = Mockito.mock(ApplicationContext.class); @@ -134,7 +137,7 @@ public void init() throws Exception { stateWheelExecuteThread = Mockito.mock(StateWheelExecuteThread.class); curingGlobalParamsService = Mockito.mock(CuringParamsService.class); ProcessAlertManager processAlertManager = Mockito.mock(ProcessAlertManager.class); - WorkflowExecuteContext workflowExecuteContext = Mockito.mock(WorkflowExecuteContext.class); + workflowExecuteContext = Mockito.mock(WorkflowExecuteContext.class); Mockito.when(workflowExecuteContext.getWorkflowInstance()).thenReturn(processInstance); IWorkflowGraph workflowGraph = Mockito.mock(IWorkflowGraph.class); Mockito.when(workflowExecuteContext.getWorkflowGraph()).thenReturn(workflowGraph); @@ -209,11 +212,13 @@ public void testGetStartTaskInstanceList() { } @Test - public void testGetPreVarPool() { + public void testInitializeTaskInstanceVarPool() { try { - Set preTaskName = new HashSet<>(); - preTaskName.add(1L); - preTaskName.add(2L); + IWorkflowGraph workflowGraph = Mockito.mock(IWorkflowGraph.class); + Mockito.when(workflowExecuteContext.getWorkflowGraph()).thenReturn(workflowGraph); + TaskNode taskNode = Mockito.mock(TaskNode.class); + Mockito.when(workflowGraph.getTaskNodeByCode(Mockito.anyLong())).thenReturn(taskNode); + Mockito.when(taskNode.getPreTasks()).thenReturn(JSONUtils.toJsonString(Lists.newArrayList(1L, 2L))); TaskInstance taskInstance = new TaskInstance(); @@ -255,7 +260,7 @@ public void testGetPreVarPool() { taskCodeInstanceMapField.setAccessible(true); taskCodeInstanceMapField.set(workflowExecuteThread, taskCodeInstanceMap); - workflowExecuteThread.getPreVarPool(taskInstance, preTaskName); + workflowExecuteThread.initializeTaskInstanceVarPool(taskInstance); Assertions.assertNotNull(taskInstance.getVarPool()); taskInstance2.setVarPool("[{\"direct\":\"OUT\",\"prop\":\"test1\",\"type\":\"VARCHAR\",\"value\":\"2\"}]"); @@ -266,7 +271,7 @@ public void testGetPreVarPool() { taskInstanceMapField.setAccessible(true); taskInstanceMapField.set(workflowExecuteThread, taskInstanceMap); - workflowExecuteThread.getPreVarPool(taskInstance, preTaskName); + workflowExecuteThread.initializeTaskInstanceVarPool(taskInstance); Assertions.assertNotNull(taskInstance.getVarPool()); } catch (Exception e) { Assertions.fail(); @@ -387,7 +392,6 @@ void testTryToDispatchTaskInstance() { // task instance already finished, not dispatch TaskInstance taskInstance = new TaskInstance(); taskInstance.setState(TaskExecutionStatus.PAUSE); - Mockito.when(processInstance.isBlocked()).thenReturn(true); TaskExecuteRunnable taskExecuteRunnable = Mockito.mock(TaskExecuteRunnable.class); workflowExecuteThread.tryToDispatchTaskInstance(taskInstance, taskExecuteRunnable); Mockito.verify(taskExecuteRunnable, Mockito.never()).dispatch(); diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/runner/dispatcher/TaskDispatchFactoryTest.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/runner/dispatcher/TaskDispatchFactoryTest.java index 93c84653628a..dd63abcfa9d6 100644 --- a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/runner/dispatcher/TaskDispatchFactoryTest.java +++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/runner/dispatcher/TaskDispatchFactoryTest.java @@ -17,15 +17,18 @@ package org.apache.dolphinscheduler.server.master.runner.dispatcher; -import org.apache.dolphinscheduler.server.master.runner.task.blocking.BlockingLogicTask; -import org.apache.dolphinscheduler.server.master.runner.task.condition.ConditionLogicTask; -import org.apache.dolphinscheduler.server.master.runner.task.dependent.DependentLogicTask; -import org.apache.dolphinscheduler.server.master.runner.task.subworkflow.SubWorkflowLogicTask; -import org.apache.dolphinscheduler.server.master.runner.task.switchtask.SwitchLogicTask; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; +import static com.google.common.truth.Truth.assertThat; + +import org.apache.dolphinscheduler.plugin.task.api.task.ConditionsLogicTaskChannelFactory; +import org.apache.dolphinscheduler.plugin.task.api.task.DependentLogicTaskChannelFactory; +import org.apache.dolphinscheduler.plugin.task.api.task.DynamicLogicTaskChannelFactory; +import org.apache.dolphinscheduler.plugin.task.api.task.SubWorkflowLogicTaskChannelFactory; +import org.apache.dolphinscheduler.plugin.task.api.task.SwitchLogicTaskChannelFactory; +import org.apache.dolphinscheduler.plugin.task.shell.ShellTaskChannelFactory; + import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; @@ -42,18 +45,20 @@ public class TaskDispatchFactoryTest { @Mock private WorkerTaskDispatcher workerTaskDispatcher; - @Test - public void getTaskDispatcher() { - Assertions.assertEquals(masterTaskDispatcher, - taskDispatchFactory.getTaskDispatcher(BlockingLogicTask.TASK_TYPE)); - Assertions.assertEquals(masterTaskDispatcher, - taskDispatchFactory.getTaskDispatcher(ConditionLogicTask.TASK_TYPE)); - Assertions.assertEquals(masterTaskDispatcher, - taskDispatchFactory.getTaskDispatcher(DependentLogicTask.TASK_TYPE)); - Assertions.assertEquals(masterTaskDispatcher, - taskDispatchFactory.getTaskDispatcher(SubWorkflowLogicTask.TASK_TYPE)); - Assertions.assertEquals(masterTaskDispatcher, taskDispatchFactory.getTaskDispatcher(SwitchLogicTask.TASK_TYPE)); - - Assertions.assertEquals(workerTaskDispatcher, taskDispatchFactory.getTaskDispatcher("SHELL")); + @ParameterizedTest + @ValueSource(strings = { + ConditionsLogicTaskChannelFactory.NAME, + DependentLogicTaskChannelFactory.NAME, + DynamicLogicTaskChannelFactory.NAME, + SubWorkflowLogicTaskChannelFactory.NAME, + SwitchLogicTaskChannelFactory.NAME}) + public void getTaskDispatcher_withLogicTask(String taskType) { + assertThat(taskDispatchFactory.getTaskDispatcher(taskType)).isSameInstanceAs(masterTaskDispatcher); + } + + @ParameterizedTest + @ValueSource(strings = {ShellTaskChannelFactory.NAME}) + public void getTaskDispatcher_withWorkerTask(String taskType) { + assertThat(taskDispatchFactory.getTaskDispatcher(taskType)).isSameInstanceAs(workerTaskDispatcher); } } diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/runner/execute/PriorityDelayTaskExecuteRunnableTest.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/runner/execute/PriorityDelayTaskExecuteRunnableTest.java deleted file mode 100644 index 778884e066a2..000000000000 --- a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/runner/execute/PriorityDelayTaskExecuteRunnableTest.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.server.master.runner.execute; - -import org.apache.dolphinscheduler.common.enums.Priority; -import org.apache.dolphinscheduler.dao.entity.ProcessInstance; -import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; -import org.apache.dolphinscheduler.server.master.runner.DefaultTaskExecuteRunnable; -import org.apache.dolphinscheduler.server.master.runner.PriorityDelayTaskExecuteRunnable; -import org.apache.dolphinscheduler.server.master.runner.operator.TaskExecuteRunnableOperatorManager; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -public class PriorityDelayTaskExecuteRunnableTest { - - @Test - public void testCompareTo() { - TaskExecuteRunnableOperatorManager taskOperatorManager = new TaskExecuteRunnableOperatorManager(); - - ProcessInstance workflowInstance = new ProcessInstance(); - workflowInstance.setId(1); - workflowInstance.setProcessInstancePriority(Priority.HIGH); - - TaskInstance t1 = new TaskInstance(); - t1.setId(1); - t1.setTaskInstancePriority(Priority.HIGH); - - TaskInstance t2 = new TaskInstance(); - t2.setId(1); - t2.setTaskInstancePriority(Priority.HIGH); - - TaskExecutionContext context1 = new TaskExecutionContext(); - TaskExecutionContext context2 = new TaskExecutionContext(); - PriorityDelayTaskExecuteRunnable p1 = - new DefaultTaskExecuteRunnable(workflowInstance, t1, context1, taskOperatorManager); - PriorityDelayTaskExecuteRunnable p2 = - new DefaultTaskExecuteRunnable(workflowInstance, t2, context2, taskOperatorManager); - - Assertions.assertEquals(0, p1.compareTo(p2)); - - // the higher priority, the higher priority - t2.setTaskInstancePriority(Priority.MEDIUM); - Assertions.assertTrue(p1.compareTo(p2) < 0); - - // the smaller dispatch fail times, the higher priority - context1.setDispatchFailTimes(1); - Assertions.assertTrue(p1.compareTo(p2) > 0); - } - -} diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/runner/queue/DelayEntryTest.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/runner/queue/DelayEntryTest.java new file mode 100644 index 000000000000..00cf782e18b9 --- /dev/null +++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/runner/queue/DelayEntryTest.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.runner.queue; + +import static com.google.common.truth.Truth.assertThat; + +import java.util.concurrent.TimeUnit; + +import org.junit.jupiter.api.Test; + +class DelayEntryTest { + + @Test + void getDelay() { + DelayEntry delayEntry = new DelayEntry<>(5_000L, "Item"); + assertThat(delayEntry.getDelay(TimeUnit.NANOSECONDS)) + .isWithin(TimeUnit.NANOSECONDS.convert(500, TimeUnit.MILLISECONDS)) + .of(TimeUnit.NANOSECONDS.convert(5_000L, TimeUnit.MILLISECONDS)); + } +} diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/runner/task/dynamic/DynamicCommandUtilsTest.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/runner/task/dynamic/DynamicCommandUtilsTest.java index d238869f41f4..d9b9c82e6641 100644 --- a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/runner/task/dynamic/DynamicCommandUtilsTest.java +++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/runner/task/dynamic/DynamicCommandUtilsTest.java @@ -54,6 +54,7 @@ void setUp() { processInstance.setWarningGroupId(1); processInstance.setProcessInstancePriority(null); // update this processInstance.setWorkerGroup("worker"); + processInstance.setTenantCode("unit-root"); processInstance.setDryRun(0); } @@ -73,6 +74,7 @@ void testCreateCommand() { Assertions.assertEquals(processInstance.getProcessInstancePriority(), command.getProcessInstancePriority()); Assertions.assertEquals(processInstance.getWorkerGroup(), command.getWorkerGroup()); Assertions.assertEquals(processInstance.getDryRun(), command.getDryRun()); + Assertions.assertEquals(processInstance.getTenantCode(), command.getTenantCode()); } @Test diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/runner/task/dynamic/DynamicLogicTaskTest.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/runner/task/dynamic/DynamicLogicTaskTest.java index a5b9f5a37108..019b435fe090 100644 --- a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/runner/task/dynamic/DynamicLogicTaskTest.java +++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/runner/task/dynamic/DynamicLogicTaskTest.java @@ -19,6 +19,7 @@ import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.WorkflowExecutionStatus; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.mapper.CommandMapper; import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; @@ -43,8 +44,6 @@ import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; -import com.fasterxml.jackson.databind.ObjectMapper; - @ExtendWith(MockitoExtension.class) class DynamicLogicTaskTest { @@ -74,14 +73,11 @@ class DynamicLogicTaskTest { private DynamicLogicTask dynamicLogicTask; - private ObjectMapper objectMapper; - @BeforeEach public void setUp() { // Set up your test environment before each test. dynamicParameters = new DynamicParameters(); taskExecutionContext = Mockito.mock(TaskExecutionContext.class); - objectMapper = new ObjectMapper(); processInstance = new ProcessInstance(); Mockito.when(processInstanceDao.queryById(Mockito.any())).thenReturn(processInstance); dynamicLogicTask = new DynamicLogicTask( @@ -95,7 +91,7 @@ public void setUp() { } @Test - void testGenerateParameterGroup() throws Exception { + void testGenerateParameterGroup() { DynamicInputParameter dynamicInputParameter1 = new DynamicInputParameter(); dynamicInputParameter1.setName("param1"); dynamicInputParameter1.setValue("a,b,c"); @@ -113,7 +109,7 @@ void testGenerateParameterGroup() throws Exception { Mockito.when(taskExecutionContext.getPrepareParamsMap()).thenReturn(new HashMap<>()); Mockito.when(taskExecutionContext.getTaskParams()) - .thenReturn(objectMapper.writeValueAsString(dynamicParameters)); + .thenReturn(JSONUtils.toJsonString(dynamicParameters)); dynamicLogicTask = new DynamicLogicTask( taskExecutionContext, diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/service/FailoverServiceTest.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/service/FailoverServiceTest.java index 2081d2dd7ad5..7e6f30970f33 100644 --- a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/service/FailoverServiceTest.java +++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/service/FailoverServiceTest.java @@ -17,8 +17,6 @@ package org.apache.dolphinscheduler.server.master.service; -import static org.apache.dolphinscheduler.common.constants.Constants.COMMON_TASK_TYPE; -import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.TASK_TYPE_SWITCH; import static org.mockito.BDDMockito.given; import static org.mockito.Mockito.doNothing; @@ -31,6 +29,8 @@ import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.repository.TaskInstanceDao; import org.apache.dolphinscheduler.plugin.task.api.enums.TaskExecutionStatus; +import org.apache.dolphinscheduler.plugin.task.api.task.SwitchLogicTaskChannelFactory; +import org.apache.dolphinscheduler.plugin.task.shell.ShellTaskChannelFactory; import org.apache.dolphinscheduler.registry.api.RegistryClient; import org.apache.dolphinscheduler.registry.api.enums.RegistryNodeType; import org.apache.dolphinscheduler.server.master.cache.ProcessInstanceExecCacheManager; @@ -139,13 +139,13 @@ public void before() throws Exception { masterTaskInstance.setId(1); masterTaskInstance.setStartTime(new Date()); masterTaskInstance.setHost(testMasterHost); - masterTaskInstance.setTaskType(TASK_TYPE_SWITCH); + masterTaskInstance.setTaskType(SwitchLogicTaskChannelFactory.NAME); workerTaskInstance = new TaskInstance(); workerTaskInstance.setId(2); workerTaskInstance.setStartTime(new Date()); workerTaskInstance.setHost(testWorkerHost); - workerTaskInstance.setTaskType(COMMON_TASK_TYPE); + workerTaskInstance.setTaskType(ShellTaskChannelFactory.NAME); given(processService.queryNeedFailoverProcessInstances(Mockito.anyString())) .willReturn(Arrays.asList(processInstance)); diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/utils/SwitchTaskUtilsTest.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/utils/SwitchTaskUtilsTest.java index 7f7ae43bbf5e..34785ada4757 100644 --- a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/utils/SwitchTaskUtilsTest.java +++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/utils/SwitchTaskUtilsTest.java @@ -50,7 +50,7 @@ public void testIllegalCondition() { Map globalParams = new HashMap<>(); Map varParams = new HashMap<>(); globalParams.put("test", new Property("test", Direct.IN, DataType.INTEGER, "1")); - Assertions.assertThrowsExactly(IllegalArgumentException.class, () -> { + Assertions.assertDoesNotThrow(() -> { SwitchTaskUtils.generateContentWithTaskParams(content, globalParams, varParams); }); @@ -70,15 +70,5 @@ public void testIllegalCondition() { SwitchTaskUtils.evaluate(script); }); - String contentWithSpecify1 = "cmd.abc"; - Assertions.assertThrowsExactly(IllegalArgumentException.class, () -> { - SwitchTaskUtils.generateContentWithTaskParams(contentWithSpecify1, globalParams, varParams); - }); - - String contentWithSpecify2 = "cmd()"; - Assertions.assertThrowsExactly(IllegalArgumentException.class, () -> { - SwitchTaskUtils.generateContentWithTaskParams(contentWithSpecify2, globalParams, varParams); - }); - } } diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/utils/TaskUtilsTest.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/utils/TaskUtilsTest.java deleted file mode 100644 index bec04b1936f6..000000000000 --- a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/utils/TaskUtilsTest.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.server.master.utils; - -import org.apache.dolphinscheduler.server.master.runner.task.blocking.BlockingLogicTask; -import org.apache.dolphinscheduler.server.master.runner.task.condition.ConditionLogicTask; -import org.apache.dolphinscheduler.server.master.runner.task.dependent.DependentLogicTask; -import org.apache.dolphinscheduler.server.master.runner.task.subworkflow.SubWorkflowLogicTask; -import org.apache.dolphinscheduler.server.master.runner.task.switchtask.SwitchLogicTask; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -public class TaskUtilsTest { - - @Test - public void isMasterTask() { - Assertions.assertTrue(TaskUtils.isMasterTask(BlockingLogicTask.TASK_TYPE)); - Assertions.assertTrue(TaskUtils.isMasterTask(ConditionLogicTask.TASK_TYPE)); - Assertions.assertTrue(TaskUtils.isMasterTask(DependentLogicTask.TASK_TYPE)); - Assertions.assertTrue(TaskUtils.isMasterTask(SubWorkflowLogicTask.TASK_TYPE)); - Assertions.assertTrue(TaskUtils.isMasterTask(SwitchLogicTask.TASK_TYPE)); - } -} diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/utils/WorkflowInstanceUtilsTest.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/utils/WorkflowInstanceUtilsTest.java index d52c436add35..5b4bc18ca16a 100644 --- a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/utils/WorkflowInstanceUtilsTest.java +++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/utils/WorkflowInstanceUtilsTest.java @@ -25,6 +25,7 @@ import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.dao.utils.WorkerGroupUtils; import org.apache.dolphinscheduler.plugin.task.api.enums.TaskExecutionStatus; import java.sql.Date; @@ -52,7 +53,7 @@ public void testLogWorkflowInstanceInDetails() { workflowInstance.setDryRun(0); workflowInstance.setTenantCode("default"); workflowInstance.setRestartTime(Date.valueOf("2023-08-01")); - workflowInstance.setWorkerGroup("default"); + workflowInstance.setWorkerGroup(WorkerGroupUtils.getDefaultWorkerGroup()); workflowInstance.setStartTime(Date.valueOf("2023-08-01")); workflowInstance.setEndTime(Date.valueOf("2023-08-01")); Assertions.assertEquals("\n" diff --git a/dolphinscheduler-master/src/test/resources/application.yaml b/dolphinscheduler-master/src/test/resources/application.yaml new file mode 100644 index 000000000000..15f91996090a --- /dev/null +++ b/dolphinscheduler-master/src/test/resources/application.yaml @@ -0,0 +1,158 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +spring: + banner: + charset: UTF-8 + jackson: + time-zone: UTC + date-format: "yyyy-MM-dd HH:mm:ss" + datasource: + driver-class-name: org.postgresql.Driver + url: jdbc:postgresql://127.0.0.1:5432/dolphinscheduler + username: root + password: root + hikari: + connection-test-query: select 1 + minimum-idle: 5 + auto-commit: true + validation-timeout: 3000 + pool-name: DolphinScheduler + maximum-pool-size: 50 + connection-timeout: 30000 + idle-timeout: 600000 + leak-detection-threshold: 0 + initialization-fail-timeout: 1 + quartz: + job-store-type: jdbc + jdbc: + initialize-schema: never + properties: + org.quartz.threadPool.threadPriority: 5 + org.quartz.jobStore.isClustered: true + org.quartz.jobStore.class: org.springframework.scheduling.quartz.LocalDataSourceJobStore + org.quartz.scheduler.instanceId: AUTO + org.quartz.jobStore.tablePrefix: QRTZ_ + org.quartz.jobStore.acquireTriggersWithinLock: true + org.quartz.scheduler.instanceName: DolphinScheduler + org.quartz.threadPool.class: org.quartz.simpl.SimpleThreadPool + org.quartz.jobStore.useProperties: false + org.quartz.threadPool.makeThreadsDaemons: true + org.quartz.threadPool.threadCount: 25 + org.quartz.jobStore.misfireThreshold: 60000 + org.quartz.scheduler.batchTriggerAcquisitionMaxCount: 1 + org.quartz.scheduler.makeSchedulerThreadDaemon: true + org.quartz.jobStore.driverDelegateClass: org.quartz.impl.jdbcjobstore.PostgreSQLDelegate + org.quartz.jobStore.clusterCheckinInterval: 5000 + +# Mybatis-plus configuration, you don't need to change it +mybatis-plus: + mapper-locations: classpath:org/apache/dolphinscheduler/dao/mapper/*Mapper.xml + type-aliases-package: org.apache.dolphinscheduler.dao.entity + configuration: + cache-enabled: false + call-setters-on-nulls: true + map-underscore-to-camel-case: true + jdbc-type-for-null: NULL + global-config: + db-config: + id-type: auto + banner: false + + +registry: + type: zookeeper + zookeeper: + namespace: dolphinscheduler + connect-string: localhost:2181 + retry-policy: + base-sleep-time: 60ms + max-sleep: 300ms + max-retries: 5 + session-timeout: 30s + connection-timeout: 9s + block-until-connected: 600ms + digest: ~ + +master: + listen-port: 5678 + # master prepare execute thread number to limit handle commands in parallel + pre-exec-threads: 10 + # master execute thread number to limit process instances in parallel + exec-threads: 100 + # master dispatch task number per batch, if all the tasks dispatch failed in a batch, will sleep 1s. + dispatch-task-number: 30 + # master host selector to select a suitable worker, default value: LowerWeight. Optional values include random, round_robin, lower_weight + host-selector: lower_weight + # master heartbeat interval + max-heartbeat-interval: 10s + # master commit task retry times + task-commit-retry-times: 5 + # master commit task interval + task-commit-interval: 1s + state-wheel-interval: 5s + server-load-protection: + # If set true, will open master overload protection + enabled: true + # Master max system cpu usage, when the master's system cpu usage is smaller then this value, master server can execute workflow. + max-system-cpu-usage-percentage-thresholds: 0.77 + # Master max jvm cpu usage, when the master's jvm cpu usage is smaller then this value, master server can execute workflow. + max-jvm-cpu-usage-percentage-thresholds: 0.77 + # Master max System memory usage , when the master's system memory usage is smaller then this value, master server can execute workflow. + max-system-memory-usage-percentage-thresholds: 0.77 + # Master max disk usage , when the master's disk usage is smaller then this value, master server can execute workflow. + max-disk-usage-percentage-thresholds: 0.77 + # failover interval, the unit is minute + failover-interval: 10m + # kill yarn / k8s application when failover taskInstance, default true + kill-application-when-task-failover: true + registry-disconnect-strategy: + # The disconnect strategy: stop, waiting + strategy: waiting + # The max waiting time to reconnect to registry if you set the strategy to waiting + max-waiting-time: 100s + worker-group-refresh-interval: 10s + command-fetch-strategy: + type: ID_SLOT_BASED + config: + # The incremental id step + id-step: 3 + # master fetch command num + fetch-size: 11 + +server: + port: 5679 + +management: + endpoints: + web: + exposure: + include: health,metrics,prometheus + endpoint: + health: + enabled: true + show-details: always + health: + db: + enabled: true + defaults: + enabled: false + metrics: + tags: + application: ${spring.application.name} + +metrics: + enabled: true diff --git a/dolphinscheduler-master/src/test/resources/logback.xml b/dolphinscheduler-master/src/test/resources/logback.xml index deb791fae21c..286e35cd1fb1 100644 --- a/dolphinscheduler-master/src/test/resources/logback.xml +++ b/dolphinscheduler-master/src/test/resources/logback.xml @@ -65,13 +65,7 @@ - - - - - - - - + + diff --git a/dolphinscheduler-meter/src/main/java/org/apache/dolphinscheduler/meter/MeterConfiguration.java b/dolphinscheduler-meter/src/main/java/org/apache/dolphinscheduler/meter/MeterAutoConfiguration.java similarity index 87% rename from dolphinscheduler-meter/src/main/java/org/apache/dolphinscheduler/meter/MeterConfiguration.java rename to dolphinscheduler-meter/src/main/java/org/apache/dolphinscheduler/meter/MeterAutoConfiguration.java index e3b140a57842..fa8933d13297 100644 --- a/dolphinscheduler-meter/src/main/java/org/apache/dolphinscheduler/meter/MeterConfiguration.java +++ b/dolphinscheduler-meter/src/main/java/org/apache/dolphinscheduler/meter/MeterAutoConfiguration.java @@ -20,7 +20,8 @@ package org.apache.dolphinscheduler.meter; -import org.springframework.boot.autoconfigure.EnableAutoConfiguration; +import org.apache.dolphinscheduler.meter.metrics.DefaultMetricsProvider; + import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -42,11 +43,15 @@ * } * */ -@Configuration +@Configuration(proxyBeanMethods = false) @EnableAspectJAutoProxy -@EnableAutoConfiguration @ConditionalOnProperty(prefix = "metrics", name = "enabled", havingValue = "true") -public class MeterConfiguration { +public class MeterAutoConfiguration { + + @Bean + public DefaultMetricsProvider metricsProvider(MeterRegistry meterRegistry) { + return new DefaultMetricsProvider(meterRegistry); + } @Bean public TimedAspect timedAspect(MeterRegistry registry) { diff --git a/dolphinscheduler-meter/src/main/java/org/apache/dolphinscheduler/meter/metrics/BaseServerLoadProtection.java b/dolphinscheduler-meter/src/main/java/org/apache/dolphinscheduler/meter/metrics/BaseServerLoadProtection.java new file mode 100644 index 000000000000..fd12d3bb6604 --- /dev/null +++ b/dolphinscheduler-meter/src/main/java/org/apache/dolphinscheduler/meter/metrics/BaseServerLoadProtection.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.meter.metrics; + +import lombok.Data; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +@Data +public class BaseServerLoadProtection implements ServerLoadProtection { + + protected boolean enabled = true; + + protected double maxSystemCpuUsagePercentageThresholds = 0.7; + + protected double maxJvmCpuUsagePercentageThresholds = 0.7; + + protected double maxSystemMemoryUsagePercentageThresholds = 0.7; + + protected double maxDiskUsagePercentageThresholds = 0.7; + + @Override + public boolean isOverload(SystemMetrics systemMetrics) { + if (!enabled) { + return false; + } + if (systemMetrics.getSystemCpuUsagePercentage() > maxSystemCpuUsagePercentageThresholds) { + log.info( + "OverLoad: the system cpu usage: {} is over then the maxSystemCpuUsagePercentageThresholds {}", + systemMetrics.getSystemCpuUsagePercentage(), maxSystemCpuUsagePercentageThresholds); + return true; + } + if (systemMetrics.getJvmCpuUsagePercentage() > maxJvmCpuUsagePercentageThresholds) { + log.info( + "OverLoad: the jvm cpu usage: {} is over then the maxJvmCpuUsagePercentageThresholds {}", + systemMetrics.getJvmCpuUsagePercentage(), maxJvmCpuUsagePercentageThresholds); + return true; + } + if (systemMetrics.getDiskUsedPercentage() > maxDiskUsagePercentageThresholds) { + log.info("OverLoad: the DiskUsedPercentage: {} is over then the maxDiskUsagePercentageThresholds {}", + systemMetrics.getDiskUsedPercentage(), maxDiskUsagePercentageThresholds); + return true; + } + if (systemMetrics.getSystemMemoryUsedPercentage() > maxSystemMemoryUsagePercentageThresholds) { + log.info( + "OverLoad: the SystemMemoryUsedPercentage: {} is over then the maxSystemMemoryUsagePercentageThresholds {}", + systemMetrics.getSystemMemoryUsedPercentage(), maxSystemMemoryUsagePercentageThresholds); + return true; + } + return false; + } +} diff --git a/dolphinscheduler-meter/src/main/java/org/apache/dolphinscheduler/meter/metrics/DefaultMetricsProvider.java b/dolphinscheduler-meter/src/main/java/org/apache/dolphinscheduler/meter/metrics/DefaultMetricsProvider.java index 0ce6ceb4a401..e293e44a8d81 100644 --- a/dolphinscheduler-meter/src/main/java/org/apache/dolphinscheduler/meter/metrics/DefaultMetricsProvider.java +++ b/dolphinscheduler-meter/src/main/java/org/apache/dolphinscheduler/meter/metrics/DefaultMetricsProvider.java @@ -19,16 +19,15 @@ import org.apache.dolphinscheduler.common.utils.OSUtils; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; - import io.micrometer.core.instrument.MeterRegistry; -@Component public class DefaultMetricsProvider implements MetricsProvider { - @Autowired - private MeterRegistry meterRegistry; + private final MeterRegistry meterRegistry; + + public DefaultMetricsProvider(MeterRegistry meterRegistry) { + this.meterRegistry = meterRegistry; + } private SystemMetrics systemMetrics; @@ -53,8 +52,7 @@ public SystemMetrics getSystemMetrics() { systemMetrics = SystemMetrics.builder() .systemCpuUsagePercentage(systemCpuUsage) - .processCpuUsagePercentage(processCpuUsage) - .totalCpuUsedPercentage(systemCpuUsage + processCpuUsage) + .jvmCpuUsagePercentage(processCpuUsage) .jvmMemoryUsed(jvmMemoryUsed) .jvmMemoryMax(jvmMemoryMax) .jvmMemoryUsedPercentage(jvmMemoryUsed / jvmMemoryMax) diff --git a/dolphinscheduler-meter/src/main/java/org/apache/dolphinscheduler/meter/metrics/ServerLoadProtection.java b/dolphinscheduler-meter/src/main/java/org/apache/dolphinscheduler/meter/metrics/ServerLoadProtection.java new file mode 100644 index 000000000000..3385de891f3b --- /dev/null +++ b/dolphinscheduler-meter/src/main/java/org/apache/dolphinscheduler/meter/metrics/ServerLoadProtection.java @@ -0,0 +1,24 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.meter.metrics; + +public interface ServerLoadProtection { + + boolean isOverload(SystemMetrics systemMetrics); + +} diff --git a/dolphinscheduler-meter/src/main/java/org/apache/dolphinscheduler/meter/metrics/SystemMetrics.java b/dolphinscheduler-meter/src/main/java/org/apache/dolphinscheduler/meter/metrics/SystemMetrics.java index dcffafb83dee..6da8f8ca4ece 100644 --- a/dolphinscheduler-meter/src/main/java/org/apache/dolphinscheduler/meter/metrics/SystemMetrics.java +++ b/dolphinscheduler-meter/src/main/java/org/apache/dolphinscheduler/meter/metrics/SystemMetrics.java @@ -30,8 +30,7 @@ public class SystemMetrics { // CPU private double systemCpuUsagePercentage; - private double processCpuUsagePercentage; - private double totalCpuUsedPercentage; + private double jvmCpuUsagePercentage; // JVM-Memory // todo: get pod memory usage diff --git a/dolphinscheduler-meter/src/main/resources/META-INF/spring.factories b/dolphinscheduler-meter/src/main/resources/META-INF/spring.factories new file mode 100644 index 000000000000..77bc56d86eb7 --- /dev/null +++ b/dolphinscheduler-meter/src/main/resources/META-INF/spring.factories @@ -0,0 +1,19 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +org.springframework.boot.autoconfigure.EnableAutoConfiguration=\ + org.apache.dolphinscheduler.meter.MeterAutoConfiguration diff --git a/dolphinscheduler-microbench/src/main/java/org/apache/dolphinscheduler/microbench/rpc/RpcBenchMarkTest.java b/dolphinscheduler-microbench/src/main/java/org/apache/dolphinscheduler/microbench/rpc/RpcBenchMarkTest.java index 1a3e4ab1e2a1..496983118fd8 100644 --- a/dolphinscheduler-microbench/src/main/java/org/apache/dolphinscheduler/microbench/rpc/RpcBenchMarkTest.java +++ b/dolphinscheduler-microbench/src/main/java/org/apache/dolphinscheduler/microbench/rpc/RpcBenchMarkTest.java @@ -17,7 +17,6 @@ package org.apache.dolphinscheduler.microbench.rpc; -import org.apache.dolphinscheduler.extract.base.NettyRemotingServer; import org.apache.dolphinscheduler.extract.base.client.SingletonJdkDynamicRpcClientProxyFactory; import org.apache.dolphinscheduler.extract.base.config.NettyServerConfig; import org.apache.dolphinscheduler.extract.base.server.SpringServerMethodInvokerDiscovery; @@ -46,18 +45,17 @@ @BenchmarkMode({Mode.Throughput, Mode.AverageTime, Mode.SampleTime}) public class RpcBenchMarkTest extends AbstractBaseBenchmark { - private NettyRemotingServer nettyRemotingServer; + private SpringServerMethodInvokerDiscovery springServerMethodInvokerDiscovery; private IService iService; @Setup public void before() { - nettyRemotingServer = new NettyRemotingServer( - NettyServerConfig.builder().serverName("NettyRemotingServer").listenPort(12345).build()); - nettyRemotingServer.start(); - SpringServerMethodInvokerDiscovery springServerMethodInvokerDiscovery = - new SpringServerMethodInvokerDiscovery(nettyRemotingServer); + NettyServerConfig nettyServerConfig = + NettyServerConfig.builder().serverName("NettyRemotingServer").listenPort(12345).build(); + springServerMethodInvokerDiscovery = new SpringServerMethodInvokerDiscovery(nettyServerConfig); springServerMethodInvokerDiscovery.postProcessAfterInitialization(new IServiceImpl(), "iServiceImpl"); + springServerMethodInvokerDiscovery.start(); iService = SingletonJdkDynamicRpcClientProxyFactory.getProxyClient("localhost:12345", IService.class); } @@ -72,6 +70,6 @@ public void sendTest(Blackhole bh) { @TearDown public void after() { - nettyRemotingServer.close(); + springServerMethodInvokerDiscovery.close(); } } diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/Registry.java b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/Registry.java index 8bdb8b9021af..f90ef1ea3243 100644 --- a/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/Registry.java +++ b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/Registry.java @@ -26,10 +26,20 @@ import lombok.NonNull; /** - * Registry + * The SPI interface for registry center, each registry plugin should implement this interface. */ public interface Registry extends Closeable { + /** + * Start the registry, once started, the registry will connect to the registry center. + */ + void start(); + + /** + * Whether the registry is connected + * + * @return true if connected, false otherwise. + */ boolean isConnected(); /** @@ -40,7 +50,13 @@ public interface Registry extends Closeable { */ void connectUntilTimeout(@NonNull Duration timeout) throws RegistryException; - boolean subscribe(String path, SubscribeListener listener); + /** + * Subscribe the path, when the path has expose {@link Event}, the listener will be triggered. + * + * @param path the path to subscribe + * @param listener the listener to be triggered + */ + void subscribe(String path, SubscribeListener listener); /** * Remove the path from the subscribe list. @@ -53,35 +69,34 @@ public interface Registry extends Closeable { void addConnectionStateListener(ConnectionListener listener); /** - * @return the value + * Get the value of the key, if key not exist will throw {@link RegistryException} */ - String get(String key); + String get(String key) throws RegistryException; /** + * Put the key-value pair into the registry * - * @param key - * @param value + * @param key the key, cannot be null + * @param value the value, cannot be null * @param deleteOnDisconnect if true, when the connection state is disconnected, the key will be deleted */ void put(String key, String value, boolean deleteOnDisconnect); /** - * This function will delete the keys whose prefix is {@param key} - * @param key the prefix of deleted key - * @throws if the key not exists, there is a registryException + * Delete the key from the registry */ void delete(String key); /** - * @return {@code true} if key exists. - * E.g: registry contains the following keys:[/test/test1/test2,] - * if the key: /test - * Return: test1 + * Return the children of the key */ Collection children(String key); /** - * @return if key exists,return true + * Check if the key exists + * + * @param key the key to check + * @return true if the key exists */ boolean exists(String key); @@ -90,6 +105,11 @@ public interface Registry extends Closeable { */ boolean acquireLock(String key); + /** + * Acquire the lock of the prefix {@param key}, if acquire in the given timeout return true, else return false. + */ + boolean acquireLock(String key, long timeout); + /** * Release the lock of the prefix {@param key} */ diff --git a/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzSchedulerConfiguration.java b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/RegistryConfiguration.java similarity index 75% rename from dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzSchedulerConfiguration.java rename to dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/RegistryConfiguration.java index 123e4cde5026..bae36949645a 100644 --- a/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzSchedulerConfiguration.java +++ b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/RegistryConfiguration.java @@ -15,18 +15,19 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.scheduler.quartz; - -import org.apache.dolphinscheduler.scheduler.api.SchedulerApi; +package org.apache.dolphinscheduler.registry.api; +import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @Configuration -public class QuartzSchedulerConfiguration { +public class RegistryConfiguration { @Bean - public SchedulerApi schedulerApi() { - return new QuartzScheduler(); + @ConditionalOnMissingBean + public RegistryClient registryClient(Registry registry) { + return new RegistryClient(registry); } + } diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/enums/RegistryNodeType.java b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/enums/RegistryNodeType.java index a1f3bb02b77f..cbc2db6a63ae 100644 --- a/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/enums/RegistryNodeType.java +++ b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/enums/RegistryNodeType.java @@ -37,5 +37,4 @@ public enum RegistryNodeType { private final String name; private final String registryPath; - } diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/ha/AbstractHAServer.java b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/ha/AbstractHAServer.java new file mode 100644 index 000000000000..5dca5552b325 --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/ha/AbstractHAServer.java @@ -0,0 +1,105 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.registry.api.ha; + +import org.apache.dolphinscheduler.common.thread.ThreadUtils; +import org.apache.dolphinscheduler.registry.api.Event; +import org.apache.dolphinscheduler.registry.api.Registry; + +import java.util.List; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; + +import lombok.extern.slf4j.Slf4j; + +import com.google.common.collect.Lists; + +@Slf4j +public abstract class AbstractHAServer implements HAServer { + + private final Registry registry; + + private final String serverPath; + + private ServerStatus serverStatus; + + private final List serverStatusChangeListeners; + + public AbstractHAServer(Registry registry, String serverPath) { + this.registry = registry; + this.serverPath = serverPath; + this.serverStatus = ServerStatus.STAND_BY; + this.serverStatusChangeListeners = Lists.newArrayList(new DefaultServerStatusChangeListener()); + } + + @Override + public void start() { + registry.subscribe(serverPath, event -> { + if (Event.Type.REMOVE.equals(event.type())) { + if (isActive() && !participateElection()) { + statusChange(ServerStatus.STAND_BY); + } + } + }); + ScheduledExecutorService electionSelectionThread = + ThreadUtils.newSingleDaemonScheduledExecutorService("election-selection-thread"); + electionSelectionThread.schedule(() -> { + if (isActive()) { + return; + } + if (participateElection()) { + statusChange(ServerStatus.ACTIVE); + } + }, 10, TimeUnit.SECONDS); + } + + @Override + public boolean isActive() { + return ServerStatus.ACTIVE.equals(getServerStatus()); + } + + @Override + public boolean participateElection() { + return registry.acquireLock(serverPath, 3_000); + } + + @Override + public void addServerStatusChangeListener(ServerStatusChangeListener listener) { + serverStatusChangeListeners.add(listener); + } + + @Override + public ServerStatus getServerStatus() { + return serverStatus; + } + + @Override + public void shutdown() { + if (isActive()) { + registry.releaseLock(serverPath); + } + } + + private void statusChange(ServerStatus targetStatus) { + synchronized (this) { + ServerStatus originStatus = serverStatus; + serverStatus = targetStatus; + serverStatusChangeListeners.forEach(listener -> listener.change(originStatus, serverStatus)); + } + } +} diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/ha/AbstractServerStatusChangeListener.java b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/ha/AbstractServerStatusChangeListener.java new file mode 100644 index 000000000000..f2e332ea2068 --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/ha/AbstractServerStatusChangeListener.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.registry.api.ha; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public abstract class AbstractServerStatusChangeListener implements ServerStatusChangeListener { + + @Override + public void change(HAServer.ServerStatus originStatus, HAServer.ServerStatus currentStatus) { + log.info("The status change from {} to {}.", originStatus, currentStatus); + if (originStatus == HAServer.ServerStatus.ACTIVE) { + if (currentStatus == HAServer.ServerStatus.STAND_BY) { + changeToStandBy(); + } + } else if (originStatus == HAServer.ServerStatus.STAND_BY) { + if (currentStatus == HAServer.ServerStatus.ACTIVE) { + changeToActive(); + } + } + } + + public abstract void changeToActive(); + + public abstract void changeToStandBy(); +} diff --git a/dolphinscheduler-ui/src/views/resource/udf/function/index.module.scss b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/ha/DefaultServerStatusChangeListener.java similarity index 67% rename from dolphinscheduler-ui/src/views/resource/udf/function/index.module.scss rename to dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/ha/DefaultServerStatusChangeListener.java index f717654df859..d2acbcb51693 100644 --- a/dolphinscheduler-ui/src/views/resource/udf/function/index.module.scss +++ b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/ha/DefaultServerStatusChangeListener.java @@ -15,29 +15,20 @@ * limitations under the License. */ -.table { - table { - width: 100%; - tr { - height: 40px; - font-size: 12px; - th, - td { - &:nth-child(1) { - width: 50px; - text-align: center; - } - } - th { - &:nth-child(1) { - width: 60px; - text-align: center; - } - > span { - font-size: 12px; - color: #555; - } - } +package org.apache.dolphinscheduler.registry.api.ha; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class DefaultServerStatusChangeListener extends AbstractServerStatusChangeListener { + + @Override + public void changeToActive() { + log.info("The status is active now."); + } + + @Override + public void changeToStandBy() { + log.info("The status is standby now."); } - } } diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/ha/HAServer.java b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/ha/HAServer.java new file mode 100644 index 000000000000..6a79e6eb844b --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/ha/HAServer.java @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.registry.api.ha; + +/** + * Interface for HA server, used to select a active server from multiple servers. + * In HA mode, there are multiple servers, only one server is active, others are standby. + */ +public interface HAServer { + + /** + * Start the server. + */ + void start(); + + /** + * Judge whether the server is active. + * + * @return true if the current server is active. + */ + boolean isActive(); + + /** + * Participate in the election of active server, this method will block until the server is active. + */ + boolean participateElection(); + + /** + * Add a listener to listen to the status change of the server. + * + * @param listener listener to add. + */ + void addServerStatusChangeListener(ServerStatusChangeListener listener); + + /** + * Get the status of the server. + * + * @return the status of the server. + */ + ServerStatus getServerStatus(); + + /** + * Shutdown the server, release resources. + */ + void shutdown(); + + enum ServerStatus { + ACTIVE, + STAND_BY, + ; + } + +} diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/ha/ServerStatusChangeListener.java b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/ha/ServerStatusChangeListener.java new file mode 100644 index 000000000000..af109228e230 --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/ha/ServerStatusChangeListener.java @@ -0,0 +1,24 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.registry.api.ha; + +public interface ServerStatusChangeListener { + + void change(HAServer.ServerStatus originStatus, HAServer.ServerStatus currentStatus); + +} diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/pom.xml b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/pom.xml index b084db1ccfee..0f5c4d1494ff 100644 --- a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/pom.xml +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/pom.xml @@ -31,6 +31,15 @@ org.apache.dolphinscheduler dolphinscheduler-registry-api + + + org.apache.dolphinscheduler + dolphinscheduler-registry-it + ${project.version} + test-jar + test + + io.etcd jetcd-core @@ -49,18 +58,22 @@ + + + io.netty + netty-all + + io.etcd jetcd-test test + - io.netty - netty-all - - - org.slf4j - slf4j-api + org.springframework.boot + spring-boot-starter-test + test diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/main/java/org/apache/dolphinscheduler/plugin/registry/etcd/EtcdRegistry.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/main/java/org/apache/dolphinscheduler/plugin/registry/etcd/EtcdRegistry.java index 1d1397db54e9..80279775ffb9 100644 --- a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/main/java/org/apache/dolphinscheduler/plugin/registry/etcd/EtcdRegistry.java +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/main/java/org/apache/dolphinscheduler/plugin/registry/etcd/EtcdRegistry.java @@ -34,6 +34,8 @@ import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; import java.util.stream.Collectors; import javax.net.ssl.SSLException; @@ -41,8 +43,6 @@ import lombok.NonNull; import lombok.extern.slf4j.Slf4j; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.stereotype.Component; import org.springframework.util.StringUtils; import com.google.common.base.Splitter; @@ -68,11 +68,10 @@ * This is one of the implementation of {@link Registry}, with this implementation, you need to rely on Etcd cluster to * store the DolphinScheduler master/worker's metadata and do the server registry/unRegistry. */ -@Component -@ConditionalOnProperty(prefix = "registry", name = "type", havingValue = "etcd") @Slf4j public class EtcdRegistry implements Registry { + private final EtcdRegistryProperties etcdRegistryProperties; private final Client client; private EtcdConnectionStateListener etcdConnectionStateListener; @@ -85,8 +84,8 @@ public class EtcdRegistry implements Registry { private final Map watcherMap = new ConcurrentHashMap<>(); - private static final long TIME_TO_LIVE_SECONDS = 30L; public EtcdRegistry(EtcdRegistryProperties registryProperties) throws SSLException { + this.etcdRegistryProperties = registryProperties; ClientBuilder clientBuilder = Client.builder() .endpoints(Util.toURIs(Splitter.on(",").trimResults().splitToList(registryProperties.getEndpoints()))) .namespace(byteSequence(registryProperties.getNamespace())) @@ -130,6 +129,11 @@ public EtcdRegistry(EtcdRegistryProperties registryProperties) throws SSLExcepti } + @Override + public void start() { + // The start has been set in the constructor + } + @Override public boolean isConnected() { return client.getKVClient().get(byteSequence("/")).join() != null; @@ -141,13 +145,12 @@ public void connectUntilTimeout(@NonNull Duration timeout) throws RegistryExcept } /** - * - * @param path The prefix of the key being listened to + * @param path The prefix of the key being listened to * @param listener * @return if subcribe Returns true if no exception was thrown */ @Override - public boolean subscribe(String path, SubscribeListener listener) { + public void subscribe(String path, SubscribeListener listener) { try { ByteSequence watchKey = byteSequence(path); WatchOption watchOption = @@ -161,12 +164,11 @@ public boolean subscribe(String path, SubscribeListener listener) { } catch (Exception e) { throw new RegistryException("Failed to subscribe listener for key: " + path, e); } - return true; } /** - * @throws throws an exception if the unsubscribe path does not exist * @param path The prefix of the key being listened to + * @throws throws an exception if the unsubscribe path does not exist */ @Override public void unsubscribe(String path) { @@ -184,7 +186,6 @@ public void addConnectionStateListener(ConnectionListener listener) { } /** - * * @return Returns the value corresponding to the key * @throws throws an exception if the key does not exist */ @@ -196,13 +197,12 @@ public String get(String key) { } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RegistryException("etcd get data error", e); - } catch (ExecutionException e) { + } catch (Exception e) { throw new RegistryException("etcd get data error, key = " + key, e); } } /** - * * @param deleteOnDisconnect Does the put data disappear when the client disconnects */ @Override @@ -210,7 +210,8 @@ public void put(String key, String value, boolean deleteOnDisconnect) { try { if (deleteOnDisconnect) { // keep the key by lease, if disconnected, the lease will expire and the key will delete - long leaseId = etcdKeepAliveLeaseManager.getOrCreateKeepAliveLease(key, TIME_TO_LIVE_SECONDS); + long leaseId = etcdKeepAliveLeaseManager.getOrCreateKeepAliveLease(key, + etcdRegistryProperties.getTtl().get(ChronoUnit.SECONDS)); PutOption putOption = PutOption.newBuilder().withLeaseId(leaseId).build(); client.getKVClient().put(byteSequence(key), byteSequence(value), putOption).get(); } else { @@ -293,26 +294,67 @@ public boolean exists(String key) { */ @Override public boolean acquireLock(String key) { + Map leaseIdMap = threadLocalLockMap.get(); + if (null == leaseIdMap) { + leaseIdMap = new HashMap<>(); + threadLocalLockMap.set(leaseIdMap); + } + if (leaseIdMap.containsKey(key)) { + return true; + } + Lock lockClient = client.getLockClient(); Lease leaseClient = client.getLeaseClient(); // get the lock with a lease try { - long leaseId = leaseClient.grant(TIME_TO_LIVE_SECONDS).get().getID(); + long leaseId = leaseClient.grant(etcdRegistryProperties.getTtl().get(ChronoUnit.SECONDS)).get().getID(); // keep the lease client.getLeaseClient().keepAlive(leaseId, Observers.observer(response -> { })); lockClient.lock(byteSequence(key), leaseId).get(); // save the leaseId for release Lock - if (null == threadLocalLockMap.get()) { - threadLocalLockMap.set(new HashMap<>()); - } - threadLocalLockMap.get().put(key, leaseId); + leaseIdMap.put(key, leaseId); return true; } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RegistryException("etcd get lock error", e); - } catch (ExecutionException e) { + } catch (Exception e) { + throw new RegistryException("etcd get lock error, lockKey: " + key, e); + } + } + + @Override + public boolean acquireLock(String key, long timeout) { + Map leaseIdMap = threadLocalLockMap.get(); + if (null == leaseIdMap) { + leaseIdMap = new HashMap<>(); + threadLocalLockMap.set(leaseIdMap); + } + if (leaseIdMap.containsKey(key)) { + return true; + } + + Lock lockClient = client.getLockClient(); + Lease leaseClient = client.getLeaseClient(); + // get the lock with a lease + try { + long leaseId = leaseClient.grant(etcdRegistryProperties.getTtl().get(ChronoUnit.SECONDS)).get().getID(); + // keep the lease + lockClient.lock(byteSequence(key), leaseId).get(timeout, TimeUnit.MILLISECONDS); + client.getLeaseClient().keepAlive(leaseId, Observers.observer(response -> { + })); + + // save the leaseId for release Lock + leaseIdMap.put(key, leaseId); + return true; + } catch (TimeoutException timeoutException) { + log.debug("Acquire lock: {} in {}/ms timeout", key, timeout); + return false; + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new RegistryException("etcd get lock error", e); + } catch (Exception e) { throw new RegistryException("etcd get lock error, lockKey: " + key, e); } } diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/main/java/org/apache/dolphinscheduler/plugin/registry/etcd/EtcdRegistryAutoConfiguration.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/main/java/org/apache/dolphinscheduler/plugin/registry/etcd/EtcdRegistryAutoConfiguration.java new file mode 100644 index 000000000000..1038d312ffc7 --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/main/java/org/apache/dolphinscheduler/plugin/registry/etcd/EtcdRegistryAutoConfiguration.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.registry.etcd; + +import org.apache.dolphinscheduler.registry.api.Registry; + +import javax.net.ssl.SSLException; + +import lombok.extern.slf4j.Slf4j; + +import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.Configuration; + +@Slf4j +@ComponentScan +@Configuration(proxyBeanMethods = false) +@ConditionalOnProperty(prefix = "registry", name = "type", havingValue = "etcd") +public class EtcdRegistryAutoConfiguration { + + public EtcdRegistryAutoConfiguration() { + log.info("Load EtcdRegistryAutoConfiguration"); + } + + @Bean + @ConditionalOnMissingBean(value = Registry.class) + public EtcdRegistry etcdRegistry(EtcdRegistryProperties etcdRegistryProperties) throws SSLException { + return new EtcdRegistry(etcdRegistryProperties); + } + +} diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/main/java/org/apache/dolphinscheduler/plugin/registry/etcd/EtcdRegistryProperties.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/main/java/org/apache/dolphinscheduler/plugin/registry/etcd/EtcdRegistryProperties.java index faded2a506a4..b748c2a0fe93 100644 --- a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/main/java/org/apache/dolphinscheduler/plugin/registry/etcd/EtcdRegistryProperties.java +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/main/java/org/apache/dolphinscheduler/plugin/registry/etcd/EtcdRegistryProperties.java @@ -21,13 +21,11 @@ import lombok.Data; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.context.annotation.Configuration; @Data @Configuration -@ConditionalOnProperty(prefix = "registry", name = "type", havingValue = "etcd") @ConfigurationProperties(prefix = "registry") public class EtcdRegistryProperties { @@ -35,6 +33,8 @@ public class EtcdRegistryProperties { private String namespace = "dolphinscheduler"; private Duration connectionTimeout = Duration.ofSeconds(9); + private Duration ttl = Duration.ofSeconds(30); + // auth private String user; private String password; diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/main/resources/META-INF/spring.factories b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/main/resources/META-INF/spring.factories new file mode 100644 index 000000000000..689817bb9678 --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/main/resources/META-INF/spring.factories @@ -0,0 +1,19 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +org.springframework.boot.autoconfigure.EnableAutoConfiguration=\ + org.apache.dolphinscheduler.plugin.registry.etcd.EtcdRegistryAutoConfiguration diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/test/java/org/apache/dolphinscheduler/plugin/registry/etcd/EtcdKeepAliveLeaseManagerTest.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/test/java/org/apache/dolphinscheduler/plugin/registry/etcd/EtcdKeepAliveLeaseManagerTest.java index 70593e4bad24..84acbae8f32c 100644 --- a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/test/java/org/apache/dolphinscheduler/plugin/registry/etcd/EtcdKeepAliveLeaseManagerTest.java +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/test/java/org/apache/dolphinscheduler/plugin/registry/etcd/EtcdKeepAliveLeaseManagerTest.java @@ -36,13 +36,14 @@ class EtcdKeepAliveLeaseManagerTest { static Client client; static EtcdKeepAliveLeaseManager etcdKeepAliveLeaseManager; + @BeforeAll public static void before() throws Exception { server = EtcdClusterExtension.builder() .withNodes(1) .withImage("ibmcom/etcd:3.2.24") .build(); - server.restart(); + server.cluster().start(); client = Client.builder().endpoints(server.clientEndpoints()).build(); @@ -65,8 +66,9 @@ void getOrCreateKeepAliveLeaseTest() throws Exception { @AfterAll public static void after() throws IOException { - try (EtcdCluster closeServer = server.cluster()) { - client.close(); + try ( + EtcdCluster closeServer = server.cluster(); + Client closedClient = client) { } } } diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/test/java/org/apache/dolphinscheduler/plugin/registry/etcd/EtcdRegistryTest.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/test/java/org/apache/dolphinscheduler/plugin/registry/etcd/EtcdRegistryTest.java deleted file mode 100644 index b99bab98ad87..000000000000 --- a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/test/java/org/apache/dolphinscheduler/plugin/registry/etcd/EtcdRegistryTest.java +++ /dev/null @@ -1,143 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.registry.etcd; - -import org.apache.dolphinscheduler.registry.api.Event; -import org.apache.dolphinscheduler.registry.api.SubscribeListener; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; - -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import io.etcd.jetcd.test.EtcdClusterExtension; - -public class EtcdRegistryTest { - - private static final Logger logger = LoggerFactory.getLogger(EtcdRegistryTest.class); - - public static EtcdRegistry registry; - - @BeforeAll - public static void before() throws Exception { - EtcdClusterExtension server = EtcdClusterExtension.builder() - .withNodes(1) - .withImage("ibmcom/etcd:3.2.24") - .build(); - EtcdRegistryProperties properties = new EtcdRegistryProperties(); - server.restart(); - properties.setEndpoints(String.valueOf(server.clientEndpoints().get(0))); - registry = new EtcdRegistry(properties); - registry.put("/sub", "sub", false); - } - - @Test - public void persistTest() { - registry.put("/nodes/m1", "", false); - registry.put("/nodes/m2", "", false); - Assertions.assertEquals(Arrays.asList("m1", "m2"), registry.children("/nodes")); - Assertions.assertTrue(registry.exists("/nodes/m1")); - registry.delete("/nodes/m2"); - Assertions.assertFalse(registry.exists("/nodes/m2")); - registry.delete("/nodes"); - Assertions.assertFalse(registry.exists("/nodes/m1")); - } - - @Test - public void lockTest() { - CountDownLatch preCountDownLatch = new CountDownLatch(1); - CountDownLatch allCountDownLatch = new CountDownLatch(2); - List testData = new ArrayList<>(); - new Thread(() -> { - registry.acquireLock("/lock"); - preCountDownLatch.countDown(); - logger.info(Thread.currentThread().getName() - + " :I got the lock, but I don't want to work. I want to rest for a while"); - try { - Thread.sleep(1000); - logger.info(Thread.currentThread().getName() + " :I'm going to start working"); - testData.add("thread1"); - - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - } finally { - logger.info(Thread.currentThread().getName() + " :I have finished my work, now I release the lock"); - registry.releaseLock("/lock"); - allCountDownLatch.countDown(); - } - }).start(); - try { - preCountDownLatch.await(5, TimeUnit.SECONDS); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - new Thread(() -> { - try { - logger.info(Thread.currentThread().getName() + " :I am trying to acquire the lock"); - registry.acquireLock("/lock"); - logger.info(Thread.currentThread().getName() + " :I got the lock and I started working"); - - testData.add("thread2"); - } finally { - registry.releaseLock("/lock"); - allCountDownLatch.countDown(); - } - - }).start(); - try { - allCountDownLatch.await(5, TimeUnit.SECONDS); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - Assertions.assertEquals(testData, Arrays.asList("thread1", "thread2")); - } - - @Test - public void subscribeTest() { - boolean status = registry.subscribe("/sub", new TestListener()); - // The following add and delete operations are used for debugging - registry.put("/sub/m1", "tt", false); - registry.put("/sub/m2", "tt", false); - registry.delete("/sub/m2"); - registry.delete("/sub"); - Assertions.assertTrue(status); - - } - - static class TestListener implements SubscribeListener { - - @Override - public void notify(Event event) { - logger.info("I'm test listener"); - } - } - - @AfterAll - public static void after() throws IOException { - registry.close(); - } -} diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/test/java/org/apache/dolphinscheduler/plugin/registry/etcd/EtcdRegistryTestCase.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/test/java/org/apache/dolphinscheduler/plugin/registry/etcd/EtcdRegistryTestCase.java new file mode 100644 index 000000000000..39bfea8cc770 --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/test/java/org/apache/dolphinscheduler/plugin/registry/etcd/EtcdRegistryTestCase.java @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.registry.etcd; + +import org.apache.dolphinscheduler.plugin.registry.RegistryTestCase; + +import java.net.URI; +import java.util.stream.Collectors; + +import lombok.SneakyThrows; + +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.boot.test.context.SpringBootTest; + +import io.etcd.jetcd.launcher.EtcdCluster; +import io.etcd.jetcd.test.EtcdClusterExtension; + +@SpringBootTest(classes = EtcdRegistryProperties.class) +@SpringBootApplication(scanBasePackageClasses = EtcdRegistryProperties.class) +public class EtcdRegistryTestCase extends RegistryTestCase { + + @Autowired + private EtcdRegistryProperties etcdRegistryProperties; + + private static EtcdCluster etcdCluster; + + @SneakyThrows + @BeforeAll + public static void setUpTestingServer() { + etcdCluster = EtcdClusterExtension.builder() + .withNodes(1) + .withImage("ibmcom/etcd:3.2.24") + .build() + .cluster(); + etcdCluster.start(); + System.clearProperty("registry.endpoints"); + System.setProperty("registry.endpoints", + etcdCluster.clientEndpoints().stream().map(URI::toString).collect(Collectors.joining(","))); + } + + @SneakyThrows + @Override + public EtcdRegistry createRegistry() { + return new EtcdRegistry(etcdRegistryProperties); + } + + @SneakyThrows + @AfterAll + public static void tearDownTestingServer() { + try (EtcdCluster cluster = etcdCluster) { + } + } +} diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/test/resources/application.yaml b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/test/resources/application.yaml new file mode 100644 index 000000000000..083d38511c50 --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/test/resources/application.yaml @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +registry: + type: etcd + ttl: 2s diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/test/resources/logback.xml b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/test/resources/logback.xml new file mode 100644 index 000000000000..6f211959c590 --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/test/resources/logback.xml @@ -0,0 +1,21 @@ + + + + + + diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-it/pom.xml b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-it/pom.xml new file mode 100644 index 000000000000..7f4b97d3efe5 --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-it/pom.xml @@ -0,0 +1,60 @@ + + + + 4.0.0 + + org.apache.dolphinscheduler + dolphinscheduler-registry-plugins + dev-SNAPSHOT + + + dolphinscheduler-registry-it + + + + org.apache.dolphinscheduler + dolphinscheduler-registry-api + + + org.springframework.boot + spring-boot-starter-test + test + + + + + + + org.apache.maven.plugins + maven-jar-plugin + + false + + + + + test-jar + + + + + + + + diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-it/src/test/java/org/apache/dolphinscheduler/plugin/registry/RegistryTestCase.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-it/src/test/java/org/apache/dolphinscheduler/plugin/registry/RegistryTestCase.java new file mode 100644 index 000000000000..8fbd6bc5c021 --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-it/src/test/java/org/apache/dolphinscheduler/plugin/registry/RegistryTestCase.java @@ -0,0 +1,290 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.registry; + +import static org.awaitility.Awaitility.await; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import org.apache.dolphinscheduler.registry.api.ConnectionState; +import org.apache.dolphinscheduler.registry.api.Event; +import org.apache.dolphinscheduler.registry.api.Registry; +import org.apache.dolphinscheduler.registry.api.RegistryException; +import org.apache.dolphinscheduler.registry.api.SubscribeListener; + +import java.time.Duration; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; + +import lombok.SneakyThrows; + +import org.assertj.core.util.Lists; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import com.google.common.truth.Truth; + +public abstract class RegistryTestCase { + + protected R registry; + + @BeforeEach + public void setupRegistry() { + registry = createRegistry(); + } + + @SneakyThrows + @AfterEach + public void tearDownRegistry() { + try (R registry = this.registry) { + } + } + + @Test + public void testIsConnected() { + registry.start(); + Truth.assertThat(registry.isConnected()).isTrue(); + } + + @Test + public void testConnectUntilTimeout() { + registry.start(); + await().atMost(Duration.ofSeconds(10)) + .untilAsserted(() -> registry.connectUntilTimeout(Duration.ofSeconds(3))); + + } + + @SneakyThrows + @Test + public void testSubscribe() { + registry.start(); + + final AtomicBoolean subscribeAdded = new AtomicBoolean(false); + final AtomicBoolean subscribeRemoved = new AtomicBoolean(false); + final AtomicBoolean subscribeUpdated = new AtomicBoolean(false); + + SubscribeListener subscribeListener = event -> { + System.out.println("Receive event: " + event); + if (event.type() == Event.Type.ADD) { + subscribeAdded.compareAndSet(false, true); + } + if (event.type() == Event.Type.REMOVE) { + subscribeRemoved.compareAndSet(false, true); + } + if (event.type() == Event.Type.UPDATE) { + subscribeUpdated.compareAndSet(false, true); + } + }; + String key = "/nodes/master" + System.nanoTime(); + registry.subscribe(key, subscribeListener); + registry.put(key, String.valueOf(System.nanoTime()), true); + // Sleep 3 seconds here since in mysql jdbc registry + // If multiple event occurs in a refresh time, only the last event will be triggered + Thread.sleep(3000); + registry.put(key, String.valueOf(System.nanoTime()), true); + Thread.sleep(3000); + registry.delete(key); + + await().atMost(Duration.ofSeconds(10)) + .untilAsserted(() -> { + Assertions.assertTrue(subscribeAdded.get()); + Assertions.assertTrue(subscribeUpdated.get()); + Assertions.assertTrue(subscribeRemoved.get()); + }); + } + + @SneakyThrows + @Test + public void testUnsubscribe() { + registry.start(); + + final AtomicBoolean subscribeAdded = new AtomicBoolean(false); + final AtomicBoolean subscribeRemoved = new AtomicBoolean(false); + final AtomicBoolean subscribeUpdated = new AtomicBoolean(false); + + SubscribeListener subscribeListener = event -> { + if (event.type() == Event.Type.ADD) { + subscribeAdded.compareAndSet(false, true); + } + if (event.type() == Event.Type.REMOVE) { + subscribeRemoved.compareAndSet(false, true); + } + if (event.type() == Event.Type.UPDATE) { + subscribeUpdated.compareAndSet(false, true); + } + }; + String key = "/nodes/master" + System.nanoTime(); + String value = "127.0.0.1:8080"; + registry.subscribe(key, subscribeListener); + registry.unsubscribe(key); + registry.put(key, value, true); + registry.put(key, value, true); + registry.delete(key); + + Thread.sleep(2000); + Assertions.assertFalse(subscribeAdded.get()); + Assertions.assertFalse(subscribeRemoved.get()); + Assertions.assertFalse(subscribeUpdated.get()); + + } + + @SneakyThrows + @Test + public void testAddConnectionStateListener() { + + AtomicReference connectionState = new AtomicReference<>(); + registry.addConnectionStateListener(connectionState::set); + + Truth.assertThat(connectionState.get()).isNull(); + registry.start(); + + await().atMost(Duration.ofSeconds(2)) + .until(() -> ConnectionState.CONNECTED == connectionState.get()); + + } + + @Test + public void testGet() { + registry.start(); + String key = "/nodes/master" + System.nanoTime(); + String value = "127.0.0.1:8080"; + assertThrows(RegistryException.class, () -> registry.get(key)); + registry.put(key, value, true); + Truth.assertThat(registry.get(key)).isEqualTo(value); + } + + @Test + public void testPut() { + registry.start(); + String key = "/nodes/master" + System.nanoTime(); + String value = "127.0.0.1:8080"; + registry.put(key, value, true); + Truth.assertThat(registry.get(key)).isEqualTo(value); + + // Update the value + registry.put(key, "123", true); + Truth.assertThat(registry.get(key)).isEqualTo("123"); + } + + @Test + public void testDelete() { + registry.start(); + String key = "/nodes/master" + System.nanoTime(); + String value = "127.0.0.1:8080"; + // Delete a non-existent key + registry.delete(key); + + registry.put(key, value, true); + Truth.assertThat(registry.get(key)).isEqualTo(value); + registry.delete(key); + Truth.assertThat(registry.exists(key)).isFalse(); + + } + + @Test + public void testChildren() { + registry.start(); + String master1 = "/nodes/children/127.0.0.1:8080"; + String master2 = "/nodes/children/127.0.0.2:8080"; + String value = "123"; + registry.put(master1, value, true); + registry.put(master2, value, true); + Truth.assertThat(registry.children("/nodes/children")) + .containsAtLeastElementsIn(Lists.newArrayList("127.0.0.1:8080", "127.0.0.2:8080")); + } + + @Test + public void testExists() { + registry.start(); + String key = "/nodes/master" + System.nanoTime(); + String value = "123"; + Truth.assertThat(registry.exists(key)).isFalse(); + registry.put(key, value, true); + Truth.assertThat(registry.exists(key)).isTrue(); + + } + + @SneakyThrows + @Test + public void testAcquireLock() { + registry.start(); + String lockKey = "/lock" + System.nanoTime(); + + // 1. Acquire the lock at the main thread + Truth.assertThat(registry.acquireLock(lockKey)).isTrue(); + // Acquire the lock at the main thread again + // It should acquire success + Truth.assertThat(registry.acquireLock(lockKey)).isTrue(); + + // Acquire the lock at another thread + // It should acquire failed + CompletableFuture acquireResult = CompletableFuture.supplyAsync(() -> registry.acquireLock(lockKey)); + assertThrows(TimeoutException.class, () -> acquireResult.get(3000, TimeUnit.MILLISECONDS)); + + } + + @SneakyThrows + @Test + public void testAcquireLock_withTimeout() { + registry.start(); + String lockKey = "/lock" + System.nanoTime(); + // 1. Acquire the lock in the main thread + Truth.assertThat(registry.acquireLock(lockKey, 3000)).isTrue(); + + // Acquire the lock in the main thread + // It should acquire success + Truth.assertThat(registry.acquireLock(lockKey, 3000)).isTrue(); + + // Acquire the lock at another thread + // It should acquire failed + CompletableFuture acquireResult = + CompletableFuture.supplyAsync(() -> registry.acquireLock(lockKey, 3000)); + Truth.assertThat(acquireResult.get()).isFalse(); + + } + + @SneakyThrows + @Test + public void testReleaseLock() { + registry.start(); + String lockKey = "/lock" + System.nanoTime(); + // 1. Acquire the lock in the main thread + Truth.assertThat(registry.acquireLock(lockKey, 3000)).isTrue(); + + // Acquire the lock at another thread + // It should acquire failed + CompletableFuture acquireResult = + CompletableFuture.supplyAsync(() -> registry.acquireLock(lockKey, 3000)); + Truth.assertThat(acquireResult.get()).isFalse(); + + // 2. Release the lock in the main thread + Truth.assertThat(registry.releaseLock(lockKey)).isTrue(); + + // Acquire the lock at another thread + // It should acquire success + acquireResult = CompletableFuture.supplyAsync(() -> registry.acquireLock(lockKey, 3000)); + Truth.assertThat(acquireResult.get()).isTrue(); + } + + public abstract R createRegistry(); + +} diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/README.md b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/README.md index 3b1a2cb24f76..554c375218e8 100644 --- a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/README.md +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/README.md @@ -1,6 +1,7 @@ # Introduction -This module is the jdbc registry plugin module, this plugin will use jdbc as the registry center. Will use the database configuration same as DolphinScheduler in api'yaml default. +This module is the jdbc registry plugin module, this plugin will use jdbc as the registry center. Will use the database +configuration same as DolphinScheduler in api'yaml default. # How to use @@ -22,8 +23,11 @@ registry: After do this two steps, you can start your DolphinScheduler cluster, your cluster will use mysql as registry center to store server metadata. -NOTE: You need to add `mysql-connector-java.jar` into DS classpath if you use mysql database, since this plugin will not bundle this driver in distribution. -You can get the detail about Initialize the Database. +NOTE: You need to add `mysql-connector-java.jar` into DS classpath if you use mysql database, since this plugin will not +bundle this driver in distribution. +You can get the detail +about Initialize the +Database. ## Optional configuration diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/pom.xml b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/pom.xml index 47b644929364..aa592b9da450 100644 --- a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/pom.xml +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/pom.xml @@ -62,6 +62,41 @@ mybatis-plus + + com.baomidou + mybatis-plus-boot-starter + + + org.apache.logging.log4j + log4j-to-slf4j + + + + + + org.apache.dolphinscheduler + dolphinscheduler-registry-it + ${project.version} + test-jar + test + + + + org.testcontainers + mysql + + + + org.testcontainers + postgresql + + + + org.springframework.boot + spring-boot-starter-test + test + + diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/task/EphemeralDateManager.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/EphemeralDateManager.java similarity index 93% rename from dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/task/EphemeralDateManager.java rename to dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/EphemeralDateManager.java index 64915e8ca894..7c601b91a1f9 100644 --- a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/task/EphemeralDateManager.java +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/EphemeralDateManager.java @@ -15,12 +15,10 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.plugin.registry.jdbc.task; +package org.apache.dolphinscheduler.plugin.registry.jdbc; import static com.google.common.base.Preconditions.checkNotNull; -import org.apache.dolphinscheduler.plugin.registry.jdbc.JdbcOperator; -import org.apache.dolphinscheduler.plugin.registry.jdbc.JdbcRegistryProperties; import org.apache.dolphinscheduler.registry.api.ConnectionListener; import org.apache.dolphinscheduler.registry.api.ConnectionState; @@ -42,7 +40,7 @@ * This thread is used to check the connect state to jdbc. */ @Slf4j -public class EphemeralDateManager implements AutoCloseable { +class EphemeralDateManager implements AutoCloseable { private ConnectionState connectionState; private final JdbcOperator jdbcOperator; @@ -51,7 +49,7 @@ public class EphemeralDateManager implements AutoCloseable { private final Set ephemeralDateIds = Collections.synchronizedSet(new HashSet<>()); private final ScheduledExecutorService scheduledExecutorService; - public EphemeralDateManager(JdbcRegistryProperties registryProperties, JdbcOperator jdbcOperator) { + EphemeralDateManager(JdbcRegistryProperties registryProperties, JdbcOperator jdbcOperator) { this.registryProperties = registryProperties; this.jdbcOperator = checkNotNull(jdbcOperator); this.scheduledExecutorService = Executors.newScheduledThreadPool( @@ -151,7 +149,7 @@ private ConnectionState getConnectionState() { } } - private void updateEphemeralDateTerm() throws SQLException { + private void updateEphemeralDateTerm() { if (!jdbcOperator.updateEphemeralDataTerm(ephemeralDateIds)) { log.warn("Update jdbc registry ephemeral data: {} term error", ephemeralDateIds); } diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/JdbcOperator.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/JdbcOperator.java index a56d609da77c..46ba84db8ab3 100644 --- a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/JdbcOperator.java +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/JdbcOperator.java @@ -27,28 +27,26 @@ import org.apache.commons.lang3.StringUtils; import java.sql.SQLException; -import java.sql.SQLIntegrityConstraintViolationException; import java.util.Collection; +import java.util.Date; import java.util.List; import java.util.stream.Collectors; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.stereotype.Component; +import org.springframework.dao.DuplicateKeyException; -@Component -@ConditionalOnProperty(prefix = "registry", name = "type", havingValue = "jdbc") -public class JdbcOperator { +public final class JdbcOperator { - @Autowired - private JdbcRegistryDataMapper jdbcRegistryDataMapper; - @Autowired - private JdbcRegistryLockMapper jdbcRegistryLockMapper; + private final JdbcRegistryDataMapper jdbcRegistryDataMapper; + private final JdbcRegistryLockMapper jdbcRegistryLockMapper; private final long expireTimeWindow; - public JdbcOperator(JdbcRegistryProperties registryProperties) { + JdbcOperator(JdbcRegistryProperties registryProperties, + JdbcRegistryDataMapper jdbcRegistryDataMapper, + JdbcRegistryLockMapper jdbcRegistryLockMapper) { this.expireTimeWindow = registryProperties.getTermExpireTimes() * registryProperties.getTermRefreshInterval().toMillis(); + this.jdbcRegistryDataMapper = jdbcRegistryDataMapper; + this.jdbcRegistryLockMapper = jdbcRegistryLockMapper; } public void healthCheck() { @@ -62,17 +60,21 @@ public List queryAllJdbcRegistryData() { public Long insertOrUpdateEphemeralData(String key, String value) throws SQLException { JdbcRegistryData jdbcRegistryData = jdbcRegistryDataMapper.selectByKey(key); if (jdbcRegistryData != null) { - long id = jdbcRegistryData.getId(); - if (jdbcRegistryDataMapper.updateDataAndTermById(id, value, System.currentTimeMillis()) <= 0) { + jdbcRegistryData.setDataValue(value); + jdbcRegistryData.setLastUpdateTime(new Date()); + jdbcRegistryData.setLastTerm(System.currentTimeMillis()); + if (jdbcRegistryDataMapper.updateById(jdbcRegistryData) <= 0) { throw new SQLException(String.format("update registry value failed, key: %s, value: %s", key, value)); } - return id; + return jdbcRegistryData.getId(); } jdbcRegistryData = JdbcRegistryData.builder() .dataKey(key) .dataValue(value) .dataType(DataType.EPHEMERAL.getTypeValue()) .lastTerm(System.currentTimeMillis()) + .lastUpdateTime(new Date()) + .createTime(new Date()) .build(); jdbcRegistryDataMapper.insert(jdbcRegistryData); return jdbcRegistryData.getId(); @@ -81,17 +83,21 @@ public Long insertOrUpdateEphemeralData(String key, String value) throws SQLExce public long insertOrUpdatePersistentData(String key, String value) throws SQLException { JdbcRegistryData jdbcRegistryData = jdbcRegistryDataMapper.selectByKey(key); if (jdbcRegistryData != null) { - long id = jdbcRegistryData.getId(); - if (jdbcRegistryDataMapper.updateDataAndTermById(id, value, System.currentTimeMillis()) <= 0) { + jdbcRegistryData.setDataValue(value); + jdbcRegistryData.setLastUpdateTime(new Date()); + jdbcRegistryData.setLastTerm(System.currentTimeMillis()); + if (jdbcRegistryDataMapper.updateById(jdbcRegistryData) <= 0) { throw new SQLException(String.format("update registry value failed, key: %s, value: %s", key, value)); } - return id; + return jdbcRegistryData.getId(); } jdbcRegistryData = JdbcRegistryData.builder() .dataKey(key) .dataValue(value) .dataType(DataType.PERSISTENT.getTypeValue()) .lastTerm(System.currentTimeMillis()) + .lastUpdateTime(new Date()) + .createTime(new Date()) .build(); jdbcRegistryDataMapper.insert(jdbcRegistryData); return jdbcRegistryData.getId(); @@ -127,7 +133,7 @@ public List getChildren(String key) throws SQLException { .collect(Collectors.toList()); } - public boolean existKey(String key) throws SQLException { + public boolean existKey(String key) { JdbcRegistryData jdbcRegistryData = jdbcRegistryDataMapper.selectByKey(key); return jdbcRegistryData != null; } @@ -135,25 +141,25 @@ public boolean existKey(String key) throws SQLException { /** * Try to acquire the target Lock, if cannot acquire, return null. */ - @SuppressWarnings("checkstyle:IllegalCatch") - public JdbcRegistryLock tryToAcquireLock(String key) throws SQLException { + public JdbcRegistryLock tryToAcquireLock(String key) { JdbcRegistryLock jdbcRegistryLock = JdbcRegistryLock.builder() .lockKey(key) - .lockOwner(JdbcRegistryConstant.LOCK_OWNER) + .lockOwner(LockUtils.getLockOwner()) .lastTerm(System.currentTimeMillis()) + .lastUpdateTime(new Date()) .build(); try { jdbcRegistryLockMapper.insert(jdbcRegistryLock); return jdbcRegistryLock; } catch (Exception e) { - if (e instanceof SQLIntegrityConstraintViolationException) { + if (e instanceof DuplicateKeyException) { return null; } throw e; } } - public JdbcRegistryLock getLockById(long lockId) throws SQLException { + public JdbcRegistryLock getLockById(long lockId) { return jdbcRegistryLockMapper.selectById(lockId); } @@ -161,7 +167,7 @@ public boolean releaseLock(long lockId) throws SQLException { return jdbcRegistryLockMapper.deleteById(lockId) > 0; } - public boolean updateEphemeralDataTerm(Collection ephemeralDateIds) throws SQLException { + public boolean updateEphemeralDataTerm(Collection ephemeralDateIds) { if (CollectionUtils.isEmpty(ephemeralDateIds)) { return true; } diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/JdbcRegistry.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/JdbcRegistry.java index f3cbcfbc3b1d..2b7993c87bdc 100644 --- a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/JdbcRegistry.java +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/JdbcRegistry.java @@ -17,9 +17,7 @@ package org.apache.dolphinscheduler.plugin.registry.jdbc; -import org.apache.dolphinscheduler.plugin.registry.jdbc.task.EphemeralDateManager; -import org.apache.dolphinscheduler.plugin.registry.jdbc.task.RegistryLockManager; -import org.apache.dolphinscheduler.plugin.registry.jdbc.task.SubscribeDataManager; +import org.apache.dolphinscheduler.plugin.registry.jdbc.model.JdbcRegistryData; import org.apache.dolphinscheduler.registry.api.ConnectionListener; import org.apache.dolphinscheduler.registry.api.ConnectionState; import org.apache.dolphinscheduler.registry.api.Registry; @@ -30,31 +28,24 @@ import java.time.Duration; import java.util.Collection; -import javax.annotation.PostConstruct; - import lombok.NonNull; import lombok.extern.slf4j.Slf4j; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.stereotype.Component; - /** * This is one of the implementation of {@link Registry}, with this implementation, you need to rely on mysql database to * store the DolphinScheduler master/worker's metadata and do the server registry/unRegistry. */ -@Component -@ConditionalOnProperty(prefix = "registry", name = "type", havingValue = "jdbc") @Slf4j -public class JdbcRegistry implements Registry { +public final class JdbcRegistry implements Registry { private final JdbcRegistryProperties jdbcRegistryProperties; private final EphemeralDateManager ephemeralDateManager; private final SubscribeDataManager subscribeDataManager; private final RegistryLockManager registryLockManager; - private JdbcOperator jdbcOperator; + private final JdbcOperator jdbcOperator; - public JdbcRegistry(JdbcRegistryProperties jdbcRegistryProperties, - JdbcOperator jdbcOperator) { + JdbcRegistry(JdbcRegistryProperties jdbcRegistryProperties, + JdbcOperator jdbcOperator) { this.jdbcOperator = jdbcOperator; jdbcOperator.clearExpireLock(); jdbcOperator.clearExpireEphemeralDate(); @@ -65,7 +56,7 @@ public JdbcRegistry(JdbcRegistryProperties jdbcRegistryProperties, log.info("Initialize Jdbc Registry..."); } - @PostConstruct + @Override public void start() { log.info("Starting Jdbc Registry..."); // start a jdbc connect check @@ -103,10 +94,9 @@ public void connectUntilTimeout(@NonNull Duration timeout) throws RegistryExcept } @Override - public boolean subscribe(String path, SubscribeListener listener) { + public void subscribe(String path, SubscribeListener listener) { // new a schedule thread to query the path, if the path subscribeDataManager.addListener(path, listener); - return true; } @Override @@ -122,8 +112,18 @@ public void addConnectionStateListener(ConnectionListener listener) { @Override public String get(String key) { - // get the key value - return subscribeDataManager.getData(key); + try { + // get the key value + JdbcRegistryData data = jdbcOperator.getData(key); + if (data == null) { + throw new RegistryException("key: " + key + " not exist"); + } + return data.getDataValue(); + } catch (RegistryException registryException) { + throw registryException; + } catch (Exception e) { + throw new RegistryException(String.format("Get key: %s error", key), e); + } } @Override @@ -179,6 +179,17 @@ public boolean acquireLock(String key) { } } + @Override + public boolean acquireLock(String key, long timeout) { + try { + return registryLockManager.acquireLock(key, timeout); + } catch (RegistryException e) { + throw e; + } catch (Exception e) { + throw new RegistryException(String.format("Acquire lock: %s error", key), e); + } + } + @Override public boolean releaseLock(String key) { registryLockManager.releaseLock(key); diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/JdbcRegistryConfiguration.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/JdbcRegistryAutoConfiguration.java similarity index 55% rename from dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/JdbcRegistryConfiguration.java rename to dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/JdbcRegistryAutoConfiguration.java index 7b37749ab77f..603a47632216 100644 --- a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/JdbcRegistryConfiguration.java +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/JdbcRegistryAutoConfiguration.java @@ -22,40 +22,70 @@ import org.apache.ibatis.session.SqlSessionFactory; +import lombok.extern.slf4j.Slf4j; + import org.mybatis.spring.SqlSessionTemplate; +import org.mybatis.spring.annotation.MapperScan; +import org.springframework.boot.autoconfigure.AutoConfigureAfter; +import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.Configuration; +import com.baomidou.mybatisplus.autoconfigure.MybatisPlusAutoConfiguration; import com.baomidou.mybatisplus.extension.spring.MybatisSqlSessionFactoryBean; import com.zaxxer.hikari.HikariDataSource; -@Configuration +@Slf4j +@ComponentScan +@Configuration(proxyBeanMethods = false) +@MapperScan("org.apache.dolphinscheduler.plugin.registry.jdbc.mapper") @ConditionalOnProperty(prefix = "registry", name = "type", havingValue = "jdbc") -public class JdbcRegistryConfiguration { +@AutoConfigureAfter(MybatisPlusAutoConfiguration.class) +public class JdbcRegistryAutoConfiguration { + + public JdbcRegistryAutoConfiguration() { + log.info("Load JdbcRegistryAutoConfiguration"); + } + + @Bean + public JdbcOperator jdbcOperator(JdbcRegistryProperties jdbcRegistryProperties, + JdbcRegistryDataMapper jdbcRegistryDataMapper, + JdbcRegistryLockMapper jdbcRegistryLockMapper) { + return new JdbcOperator(jdbcRegistryProperties, jdbcRegistryDataMapper, jdbcRegistryLockMapper); + } + + @Bean + public JdbcRegistry jdbcRegistry(JdbcRegistryProperties jdbcRegistryProperties, JdbcOperator jdbcOperator) { + return new JdbcRegistry(jdbcRegistryProperties, jdbcOperator); + } @Bean - @ConditionalOnProperty(prefix = "registry.hikari-config", name = "jdbc-url") - public SqlSessionFactory jdbcRegistrySqlSessionFactory(JdbcRegistryProperties jdbcRegistryProperties) throws Exception { + @ConditionalOnMissingBean + public SqlSessionFactory sqlSessionFactory(JdbcRegistryProperties jdbcRegistryProperties) throws Exception { + log.info("Initialize jdbcRegistrySqlSessionFactory"); MybatisSqlSessionFactoryBean sqlSessionFactoryBean = new MybatisSqlSessionFactoryBean(); sqlSessionFactoryBean.setDataSource(new HikariDataSource(jdbcRegistryProperties.getHikariConfig())); return sqlSessionFactoryBean.getObject(); } @Bean - public SqlSessionTemplate jdbcRegistrySqlSessionTemplate(SqlSessionFactory jdbcRegistrySqlSessionFactory) { - jdbcRegistrySqlSessionFactory.getConfiguration().addMapper(JdbcRegistryDataMapper.class); - jdbcRegistrySqlSessionFactory.getConfiguration().addMapper(JdbcRegistryLockMapper.class); + @ConditionalOnMissingBean + public SqlSessionTemplate sqlSessionTemplate(SqlSessionFactory jdbcRegistrySqlSessionFactory) { + log.info("Initialize jdbcRegistrySqlSessionTemplate"); return new SqlSessionTemplate(jdbcRegistrySqlSessionFactory); } @Bean public JdbcRegistryDataMapper jdbcRegistryDataMapper(SqlSessionTemplate jdbcRegistrySqlSessionTemplate) { + jdbcRegistrySqlSessionTemplate.getConfiguration().addMapper(JdbcRegistryDataMapper.class); return jdbcRegistrySqlSessionTemplate.getMapper(JdbcRegistryDataMapper.class); } @Bean public JdbcRegistryLockMapper jdbcRegistryLockMapper(SqlSessionTemplate jdbcRegistrySqlSessionTemplate) { + jdbcRegistrySqlSessionTemplate.getConfiguration().addMapper(JdbcRegistryLockMapper.class); return jdbcRegistrySqlSessionTemplate.getMapper(JdbcRegistryLockMapper.class); } diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/JdbcRegistryConstant.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/JdbcRegistryConstant.java index 4a016f4d2ede..84496afb809b 100644 --- a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/JdbcRegistryConstant.java +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/JdbcRegistryConstant.java @@ -17,15 +17,11 @@ package org.apache.dolphinscheduler.plugin.registry.jdbc; -import org.apache.dolphinscheduler.common.utils.NetUtils; -import org.apache.dolphinscheduler.common.utils.OSUtils; - import lombok.experimental.UtilityClass; @UtilityClass -public final class JdbcRegistryConstant { +final class JdbcRegistryConstant { public static final long LOCK_ACQUIRE_INTERVAL = 1_000; - public static final String LOCK_OWNER = NetUtils.getHost() + "_" + OSUtils.getProcessID(); } diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/LockUtils.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/LockUtils.java new file mode 100644 index 000000000000..f70f0afa5b0f --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/LockUtils.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.registry.jdbc; + +import org.apache.dolphinscheduler.common.utils.NetUtils; +import org.apache.dolphinscheduler.common.utils.OSUtils; + +import lombok.experimental.UtilityClass; + +@UtilityClass +public class LockUtils { + + private static final String LOCK_OWNER_PREFIX = NetUtils.getHost() + "_" + OSUtils.getProcessID() + "_"; + + public static String getLockOwner() { + return LOCK_OWNER_PREFIX + Thread.currentThread().getName(); + } + +} diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/task/RegistryLockManager.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/RegistryLockManager.java similarity index 65% rename from dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/task/RegistryLockManager.java rename to dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/RegistryLockManager.java index 46ccd15ec010..6c519685ff45 100644 --- a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/task/RegistryLockManager.java +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/RegistryLockManager.java @@ -15,12 +15,9 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.plugin.registry.jdbc.task; +package org.apache.dolphinscheduler.plugin.registry.jdbc; import org.apache.dolphinscheduler.common.thread.ThreadUtils; -import org.apache.dolphinscheduler.plugin.registry.jdbc.JdbcOperator; -import org.apache.dolphinscheduler.plugin.registry.jdbc.JdbcRegistryConstant; -import org.apache.dolphinscheduler.plugin.registry.jdbc.JdbcRegistryProperties; import org.apache.dolphinscheduler.plugin.registry.jdbc.model.JdbcRegistryLock; import org.apache.dolphinscheduler.registry.api.RegistryException; @@ -40,14 +37,15 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder; @Slf4j -public class RegistryLockManager implements AutoCloseable { +class RegistryLockManager implements AutoCloseable { private final JdbcOperator jdbcOperator; private final JdbcRegistryProperties registryProperties; + // lock owner -> lock private final Map lockHoldMap; private final ScheduledExecutorService lockTermUpdateThreadPool; - public RegistryLockManager(JdbcRegistryProperties registryProperties, JdbcOperator jdbcOperator) { + RegistryLockManager(JdbcRegistryProperties registryProperties, JdbcOperator jdbcOperator) { this.registryProperties = registryProperties; this.jdbcOperator = jdbcOperator; this.lockHoldMap = new ConcurrentHashMap<>(); @@ -67,20 +65,49 @@ public void start() { * Acquire the lock, if cannot get the lock will await. */ public void acquireLock(String lockKey) throws RegistryException { - // maybe we can use the computeIf absent - lockHoldMap.computeIfAbsent(lockKey, key -> { - JdbcRegistryLock jdbcRegistryLock; - try { - while ((jdbcRegistryLock = jdbcOperator.tryToAcquireLock(lockKey)) == null) { - log.debug("Acquire the lock {} failed try again", key); - // acquire failed, wait and try again - ThreadUtils.sleep(JdbcRegistryConstant.LOCK_ACQUIRE_INTERVAL); + try { + while (true) { + JdbcRegistryLock jdbcRegistryLock = lockHoldMap.get(lockKey); + if (jdbcRegistryLock != null && LockUtils.getLockOwner().equals(jdbcRegistryLock.getLockOwner())) { + return; } - } catch (SQLException e) { - throw new RegistryException("Acquire the lock error", e); + jdbcRegistryLock = jdbcOperator.tryToAcquireLock(lockKey); + if (jdbcRegistryLock != null) { + lockHoldMap.put(lockKey, jdbcRegistryLock); + return; + } + log.debug("Acquire the lock {} failed try again", lockKey); + // acquire failed, wait and try again + ThreadUtils.sleep(JdbcRegistryConstant.LOCK_ACQUIRE_INTERVAL); + } + } catch (Exception ex) { + throw new RegistryException("Acquire the lock: " + lockKey + " error", ex); + } + } + + /** + * Acquire the lock, if cannot get the lock will await. + */ + public boolean acquireLock(String lockKey, long timeout) throws RegistryException { + long startTime = System.currentTimeMillis(); + try { + while (System.currentTimeMillis() - startTime < timeout) { + JdbcRegistryLock jdbcRegistryLock = lockHoldMap.get(lockKey); + if (jdbcRegistryLock != null && LockUtils.getLockOwner().equals(jdbcRegistryLock.getLockOwner())) { + return true; + } + jdbcRegistryLock = jdbcOperator.tryToAcquireLock(lockKey); + if (jdbcRegistryLock != null) { + lockHoldMap.put(lockKey, jdbcRegistryLock); + return true; + } + log.debug("Acquire the lock {} failed try again", lockKey); + ThreadUtils.sleep(JdbcRegistryConstant.LOCK_ACQUIRE_INTERVAL); } - return jdbcRegistryLock; - }); + } catch (Exception e) { + throw new RegistryException("Acquire the lock: " + lockKey + " error", e); + } + return false; } public void releaseLock(String lockKey) { @@ -91,6 +118,7 @@ public void releaseLock(String lockKey) { jdbcOperator.releaseLock(jdbcRegistryLock.getId()); lockHoldMap.remove(lockKey); } catch (SQLException e) { + lockHoldMap.remove(lockKey); throw new RegistryException(String.format("Release lock: %s error", lockKey), e); } } @@ -125,7 +153,6 @@ public void run() { if (!jdbcOperator.updateLockTerm(lockIds)) { log.warn("Update the lock: {} term failed.", lockIds); } - jdbcOperator.clearExpireLock(); } catch (Exception e) { log.error("Update lock term error", e); } diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/task/SubscribeDataManager.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/SubscribeDataManager.java similarity index 91% rename from dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/task/SubscribeDataManager.java rename to dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/SubscribeDataManager.java index 4718b053f47e..e86dc4b15585 100644 --- a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/task/SubscribeDataManager.java +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/SubscribeDataManager.java @@ -15,10 +15,8 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.plugin.registry.jdbc.task; +package org.apache.dolphinscheduler.plugin.registry.jdbc; -import org.apache.dolphinscheduler.plugin.registry.jdbc.JdbcOperator; -import org.apache.dolphinscheduler.plugin.registry.jdbc.JdbcRegistryProperties; import org.apache.dolphinscheduler.plugin.registry.jdbc.model.JdbcRegistryData; import org.apache.dolphinscheduler.registry.api.Event; import org.apache.dolphinscheduler.registry.api.SubscribeListener; @@ -42,7 +40,7 @@ * Used to refresh if the subscribe path has been changed. */ @Slf4j -public class SubscribeDataManager implements AutoCloseable { +class SubscribeDataManager implements AutoCloseable { private final JdbcOperator jdbcOperator; private final JdbcRegistryProperties registryProperties; @@ -50,7 +48,7 @@ public class SubscribeDataManager implements AutoCloseable { private final ScheduledExecutorService dataSubscribeCheckThreadPool; private final Map jdbcRegistryDataMap = new ConcurrentHashMap<>(); - public SubscribeDataManager(JdbcRegistryProperties registryProperties, JdbcOperator jdbcOperator) { + SubscribeDataManager(JdbcRegistryProperties registryProperties, JdbcOperator jdbcOperator) { this.registryProperties = registryProperties; this.jdbcOperator = jdbcOperator; this.dataSubscribeCheckThreadPool = Executors.newScheduledThreadPool( @@ -75,12 +73,8 @@ public void removeListener(String path) { dataSubScribeMap.remove(path); } - public String getData(String path) { - JdbcRegistryData jdbcRegistryData = jdbcRegistryDataMap.get(path); - if (jdbcRegistryData == null) { - return null; - } - return jdbcRegistryData.getDataValue(); + public JdbcRegistryData getData(String path) { + return jdbcRegistryDataMap.get(path); } @Override @@ -107,6 +101,7 @@ public void run() { List addedData = new ArrayList<>(); List deletedData = new ArrayList<>(); List updatedData = new ArrayList<>(); + for (Map.Entry entry : currentJdbcDataMap.entrySet()) { JdbcRegistryData newData = entry.getValue(); JdbcRegistryData oldData = jdbcRegistryDataMap.get(entry.getKey()); @@ -118,6 +113,7 @@ public void run() { } } } + for (Map.Entry entry : jdbcRegistryDataMap.entrySet()) { if (!currentJdbcDataMap.containsKey(entry.getKey())) { deletedData.add(entry.getValue()); diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/mapper/JdbcRegistryDataMapper.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/mapper/JdbcRegistryDataMapper.java index 701f2e7310b5..e1d27bbf0b74 100644 --- a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/mapper/JdbcRegistryDataMapper.java +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/java/org/apache/dolphinscheduler/plugin/registry/jdbc/mapper/JdbcRegistryDataMapper.java @@ -40,9 +40,6 @@ public interface JdbcRegistryDataMapper extends BaseMapper { @Select("select * from t_ds_jdbc_registry_data where data_key like CONCAT (#{key}, '%')") List fuzzyQueryByKey(@Param("key") String key); - @Update("update t_ds_jdbc_registry_data set data_value = #{data}, last_term = #{term} where id = #{id}") - int updateDataAndTermById(@Param("id") long id, @Param("data") String data, @Param("term") long term); - @Delete("delete from t_ds_jdbc_registry_data where data_key = #{key}") void deleteByKey(@Param("key") String key); diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/resources/META-INF/spring.factories b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/resources/META-INF/spring.factories new file mode 100644 index 000000000000..aabe7e325203 --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/resources/META-INF/spring.factories @@ -0,0 +1,19 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +org.springframework.boot.autoconfigure.EnableAutoConfiguration=\ + org.apache.dolphinscheduler.plugin.registry.jdbc.JdbcRegistryAutoConfiguration diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/resources/mysql_registry_init.sql b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/resources/mysql_registry_init.sql index 30af3066ff53..408a2810a9aa 100644 --- a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/resources/mysql_registry_init.sql +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/main/resources/mysql_registry_init.sql @@ -15,7 +15,6 @@ * limitations under the License. */ -SET FOREIGN_KEY_CHECKS = 0; DROP TABLE IF EXISTS `t_ds_jdbc_registry_data`; CREATE TABLE `t_ds_jdbc_registry_data` @@ -25,7 +24,7 @@ CREATE TABLE `t_ds_jdbc_registry_data` `data_value` text NOT NULL COMMENT 'data, like zookeeper node value', `data_type` tinyint(4) NOT NULL COMMENT '1: ephemeral node, 2: persistent node', `last_term` bigint NOT NULL COMMENT 'last term time', - `last_update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT 'last update time', + `last_update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'last update time', `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', PRIMARY KEY (`id`), unique (`data_key`) @@ -40,7 +39,7 @@ CREATE TABLE `t_ds_jdbc_registry_lock` `lock_key` varchar(256) NOT NULL COMMENT 'lock path', `lock_owner` varchar(256) NOT NULL COMMENT 'the lock owner, ip_processId', `last_term` bigint NOT NULL COMMENT 'last term time', - `last_update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT 'last update time', + `last_update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'last update time', `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', PRIMARY KEY (`id`), unique (`lock_key`) diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/test/java/org/apache/dolphinscheduler/plugin/registry/jdbc/JdbcRegistryTestCase.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/test/java/org/apache/dolphinscheduler/plugin/registry/jdbc/JdbcRegistryTestCase.java new file mode 100644 index 000000000000..c5c83b62e4a9 --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/test/java/org/apache/dolphinscheduler/plugin/registry/jdbc/JdbcRegistryTestCase.java @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.registry.jdbc; + +import static com.google.common.truth.Truth.assertThat; + +import org.apache.dolphinscheduler.plugin.registry.RegistryTestCase; +import org.apache.dolphinscheduler.plugin.registry.jdbc.model.JdbcRegistryLock; + +import lombok.SneakyThrows; + +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.boot.test.context.SpringBootTest; + +@SpringBootTest(classes = {JdbcRegistryProperties.class}) +@SpringBootApplication(scanBasePackageClasses = JdbcRegistryProperties.class) +public abstract class JdbcRegistryTestCase extends RegistryTestCase { + + @Autowired + private JdbcRegistryProperties jdbcRegistryProperties; + + @Autowired + private JdbcOperator jdbcOperator; + + @Test + @SneakyThrows + public void testTryToAcquireLock_lockIsAlreadyBeenAcquired() { + final String lockKey = "testTryToAcquireLock_lockIsAlreadyBeenAcquired"; + // acquire success + JdbcRegistryLock jdbcRegistryLock = jdbcOperator.tryToAcquireLock(lockKey); + // acquire failed + assertThat(jdbcOperator.tryToAcquireLock(lockKey)).isNull(); + // release + jdbcOperator.releaseLock(jdbcRegistryLock.getId()); + } + + @Override + public JdbcRegistry createRegistry() { + return new JdbcRegistry(jdbcRegistryProperties, jdbcOperator); + } + +} diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/test/java/org/apache/dolphinscheduler/plugin/registry/jdbc/MysqlJdbcRegistryTestCase.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/test/java/org/apache/dolphinscheduler/plugin/registry/jdbc/MysqlJdbcRegistryTestCase.java new file mode 100644 index 000000000000..6dca1aeb2a94 --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/test/java/org/apache/dolphinscheduler/plugin/registry/jdbc/MysqlJdbcRegistryTestCase.java @@ -0,0 +1,105 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.registry.jdbc; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.Statement; +import java.time.Duration; +import java.util.stream.Stream; + +import lombok.SneakyThrows; + +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.springframework.test.context.ActiveProfiles; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.MySQLContainer; +import org.testcontainers.containers.Network; +import org.testcontainers.containers.wait.strategy.Wait; +import org.testcontainers.lifecycle.Startables; +import org.testcontainers.utility.DockerImageName; + +@ActiveProfiles("mysql") +class MysqlJdbcRegistryTestCase extends JdbcRegistryTestCase { + + private static GenericContainer mysqlContainer; + + @SneakyThrows + @BeforeAll + public static void setUpTestingServer() { + mysqlContainer = new MySQLContainer(DockerImageName.parse("mysql:8.0")) + .withUsername("root") + .withPassword("root") + .withDatabaseName("dolphinscheduler") + .withNetwork(Network.newNetwork()) + .withExposedPorts(3306) + .waitingFor(Wait.forHealthcheck().withStartupTimeout(Duration.ofSeconds(300))); + + Startables.deepStart(Stream.of(mysqlContainer)).join(); + + String jdbcUrl = "jdbc:mysql://localhost:" + mysqlContainer.getMappedPort(3306) + + "/dolphinscheduler?useSSL=false&serverTimezone=UTC"; + System.clearProperty("spring.datasource.url"); + System.setProperty("spring.datasource.url", jdbcUrl); + + try ( + Connection connection = DriverManager.getConnection(jdbcUrl, "root", "root"); + Statement statement = connection.createStatement();) { + statement.execute( + "CREATE TABLE `t_ds_jdbc_registry_data`\n" + + "(\n" + + " `id` bigint(11) NOT NULL AUTO_INCREMENT COMMENT 'primary key',\n" + + " `data_key` varchar(256) NOT NULL COMMENT 'key, like zookeeper node path',\n" + + " `data_value` text NOT NULL COMMENT 'data, like zookeeper node value',\n" + + + " `data_type` tinyint(4) NOT NULL COMMENT '1: ephemeral node, 2: persistent node',\n" + + + " `last_term` bigint NOT NULL COMMENT 'last term time',\n" + + " `last_update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT 'last update time',\n" + + + " `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'create time',\n" + + + " PRIMARY KEY (`id`),\n" + + " unique (`data_key`)\n" + + ") ENGINE = InnoDB\n" + + " DEFAULT CHARSET = utf8;"); + statement.execute( + "CREATE TABLE `t_ds_jdbc_registry_lock`\n" + + "(\n" + + " `id` bigint(11) NOT NULL AUTO_INCREMENT COMMENT 'primary key',\n" + + " `lock_key` varchar(256) NOT NULL COMMENT 'lock path',\n" + + " `lock_owner` varchar(256) NOT NULL COMMENT 'the lock owner, ip_processId',\n" + + " `last_term` bigint NOT NULL COMMENT 'last term time',\n" + + " `last_update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT 'last update time',\n" + + + " `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'create time',\n" + + + " PRIMARY KEY (`id`),\n" + + " unique (`lock_key`)\n" + + ") ENGINE = InnoDB\n" + + " DEFAULT CHARSET = utf8;"); + } + } + + @SneakyThrows + @AfterAll + public static void tearDownTestingServer() { + mysqlContainer.close(); + } +} diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/test/java/org/apache/dolphinscheduler/plugin/registry/jdbc/PostgresqlJdbcRegistryTestCase.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/test/java/org/apache/dolphinscheduler/plugin/registry/jdbc/PostgresqlJdbcRegistryTestCase.java new file mode 100644 index 000000000000..f34015e5a2c5 --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/test/java/org/apache/dolphinscheduler/plugin/registry/jdbc/PostgresqlJdbcRegistryTestCase.java @@ -0,0 +1,98 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.registry.jdbc; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.Statement; +import java.util.stream.Stream; + +import lombok.SneakyThrows; + +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.ActiveProfiles; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.Network; +import org.testcontainers.containers.PostgreSQLContainer; +import org.testcontainers.lifecycle.Startables; +import org.testcontainers.utility.DockerImageName; + +@ActiveProfiles("postgresql") +@SpringBootTest(classes = {JdbcRegistryProperties.class}) +@SpringBootApplication(scanBasePackageClasses = JdbcRegistryProperties.class) +public class PostgresqlJdbcRegistryTestCase extends JdbcRegistryTestCase { + + private static GenericContainer postgresqlContainer; + + @SneakyThrows + @BeforeAll + public static void setUpTestingServer() { + postgresqlContainer = new PostgreSQLContainer(DockerImageName.parse("postgres:16.0")) + .withUsername("root") + .withPassword("root") + .withDatabaseName("dolphinscheduler") + .withNetwork(Network.newNetwork()) + .withExposedPorts(5432); + + Startables.deepStart(Stream.of(postgresqlContainer)).join(); + + String jdbcUrl = "jdbc:postgresql://localhost:" + postgresqlContainer.getMappedPort(5432) + "/dolphinscheduler"; + System.clearProperty("spring.datasource.url"); + System.setProperty("spring.datasource.url", jdbcUrl); + try ( + Connection connection = DriverManager.getConnection(jdbcUrl, "root", "root"); + Statement statement = connection.createStatement();) { + statement.execute( + "create table t_ds_jdbc_registry_data\n" + + "(\n" + + " id serial\n" + + " constraint t_ds_jdbc_registry_data_pk primary key,\n" + + " data_key varchar not null,\n" + + " data_value text not null,\n" + + " data_type int4 not null,\n" + + " last_term bigint not null,\n" + + " last_update_time timestamp default current_timestamp not null,\n" + + " create_time timestamp default current_timestamp not null\n" + + ");"); + statement.execute( + "create unique index t_ds_jdbc_registry_data_key_uindex on t_ds_jdbc_registry_data (data_key);"); + statement.execute( + "create table t_ds_jdbc_registry_lock\n" + + "(\n" + + " id serial\n" + + " constraint t_ds_jdbc_registry_lock_pk primary key,\n" + + " lock_key varchar not null,\n" + + " lock_owner varchar not null,\n" + + " last_term bigint not null,\n" + + " last_update_time timestamp default current_timestamp not null,\n" + + " create_time timestamp default current_timestamp not null\n" + + ");"); + statement.execute( + "create unique index t_ds_jdbc_registry_lock_key_uindex on t_ds_jdbc_registry_lock (lock_key);"); + } + } + + @SneakyThrows + @AfterAll + public static void tearDownTestingServer() { + postgresqlContainer.close(); + } +} diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/test/resources/application-mysql.yaml b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/test/resources/application-mysql.yaml new file mode 100644 index 000000000000..a65dd8c5d28d --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/test/resources/application-mysql.yaml @@ -0,0 +1,31 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +spring: + sql: + init: + schema-locations: classpath:mysql_registry_init.sql + datasource: + driver-class-name: com.mysql.cj.jdbc.Driver + url: jdbc:mysql://localhost:3306/dolphinscheduler?useUnicode=true&characterEncoding=UTF-8 + username: root + password: root + +registry: + type: jdbc + term-refresh-interval: 1s + term-expire-times: 1 + diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/test/resources/application-postgresql.yaml b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/test/resources/application-postgresql.yaml new file mode 100644 index 000000000000..4d5625635b85 --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/test/resources/application-postgresql.yaml @@ -0,0 +1,28 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +spring: + datasource: + driver-class-name: org.postgresql.Driver + url: jdbc:postgresql://localhost:5432/dolphinscheduler + username: root + password: root + +registry: + type: jdbc + term-refresh-interval: 1s + term-expire-times: 1 + diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/test/resources/logback.xml b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/test/resources/logback.xml new file mode 100644 index 000000000000..6f211959c590 --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-jdbc/src/test/resources/logback.xml @@ -0,0 +1,21 @@ + + + + + + diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/pom.xml b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/pom.xml index 8f8bd7b6451d..1d93ae87f1fc 100644 --- a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/pom.xml +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/pom.xml @@ -26,12 +26,37 @@ dolphinscheduler-registry-zookeeper + + + + org.apache.dolphinscheduler + dolphinscheduler-bom + ${project.version} + pom + import + + + + org.apache.dolphinscheduler dolphinscheduler-registry-api + + org.apache.dolphinscheduler + dolphinscheduler-registry-it + ${project.version} + test-jar + test + + + + org.testcontainers + testcontainers + + org.apache.zookeeper zookeeper @@ -41,6 +66,7 @@ org.apache.curator curator-framework + org.apache.curator curator-client @@ -51,11 +77,6 @@ curator-recipes - - org.slf4j - slf4j-api - - io.dropwizard.metrics metrics-core @@ -70,5 +91,11 @@ curator-test test + + + org.springframework.boot + spring-boot-starter-test + test + diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperConnectionStateListener.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperConnectionStateListener.java index 380df4d02102..7586d85c030f 100644 --- a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperConnectionStateListener.java +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperConnectionStateListener.java @@ -26,11 +26,11 @@ import lombok.extern.slf4j.Slf4j; @Slf4j -public final class ZookeeperConnectionStateListener implements ConnectionStateListener { +final class ZookeeperConnectionStateListener implements ConnectionStateListener { private final ConnectionListener listener; - public ZookeeperConnectionStateListener(ConnectionListener listener) { + ZookeeperConnectionStateListener(ConnectionListener listener) { this.listener = listener; } @@ -38,6 +38,10 @@ public ZookeeperConnectionStateListener(ConnectionListener listener) { public void stateChanged(CuratorFramework client, org.apache.curator.framework.state.ConnectionState newState) { switch (newState) { + case CONNECTED: + log.info("Registry connected"); + listener.onUpdate(ConnectionState.CONNECTED); + break; case LOST: log.warn("Registry disconnected"); listener.onUpdate(ConnectionState.DISCONNECTED); diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistry.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistry.java index 7333c10f0583..76313df02a58 100644 --- a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistry.java +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistry.java @@ -25,6 +25,7 @@ import org.apache.dolphinscheduler.registry.api.RegistryException; import org.apache.dolphinscheduler.registry.api.SubscribeListener; +import org.apache.commons.lang3.time.DurationUtils; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFrameworkFactory; import org.apache.curator.framework.api.ACLProvider; @@ -45,20 +46,16 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; -import javax.annotation.PostConstruct; - import lombok.NonNull; - -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.stereotype.Component; +import lombok.extern.slf4j.Slf4j; import com.google.common.base.Strings; -@Component -@ConditionalOnProperty(prefix = "registry", name = "type", havingValue = "zookeeper") -public final class ZookeeperRegistry implements Registry { +@Slf4j +final class ZookeeperRegistry implements Registry { private final ZookeeperRegistryProperties.ZookeeperProperties properties; private final CuratorFramework client; @@ -67,7 +64,7 @@ public final class ZookeeperRegistry implements Registry { private static final ThreadLocal> threadLocalLockMap = new ThreadLocal<>(); - public ZookeeperRegistry(ZookeeperRegistryProperties registryProperties) { + ZookeeperRegistry(ZookeeperRegistryProperties registryProperties) { properties = registryProperties.getZookeeper(); final ExponentialBackoffRetry retryPolicy = new ExponentialBackoffRetry( @@ -80,39 +77,36 @@ public ZookeeperRegistry(ZookeeperRegistryProperties registryProperties) { .connectString(properties.getConnectString()) .retryPolicy(retryPolicy) .namespace(properties.getNamespace()) - .sessionTimeoutMs((int) properties.getSessionTimeout().toMillis()) - .connectionTimeoutMs((int) properties.getConnectionTimeout().toMillis()); + .sessionTimeoutMs(DurationUtils.toMillisInt(properties.getSessionTimeout())) + .connectionTimeoutMs(DurationUtils.toMillisInt(properties.getConnectionTimeout())); final String digest = properties.getDigest(); if (!Strings.isNullOrEmpty(digest)) { - buildDigest(builder, digest); + builder.authorization("digest", digest.getBytes(StandardCharsets.UTF_8)) + .aclProvider(new ACLProvider() { + + @Override + public List getDefaultAcl() { + return ZooDefs.Ids.CREATOR_ALL_ACL; + } + + @Override + public List getAclForPath(final String path) { + return ZooDefs.Ids.CREATOR_ALL_ACL; + } + }); } client = builder.build(); } - private void buildDigest(CuratorFrameworkFactory.Builder builder, String digest) { - builder.authorization("digest", digest.getBytes(StandardCharsets.UTF_8)) - .aclProvider(new ACLProvider() { - - @Override - public List getDefaultAcl() { - return ZooDefs.Ids.CREATOR_ALL_ACL; - } - - @Override - public List getAclForPath(final String path) { - return ZooDefs.Ids.CREATOR_ALL_ACL; - } - }); - } - - @PostConstruct + @Override public void start() { client.start(); try { - if (!client.blockUntilConnected((int) properties.getBlockUntilConnected().toMillis(), MILLISECONDS)) { + if (!client.blockUntilConnected(DurationUtils.toMillisInt(properties.getBlockUntilConnected()), + MILLISECONDS)) { client.close(); - throw new RegistryException("zookeeper connect timeout: " + properties.getConnectString()); + throw new RegistryException("zookeeper connect failed in : " + properties.getConnectString() + "ms"); } } catch (InterruptedException e) { Thread.currentThread().interrupt(); @@ -128,21 +122,21 @@ public void addConnectionStateListener(ConnectionListener listener) { @Override public void connectUntilTimeout(@NonNull Duration timeout) throws RegistryException { try { - if (!client.blockUntilConnected((int) timeout.toMillis(), MILLISECONDS)) { + if (!client.blockUntilConnected(DurationUtils.toMillisInt(timeout), MILLISECONDS)) { throw new RegistryException( - String.format("Cannot connect to the Zookeeper registry in %s s", timeout.getSeconds())); + String.format("Cannot connect to registry in %s s", timeout.getSeconds())); } } catch (RegistryException e) { throw e; } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RegistryException( - String.format("Cannot connect to the Zookeeper registry in %s s", timeout.getSeconds()), e); + String.format("Cannot connect to registry in %s s", timeout.getSeconds()), e); } } @Override - public boolean subscribe(String path, SubscribeListener listener) { + public void subscribe(String path, SubscribeListener listener) { final TreeCache treeCache = treeCacheMap.computeIfAbsent(path, $ -> new TreeCache(client, path)); treeCache.getListenable().addListener(($, event) -> listener.notify(new EventAdaptor(event, path))); try { @@ -151,7 +145,6 @@ public boolean subscribe(String path, SubscribeListener listener) { treeCacheMap.remove(path); throw new RegistryException("Failed to subscribe listener for key: " + path, e); } - return true; } @Override @@ -218,17 +211,60 @@ public void delete(String nodePath) { @Override public boolean acquireLock(String key) { - InterProcessMutex interProcessMutex = new InterProcessMutex(client, key); + Map processMutexMap = threadLocalLockMap.get(); + if (null == processMutexMap) { + processMutexMap = new HashMap<>(); + threadLocalLockMap.set(processMutexMap); + } + InterProcessMutex interProcessMutex = null; try { - interProcessMutex.acquire(); - if (null == threadLocalLockMap.get()) { - threadLocalLockMap.set(new HashMap<>(3)); + interProcessMutex = + Optional.ofNullable(processMutexMap.get(key)).orElse(new InterProcessMutex(client, key)); + if (interProcessMutex.isAcquiredInThisProcess()) { + // Since etcd/jdbc cannot implement a reentrant lock, we need to check if the lock is already acquired + // If it is already acquired, return true directly + // This means you only need to release once when you acquire multiple times + return true; } - threadLocalLockMap.get().put(key, interProcessMutex); + interProcessMutex.acquire(); + processMutexMap.put(key, interProcessMutex); return true; } catch (Exception e) { try { - interProcessMutex.release(); + if (interProcessMutex != null) { + interProcessMutex.release(); + } + throw new RegistryException(String.format("zookeeper get lock: %s error", key), e); + } catch (Exception exception) { + throw new RegistryException(String.format("zookeeper get lock: %s error", key), e); + } + } + } + + @Override + public boolean acquireLock(String key, long timeout) { + Map processMutexMap = threadLocalLockMap.get(); + if (null == processMutexMap) { + processMutexMap = new HashMap<>(); + threadLocalLockMap.set(processMutexMap); + } + InterProcessMutex interProcessMutex = null; + try { + interProcessMutex = + Optional.ofNullable(processMutexMap.get(key)).orElse(new InterProcessMutex(client, key)); + if (interProcessMutex.isAcquiredInThisProcess()) { + return true; + } + if (interProcessMutex.acquire(timeout, MILLISECONDS)) { + processMutexMap.put(key, interProcessMutex); + return true; + } + return false; + } catch (Exception e) { + try { + if (interProcessMutex != null) { + interProcessMutex.release(); + } throw new RegistryException(String.format("zookeeper get lock: %s error", key), e); } catch (Exception exception) { throw new RegistryException(String.format("zookeeper get lock: %s error", key), e); @@ -238,13 +274,18 @@ public boolean acquireLock(String key) { @Override public boolean releaseLock(String key) { - if (null == threadLocalLockMap.get().get(key)) { + Map processMutexMap = threadLocalLockMap.get(); + if (processMutexMap == null) { + return true; + } + InterProcessMutex interProcessMutex = processMutexMap.get(key); + if (null == interProcessMutex) { return false; } try { - threadLocalLockMap.get().get(key).release(); - threadLocalLockMap.get().remove(key); - if (threadLocalLockMap.get().isEmpty()) { + interProcessMutex.release(); + processMutexMap.remove(key); + if (processMutexMap.isEmpty()) { threadLocalLockMap.remove(); } } catch (Exception e) { diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistryAutoConfiguration.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistryAutoConfiguration.java new file mode 100644 index 000000000000..08ca4b418b9e --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistryAutoConfiguration.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.registry.zookeeper; + +import org.apache.dolphinscheduler.registry.api.Registry; + +import lombok.extern.slf4j.Slf4j; + +import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.Configuration; + +@Slf4j +@ComponentScan +@Configuration(proxyBeanMethods = false) +@ConditionalOnProperty(prefix = "registry", name = "type", havingValue = "zookeeper") +public class ZookeeperRegistryAutoConfiguration { + + public ZookeeperRegistryAutoConfiguration() { + log.info("Load ZookeeperRegistryAutoConfiguration"); + } + + @Bean + @ConditionalOnMissingBean(value = Registry.class) + public ZookeeperRegistry zookeeperRegistry(ZookeeperRegistryProperties zookeeperRegistryProperties) { + ZookeeperRegistry zookeeperRegistry = new ZookeeperRegistry(zookeeperRegistryProperties); + zookeeperRegistry.start(); + return zookeeperRegistry; + } + +} diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistryProperties.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistryProperties.java index 42dbc5d256f0..208fb8fc839c 100644 --- a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistryProperties.java +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistryProperties.java @@ -17,122 +17,101 @@ package org.apache.dolphinscheduler.plugin.registry.zookeeper; +import org.apache.commons.lang3.StringUtils; + import java.time.Duration; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; +import lombok.extern.slf4j.Slf4j; + import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.context.annotation.Configuration; +import org.springframework.validation.Errors; +import org.springframework.validation.Validator; +@Slf4j +@Data +@NoArgsConstructor +@AllArgsConstructor @Configuration -@ConditionalOnProperty(prefix = "registry", name = "type", havingValue = "zookeeper") @ConfigurationProperties(prefix = "registry") -public class ZookeeperRegistryProperties { +class ZookeeperRegistryProperties implements Validator { private ZookeeperProperties zookeeper = new ZookeeperProperties(); - public ZookeeperProperties getZookeeper() { - return zookeeper; - } + private String type; - public void setZookeeper(ZookeeperProperties zookeeper) { - this.zookeeper = zookeeper; + @Override + public boolean supports(Class clazz) { + return ZookeeperRegistryProperties.class.isAssignableFrom(clazz); } - public static final class ZookeeperProperties { - - private String namespace; - private String connectString; - private RetryPolicy retryPolicy = new RetryPolicy(); - private String digest; - private Duration sessionTimeout = Duration.ofSeconds(30); - private Duration connectionTimeout = Duration.ofSeconds(9); - private Duration blockUntilConnected = Duration.ofMillis(600); - - public String getNamespace() { - return namespace; + @Override + public void validate(Object target, Errors errors) { + ZookeeperRegistryProperties zookeeperRegistryProperties = (ZookeeperRegistryProperties) target; + if (zookeeperRegistryProperties.getZookeeper() == null) { + errors.rejectValue("zookeeper", "zookeeper", "zookeeper properties is required"); } - public void setNamespace(String namespace) { - this.namespace = namespace; + ZookeeperProperties zookeeper = zookeeperRegistryProperties.getZookeeper(); + if (StringUtils.isEmpty(zookeeper.getNamespace())) { + errors.rejectValue("zookeeper.namespace", "", "zookeeper.namespace cannot be null"); } - - public String getConnectString() { - return connectString; - } - - public void setConnectString(String connectString) { - this.connectString = connectString; - } - - public RetryPolicy getRetryPolicy() { - return retryPolicy; - } - - public void setRetryPolicy(RetryPolicy retryPolicy) { - this.retryPolicy = retryPolicy; - } - - public String getDigest() { - return digest; + if (StringUtils.isEmpty(zookeeper.getConnectString())) { + errors.rejectValue("zookeeper.connectString", "", "zookeeper.connectString cannot be null"); } - - public void setDigest(String digest) { - this.digest = digest; + if (zookeeper.getRetryPolicy() == null) { + errors.rejectValue("zookeeper.retryPolicy", "", "zookeeper.retryPolicy cannot be null"); } - - public Duration getSessionTimeout() { - return sessionTimeout; + if (zookeeper.getSessionTimeout() == null || zookeeper.getSessionTimeout().isZero() + || zookeeper.getSessionTimeout().isNegative()) { + errors.rejectValue("zookeeper.sessionTimeout", "", "zookeeper.sessionTimeout should be positive"); } - - public void setSessionTimeout(Duration sessionTimeout) { - this.sessionTimeout = sessionTimeout; + if (zookeeper.getConnectionTimeout() == null || zookeeper.getConnectionTimeout().isZero() + || zookeeper.getConnectionTimeout().isNegative()) { + errors.rejectValue("zookeeper.connectionTimeout", "", "zookeeper.connectionTimeout should be positive"); } - - public Duration getConnectionTimeout() { - return connectionTimeout; + if (zookeeper.getBlockUntilConnected() == null || zookeeper.getBlockUntilConnected().isZero() + || zookeeper.getBlockUntilConnected().isNegative()) { + errors.rejectValue("zookeeper.blockUntilConnected", "", "zookeeper.blockUntilConnected should be positive"); } + printConfig(); + } - public void setConnectionTimeout(Duration connectionTimeout) { - this.connectionTimeout = connectionTimeout; - } + private void printConfig() { + String config = + "\n****************************ZookeeperRegistryProperties**************************************" + + "\n namespace -> " + zookeeper.getNamespace() + + "\n connectString -> " + zookeeper.getConnectString() + + "\n retryPolicy -> " + zookeeper.getRetryPolicy() + + "\n digest -> " + zookeeper.getDigest() + + "\n sessionTimeout -> " + zookeeper.getSessionTimeout() + + "\n connectionTimeout -> " + zookeeper.getConnectionTimeout() + + "\n blockUntilConnected -> " + zookeeper.getBlockUntilConnected() + + "\n****************************ZookeeperRegistryProperties**************************************"; + log.info(config); + } - public Duration getBlockUntilConnected() { - return blockUntilConnected; - } + @Data + public static final class ZookeeperProperties { - public void setBlockUntilConnected(Duration blockUntilConnected) { - this.blockUntilConnected = blockUntilConnected; - } + private String namespace = "dolphinscheduler"; + private String connectString; + private RetryPolicy retryPolicy = new RetryPolicy(); + private String digest; + private Duration sessionTimeout = Duration.ofSeconds(60); + private Duration connectionTimeout = Duration.ofSeconds(15); + private Duration blockUntilConnected = Duration.ofSeconds(15); + @Data public static final class RetryPolicy { - private Duration baseSleepTime = Duration.ofMillis(60); - private int maxRetries; - private Duration maxSleep = Duration.ofMillis(300); - - public Duration getBaseSleepTime() { - return baseSleepTime; - } - - public void setBaseSleepTime(Duration baseSleepTime) { - this.baseSleepTime = baseSleepTime; - } - - public int getMaxRetries() { - return maxRetries; - } - - public void setMaxRetries(int maxRetries) { - this.maxRetries = maxRetries; - } - - public Duration getMaxSleep() { - return maxSleep; - } + private Duration baseSleepTime = Duration.ofSeconds(1); + private int maxRetries = 3; + private Duration maxSleep = Duration.ofSeconds(3); - public void setMaxSleep(Duration maxSleep) { - this.maxSleep = maxSleep; - } } } diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/resources/META-INF/spring.factories b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/resources/META-INF/spring.factories new file mode 100644 index 000000000000..821f1a70c139 --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/resources/META-INF/spring.factories @@ -0,0 +1,19 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +org.springframework.boot.autoconfigure.EnableAutoConfiguration=\ + org.apache.dolphinscheduler.plugin.registry.zookeeper.ZookeeperRegistryAutoConfiguration diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/test/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistryTest.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/test/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistryTest.java deleted file mode 100644 index 0134106d474b..000000000000 --- a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/test/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistryTest.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.registry.zookeeper; - -import org.apache.dolphinscheduler.registry.api.Event; -import org.apache.dolphinscheduler.registry.api.SubscribeListener; - -import org.apache.curator.test.TestingServer; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; - -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class ZookeeperRegistryTest { - - private static final Logger logger = LoggerFactory.getLogger(ZookeeperRegistryTest.class); - - TestingServer server; - - ZookeeperRegistry registry; - - @BeforeEach - public void before() throws Exception { - server = new TestingServer(true); - - ZookeeperRegistryProperties p = new ZookeeperRegistryProperties(); - p.getZookeeper().setConnectString(server.getConnectString()); - registry = new ZookeeperRegistry(p); - registry.start(); - registry.put("/sub", "", false); - } - - @Test - public void persistTest() { - registry.put("/nodes/m1", "", false); - registry.put("/nodes/m2", "", false); - Assertions.assertEquals(Arrays.asList("m2", "m1"), registry.children("/nodes")); - Assertions.assertTrue(registry.exists("/nodes/m1")); - registry.delete("/nodes/m2"); - Assertions.assertFalse(registry.exists("/nodes/m2")); - } - - @Test - public void lockTest() throws InterruptedException { - CountDownLatch preCountDownLatch = new CountDownLatch(1); - CountDownLatch allCountDownLatch = new CountDownLatch(2); - List testData = new ArrayList<>(); - new Thread(() -> { - registry.acquireLock("/lock"); - preCountDownLatch.countDown(); - logger.info(Thread.currentThread().getName() - + " :I got the lock, but I don't want to work. I want to rest for a while"); - try { - Thread.sleep(1000); - logger.info(Thread.currentThread().getName() + " :I'm going to start working"); - testData.add("thread1"); - - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - } finally { - logger.info(Thread.currentThread().getName() + " :I have finished my work, now I release the lock"); - registry.releaseLock("/lock"); - allCountDownLatch.countDown(); - } - }).start(); - preCountDownLatch.await(5, TimeUnit.SECONDS); - new Thread(() -> { - try { - logger.info(Thread.currentThread().getName() + " :I am trying to acquire the lock"); - registry.acquireLock("/lock"); - logger.info(Thread.currentThread().getName() + " :I got the lock and I started working"); - - testData.add("thread2"); - } finally { - registry.releaseLock("/lock"); - allCountDownLatch.countDown(); - } - - }).start(); - allCountDownLatch.await(5, TimeUnit.SECONDS); - Assertions.assertEquals(testData, Arrays.asList("thread1", "thread2")); - - } - - @Test - public void subscribeTest() { - boolean status = registry.subscribe("/sub", new TestListener()); - Assertions.assertTrue(status); - - } - - static class TestListener implements SubscribeListener { - - @Override - public void notify(Event event) { - logger.info("I'm test listener"); - } - } - - @AfterEach - public void after() throws IOException { - registry.close(); - server.close(); - } - -} diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/test/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistryTestCase.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/test/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistryTestCase.java new file mode 100644 index 000000000000..73c784b2df79 --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/test/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistryTestCase.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.registry.zookeeper; + +import org.apache.dolphinscheduler.plugin.registry.RegistryTestCase; + +import java.util.stream.Stream; + +import lombok.SneakyThrows; + +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.boot.test.context.SpringBootTest; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.Network; +import org.testcontainers.lifecycle.Startables; +import org.testcontainers.utility.DockerImageName; + +@SpringBootTest(classes = ZookeeperRegistryProperties.class) +@SpringBootApplication(scanBasePackageClasses = ZookeeperRegistryProperties.class) +class ZookeeperRegistryTestCase extends RegistryTestCase { + + @Autowired + private ZookeeperRegistryProperties zookeeperRegistryProperties; + + private static GenericContainer zookeeperContainer; + + private static final Network NETWORK = Network.newNetwork(); + + @SneakyThrows + @BeforeAll + public static void setUpTestingServer() { + zookeeperContainer = new GenericContainer<>(DockerImageName.parse("zookeeper:3.8")) + .withNetwork(NETWORK) + .withExposedPorts(2181); + Startables.deepStart(Stream.of(zookeeperContainer)).join(); + System.clearProperty("registry.zookeeper.connect-string"); + System.setProperty("registry.zookeeper.connect-string", "localhost:" + zookeeperContainer.getMappedPort(2181)); + } + + @SneakyThrows + @Override + public ZookeeperRegistry createRegistry() { + return new ZookeeperRegistry(zookeeperRegistryProperties); + } + + @SneakyThrows + @AfterAll + public static void tearDownTestingServer() { + zookeeperContainer.close(); + } +} diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/test/resources/application.yaml b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/test/resources/application.yaml new file mode 100644 index 000000000000..92902a608cb4 --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/test/resources/application.yaml @@ -0,0 +1,30 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +registry: + type: zookeeper + zookeeper: + namespace: dolphinscheduler + connect-string: 127.0.0.1:2181 + retry-policy: + base-sleep-time: 60ms + max-sleep: 300ms + max-retries: 5 + session-timeout: 30s + connection-timeout: 9s + block-until-connected: 3s + digest: ~ diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/test/resources/logback.xml b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/test/resources/logback.xml new file mode 100644 index 000000000000..6f211959c590 --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/test/resources/logback.xml @@ -0,0 +1,21 @@ + + + + + + diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/pom.xml b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/pom.xml index d3e4df428a03..aa184104d63d 100644 --- a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/pom.xml +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/pom.xml @@ -32,5 +32,6 @@ dolphinscheduler-registry-zookeeper dolphinscheduler-registry-jdbc dolphinscheduler-registry-etcd + dolphinscheduler-registry-it diff --git a/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-api/src/main/java/org/apache/dolphinscheduler/scheduler/api/SchedulerException.java b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-api/src/main/java/org/apache/dolphinscheduler/scheduler/api/SchedulerException.java index c81f11420464..5808787cda8b 100644 --- a/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-api/src/main/java/org/apache/dolphinscheduler/scheduler/api/SchedulerException.java +++ b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-api/src/main/java/org/apache/dolphinscheduler/scheduler/api/SchedulerException.java @@ -19,12 +19,12 @@ public class SchedulerException extends RuntimeException { - public SchedulerException(String message) { - super(message); + public SchedulerException(SchedulerExceptionEnum schedulerExceptionEnum) { + super("Scheduler[" + schedulerExceptionEnum.getCode() + "] " + schedulerExceptionEnum.getMessage()); } - public SchedulerException(String message, Throwable cause) { - super(message, cause); + public SchedulerException(SchedulerExceptionEnum schedulerExceptionEnum, Throwable cause) { + super("Scheduler[" + schedulerExceptionEnum.getCode() + "] " + schedulerExceptionEnum.getMessage(), cause); } } diff --git a/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-api/src/main/java/org/apache/dolphinscheduler/scheduler/api/SchedulerExceptionEnum.java b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-api/src/main/java/org/apache/dolphinscheduler/scheduler/api/SchedulerExceptionEnum.java new file mode 100644 index 000000000000..d8a0a5ef4edd --- /dev/null +++ b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-api/src/main/java/org/apache/dolphinscheduler/scheduler/api/SchedulerExceptionEnum.java @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.scheduler.api; + +public interface SchedulerExceptionEnum { + + String getCode(); + + String getMessage(); + +} diff --git a/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/ProcessScheduleTask.java b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/ProcessScheduleTask.java index 2c189d2bf837..8ce6480c4563 100644 --- a/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/ProcessScheduleTask.java +++ b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/ProcessScheduleTask.java @@ -17,23 +17,19 @@ package org.apache.dolphinscheduler.scheduler.quartz; -import org.apache.dolphinscheduler.common.constants.Constants; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.dao.entity.Command; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.Schedule; -import org.apache.dolphinscheduler.scheduler.quartz.utils.QuartzTaskUtils; +import org.apache.dolphinscheduler.dao.utils.WorkerGroupUtils; import org.apache.dolphinscheduler.service.command.CommandService; import org.apache.dolphinscheduler.service.process.ProcessService; -import org.apache.commons.lang3.StringUtils; - import java.util.Date; import lombok.extern.slf4j.Slf4j; -import org.quartz.JobDataMap; import org.quartz.JobExecutionContext; import org.quartz.JobKey; import org.quartz.Scheduler; @@ -56,10 +52,9 @@ public class ProcessScheduleTask extends QuartzJobBean { @Timed(value = "ds.master.quartz.job.execution.time", percentiles = {0.5, 0.75, 0.95, 0.99}, histogram = true) @Override protected void executeInternal(JobExecutionContext context) { - JobDataMap dataMap = context.getJobDetail().getJobDataMap(); - - int projectId = dataMap.getInt(QuartzTaskUtils.PROJECT_ID); - int scheduleId = dataMap.getInt(QuartzTaskUtils.SCHEDULE_ID); + QuartzJobData quartzJobData = QuartzJobData.of(context.getJobDetail().getJobDataMap()); + int projectId = quartzJobData.getProjectId(); + int scheduleId = quartzJobData.getScheduleId(); Date scheduledFireTime = context.getScheduledFireTime(); @@ -96,8 +91,7 @@ protected void executeInternal(JobExecutionContext context) { command.setScheduleTime(scheduledFireTime); command.setStartTime(fireTime); command.setWarningGroupId(schedule.getWarningGroupId()); - String workerGroup = StringUtils.isEmpty(schedule.getWorkerGroup()) ? Constants.DEFAULT_WORKER_GROUP - : schedule.getWorkerGroup(); + String workerGroup = WorkerGroupUtils.getWorkerGroupOrDefault(schedule.getWorkerGroup()); command.setWorkerGroup(workerGroup); command.setTenantCode(schedule.getTenantCode()); command.setEnvironmentCode(schedule.getEnvironmentCode()); @@ -110,7 +104,7 @@ protected void executeInternal(JobExecutionContext context) { private void deleteJob(JobExecutionContext context, int projectId, int scheduleId) { final Scheduler scheduler = context.getScheduler(); - JobKey jobKey = QuartzTaskUtils.getJobKey(scheduleId, projectId); + JobKey jobKey = QuartzJobKey.of(projectId, scheduleId).toJobKey(); try { if (scheduler.checkExists(jobKey)) { log.info("Try to delete job: {}, projectId: {}, schedulerId", projectId, scheduleId); diff --git a/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzCornTriggerBuilder.java b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzCornTriggerBuilder.java new file mode 100644 index 000000000000..b7177e74db84 --- /dev/null +++ b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzCornTriggerBuilder.java @@ -0,0 +1,95 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.scheduler.quartz; + +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.dao.entity.Schedule; + +import java.util.Date; + +import org.quartz.CronScheduleBuilder; +import org.quartz.CronTrigger; +import org.quartz.JobKey; +import org.quartz.TriggerBuilder; +import org.quartz.TriggerKey; + +/** + * QuartzCornTriggerBuilder used to build a {@link CronTrigger} instance. + */ +public class QuartzCornTriggerBuilder implements QuartzTriggerBuilder { + + private Integer projectId; + + private Schedule schedule; + + public static QuartzCornTriggerBuilder newBuilder() { + return new QuartzCornTriggerBuilder(); + } + + public QuartzCornTriggerBuilder withProjectId(Integer projectId) { + this.projectId = projectId; + return this; + } + + public QuartzCornTriggerBuilder withSchedule(Schedule schedule) { + this.schedule = schedule; + return this; + } + + @Override + public CronTrigger build() { + + if (projectId == null) { + throw new IllegalArgumentException("projectId cannot be null"); + } + if (schedule == null) { + throw new IllegalArgumentException("schedule cannot be null"); + } + + /** + * transform from server default timezone to schedule timezone + * e.g. server default timezone is `UTC` + * user set a schedule with startTime `2022-04-28 10:00:00`, timezone is `Asia/Shanghai`, + * api skip to transform it and save into databases directly, startTime `2022-04-28 10:00:00`, timezone is `UTC`, which actually added 8 hours, + * so when add job to quartz, it should recover by transform timezone + */ + Date startDate = DateUtils.transformTimezoneDate(schedule.getStartTime(), schedule.getTimezoneId()); + Date endDate = DateUtils.transformTimezoneDate(schedule.getEndTime(), schedule.getTimezoneId()); + /** + * If the start time is less than the current time, the start time is set to the current time. + * We do this change to avoid misfires all triggers when update the scheduler. + */ + Date now = new Date(); + if (startDate.before(now)) { + startDate = now; + } + JobKey jobKey = QuartzJobKey.of(projectId, schedule.getId()).toJobKey(); + + TriggerKey triggerKey = TriggerKey.triggerKey(jobKey.getName(), jobKey.getGroup()); + return TriggerBuilder.newTrigger() + .withIdentity(triggerKey) + .startAt(startDate) + .endAt(endDate) + .withSchedule( + CronScheduleBuilder.cronSchedule(schedule.getCrontab()) + .withMisfireHandlingInstructionIgnoreMisfires() + .inTimeZone(DateUtils.getTimezone(schedule.getTimezoneId()))) + .build(); + } + +} diff --git a/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzJobData.java b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzJobData.java new file mode 100644 index 000000000000..09e255bef3f2 --- /dev/null +++ b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzJobData.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.scheduler.quartz; + +import lombok.Getter; + +import org.quartz.JobDataMap; + +@Getter +public class QuartzJobData { + + private static final String PROJECT_ID = "projectId"; + private static final String SCHEDULE_ID = "scheduleId"; + + private final Integer projectId; + + private final Integer scheduleId; + + private QuartzJobData(Integer projectId, Integer scheduleId) { + if (projectId == null) { + throw new IllegalArgumentException("projectId cannot be null"); + } + if (scheduleId == null) { + throw new IllegalArgumentException("schedule cannot be null"); + } + this.projectId = projectId; + this.scheduleId = scheduleId; + } + + public static QuartzJobData of(Integer projectId, Integer scheduleId) { + return new QuartzJobData(projectId, scheduleId); + } + + public static QuartzJobData of(JobDataMap jobDataMap) { + Integer projectId = jobDataMap.getInt(PROJECT_ID); + Integer scheduleId = jobDataMap.getInt(SCHEDULE_ID); + return of(projectId, scheduleId); + } + + public JobDataMap toJobDataMap() { + JobDataMap jobDataMap = new JobDataMap(); + jobDataMap.put(PROJECT_ID, projectId); + jobDataMap.put(SCHEDULE_ID, scheduleId); + return jobDataMap; + } + +} diff --git a/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzJobDetailBuilder.java b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzJobDetailBuilder.java new file mode 100644 index 000000000000..7a579d01835d --- /dev/null +++ b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzJobDetailBuilder.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.scheduler.quartz; + +import org.quartz.JobBuilder; +import org.quartz.JobDetail; + +public class QuartzJobDetailBuilder { + + private Integer projectId; + + private Integer scheduleId; + + public static QuartzJobDetailBuilder newBuilder() { + return new QuartzJobDetailBuilder(); + } + + public QuartzJobDetailBuilder withProjectId(Integer projectId) { + this.projectId = projectId; + return this; + } + + public QuartzJobDetailBuilder withSchedule(Integer scheduleId) { + this.scheduleId = scheduleId; + return this; + } + + public JobDetail build() { + if (projectId == null) { + throw new IllegalArgumentException("projectId cannot be null"); + } + if (scheduleId == null) { + throw new IllegalArgumentException("scheduleId cannot be null"); + } + QuartzJobData quartzJobData = QuartzJobData.of(projectId, scheduleId); + + return JobBuilder.newJob(ProcessScheduleTask.class) + .withIdentity(QuartzJobKey.of(projectId, scheduleId).toJobKey()) + .setJobData(quartzJobData.toJobDataMap()) + .build(); + } + +} diff --git a/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzJobKey.java b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzJobKey.java new file mode 100644 index 000000000000..b7666094e282 --- /dev/null +++ b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzJobKey.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.scheduler.quartz; + +import lombok.Getter; + +import org.quartz.JobKey; + +@Getter +public class QuartzJobKey { + + private final int schedulerId; + private final int projectId; + + private static final String QUARTZ_JOB_PREFIX = "job"; + private static final String QUARTZ_JOB_GROUP_PREFIX = "jobgroup"; + private static final String UNDERLINE = "_"; + + private QuartzJobKey(int projectId, int schedulerId) { + this.schedulerId = schedulerId; + this.projectId = projectId; + } + + public static QuartzJobKey of(int projectId, int schedulerId) { + return new QuartzJobKey(projectId, schedulerId); + } + + public JobKey toJobKey() { + // todo: We don't need to add prefix to job name and job group? + String jobName = QUARTZ_JOB_PREFIX + UNDERLINE + schedulerId; + String jobGroup = QUARTZ_JOB_GROUP_PREFIX + UNDERLINE + projectId; + return new JobKey(jobName, jobGroup); + } +} diff --git a/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzScheduler.java b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzScheduler.java index 7d0f1ce73ccb..70d5bf8fabc0 100644 --- a/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzScheduler.java +++ b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzScheduler.java @@ -17,20 +17,10 @@ package org.apache.dolphinscheduler.scheduler.quartz; -import static org.quartz.CronScheduleBuilder.cronSchedule; -import static org.quartz.JobBuilder.newJob; -import static org.quartz.TriggerBuilder.newTrigger; - -import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.dao.entity.Schedule; import org.apache.dolphinscheduler.scheduler.api.SchedulerApi; import org.apache.dolphinscheduler.scheduler.api.SchedulerException; -import org.apache.dolphinscheduler.scheduler.quartz.utils.QuartzTaskUtils; - -import java.util.Date; -import java.util.Map; -import java.util.concurrent.locks.ReadWriteLock; -import java.util.concurrent.locks.ReentrantReadWriteLock; +import org.apache.dolphinscheduler.scheduler.quartz.exception.QuartzSchedulerExceptionEnum; import lombok.extern.slf4j.Slf4j; @@ -38,118 +28,49 @@ import org.quartz.JobDetail; import org.quartz.JobKey; import org.quartz.Scheduler; -import org.quartz.TriggerKey; -import org.springframework.beans.factory.annotation.Autowired; -import com.google.common.base.Strings; +import com.google.common.collect.Sets; @Slf4j public class QuartzScheduler implements SchedulerApi { - @Autowired - private Scheduler scheduler; + private final Scheduler scheduler; - private final ReadWriteLock lock = new ReentrantReadWriteLock(); + public QuartzScheduler(Scheduler scheduler) { + this.scheduler = scheduler; + } @Override public void start() throws SchedulerException { try { scheduler.start(); } catch (Exception e) { - throw new SchedulerException("Failed to start quartz scheduler ", e); + throw new SchedulerException(QuartzSchedulerExceptionEnum.QUARTZ_SCHEDULER_START_ERROR, e); } } @Override public void insertOrUpdateScheduleTask(int projectId, Schedule schedule) throws SchedulerException { - JobKey jobKey = QuartzTaskUtils.getJobKey(schedule.getId(), projectId); - Map jobDataMap = QuartzTaskUtils.buildDataMap(projectId, schedule); - String cronExpression = schedule.getCrontab(); - String timezoneId = schedule.getTimezoneId(); - - /** - * transform from server default timezone to schedule timezone - * e.g. server default timezone is `UTC` - * user set a schedule with startTime `2022-04-28 10:00:00`, timezone is `Asia/Shanghai`, - * api skip to transform it and save into databases directly, startTime `2022-04-28 10:00:00`, timezone is `UTC`, which actually added 8 hours, - * so when add job to quartz, it should recover by transform timezone - */ - Date startDate = DateUtils.transformTimezoneDate(schedule.getStartTime(), timezoneId); - Date endDate = DateUtils.transformTimezoneDate(schedule.getEndTime(), timezoneId); - /** - * If the start time is less than the current time, the start time is set to the current time. - * We do this change to avoid misfires all triggers when update the scheduler. - */ - Date now = new Date(); - if (startDate.before(now)) { - startDate = now; - } - - lock.writeLock().lock(); try { - - JobDetail jobDetail; - // add a task (if this task already exists, return this task directly) - if (scheduler.checkExists(jobKey)) { - - jobDetail = scheduler.getJobDetail(jobKey); - jobDetail.getJobDataMap().putAll(jobDataMap); - } else { - jobDetail = newJob(ProcessScheduleTask.class).withIdentity(jobKey).build(); - - jobDetail.getJobDataMap().putAll(jobDataMap); - - scheduler.addJob(jobDetail, false, true); - - log.info("Add job, job name: {}, group name: {}", jobKey.getName(), jobKey.getGroup()); - } - - TriggerKey triggerKey = new TriggerKey(jobKey.getName(), jobKey.getGroup()); - /* - * Instructs the Scheduler that upon a mis-fire situation, the CronTrigger wants to have it's next-fire-time - * updated to the next time in the schedule after the current time (taking into account any associated - * Calendar), but it does not want to be fired now. - */ - CronTrigger cronTrigger = newTrigger() - .withIdentity(triggerKey) - .startAt(startDate) - .endAt(endDate) - .withSchedule( - cronSchedule(cronExpression) - .withMisfireHandlingInstructionIgnoreMisfires() - .inTimeZone(DateUtils.getTimezone(timezoneId))) - .forJob(jobDetail).build(); - - if (scheduler.checkExists(triggerKey)) { - // updateProcessInstance scheduler trigger when scheduler cycle changes - CronTrigger oldCronTrigger = (CronTrigger) scheduler.getTrigger(triggerKey); - String oldCronExpression = oldCronTrigger.getCronExpression(); - - if (!Strings.nullToEmpty(cronExpression).equalsIgnoreCase(Strings.nullToEmpty(oldCronExpression))) { - // reschedule job trigger - scheduler.rescheduleJob(triggerKey, cronTrigger); - log.info( - "reschedule job trigger, triggerName: {}, triggerGroupName: {}, cronExpression: {}, startDate: {}, endDate: {}", - triggerKey.getName(), triggerKey.getGroup(), cronExpression, startDate, endDate); - } - } else { - scheduler.scheduleJob(cronTrigger); - log.info( - "schedule job trigger, triggerName: {}, triggerGroupName: {}, cronExpression: {}, startDate: {}, endDate: {}", - triggerKey.getName(), triggerKey.getGroup(), cronExpression, startDate, endDate); - } - + CronTrigger cornTrigger = QuartzCornTriggerBuilder.newBuilder() + .withProjectId(projectId) + .withSchedule(schedule) + .build(); + JobDetail jobDetail = QuartzJobDetailBuilder.newBuilder() + .withProjectId(projectId) + .withSchedule(schedule.getId()) + .build(); + scheduler.scheduleJob(jobDetail, Sets.newHashSet(cornTrigger), true); + log.info("Success scheduleJob: {} with trigger: {} at quartz", jobDetail, cornTrigger); } catch (Exception e) { log.error("Failed to add scheduler task, projectId: {}, scheduler: {}", projectId, schedule, e); - throw new SchedulerException("Add schedule job failed", e); - } finally { - lock.writeLock().unlock(); + throw new SchedulerException(QuartzSchedulerExceptionEnum.QUARTZ_UPSERT_JOB_ERROR, e); } } @Override public void deleteScheduleTask(int projectId, int scheduleId) throws SchedulerException { - JobKey jobKey = QuartzTaskUtils.getJobKey(scheduleId, projectId); + JobKey jobKey = QuartzJobKey.of(projectId, scheduleId).toJobKey(); try { if (scheduler.checkExists(jobKey)) { log.info("Try to delete scheduler task, projectId: {}, schedulerId: {}", projectId, scheduleId); @@ -157,7 +78,7 @@ public void deleteScheduleTask(int projectId, int scheduleId) throws SchedulerEx } } catch (Exception e) { log.error("Failed to delete scheduler task, projectId: {}, schedulerId: {}", projectId, scheduleId, e); - throw new SchedulerException("Failed to delete scheduler task"); + throw new SchedulerException(QuartzSchedulerExceptionEnum.QUARTZ_DELETE_JOB_ERROR, e); } } @@ -166,8 +87,8 @@ public void close() { // nothing to do try { scheduler.shutdown(); - } catch (org.quartz.SchedulerException e) { - throw new SchedulerException("Failed to shutdown scheduler", e); + } catch (Exception e) { + throw new SchedulerException(QuartzSchedulerExceptionEnum.QUARTZ_SCHEDULER_SHOWDOWN_ERROR, e); } } } diff --git a/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzSchedulerAutoConfiguration.java b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzSchedulerAutoConfiguration.java new file mode 100644 index 000000000000..34fb78258772 --- /dev/null +++ b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzSchedulerAutoConfiguration.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.scheduler.quartz; + +import org.apache.dolphinscheduler.scheduler.api.SchedulerApi; + +import org.quartz.Scheduler; +import org.springframework.boot.autoconfigure.AutoConfiguration; +import org.springframework.boot.autoconfigure.condition.ConditionalOnClass; +import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; +import org.springframework.boot.autoconfigure.quartz.QuartzAutoConfiguration; +import org.springframework.context.annotation.Bean; + +@AutoConfiguration(after = {QuartzAutoConfiguration.class}) +@ConditionalOnClass(value = Scheduler.class) +public class QuartzSchedulerAutoConfiguration { + + @Bean + @ConditionalOnMissingBean + public SchedulerApi schedulerApi(Scheduler scheduler) { + return new QuartzScheduler(scheduler); + } +} diff --git a/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzTriggerBuilder.java b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzTriggerBuilder.java new file mode 100644 index 000000000000..06c9101c5f9a --- /dev/null +++ b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzTriggerBuilder.java @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.scheduler.quartz; + +import org.quartz.CronTrigger; + +public interface QuartzTriggerBuilder { + + CronTrigger build(); +} diff --git a/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/exception/QuartzSchedulerExceptionEnum.java b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/exception/QuartzSchedulerExceptionEnum.java new file mode 100644 index 000000000000..76fa39e9117d --- /dev/null +++ b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/exception/QuartzSchedulerExceptionEnum.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.scheduler.quartz.exception; + +import org.apache.dolphinscheduler.scheduler.api.SchedulerExceptionEnum; + +public enum QuartzSchedulerExceptionEnum implements SchedulerExceptionEnum { + + QUARTZ_SCHEDULER_START_ERROR("QUARTZ-001", "Quartz Scheduler start error"), + QUARTZ_UPSERT_JOB_ERROR("QUARTZ-002", "Upsert quartz job error"), + QUARTZ_DELETE_JOB_ERROR("QUARTZ-003", "Delete quartz job error"), + QUARTZ_SCHEDULER_SHOWDOWN_ERROR("QUARTZ-004", "Quartz Scheduler shutdown error"), + ; + + private final String code; + + private final String message; + + QuartzSchedulerExceptionEnum(String code, String message) { + this.code = code; + this.message = message; + } + + @Override + public String getCode() { + return code; + } + + @Override + public String getMessage() { + return message; + } +} diff --git a/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/utils/QuartzTaskUtils.java b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/utils/QuartzTaskUtils.java deleted file mode 100644 index e4f3471399d4..000000000000 --- a/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/utils/QuartzTaskUtils.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.scheduler.quartz.utils; - -import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.apache.dolphinscheduler.dao.entity.Schedule; - -import java.util.HashMap; -import java.util.Map; - -import org.quartz.JobKey; - -public final class QuartzTaskUtils { - - public static final String QUARTZ_JOB_PREFIX = "job"; - public static final String QUARTZ_JOB_GROUP_PREFIX = "jobgroup"; - public static final String UNDERLINE = "_"; - public static final String PROJECT_ID = "projectId"; - public static final String SCHEDULE_ID = "scheduleId"; - public static final String SCHEDULE = "schedule"; - - /** - * @param schedulerId scheduler id - * @return quartz job name - */ - public static JobKey getJobKey(int schedulerId, int projectId) { - String jobName = QUARTZ_JOB_PREFIX + UNDERLINE + schedulerId; - String jobGroup = QUARTZ_JOB_GROUP_PREFIX + UNDERLINE + projectId; - return new JobKey(jobName, jobGroup); - } - - /** - * create quartz job data, include projectId and scheduleId, schedule. - */ - public static Map buildDataMap(int projectId, Schedule schedule) { - Map dataMap = new HashMap<>(8); - dataMap.put(PROJECT_ID, projectId); - dataMap.put(SCHEDULE_ID, schedule.getId()); - dataMap.put(SCHEDULE, JSONUtils.toJsonString(schedule)); - - return dataMap; - } - -} diff --git a/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/resources/META-INF/spring.factories b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/resources/META-INF/spring.factories new file mode 100644 index 000000000000..b34f896b84af --- /dev/null +++ b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/resources/META-INF/spring.factories @@ -0,0 +1,19 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +org.springframework.boot.autoconfigure.EnableAutoConfiguration=\ + org.apache.dolphinscheduler.scheduler.quartz.QuartzSchedulerAutoConfiguration diff --git a/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/test/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzJobDataTest.java b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/test/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzJobDataTest.java new file mode 100644 index 000000000000..e9493cc78276 --- /dev/null +++ b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/test/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzJobDataTest.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.scheduler.quartz; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.Test; +import org.quartz.JobDataMap; + +import com.google.common.collect.ImmutableMap; + +class QuartzJobDataTest { + + @Test + void of() { + QuartzJobData quartzJobData = QuartzJobData.of(1, 2); + assertEquals(1, quartzJobData.getProjectId()); + assertEquals(2, quartzJobData.getScheduleId()); + } + + @Test + void of_JobDataMap() { + + ImmutableMap map = ImmutableMap.of( + "projectId", 1, + "scheduleId", 2); + JobDataMap jobDataMap = new JobDataMap(map); + QuartzJobData quartzJobData = QuartzJobData.of(jobDataMap); + assertEquals(1, quartzJobData.getProjectId()); + assertEquals(2, quartzJobData.getScheduleId()); + } + + @Test + void toJobDataMap() { + QuartzJobData quartzJobData = QuartzJobData.of(1, 2); + JobDataMap jobDataMap = quartzJobData.toJobDataMap(); + QuartzJobData quartzJobData1 = QuartzJobData.of(jobDataMap); + assertEquals(1, quartzJobData1.getProjectId()); + assertEquals(2, quartzJobData1.getScheduleId()); + } +} diff --git a/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/utils/ParserUtilsTest.java b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/test/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzJobKeyTest.java similarity index 57% rename from dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/utils/ParserUtilsTest.java rename to dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/test/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzJobKeyTest.java index 328316cc391f..aafbadefb13e 100644 --- a/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/utils/ParserUtilsTest.java +++ b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/test/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzJobKeyTest.java @@ -15,25 +15,27 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.data.quality.utils; +package org.apache.dolphinscheduler.scheduler.quartz; + +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; +import org.quartz.JobKey; -public class ParserUtilsTest { +class QuartzJobKeyTest { @Test - public void testParserUtils() { - String testStr = "aaa$bbb$ccc%ddd^eee#fff"; - String encode = ParserUtils.encode(testStr); - String decode = ParserUtils.decode(encode); - Assertions.assertEquals(testStr, decode); - - String blank = ""; - Assertions.assertEquals(ParserUtils.encode(blank), blank); - Assertions.assertEquals(ParserUtils.decode(blank), blank); + void of() { + QuartzJobKey quartzJobKey = QuartzJobKey.of(1, 2); + assertEquals(1, quartzJobKey.getProjectId()); + assertEquals(2, quartzJobKey.getSchedulerId()); + } - Assertions.assertNull(ParserUtils.encode(null)); - Assertions.assertNull(ParserUtils.decode(null)); + @Test + void toJobKey() { + QuartzJobKey quartzJobKey = QuartzJobKey.of(1, 2); + JobKey jobKey = quartzJobKey.toJobKey(); + assertEquals("job_2", jobKey.getName()); + assertEquals("jobgroup_1", jobKey.getGroup()); } } diff --git a/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/test/java/org/apache/dolphinscheduler/scheduler/quartz/exception/QuartzSchedulerExceptionEnumTest.java b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/test/java/org/apache/dolphinscheduler/scheduler/quartz/exception/QuartzSchedulerExceptionEnumTest.java new file mode 100644 index 000000000000..943b2cc125a0 --- /dev/null +++ b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/test/java/org/apache/dolphinscheduler/scheduler/quartz/exception/QuartzSchedulerExceptionEnumTest.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.scheduler.quartz.exception; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.apache.dolphinscheduler.scheduler.api.SchedulerException; + +import org.junit.jupiter.api.Test; + +class QuartzSchedulerExceptionEnumTest { + + @Test + void testException() { + SchedulerException schedulerException = + new SchedulerException(QuartzSchedulerExceptionEnum.QUARTZ_SCHEDULER_START_ERROR); + assertEquals("Scheduler[QUARTZ-001] Quartz Scheduler start error", schedulerException.getMessage()); + } + +} diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/ServiceConfiguration.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/ServiceConfiguration.java new file mode 100644 index 000000000000..fa831a5b6bb8 --- /dev/null +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/ServiceConfiguration.java @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.service; + +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.Configuration; + +@ComponentScan("org.apache.dolphinscheduler.service") +@Configuration +public class ServiceConfiguration { +} diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/alert/ListenerEventAlertManager.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/alert/ListenerEventAlertManager.java index 182ed678a0c2..3056a14f5b4c 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/alert/ListenerEventAlertManager.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/alert/ListenerEventAlertManager.java @@ -43,6 +43,7 @@ import org.apache.dolphinscheduler.dao.entity.event.TaskStartListenerEvent; import org.apache.dolphinscheduler.dao.mapper.AlertPluginInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.ListenerEventMapper; +import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.collections4.CollectionUtils; @@ -71,6 +72,9 @@ public class ListenerEventAlertManager { @Autowired private AlertPluginInstanceMapper alertPluginInstanceMapper; + @Autowired + private ProcessService processService; + public void publishServerDownListenerEvent(String host, String type) { ServerDownListenerEvent event = new ServerDownListenerEvent(); event.setEventTime(new Date()); @@ -214,8 +218,9 @@ public void publishTaskEndListenerEvent(ProcessInstance processInstance, } public void publishTaskFailListenerEvent(ProcessInstance processInstance, - TaskInstance taskInstance, - ProjectUser projectUser) { + TaskInstance taskInstance) { + ProjectUser projectUser = processService.queryProjectWithUserByProcessInstanceId(processInstance.getId()); + TaskFailListenerEvent event = new TaskFailListenerEvent(); event.setProjectCode(projectUser.getProjectCode()); event.setProjectName(projectUser.getProjectName()); diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/command/CommandService.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/command/CommandService.java index cff73c503f2c..43b81c4e5c45 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/command/CommandService.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/command/CommandService.java @@ -22,8 +22,6 @@ import org.apache.dolphinscheduler.dao.entity.ProcessInstanceMap; import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import java.util.List; - /** * Command Service */ @@ -44,15 +42,6 @@ public interface CommandService { */ int createCommand(Command command); - /** - * Get command page - * @param pageSize page size - * @param masterCount master count - * @param thisMasterSlot master slot - * @return command page - */ - List findCommandPageBySlot(int pageSize, int masterCount, int thisMasterSlot); - /** * check the input command exists in queue list * diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/command/CommandServiceImpl.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/command/CommandServiceImpl.java index 483899446b2a..ee833a80b045 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/command/CommandServiceImpl.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/command/CommandServiceImpl.java @@ -57,7 +57,6 @@ import org.springframework.stereotype.Component; import com.fasterxml.jackson.databind.node.ObjectNode; -import com.google.common.collect.Lists; import io.micrometer.core.annotation.Counted; /** @@ -107,14 +106,6 @@ public int createCommand(Command command) { return result; } - @Override - public List findCommandPageBySlot(int pageSize, int masterCount, int thisMasterSlot) { - if (masterCount <= 0) { - return Lists.newArrayList(); - } - return commandMapper.queryCommandPageBySlot(pageSize, masterCount, thisMasterSlot); - } - @Override public boolean verifyIsNeedCreateCommand(Command command) { boolean isNeedCreate = true; diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/expand/CuringParamsServiceImpl.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/expand/CuringParamsServiceImpl.java index afcfae3fd677..5f042b75938c 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/expand/CuringParamsServiceImpl.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/expand/CuringParamsServiceImpl.java @@ -45,7 +45,9 @@ import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters; import org.apache.dolphinscheduler.plugin.task.api.utils.MapUtils; import org.apache.dolphinscheduler.plugin.task.api.utils.ParameterUtils; +import org.apache.dolphinscheduler.plugin.task.api.utils.PropertyUtils; +import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; import java.util.Date; @@ -54,6 +56,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Function; import java.util.stream.Collectors; import javax.annotation.Nullable; @@ -150,9 +153,11 @@ public Map parseWorkflowStartParam(@Nullable Map(); } String startParamJson = cmdParam.get(CommandKeyConstants.CMD_PARAM_START_PARAMS); - Map startParamMap = JSONUtils.toMap(startParamJson); - return startParamMap.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, - entry -> new Property(entry.getKey(), Direct.IN, DataType.VARCHAR, entry.getValue()))); + List propertyList = PropertyUtils.startParamsTransformPropertyList(startParamJson); + if (CollectionUtils.isEmpty(propertyList)) { + return new HashMap<>(); + } + return propertyList.stream().collect(Collectors.toMap(Property::getProp, Function.identity())); } @Override @@ -181,8 +186,7 @@ public Map paramParsingPreparation(@NonNull TaskInstance taskI Map prepareParamsMap = new HashMap<>(); // assign value to definedParams here - Map globalParamsMap = setGlobalParamsMap(processInstance); - Map globalParams = ParameterUtils.getUserDefParamsMap(globalParamsMap); + Map globalParams = setGlobalParamsMap(processInstance); // combining local and global parameters Map localParams = parameters.getInputLocalParametersMap(); @@ -287,15 +291,16 @@ private Map setBuiltInParamsMap(@NonNull TaskInstance taskInstan Long.toString(taskInstance.getProcessInstance().getProcessDefinition().getProjectCode())); return params; } - private Map setGlobalParamsMap(ProcessInstance processInstance) { - Map globalParamsMap = new HashMap<>(16); + private Map setGlobalParamsMap(ProcessInstance processInstance) { + Map globalParamsMap = new HashMap<>(16); // global params string String globalParamsStr = processInstance.getGlobalParams(); if (globalParamsStr != null) { List globalParamsList = JSONUtils.toList(globalParamsStr, Property.class); globalParamsMap - .putAll(globalParamsList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue))); + .putAll(globalParamsList.stream() + .collect(Collectors.toMap(Property::getProp, Function.identity()))); } return globalParamsMap; } @@ -323,7 +328,7 @@ public Map getProjectParameterMap(long projectCode) { projectParameterList.forEach(projectParameter -> { Property property = new Property(projectParameter.getParamName(), Direct.IN, - DataType.VARCHAR, + Enum.valueOf(DataType.class, projectParameter.getParamDataType()), projectParameter.getParamValue()); result.put(projectParameter.getParamName(), property); }); diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/model/TaskNode.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/model/TaskNode.java index 954003ec41c3..c9601f15b5c0 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/model/TaskNode.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/model/TaskNode.java @@ -17,27 +17,17 @@ package org.apache.dolphinscheduler.service.model; -import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.TASK_TYPE_BLOCKING; -import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.TASK_TYPE_CONDITIONS; -import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.TASK_TYPE_SWITCH; - import org.apache.dolphinscheduler.common.constants.Constants; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.TaskExecuteType; -import org.apache.dolphinscheduler.common.model.PreviousTaskNode; import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.apache.dolphinscheduler.plugin.task.api.enums.TaskTimeoutStrategy; -import org.apache.dolphinscheduler.plugin.task.api.parameters.TaskTimeoutParameter; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; -import java.util.HashMap; import java.util.List; -import java.util.Map; import java.util.Objects; -import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize; @@ -118,11 +108,6 @@ public class TaskNode { @JsonSerialize(using = JSONUtils.JsonDataSerializer.class) private String preTasks; - /** - * node dependency list - */ - private List preTaskNodeList; - /** * users store additional information */ @@ -135,25 +120,6 @@ public class TaskNode { */ private List depList; - /** - * outer dependency information - */ - @JsonDeserialize(using = JSONUtils.JsonDataDeserializer.class) - @JsonSerialize(using = JSONUtils.JsonDataSerializer.class) - private String dependence; - - @JsonDeserialize(using = JSONUtils.JsonDataDeserializer.class) - @JsonSerialize(using = JSONUtils.JsonDataSerializer.class) - private String conditionResult; - - @JsonDeserialize(using = JSONUtils.JsonDataDeserializer.class) - @JsonSerialize(using = JSONUtils.JsonDataSerializer.class) - private String switchResult; - - @JsonDeserialize(using = JSONUtils.JsonDataDeserializer.class) - @JsonSerialize(using = JSONUtils.JsonDataSerializer.class) - private String waitStartTimeout; - /** * task instance priority */ @@ -312,10 +278,8 @@ public boolean equals(Object o) { && Objects.equals(preTasks, taskNode.preTasks) && Objects.equals(extras, taskNode.extras) && Objects.equals(runFlag, taskNode.runFlag) - && Objects.equals(dependence, taskNode.dependence) && Objects.equals(workerGroup, taskNode.workerGroup) && Objects.equals(environmentCode, taskNode.environmentCode) - && Objects.equals(conditionResult, taskNode.conditionResult) && CollectionUtils.isEqualCollection(depList, taskNode.depList) && Objects.equals(taskExecuteType, taskNode.taskExecuteType); } @@ -325,14 +289,6 @@ public int hashCode() { return Objects.hash(name, desc, type, params, preTasks, extras, depList, runFlag); } - public String getDependence() { - return dependence; - } - - public void setDependence(String dependence) { - this.dependence = dependence; - } - public int getMaxRetryTimes() { return maxRetryTimes; } @@ -373,14 +329,6 @@ public void setWorkerGroup(String workerGroup) { this.workerGroup = workerGroup; } - public String getConditionResult() { - return conditionResult; - } - - public void setConditionResult(String conditionResult) { - this.conditionResult = conditionResult; - } - public int getDelayTime() { return delayTime; } @@ -405,64 +353,6 @@ public void setVersion(int version) { this.version = version; } - /** - * get task time out parameter - * - * @return task time out parameter - */ - public TaskTimeoutParameter getTaskTimeoutParameter() { - if (!StringUtils.isEmpty(this.getTimeout())) { - String formatStr = - String.format("%s,%s", TaskTimeoutStrategy.WARN.name(), TaskTimeoutStrategy.FAILED.name()); - String taskTimeout = this.getTimeout().replace(formatStr, TaskTimeoutStrategy.WARNFAILED.name()); - return JSONUtils.parseObject(taskTimeout, TaskTimeoutParameter.class); - } - return new TaskTimeoutParameter(false); - } - - public boolean isConditionsTask() { - return TASK_TYPE_CONDITIONS.equalsIgnoreCase(this.getType()); - } - - public boolean isSwitchTask() { - return TASK_TYPE_SWITCH.equalsIgnoreCase(this.getType()); - } - - public List getPreTaskNodeList() { - return preTaskNodeList; - } - - public boolean isBlockingTask() { - return TASK_TYPE_BLOCKING.equalsIgnoreCase(this.getType()); - } - - public void setPreTaskNodeList(List preTaskNodeList) { - this.preTaskNodeList = preTaskNodeList; - } - - public String getTaskParams() { - Map taskParams = JSONUtils.parseObject(this.params, new TypeReference>() { - }); - - if (taskParams == null) { - taskParams = new HashMap<>(); - } - taskParams.put(Constants.CONDITION_RESULT, this.conditionResult); - taskParams.put(Constants.DEPENDENCE, this.dependence); - taskParams.put(Constants.SWITCH_RESULT, this.switchResult); - taskParams.put(Constants.WAIT_START_TIMEOUT, this.waitStartTimeout); - return JSONUtils.toJsonString(taskParams); - } - - public Map taskParamsToJsonObj(String taskParams) { - Map taskParamsMap = JSONUtils.parseObject(taskParams, new TypeReference>() { - }); - if (taskParamsMap == null) { - taskParamsMap = new HashMap<>(); - } - return taskParamsMap; - } - @Override public String toString() { return "TaskNode{" @@ -478,11 +368,8 @@ public String toString() { + ", retryInterval=" + retryInterval + ", params='" + params + '\'' + ", preTasks='" + preTasks + '\'' - + ", preTaskNodeList=" + preTaskNodeList + ", extras='" + extras + '\'' + ", depList=" + depList - + ", dependence='" + dependence + '\'' - + ", conditionResult='" + conditionResult + '\'' + ", taskInstancePriority=" + taskInstancePriority + ", workerGroup='" + workerGroup + '\'' + ", environmentCode=" + environmentCode @@ -500,22 +387,6 @@ public Long getEnvironmentCode() { return this.environmentCode; } - public String getSwitchResult() { - return switchResult; - } - - public void setSwitchResult(String switchResult) { - this.switchResult = switchResult; - } - - public String getWaitStartTimeout() { - return this.waitStartTimeout; - } - - public void setWaitStartTimeout(String waitStartTimeout) { - this.waitStartTimeout = waitStartTimeout; - } - public int getTaskGroupId() { return taskGroupId; } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java index 101350e90dd7..b9e4d811f31d 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java @@ -43,7 +43,6 @@ import org.apache.dolphinscheduler.dao.entity.TaskDefinitionLog; import org.apache.dolphinscheduler.dao.entity.TaskGroupQueue; import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import org.apache.dolphinscheduler.dao.entity.UdfFunc; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.service.exceptions.CronParseException; import org.apache.dolphinscheduler.service.model.TaskNode; @@ -120,8 +119,6 @@ ProcessInstance constructProcessInstance(Command command, DataSource findDataSourceById(int id); - List queryUdfFunListByIds(Integer[] ids); - ProjectUser queryProjectWithUserByProcessInstanceId(int processInstanceId); List listUnauthorized(int userId, T[] needChecks, AuthorizationType authorizationType); @@ -176,8 +173,6 @@ List transformTask(List taskRelationList, DqComparisonType getComparisonTypeById(int id); - void changeTaskGroupQueueStatus(int taskId, TaskGroupQueueStatus status); - TaskGroupQueue insertIntoTaskGroupQueue(Integer taskId, String taskName, Integer groupId, @@ -189,7 +184,7 @@ TaskGroupQueue insertIntoTaskGroupQueue(Integer taskId, public String findConfigYamlByName(String clusterName); - void forceProcessInstanceSuccessByTaskInstanceId(Integer taskInstanceId); + void forceProcessInstanceSuccessByTaskInstanceId(TaskInstance taskInstance); void saveCommandTrigger(Integer commandId, Integer processInstanceId); diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessServiceImpl.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessServiceImpl.java index 261637529ad7..2b0a8e073cee 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessServiceImpl.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessServiceImpl.java @@ -72,7 +72,6 @@ import org.apache.dolphinscheduler.dao.entity.TaskGroupQueue; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.entity.Tenant; -import org.apache.dolphinscheduler.dao.entity.UdfFunc; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.ClusterMapper; import org.apache.dolphinscheduler.dao.mapper.CommandMapper; @@ -98,7 +97,6 @@ import org.apache.dolphinscheduler.dao.mapper.TaskGroupQueueMapper; import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.TenantMapper; -import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper; import org.apache.dolphinscheduler.dao.mapper.UserMapper; import org.apache.dolphinscheduler.dao.mapper.WorkFlowLineageMapper; import org.apache.dolphinscheduler.dao.repository.ProcessInstanceDao; @@ -107,11 +105,12 @@ import org.apache.dolphinscheduler.dao.repository.TaskDefinitionLogDao; import org.apache.dolphinscheduler.dao.repository.TaskInstanceDao; import org.apache.dolphinscheduler.dao.utils.DqRuleUtils; +import org.apache.dolphinscheduler.dao.utils.EnvironmentUtils; +import org.apache.dolphinscheduler.dao.utils.WorkerGroupUtils; import org.apache.dolphinscheduler.extract.base.client.SingletonJdkDynamicRpcClientProxyFactory; import org.apache.dolphinscheduler.extract.common.ILogService; import org.apache.dolphinscheduler.extract.master.ITaskInstanceExecutionEventListener; import org.apache.dolphinscheduler.extract.master.transportor.WorkflowInstanceStateChangeEvent; -import org.apache.dolphinscheduler.plugin.task.api.TaskPluginManager; import org.apache.dolphinscheduler.plugin.task.api.enums.Direct; import org.apache.dolphinscheduler.plugin.task.api.enums.TaskExecutionStatus; import org.apache.dolphinscheduler.plugin.task.api.enums.dp.DqTaskState; @@ -119,6 +118,7 @@ import org.apache.dolphinscheduler.plugin.task.api.model.ResourceInfo; import org.apache.dolphinscheduler.plugin.task.api.parameters.SubProcessParameters; import org.apache.dolphinscheduler.plugin.task.api.parameters.TaskTimeoutParameter; +import org.apache.dolphinscheduler.plugin.task.api.utils.TaskTypeUtils; import org.apache.dolphinscheduler.service.command.CommandService; import org.apache.dolphinscheduler.service.cron.CronUtils; import org.apache.dolphinscheduler.service.exceptions.CronParseException; @@ -211,9 +211,6 @@ public class ProcessServiceImpl implements ProcessService { @Autowired private ScheduleMapper scheduleMapper; - @Autowired - private UdfFuncMapper udfFuncMapper; - @Autowired private TenantMapper tenantMapper; @@ -262,9 +259,6 @@ public class ProcessServiceImpl implements ProcessService { @Autowired private WorkFlowLineageMapper workFlowLineageMapper; - @Autowired - private TaskPluginManager taskPluginManager; - @Autowired private ClusterMapper clusterMapper; @@ -276,6 +270,7 @@ public class ProcessServiceImpl implements ProcessService { @Autowired private TriggerRelationService triggerRelationService; + /** * todo: split this method * handle Command (construct ProcessInstance from Command) , wrapped in transaction @@ -576,10 +571,8 @@ private ProcessInstance generateNewProcessInstance(ProcessDefinition processDefi // set process instance priority processInstance.setProcessInstancePriority(command.getProcessInstancePriority()); - String workerGroup = StringUtils.defaultIfEmpty(command.getWorkerGroup(), Constants.DEFAULT_WORKER_GROUP); - processInstance.setWorkerGroup(workerGroup); - processInstance - .setEnvironmentCode(Objects.isNull(command.getEnvironmentCode()) ? -1 : command.getEnvironmentCode()); + processInstance.setWorkerGroup(WorkerGroupUtils.getWorkerGroupOrDefault(command.getWorkerGroup())); + processInstance.setEnvironmentCode(EnvironmentUtils.getEnvironmentCodeOrDefault(command.getEnvironmentCode())); processInstance.setTimeout(processDefinition.getTimeout()); processInstance.setTenantCode(command.getTenantCode()); return processInstance; @@ -621,13 +614,13 @@ public void setGlobalParamIfCommanded(ProcessDefinition processDefinition, Map * the workflow provides a tenant and uses the provided tenant; * when no tenant is provided or the provided tenant is the default tenant, \ * the user's tenant created by the workflow is used * * @param tenantCode tenantCode - * @param userId userId + * @param userId userId * @return tenant code */ @Override @@ -798,7 +791,7 @@ private Boolean checkCmdParam(Command command, Map cmdParam) { // recover tolerance fault process // If the workflow instance is in ready state, we will change to running, this can avoid the workflow // instance - // status is not correct with taskInsatnce status + // status is not correct with taskInstance status if (processInstance.getState() == WorkflowExecutionStatus.READY_PAUSE || processInstance.getState() == WorkflowExecutionStatus.READY_STOP) { // todo: If we handle the ready state in WorkflowExecuteRunnable then we can remove below code @@ -1054,7 +1047,7 @@ private String joinVarPool(String parentValPool, String subValPool) { */ private void initTaskInstance(TaskInstance taskInstance) { - if (!taskInstance.isSubProcess() + if (!TaskTypeUtils.isSubWorkflowTask(taskInstance.getTaskType()) && (taskInstance.getState().isKill() || taskInstance.getState().isFailure())) { taskInstance.setFlag(Flag.NO); taskInstanceDao.updateById(taskInstance); @@ -1172,7 +1165,7 @@ private ProcessInstanceMap findPreviousTaskProcessMap(ProcessInstance parentProc */ @Override public void createSubWorkProcess(ProcessInstance parentProcessInstance, TaskInstance task) { - if (!task.isSubProcess()) { + if (!TaskTypeUtils.isSubWorkflowTask(task.getTaskType())) { return; } // check create sub work flow firstly @@ -1504,33 +1497,50 @@ public DataSource findDataSourceById(int id) { } /** - * find udf function list by id list string + * query project name and user name by processInstanceId. * - * @param ids ids - * @return udf function list + * @param processInstanceId processInstanceId + * @return projectName and userName */ @Override - public List queryUdfFunListByIds(Integer[] ids) { - return udfFuncMapper.queryUdfByIdStr(ids, null); + public ProjectUser queryProjectWithUserByProcessInstanceId(int processInstanceId) { + return projectMapper.queryProjectWithUserByProcessInstanceId(processInstanceId); } /** - * query project name and user name by processInstanceId. + * get user by user id * - * @param processInstanceId processInstanceId - * @return projectName and userName + * @param userId user id + * @return User */ @Override - public ProjectUser queryProjectWithUserByProcessInstanceId(int processInstanceId) { - return projectMapper.queryProjectWithUserByProcessInstanceId(processInstanceId); + public User getUserById(int userId) { + return userMapper.selectById(userId); + } + + /** + * format task app id in task instance + */ + @Override + public String formatTaskAppId(TaskInstance taskInstance) { + ProcessInstance processInstance = findProcessInstanceById(taskInstance.getProcessInstanceId()); + if (processInstance == null) { + return ""; + } + ProcessDefinition definition = findProcessDefinition(processInstance.getProcessDefinitionCode(), + processInstance.getProcessDefinitionVersion()); + if (definition == null) { + return ""; + } + return String.format("%s_%s_%s", definition.getId(), processInstance.getId(), taskInstance.getId()); } /** - * list unauthorized udf function + * list unauthorized * * @param userId user id * @param needChecks data source id array - * @return unauthorized udf function list + * @return unauthorized */ @Override public List listUnauthorized(int userId, T[] needChecks, AuthorizationType authorizationType) { @@ -1545,11 +1555,6 @@ public List listUnauthorized(int userId, T[] needChecks, AuthorizationTyp .stream().map(DataSource::getId).collect(toSet()); originResSet.removeAll(authorizedDatasources); break; - case UDF: - Set authorizedUdfs = udfFuncMapper.listAuthorizedUdfFunc(userId, needChecks).stream() - .map(UdfFunc::getId).collect(toSet()); - originResSet.removeAll(authorizedUdfs); - break; default: break; } @@ -1560,34 +1565,6 @@ public List listUnauthorized(int userId, T[] needChecks, AuthorizationTyp return resultList; } - /** - * get user by user id - * - * @param userId user id - * @return User - */ - @Override - public User getUserById(int userId) { - return userMapper.selectById(userId); - } - - /** - * format task app id in task instance - */ - @Override - public String formatTaskAppId(TaskInstance taskInstance) { - ProcessInstance processInstance = findProcessInstanceById(taskInstance.getProcessInstanceId()); - if (processInstance == null) { - return ""; - } - ProcessDefinition definition = findProcessDefinition(processInstance.getProcessDefinitionCode(), - processInstance.getProcessDefinitionVersion()); - if (definition == null) { - return ""; - } - return String.format("%s_%s_%s", definition.getId(), processInstance.getId(), taskInstance.getId()); - } - /** * switch process definition version to process definition log version */ @@ -1679,7 +1656,7 @@ public int saveTaskDefine(User operator, long projectCode, List transformTask(List taskRelationList, : Constants.FLOWNODE_RUN_FLAG_FORBIDDEN); taskNode.setMaxRetryTimes(taskDefinitionLog.getFailRetryTimes()); taskNode.setRetryInterval(taskDefinitionLog.getFailRetryInterval()); - Map taskParamsMap = taskNode.taskParamsToJsonObj(taskDefinitionLog.getTaskParams()); - taskNode.setConditionResult(JSONUtils.toJsonString(taskParamsMap.get(Constants.CONDITION_RESULT))); - taskNode.setSwitchResult(JSONUtils.toJsonString(taskParamsMap.get(Constants.SWITCH_RESULT))); - taskNode.setDependence(JSONUtils.toJsonString(taskParamsMap.get(Constants.DEPENDENCE))); - taskParamsMap.remove(Constants.CONDITION_RESULT); - taskParamsMap.remove(Constants.DEPENDENCE); - taskNode.setParams(JSONUtils.toJsonString(taskParamsMap)); + taskNode.setParams(taskDefinitionLog.getTaskParams()); taskNode.setTaskInstancePriority(taskDefinitionLog.getTaskPriority()); taskNode.setWorkerGroup(taskDefinitionLog.getWorkerGroup()); taskNode.setEnvironmentCode(taskDefinitionLog.getEnvironmentCode()); @@ -2041,22 +2013,6 @@ public DqComparisonType getComparisonTypeById(int id) { return dqComparisonTypeMapper.selectById(id); } - /** - * release the TGQ resource when the corresponding task is finished. - * - * @param taskId task id - * @return the result code and msg - */ - - @Override - public void changeTaskGroupQueueStatus(int taskId, TaskGroupQueueStatus status) { - TaskGroupQueue taskGroupQueue = taskGroupQueueMapper.queryByTaskId(taskId); - taskGroupQueue.setInQueue(Flag.NO.getCode()); - taskGroupQueue.setStatus(status); - taskGroupQueue.setUpdateTime(new Date(System.currentTimeMillis())); - taskGroupQueueMapper.updateById(taskGroupQueue); - } - @Override public TaskGroupQueue insertIntoTaskGroupQueue(Integer taskInstanceId, String taskName, @@ -2113,11 +2069,7 @@ public String findConfigYamlByName(String clusterName) { } @Override - public void forceProcessInstanceSuccessByTaskInstanceId(Integer taskInstanceId) { - TaskInstance task = taskInstanceMapper.selectById(taskInstanceId); - if (task == null) { - return; - } + public void forceProcessInstanceSuccessByTaskInstanceId(TaskInstance task) { ProcessInstance processInstance = findProcessInstanceDetailById(task.getProcessInstanceId()).orElse(null); if (processInstance != null && (processInstance.getState().isFailure() || processInstance.getState().isStop())) { @@ -2138,7 +2090,7 @@ public void forceProcessInstanceSuccessByTaskInstanceId(Integer taskInstanceId) List failTaskList = validTaskList.stream() .filter(instance -> instance.getState().isFailure() || instance.getState().isKill()) .map(TaskInstance::getId).collect(Collectors.toList()); - if (failTaskList.size() == 1 && failTaskList.contains(taskInstanceId)) { + if (failTaskList.size() == 1 && failTaskList.contains(task.getId())) { processInstance.setStateWithDesc(WorkflowExecutionStatus.SUCCESS, "success by task force success"); processInstanceDao.updateById(processInstance); } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/TriggerRelationService.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/TriggerRelationService.java index b78f4779313d..6de2506afd18 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/TriggerRelationService.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/TriggerRelationService.java @@ -20,6 +20,8 @@ import org.apache.dolphinscheduler.common.enums.ApiTriggerType; import org.apache.dolphinscheduler.dao.entity.TriggerRelation; +import java.util.List; + import org.springframework.stereotype.Component; /** @@ -30,7 +32,7 @@ public interface TriggerRelationService { void saveTriggerToDb(ApiTriggerType type, Long triggerCode, Integer jobId); - TriggerRelation queryByTypeAndJobId(ApiTriggerType apiTriggerType, int jobId); + List queryByTypeAndJobId(ApiTriggerType apiTriggerType, int jobId); int saveCommandTrigger(Integer commandId, Integer processInstanceId); diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/TriggerRelationServiceImpl.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/TriggerRelationServiceImpl.java index df41c41eef2f..0344ea87ea01 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/TriggerRelationServiceImpl.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/TriggerRelationServiceImpl.java @@ -21,14 +21,20 @@ import org.apache.dolphinscheduler.dao.entity.TriggerRelation; import org.apache.dolphinscheduler.dao.mapper.TriggerRelationMapper; +import org.apache.commons.collections4.CollectionUtils; + import java.util.Date; +import java.util.List; + +import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; /** - * Trigger relation operator to db + * Trigger relation operator to db */ +@Slf4j @Component public class TriggerRelationServiceImpl implements TriggerRelationService { @@ -45,29 +51,44 @@ public void saveTriggerToDb(ApiTriggerType type, Long triggerCode, Integer jobId triggerRelation.setUpdateTime(new Date()); triggerRelationMapper.upsert(triggerRelation); } + @Override - public TriggerRelation queryByTypeAndJobId(ApiTriggerType apiTriggerType, int jobId) { + public List queryByTypeAndJobId(ApiTriggerType apiTriggerType, int jobId) { return triggerRelationMapper.queryByTypeAndJobId(apiTriggerType.getCode(), jobId); } @Override public int saveCommandTrigger(Integer commandId, Integer processInstanceId) { - TriggerRelation exist = queryByTypeAndJobId(ApiTriggerType.PROCESS, processInstanceId); - if (exist == null) { + List existTriggers = queryByTypeAndJobId(ApiTriggerType.PROCESS, processInstanceId); + if (CollectionUtils.isEmpty(existTriggers)) { return 0; } - saveTriggerToDb(ApiTriggerType.COMMAND, exist.getTriggerCode(), commandId); - return 1; + existTriggers.forEach(triggerRelation -> saveTriggerToDb(ApiTriggerType.COMMAND, + triggerRelation.getTriggerCode(), commandId)); + int triggerRelationSize = existTriggers.size(); + if (triggerRelationSize > 1) { + // Fix https://github.com/apache/dolphinscheduler/issues/15864 + // This case shouldn't happen + log.error("The PROCESS TriggerRelation of command: {} is more than one", commandId); + } + return existTriggers.size(); } @Override public int saveProcessInstanceTrigger(Integer commandId, Integer processInstanceId) { - TriggerRelation exist = queryByTypeAndJobId(ApiTriggerType.COMMAND, commandId); - if (exist == null) { + List existTriggers = queryByTypeAndJobId(ApiTriggerType.COMMAND, commandId); + if (CollectionUtils.isEmpty(existTriggers)) { return 0; } - saveTriggerToDb(ApiTriggerType.PROCESS, exist.getTriggerCode(), processInstanceId); - return 1; + existTriggers.forEach(triggerRelation -> saveTriggerToDb(ApiTriggerType.PROCESS, + triggerRelation.getTriggerCode(), processInstanceId)); + int triggerRelationSize = existTriggers.size(); + if (triggerRelationSize > 1) { + // Fix https://github.com/apache/dolphinscheduler/issues/15864 + // This case shouldn't happen + log.error("The COMMAND TriggerRelation of command: {} is more than one", commandId); + } + return existTriggers.size(); } } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/MasterPriorityQueue.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/MasterPriorityQueue.java index d4fe74cd9f79..2d60eb20965c 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/MasterPriorityQueue.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/MasterPriorityQueue.java @@ -20,10 +20,10 @@ import org.apache.dolphinscheduler.common.model.Server; import org.apache.dolphinscheduler.common.utils.NetUtils; +import java.util.Arrays; import java.util.Collection; import java.util.Comparator; import java.util.HashMap; -import java.util.Iterator; import java.util.concurrent.PriorityBlockingQueue; import java.util.concurrent.TimeUnit; @@ -80,15 +80,23 @@ public void clear() { private void refreshMasterList() { hostIndexMap.clear(); - Iterator iterator = queue.iterator(); int index = 0; - while (iterator.hasNext()) { - Server server = iterator.next(); + for (Server server : getOrderedElements()) { String addr = NetUtils.getAddr(server.getHost(), server.getPort()); hostIndexMap.put(addr, index); index += 1; } + } + /** + * get ordered collection of priority queue + * + * @return ordered collection + */ + Server[] getOrderedElements() { + Server[] nQueue = queue.toArray(new Server[0]); + Arrays.sort(nQueue, new ServerComparator()); + return nQueue; } public int getIndex(String addr) { diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/StandByTaskInstancePriorityQueue.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/StandByTaskInstancePriorityQueue.java index c11c4fe5a9e3..d02c5715e381 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/StandByTaskInstancePriorityQueue.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/StandByTaskInstancePriorityQueue.java @@ -36,16 +36,10 @@ */ public class StandByTaskInstancePriorityQueue implements TaskPriorityQueue { - /** - * queue size - */ - private static final Integer QUEUE_MAX_SIZE = 3000; - /** * queue */ - private final PriorityQueue queue = - new PriorityQueue<>(QUEUE_MAX_SIZE, new TaskInstancePriorityComparator()); + private final PriorityQueue queue = new PriorityQueue<>(new TaskInstancePriorityComparator()); private final Set taskInstanceIdentifySet = Collections.synchronizedSet(new HashSet<>()); /** diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/CommonUtils.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/CommonUtils.java deleted file mode 100644 index e00212823a80..000000000000 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/CommonUtils.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.service.utils; - -import org.apache.dolphinscheduler.common.constants.Constants; -import org.apache.dolphinscheduler.common.constants.DataSourceConstants; -import org.apache.dolphinscheduler.common.utils.PropertyUtils; - -import org.apache.commons.codec.binary.Base64; -import org.apache.commons.lang3.StringUtils; - -import java.net.URL; -import java.nio.charset.StandardCharsets; - -import lombok.extern.slf4j.Slf4j; - -/** - * common utils - */ -@Slf4j -public class CommonUtils { - - private static final Base64 BASE64 = new Base64(); - - protected CommonUtils() { - throw new UnsupportedOperationException("Construct CommonUtils"); - } - - /** - * @return get the path of system environment variables - */ - public static String getSystemEnvPath() { - String envPath = PropertyUtils.getString(Constants.DOLPHINSCHEDULER_ENV_PATH); - if (StringUtils.isEmpty(envPath)) { - URL envDefaultPath = CommonUtils.class.getClassLoader().getResource(Constants.ENV_PATH); - - if (envDefaultPath != null) { - envPath = envDefaultPath.getPath(); - log.debug("env path :{}", envPath); - } else { - envPath = "/etc/profile"; - } - } - - return envPath; - } - - /** - * encode password - */ - public static String encodePassword(String password) { - if (StringUtils.isEmpty(password)) { - return StringUtils.EMPTY; - } - // if encryption is not turned on, return directly - boolean encryptionEnable = PropertyUtils.getBoolean(DataSourceConstants.DATASOURCE_ENCRYPTION_ENABLE, false); - if (!encryptionEnable) { - return password; - } - - // Using Base64 + salt to process password - String salt = PropertyUtils.getString(DataSourceConstants.DATASOURCE_ENCRYPTION_SALT, - DataSourceConstants.DATASOURCE_ENCRYPTION_SALT_DEFAULT); - String passwordWithSalt = salt + new String(BASE64.encode(password.getBytes(StandardCharsets.UTF_8))); - return new String(BASE64.encode(passwordWithSalt.getBytes(StandardCharsets.UTF_8))); - } - - /** - * decode password - */ - public static String decodePassword(String password) { - if (StringUtils.isEmpty(password)) { - return StringUtils.EMPTY; - } - - // if encryption is not turned on, return directly - boolean encryptionEnable = PropertyUtils.getBoolean(DataSourceConstants.DATASOURCE_ENCRYPTION_ENABLE, false); - if (!encryptionEnable) { - return password; - } - - // Using Base64 + salt to process password - String salt = PropertyUtils.getString(DataSourceConstants.DATASOURCE_ENCRYPTION_SALT, - DataSourceConstants.DATASOURCE_ENCRYPTION_SALT_DEFAULT); - String passwordWithSalt = new String(BASE64.decode(password), StandardCharsets.UTF_8); - if (!passwordWithSalt.startsWith(salt)) { - log.warn("There is a password and salt mismatch: {} ", password); - return password; - } - return new String(BASE64.decode(passwordWithSalt.substring(salt.length())), StandardCharsets.UTF_8); - } - -} diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/DagHelper.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/DagHelper.java index ee5e97cf551e..7e6cbda608b9 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/DagHelper.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/DagHelper.java @@ -23,10 +23,12 @@ import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelation; import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; import org.apache.dolphinscheduler.plugin.task.api.model.SwitchResultVo; import org.apache.dolphinscheduler.plugin.task.api.parameters.ConditionsParameters; import org.apache.dolphinscheduler.plugin.task.api.parameters.SwitchParameters; +import org.apache.dolphinscheduler.plugin.task.api.task.ConditionsLogicTaskChannelFactory; +import org.apache.dolphinscheduler.plugin.task.api.task.SwitchLogicTaskChannelFactory; +import org.apache.dolphinscheduler.plugin.task.api.utils.TaskTypeUtils; import org.apache.dolphinscheduler.service.model.TaskNode; import org.apache.dolphinscheduler.service.process.ProcessDag; @@ -35,6 +37,7 @@ import java.util.ArrayList; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -44,6 +47,9 @@ import lombok.extern.slf4j.Slf4j; +import com.fasterxml.jackson.core.type.TypeReference; +import com.google.common.collect.Lists; + /** * dag tools */ @@ -76,10 +82,10 @@ public static List generateRelationListByFlowNodes(List generateFlowNodeListByStartNode(List taskNodeList, @@ -139,7 +145,7 @@ public static List generateFlowNodeListByStartNode(List task /** * find all the nodes that depended on the start node * - * @param startNode startNode + * @param startNode startNode * @param taskNodeList taskNodeList * @return task node list */ @@ -166,9 +172,9 @@ private static List getFlowNodeListPost(TaskNode startNode, /** * find all nodes that start nodes depend on. * - * @param startNode startNode + * @param startNode startNode * @param recoveryNodeCodeList recoveryNodeCodeList - * @param taskNodeList taskNodeList + * @param taskNodeList taskNodeList * @return task node list */ private static List getFlowNodeListPre(TaskNode startNode, @@ -204,10 +210,10 @@ private static List getFlowNodeListPre(TaskNode startNode, /** * generate dag by start nodes and recovery nodes * - * @param totalTaskNodeList totalTaskNodeList - * @param startNodeNameList startNodeNameList + * @param totalTaskNodeList totalTaskNodeList + * @param startNodeNameList startNodeNameList * @param recoveryNodeCodeList recoveryNodeCodeList - * @param depNodeType depNodeType + * @param depNodeType depNodeType * @return process dag * @throws Exception if error throws Exception */ @@ -228,27 +234,11 @@ public static ProcessDag generateFlowDag(List totalTaskNodeList, return processDag; } - /** - * find node by node name - * - * @param nodeDetails nodeDetails - * @param nodeName nodeName - * @return task node - */ - public static TaskNode findNodeByName(List nodeDetails, String nodeName) { - for (TaskNode taskNode : nodeDetails) { - if (taskNode.getName().equals(nodeName)) { - return taskNode; - } - } - return null; - } - /** * find node by node code * * @param nodeDetails nodeDetails - * @param nodeCode nodeCode + * @param nodeCode nodeCode * @return task node */ public static TaskNode findNodeByCode(List nodeDetails, Long nodeCode) { @@ -263,8 +253,8 @@ public static TaskNode findNodeByCode(List nodeDetails, Long nodeCode) /** * the task can be submit when all the depends nodes are forbidden or complete * - * @param taskNode taskNode - * @param dag dag + * @param taskNode taskNode + * @param dag dag * @param completeTaskList completeTaskList * @return can submit */ @@ -305,10 +295,10 @@ public static Set parsePostNodes(Long preNodeCode, if (preNodeCode == null) { startVertexes = dag.getBeginNode(); - } else if (dag.getNode(preNodeCode).isConditionsTask()) { + } else if (TaskTypeUtils.isConditionTask(dag.getNode(preNodeCode).getType())) { List conditionTaskList = parseConditionTask(preNodeCode, skipTaskNodeList, dag, completeTaskList); startVertexes.addAll(conditionTaskList); - } else if (dag.getNode(preNodeCode).isSwitchTask()) { + } else if (TaskTypeUtils.isSwitchTask(dag.getNode(preNodeCode).getType())) { List conditionTaskList = parseSwitchTask(preNodeCode, skipTaskNodeList, dag, completeTaskList); startVertexes.addAll(conditionTaskList); } else { @@ -321,7 +311,7 @@ public static Set parsePostNodes(Long preNodeCode, continue; } if (isTaskNodeNeedSkip(taskNode, skipTaskNodeList)) { - setTaskNodeSkip(subsequent, dag, completeTaskList, skipTaskNodeList); + setTaskNodeSkip(subsequent, dag, skipTaskNodeList); continue; } if (!DagHelper.allDependsForbiddenOrEnd(taskNode, dag, skipTaskNodeList, completeTaskList)) { @@ -362,7 +352,7 @@ public static List parseConditionTask(Long nodeCode, Map completeTaskList) { List conditionTaskList = new ArrayList<>(); TaskNode taskNode = dag.getNode(nodeCode); - if (!taskNode.isConditionsTask()) { + if (!TaskTypeUtils.isConditionTask(taskNode.getType())) { return conditionTaskList; } if (!completeTaskList.containsKey(nodeCode)) { @@ -370,21 +360,21 @@ public static List parseConditionTask(Long nodeCode, } TaskInstance taskInstance = completeTaskList.get(nodeCode); ConditionsParameters conditionsParameters = - JSONUtils.parseObject(taskNode.getConditionResult(), ConditionsParameters.class); + JSONUtils.parseObject(taskInstance.getTaskParams(), new TypeReference() { + }); + ConditionsParameters.ConditionResult conditionResult = conditionsParameters.getConditionResult(); + List skipNodeList = new ArrayList<>(); - if (taskInstance.getState().isSuccess()) { - conditionTaskList = conditionsParameters.getSuccessNode(); - skipNodeList = conditionsParameters.getFailedNode(); - } else if (taskInstance.getState().isFailure()) { - conditionTaskList = conditionsParameters.getFailedNode(); - skipNodeList = conditionsParameters.getSuccessNode(); + if (conditionResult.isConditionSuccess()) { + conditionTaskList = conditionResult.getSuccessNode(); + skipNodeList = conditionResult.getFailedNode(); } else { - conditionTaskList.add(nodeCode); + conditionTaskList = conditionResult.getFailedNode(); + skipNodeList = conditionResult.getSuccessNode(); } - // the skipNodeList maybe null if no next task - skipNodeList = Optional.ofNullable(skipNodeList).orElse(new ArrayList<>()); - for (Long failedNode : skipNodeList) { - setTaskNodeSkip(failedNode, dag, completeTaskList, skipTaskNodeList); + + if (CollectionUtils.isNotEmpty(skipNodeList)) { + skipNodeList.forEach(skipNode -> setTaskNodeSkip(skipNode, dag, skipTaskNodeList)); } // the conditionTaskList maybe null if no next task conditionTaskList = Optional.ofNullable(conditionTaskList).orElse(new ArrayList<>()); @@ -404,88 +394,57 @@ public static List parseSwitchTask(Long nodeCode, Map completeTaskList) { List conditionTaskList = new ArrayList<>(); TaskNode taskNode = dag.getNode(nodeCode); - if (!taskNode.isSwitchTask()) { + if (!SwitchLogicTaskChannelFactory.NAME.equals(taskNode.getType())) { return conditionTaskList; } if (!completeTaskList.containsKey(nodeCode)) { return conditionTaskList; } - conditionTaskList = skipTaskNode4Switch(taskNode, skipTaskNodeList, completeTaskList, dag); + conditionTaskList = skipTaskNode4Switch(skipTaskNodeList, completeTaskList.get(nodeCode), dag); return conditionTaskList; } - public static List skipTaskNode4Switch(TaskNode taskNode, - Map skipTaskNodeList, - Map completeTaskList, + public static List skipTaskNode4Switch(Map skipTaskNodeList, + TaskInstance taskInstance, DAG dag) { - SwitchParameters switchParameters = - completeTaskList.get(taskNode.getCode()).getSwitchDependency(); - int resultConditionLocation = switchParameters.getResultConditionLocation(); - List conditionResultVoList = switchParameters.getDependTaskList(); - List switchTaskList = conditionResultVoList.get(resultConditionLocation).getNextNode(); - Set switchNeedWorkCodes = new HashSet<>(); - if (CollectionUtils.isEmpty(switchTaskList)) { - return new ArrayList<>(); - } - // get all downstream nodes of the branch that the switch node needs to execute - for (Long switchTaskCode : switchTaskList) { - getSwitchNeedWorkCodes(switchTaskCode, dag, switchNeedWorkCodes); - } - // conditionResultVoList.remove(resultConditionLocation); - for (SwitchResultVo info : conditionResultVoList) { - if (CollectionUtils.isEmpty(info.getNextNode())) { - continue; - } - for (Long nextNode : info.getNextNode()) { - setSwitchTaskNodeSkip(nextNode, dag, completeTaskList, skipTaskNodeList, - switchNeedWorkCodes); - } - } - return switchTaskList; - } + JSONUtils.parseObject(taskInstance.getTaskParams(), new TypeReference() { + }); - /** - * get all downstream nodes of the branch that the switch node needs to execute - * @param taskCode - * @param dag - * @param switchNeedWorkCodes - */ - public static void getSwitchNeedWorkCodes(Long taskCode, DAG dag, - Set switchNeedWorkCodes) { - switchNeedWorkCodes.add(taskCode); - Set subsequentNodes = dag.getSubsequentNodes(taskCode); - if (org.apache.commons.collections.CollectionUtils.isNotEmpty(subsequentNodes)) { - for (Long subCode : subsequentNodes) { - getSwitchNeedWorkCodes(subCode, dag, switchNeedWorkCodes); - } + SwitchParameters.SwitchResult switchResult = switchParameters.getSwitchResult(); + Long nextBranch = switchParameters.getNextBranch(); + if (switchResult == null) { + log.error("switchResult is null, please check the switch task configuration"); + return Collections.emptyList(); + } + if (nextBranch == null) { + log.error("switchParameters.getNextBranch() is null, please check the switch task configuration"); + return Collections.emptyList(); } - } - private static void setSwitchTaskNodeSkip(Long skipNodeCode, - DAG dag, - Map completeTaskList, - Map skipTaskNodeList, - Set switchNeedWorkCodes) { - // ignore when the node that needs to be skipped exists on the branch that the switch type node needs to execute - if (!dag.containsNode(skipNodeCode) || switchNeedWorkCodes.contains(skipNodeCode)) { - return; + Set allNextBranches = new HashSet<>(); + if (switchResult.getNextNode() != null) { + allNextBranches.add(switchResult.getNextNode()); } - skipTaskNodeList.putIfAbsent(skipNodeCode, dag.getNode(skipNodeCode)); - Collection postNodeList = dag.getSubsequentNodes(skipNodeCode); - for (Long post : postNodeList) { - TaskNode postNode = dag.getNode(post); - if (isTaskNodeNeedSkip(postNode, skipTaskNodeList)) { - setTaskNodeSkip(post, dag, completeTaskList, skipTaskNodeList); + if (CollectionUtils.isNotEmpty(switchResult.getDependTaskList())) { + for (SwitchResultVo switchResultVo : switchResult.getDependTaskList()) { + allNextBranches.add(switchResultVo.getNextNode()); } } + + allNextBranches.remove(nextBranch); + + for (Long branch : allNextBranches) { + setTaskNodeSkip(branch, dag, skipTaskNodeList); + } + return Lists.newArrayList(nextBranch); } + /** * set task node and the post nodes skip flag */ private static void setTaskNodeSkip(Long skipNodeCode, DAG dag, - Map completeTaskList, Map skipTaskNodeList) { if (!dag.containsNode(skipNodeCode)) { return; @@ -495,7 +454,7 @@ private static void setTaskNodeSkip(Long skipNodeCode, for (Long post : postNodeList) { TaskNode postNode = dag.getNode(post); if (isTaskNodeNeedSkip(postNode, skipTaskNodeList)) { - setTaskNodeSkip(post, dag, completeTaskList, skipTaskNodeList); + setTaskNodeSkip(post, dag, skipTaskNodeList); } } } @@ -591,31 +550,7 @@ public static ProcessDag getProcessDag(List taskNodeList, */ public static boolean haveConditionsAfterNode(Long parentNodeCode, DAG dag) { - return haveSubAfterNode(parentNodeCode, dag, TaskConstants.TASK_TYPE_CONDITIONS); - } - - /** - * is there have conditions after the parent node - */ - public static boolean haveConditionsAfterNode(Long parentNodeCode, List taskNodes) { - if (CollectionUtils.isEmpty(taskNodes)) { - return false; - } - for (TaskNode taskNode : taskNodes) { - List preTasksList = JSONUtils.toList(taskNode.getPreTasks(), Long.class); - if (preTasksList.contains(parentNodeCode) && taskNode.isConditionsTask()) { - return true; - } - } - return false; - } - - /** - * is there have blocking node after the parent node - */ - public static boolean haveBlockingAfterNode(Long parentNodeCode, - DAG dag) { - return haveSubAfterNode(parentNodeCode, dag, TaskConstants.TASK_TYPE_BLOCKING); + return haveSubAfterNode(parentNodeCode, dag, ConditionsLogicTaskChannelFactory.NAME); } /** diff --git a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/alert/ListenerEventAlertManagerTest.java b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/alert/ListenerEventAlertManagerTest.java index 981b0a8050e3..4ba958e71dce 100644 --- a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/alert/ListenerEventAlertManagerTest.java +++ b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/alert/ListenerEventAlertManagerTest.java @@ -18,6 +18,7 @@ package org.apache.dolphinscheduler.service.alert; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.when; import org.apache.dolphinscheduler.dao.entity.AlertPluginInstance; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; @@ -30,6 +31,7 @@ import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.AlertPluginInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.ListenerEventMapper; +import org.apache.dolphinscheduler.service.process.ProcessService; import java.util.ArrayList; import java.util.List; @@ -40,8 +42,6 @@ import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * ProcessAlertManager Test @@ -49,8 +49,6 @@ @ExtendWith(MockitoExtension.class) public class ListenerEventAlertManagerTest { - private static final Logger logger = LoggerFactory.getLogger(ListenerEventAlertManagerTest.class); - @InjectMocks ListenerEventAlertManager listenerEventAlertManager; @@ -60,6 +58,9 @@ public class ListenerEventAlertManagerTest { @Mock ListenerEventMapper listenerEventMapper; + @Mock + ProcessService processService; + @Test public void sendServerDownListenerEventTest() { String host = "127.0.0.1"; @@ -67,7 +68,7 @@ public void sendServerDownListenerEventTest() { List globalPluginInstanceList = new ArrayList<>(); AlertPluginInstance instance = new AlertPluginInstance(1, "instanceParams", "instanceName"); globalPluginInstanceList.add(instance); - Mockito.when(alertPluginInstanceMapper.queryAllGlobalAlertPluginInstanceList()) + when(alertPluginInstanceMapper.queryAllGlobalAlertPluginInstanceList()) .thenReturn(globalPluginInstanceList); Mockito.doNothing().when(listenerEventMapper).insertServerDownEvent(any(), any()); listenerEventAlertManager.publishServerDownListenerEvent(host, type); @@ -82,9 +83,9 @@ public void sendProcessDefinitionCreatedListenerEvent() { AlertPluginInstance instance = new AlertPluginInstance(1, "instanceParams", "instanceName"); List globalPluginInstanceList = new ArrayList<>(); globalPluginInstanceList.add(instance); - Mockito.when(alertPluginInstanceMapper.queryAllGlobalAlertPluginInstanceList()) + when(alertPluginInstanceMapper.queryAllGlobalAlertPluginInstanceList()) .thenReturn(globalPluginInstanceList); - Mockito.when(listenerEventMapper.insert(any())).thenReturn(1); + when(listenerEventMapper.insert(any())).thenReturn(1); listenerEventAlertManager.publishProcessDefinitionCreatedListenerEvent(user, processDefinition, taskDefinitionLogs, processTaskRelationLogs); } @@ -142,7 +143,8 @@ public void sendTaskEndListenerEvent() { public void sendTaskFailListenerEvent() { ProcessInstance processInstance = Mockito.mock(ProcessInstance.class); TaskInstance taskInstance = Mockito.mock(TaskInstance.class); - ProjectUser projectUser = Mockito.mock(ProjectUser.class); - listenerEventAlertManager.publishTaskFailListenerEvent(processInstance, taskInstance, projectUser); + when(processService.queryProjectWithUserByProcessInstanceId(processInstance.getId())) + .thenReturn(new ProjectUser()); + listenerEventAlertManager.publishTaskFailListenerEvent(processInstance, taskInstance); } } diff --git a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/command/MessageServiceImplTest.java b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/command/MessageServiceImplTest.java index 0cde76bdfe88..f60320fc63e6 100644 --- a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/command/MessageServiceImplTest.java +++ b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/command/MessageServiceImplTest.java @@ -214,14 +214,4 @@ public void testCreateCommand() { Mockito.verify(commandMapper, Mockito.times(1)).insert(command); } - @Test - public void testFindCommandPageBySlot() { - int pageSize = 1; - int masterCount = 0; - int thisMasterSlot = 2; - List commandList = - commandService.findCommandPageBySlot(pageSize, masterCount, thisMasterSlot); - Assertions.assertEquals(0, commandList.size()); - } - } diff --git a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/process/ProcessServiceTest.java b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/process/ProcessServiceTest.java index 509f9a4cf115..a8b0f44ae9ca 100644 --- a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/process/ProcessServiceTest.java +++ b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/process/ProcessServiceTest.java @@ -79,7 +79,6 @@ import org.apache.dolphinscheduler.plugin.task.api.enums.dp.OptionSourceType; import org.apache.dolphinscheduler.plugin.task.api.model.Property; import org.apache.dolphinscheduler.plugin.task.api.model.ResourceInfo; -import org.apache.dolphinscheduler.service.cron.CronUtilsTest; import org.apache.dolphinscheduler.service.exceptions.CronParseException; import org.apache.dolphinscheduler.service.exceptions.ServiceException; import org.apache.dolphinscheduler.service.expand.CuringParamsService; @@ -102,8 +101,6 @@ import org.mockito.junit.jupiter.MockitoExtension; import org.mockito.junit.jupiter.MockitoSettings; import org.mockito.quality.Strictness; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * process service test @@ -112,7 +109,6 @@ @MockitoSettings(strictness = Strictness.LENIENT) public class ProcessServiceTest { - private static final Logger logger = LoggerFactory.getLogger(CronUtilsTest.class); @InjectMocks private ProcessServiceImpl processService; @Mock @@ -667,7 +663,6 @@ public void testSaveTaskDefine() { taskDefinition.setVersion(1); taskDefinition.setCreateTime(new Date()); taskDefinition.setUpdateTime(new Date()); - when(taskPluginManager.getParameters(any())).thenReturn(null); when(taskDefinitionLogMapper.queryByDefinitionCodeAndVersion(taskDefinition.getCode(), taskDefinition.getVersion())).thenReturn(taskDefinition); when(taskDefinitionLogMapper.queryMaxVersionForDefinition(taskDefinition.getCode())).thenReturn(1); @@ -741,7 +736,7 @@ public void testChangeOutParam() { processInstance.setId(62); taskInstance.setVarPool("[{\"direct\":\"OUT\",\"prop\":\"test1\",\"type\":\"VARCHAR\",\"value\":\"\"}]"); taskInstance.setTaskParams("{\"type\":\"MYSQL\",\"datasource\":1,\"sql\":\"select id from tb_test limit 1\"," - + "\"udfs\":\"\",\"sqlType\":\"0\",\"sendEmail\":false,\"displayRows\":10,\"title\":\"\"," + + "\"sqlType\":\"0\",\"sendEmail\":false,\"displayRows\":10,\"title\":\"\"," + "\"groupId\":null,\"localParams\":[{\"prop\":\"test1\",\"direct\":\"OUT\",\"type\":\"VARCHAR\",\"value\":\"12\"}]," + "\"connParams\":\"\",\"preStatements\":[],\"postStatements\":[],\"conditionResult\":\"{\\\"successNode\\\":[\\\"\\\"]," + "\\\"failedNode\\\":[\\\"\\\"]}\",\"dependence\":\"{}\"}"); diff --git a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/process/TriggerRelationServiceTest.java b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/process/TriggerRelationServiceTest.java index 8f4790111cdc..4f3bbb0bc788 100644 --- a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/process/TriggerRelationServiceTest.java +++ b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/process/TriggerRelationServiceTest.java @@ -17,24 +17,26 @@ package org.apache.dolphinscheduler.service.process; +import static com.google.common.truth.Truth.assertThat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.when; + import org.apache.dolphinscheduler.common.enums.ApiTriggerType; import org.apache.dolphinscheduler.dao.entity.TriggerRelation; import org.apache.dolphinscheduler.dao.mapper.TriggerRelationMapper; -import org.apache.dolphinscheduler.service.cron.CronUtilsTest; import java.util.Date; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.InjectMocks; import org.mockito.Mock; -import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; import org.mockito.junit.jupiter.MockitoSettings; import org.mockito.quality.Strictness; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; + +import com.google.common.collect.Lists; /** * Trigger Relation Service Test @@ -43,8 +45,6 @@ @MockitoSettings(strictness = Strictness.LENIENT) public class TriggerRelationServiceTest { - private static final Logger logger = LoggerFactory.getLogger(CronUtilsTest.class); - @InjectMocks private TriggerRelationServiceImpl triggerRelationService; @Mock @@ -52,47 +52,37 @@ public class TriggerRelationServiceTest { @Test public void saveTriggerToDb() { - Mockito.doNothing().when(triggerRelationMapper).upsert(Mockito.any()); + doNothing().when(triggerRelationMapper).upsert(any()); triggerRelationService.saveTriggerToDb(ApiTriggerType.COMMAND, 1234567890L, 100); } @Test public void queryByTypeAndJobId() { - Mockito.doNothing().when(triggerRelationMapper).upsert(Mockito.any()); - Mockito.when(triggerRelationMapper.queryByTypeAndJobId(ApiTriggerType.PROCESS.getCode(), 100)) - .thenReturn(getTriggerTdoDb()); + doNothing().when(triggerRelationMapper).upsert(any()); + when(triggerRelationMapper.queryByTypeAndJobId(ApiTriggerType.PROCESS.getCode(), 100)) + .thenReturn(Lists.newArrayList(getTriggerTdoDb())); - TriggerRelation triggerRelation1 = triggerRelationService.queryByTypeAndJobId( - ApiTriggerType.PROCESS, 100); - Assertions.assertNotNull(triggerRelation1); - TriggerRelation triggerRelation2 = triggerRelationService.queryByTypeAndJobId( - ApiTriggerType.PROCESS, 200); - Assertions.assertNull(triggerRelation2); + assertThat(triggerRelationService.queryByTypeAndJobId(ApiTriggerType.PROCESS, 100)).hasSize(1); + assertThat(triggerRelationService.queryByTypeAndJobId(ApiTriggerType.PROCESS, 200)).isEmpty(); } @Test public void saveCommandTrigger() { - Mockito.doNothing().when(triggerRelationMapper).upsert(Mockito.any()); - Mockito.when(triggerRelationMapper.queryByTypeAndJobId(ApiTriggerType.PROCESS.getCode(), 100)) - .thenReturn(getTriggerTdoDb()); - int result = -1; - result = triggerRelationService.saveCommandTrigger(1234567890, 100); - Assertions.assertTrue(result > 0); - result = triggerRelationService.saveCommandTrigger(1234567890, 200); - Assertions.assertTrue(result == 0); + doNothing().when(triggerRelationMapper).upsert(any()); + when(triggerRelationMapper.queryByTypeAndJobId(ApiTriggerType.PROCESS.getCode(), 100)) + .thenReturn(Lists.newArrayList(getTriggerTdoDb())); + assertThat(triggerRelationService.saveCommandTrigger(1234567890, 100)).isAtLeast(1); + assertThat(triggerRelationService.saveCommandTrigger(1234567890, 200)).isEqualTo(0); } @Test public void saveProcessInstanceTrigger() { - Mockito.doNothing().when(triggerRelationMapper).upsert(Mockito.any()); - Mockito.when(triggerRelationMapper.queryByTypeAndJobId(ApiTriggerType.COMMAND.getCode(), 100)) - .thenReturn(getTriggerTdoDb()); - int result = -1; - result = triggerRelationService.saveProcessInstanceTrigger(100, 1234567890); - Assertions.assertTrue(result > 0); - result = triggerRelationService.saveProcessInstanceTrigger(200, 1234567890); - Assertions.assertTrue(result == 0); + doNothing().when(triggerRelationMapper).upsert(any()); + when(triggerRelationMapper.queryByTypeAndJobId(ApiTriggerType.COMMAND.getCode(), 100)) + .thenReturn(Lists.newArrayList(getTriggerTdoDb())); + assertThat(triggerRelationService.saveProcessInstanceTrigger(100, 1234567890)).isAtLeast(1); + assertThat(triggerRelationService.saveProcessInstanceTrigger(200, 1234567890)).isEqualTo(0); } private TriggerRelation getTriggerTdoDb() { diff --git a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/queue/MasterPriorityQueueTest.java b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/queue/MasterPriorityQueueTest.java new file mode 100644 index 000000000000..a064c928e76f --- /dev/null +++ b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/queue/MasterPriorityQueueTest.java @@ -0,0 +1,101 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.service.queue; + +import org.apache.dolphinscheduler.common.model.Server; + +import java.util.Arrays; +import java.util.Date; +import java.util.List; +import java.util.stream.Collectors; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +public class MasterPriorityQueueTest { + + @Test + public void getOrderedCollection() { + + MasterPriorityQueue queue = new MasterPriorityQueue(); + + // Test empty queue + Server[] emptyElements = queue.getOrderedElements(); + Assertions.assertArrayEquals(emptyElements, new Server[]{}); + + // Test queue with fabricated servers + queue.putAll(getServerList()); + Server[] orderElements = queue.getOrderedElements(); + Assertions.assertEquals(extractServerIds(orderElements), Arrays.asList(4, 2, 1, 3)); + + } + + @Test + public void refreshMasterList() { + MasterPriorityQueue queue = new MasterPriorityQueue(); + + // Test empty queue + queue.clear(); + Assertions.assertEquals(queue.getIndex("127.0.0.1:124"), -1); + + // Test queue with fabricated servers + queue.putAll(getServerList()); + + Assertions.assertEquals(queue.getIndex("127.0.0.1:124"), 0); + Assertions.assertEquals(queue.getIndex("127.0.0.1:122"), 1); + Assertions.assertEquals(queue.getIndex("127.0.0.1:121"), 2); + Assertions.assertEquals(queue.getIndex("127.0.0.1:123"), 3); + + } + + private List getServerList() { + + long baseTime = new Date().getTime(); + + Server server1 = new Server(); + server1.setId(1); + server1.setHost("127.0.0.1"); + server1.setPort(121); + server1.setCreateTime(new Date(baseTime - 1000)); + + Server server2 = new Server(); + server2.setId(2); + server2.setHost("127.0.0.1"); + server2.setPort(122); + server2.setCreateTime(new Date(baseTime + 1000)); + + Server server3 = new Server(); + server3.setId(3); + server3.setHost("127.0.0.1"); + server3.setPort(123); + server3.setCreateTime(new Date(baseTime - 2000)); + + Server server4 = new Server(); + server4.setId(4); + server4.setHost("127.0.0.1"); + server4.setPort(124); + server4.setCreateTime(new Date(baseTime + 2000)); + + return Arrays.asList(server1, server2, server3, server4); + } + + private List extractServerIds(Server[] servers) { + return Arrays.stream(servers).map(Server::getId).collect(Collectors.toList()); + } + +} diff --git a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/utils/CommonUtilsTest.java b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/utils/CommonUtilsTest.java deleted file mode 100644 index cab280a7020d..000000000000 --- a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/utils/CommonUtilsTest.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.service.utils; - -import org.apache.dolphinscheduler.common.utils.FileUtils; - -import java.net.InetAddress; -import java.net.UnknownHostException; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.junit.jupiter.MockitoExtension; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * configuration test - */ -@ExtendWith(MockitoExtension.class) -public class CommonUtilsTest { - - private static final Logger logger = LoggerFactory.getLogger(CommonUtilsTest.class); - - @Test - public void getSystemEnvPath() { - String envPath; - envPath = CommonUtils.getSystemEnvPath(); - Assertions.assertEquals("/etc/profile", envPath); - } - - @Test - public void getDownloadFilename() { - logger.info(FileUtils.getDownloadFilename("a.txt")); - Assertions.assertTrue(true); - } - - @Test - public void getUploadFilename() { - logger.info(FileUtils.getUploadFilename("1234", "a.txt")); - Assertions.assertTrue(true); - } - - @Test - public void test() { - InetAddress ip; - try { - ip = InetAddress.getLocalHost(); - logger.info(ip.getHostAddress()); - } catch (UnknownHostException e) { - e.printStackTrace(); - } - Assertions.assertTrue(true); - } - -} diff --git a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/utils/DagHelperTest.java b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/utils/DagHelperTest.java index c19812303ca9..daaa6ac81de6 100644 --- a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/utils/DagHelperTest.java +++ b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/utils/DagHelperTest.java @@ -17,18 +17,19 @@ package org.apache.dolphinscheduler.service.utils; -import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.TASK_TYPE_CONDITIONS; - import org.apache.dolphinscheduler.common.constants.Constants; import org.apache.dolphinscheduler.common.enums.TaskDependType; import org.apache.dolphinscheduler.common.graph.DAG; import org.apache.dolphinscheduler.common.model.TaskNodeRelation; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; import org.apache.dolphinscheduler.plugin.task.api.enums.TaskExecutionStatus; import org.apache.dolphinscheduler.plugin.task.api.model.SwitchResultVo; +import org.apache.dolphinscheduler.plugin.task.api.parameters.ConditionsParameters; import org.apache.dolphinscheduler.plugin.task.api.parameters.SwitchParameters; +import org.apache.dolphinscheduler.plugin.task.api.task.ConditionsLogicTaskChannelFactory; +import org.apache.dolphinscheduler.plugin.task.api.task.DependentLogicTaskChannelFactory; +import org.apache.dolphinscheduler.plugin.task.api.task.SwitchLogicTaskChannelFactory; import org.apache.dolphinscheduler.service.model.TaskNode; import org.apache.dolphinscheduler.service.process.ProcessDag; @@ -43,10 +44,9 @@ import org.junit.jupiter.api.Test; import com.fasterxml.jackson.core.JsonProcessingException; +import com.google.common.collect.Lists; +import com.google.common.truth.Truth; -/** - * dag helper test - */ public class DagHelperTest { @Test @@ -89,26 +89,105 @@ public void testHaveSubAfterNode() { boolean canSubmit = DagHelper.haveAllNodeAfterNode(parentNodeCode, dag); Assertions.assertTrue(canSubmit); - boolean haveBlocking = DagHelper.haveBlockingAfterNode(parentNodeCode, dag); - Assertions.assertTrue(haveBlocking); - boolean haveConditions = DagHelper.haveConditionsAfterNode(parentNodeCode, dag); Assertions.assertTrue(haveConditions); - boolean dependent = DagHelper.haveSubAfterNode(parentNodeCode, dag, TaskConstants.TASK_TYPE_DEPENDENT); + boolean dependent = DagHelper.haveSubAfterNode(parentNodeCode, dag, DependentLogicTaskChannelFactory.NAME); Assertions.assertFalse(dependent); } - /** - * test task node can submit - * - * @throws JsonProcessingException if error throws JsonProcessingException - */ @Test - public void testTaskNodeCanSubmit() throws IOException { + public void testTaskNodeCanSubmit() { + List taskNodeList = new ArrayList<>(); + TaskNode node1 = new TaskNode(); + node1.setId("1"); + node1.setName("1"); + node1.setCode(1); + node1.setType("SHELL"); + taskNodeList.add(node1); + + TaskNode node2 = new TaskNode(); + node2.setId("2"); + node2.setName("2"); + node2.setCode(2); + node2.setType("SHELL"); + List dep2 = new ArrayList<>(); + dep2.add("1"); + node2.setPreTasks(JSONUtils.toJsonString(dep2)); + taskNodeList.add(node2); + + TaskNode node4 = new TaskNode(); + node4.setId("4"); + node4.setName("4"); + node4.setCode(4); + node4.setType("SHELL"); + taskNodeList.add(node4); + + TaskNode node3 = new TaskNode(); + node3.setId("3"); + node3.setName("3"); + node3.setCode(3); + node3.setType("SHELL"); + List dep3 = new ArrayList<>(); + dep3.add("2"); + dep3.add("4"); + node3.setPreTasks(JSONUtils.toJsonString(dep3)); + taskNodeList.add(node3); + + TaskNode node5 = new TaskNode(); + node5.setId("5"); + node5.setName("5"); + node5.setCode(5); + node5.setType("SHELL"); + List dep5 = new ArrayList<>(); + dep5.add("3"); + dep5.add("8"); + node5.setPreTasks(JSONUtils.toJsonString(dep5)); + taskNodeList.add(node5); + + TaskNode node6 = new TaskNode(); + node6.setId("6"); + node6.setName("6"); + node6.setCode(6); + node6.setType("SHELL"); + List dep6 = new ArrayList<>(); + dep6.add("3"); + node6.setPreTasks(JSONUtils.toJsonString(dep6)); + taskNodeList.add(node6); + + TaskNode node7 = new TaskNode(); + node7.setId("7"); + node7.setName("7"); + node7.setCode(7); + node7.setType("SHELL"); + List dep7 = new ArrayList<>(); + dep7.add("5"); + node7.setPreTasks(JSONUtils.toJsonString(dep7)); + taskNodeList.add(node7); + + TaskNode node8 = new TaskNode(); + node8.setId("8"); + node8.setName("8"); + node8.setCode(8); + node8.setType("SHELL"); + List dep8 = new ArrayList<>(); + dep8.add("2"); + node8.setPreTasks(JSONUtils.toJsonString(dep8)); + taskNodeList.add(node8); + + List startNodes = new ArrayList<>(); + List recoveryNodes = new ArrayList<>(); + List destTaskNodeList = DagHelper.generateFlowNodeListByStartNode(taskNodeList, + startNodes, recoveryNodes, TaskDependType.TASK_POST); + List taskNodeRelations = DagHelper.generateRelationListByFlowNodes(destTaskNodeList); + ProcessDag processDag = new ProcessDag(); + processDag.setEdges(taskNodeRelations); + processDag.setNodes(destTaskNodeList); + // 1->2->3->5->7 // 4->3->6 - DAG dag = generateDag(); + // 1->2->8->5->7 + DAG dag = DagHelper.buildDagGraph(processDag); TaskNode taskNode3 = dag.getNode(3L); Map completeTaskList = new HashMap<>(); Map skipNodeList = new HashMap<>(); @@ -116,7 +195,7 @@ public void testTaskNodeCanSubmit() throws IOException { Boolean canSubmit = false; // 2/4 are forbidden submit 3 - TaskNode node2 = dag.getNode(2L); + node2 = dag.getNode(2L); node2.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN); TaskNode nodex = dag.getNode(4L); nodex.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN); @@ -131,21 +210,107 @@ public void testTaskNodeCanSubmit() throws IOException { Assertions.assertEquals(canSubmit, false); // 2/3 forbidden submit 5 - TaskNode node3 = dag.getNode(3L); + node3 = dag.getNode(3L); node3.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN); - TaskNode node8 = dag.getNode(8L); + node8 = dag.getNode(8L); node8.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN); - TaskNode node5 = dag.getNode(5L); + node5 = dag.getNode(5L); canSubmit = DagHelper.allDependsForbiddenOrEnd(node5, dag, skipNodeList, completeTaskList); Assertions.assertEquals(canSubmit, true); } - /** - * test parse post node list - */ @Test - public void testParsePostNodeList() throws IOException { - DAG dag = generateDag(); + public void testParsePostNodeList() { + List taskNodeList = new ArrayList<>(); + TaskNode node1 = new TaskNode(); + node1.setId("1"); + node1.setName("1"); + node1.setCode(1); + node1.setType("SHELL"); + taskNodeList.add(node1); + + TaskNode node2 = new TaskNode(); + node2.setId("2"); + node2.setName("2"); + node2.setCode(2); + node2.setType("SHELL"); + List dep2 = new ArrayList<>(); + dep2.add("1"); + node2.setPreTasks(JSONUtils.toJsonString(dep2)); + taskNodeList.add(node2); + + TaskNode node4 = new TaskNode(); + node4.setId("4"); + node4.setName("4"); + node4.setCode(4); + node4.setType("SHELL"); + taskNodeList.add(node4); + + TaskNode node3 = new TaskNode(); + node3.setId("3"); + node3.setName("3"); + node3.setCode(3); + node3.setType("SHELL"); + List dep3 = new ArrayList<>(); + dep3.add("2"); + dep3.add("4"); + node3.setPreTasks(JSONUtils.toJsonString(dep3)); + taskNodeList.add(node3); + + TaskNode node5 = new TaskNode(); + node5.setId("5"); + node5.setName("5"); + node5.setCode(5); + node5.setType("SHELL"); + List dep5 = new ArrayList<>(); + dep5.add("3"); + dep5.add("8"); + node5.setPreTasks(JSONUtils.toJsonString(dep5)); + taskNodeList.add(node5); + + TaskNode node6 = new TaskNode(); + node6.setId("6"); + node6.setName("6"); + node6.setCode(6); + node6.setType("SHELL"); + List dep6 = new ArrayList<>(); + dep6.add("3"); + node6.setPreTasks(JSONUtils.toJsonString(dep6)); + taskNodeList.add(node6); + + TaskNode node7 = new TaskNode(); + node7.setId("7"); + node7.setName("7"); + node7.setCode(7); + node7.setType("SHELL"); + List dep7 = new ArrayList<>(); + dep7.add("5"); + node7.setPreTasks(JSONUtils.toJsonString(dep7)); + taskNodeList.add(node7); + + TaskNode node8 = new TaskNode(); + node8.setId("8"); + node8.setName("8"); + node8.setCode(8); + node8.setType("SHELL"); + List dep8 = new ArrayList<>(); + dep8.add("2"); + node8.setPreTasks(JSONUtils.toJsonString(dep8)); + taskNodeList.add(node8); + + List startNodes = new ArrayList<>(); + List recoveryNodes = new ArrayList<>(); + List destTaskNodeList = DagHelper.generateFlowNodeListByStartNode(taskNodeList, + startNodes, recoveryNodes, TaskDependType.TASK_POST); + List taskNodeRelations = DagHelper.generateRelationListByFlowNodes(destTaskNodeList); + ProcessDag processDag = new ProcessDag(); + processDag.setEdges(taskNodeRelations); + processDag.setNodes(destTaskNodeList); + + // 1->2->3->5->7 + // 4->3->6 + // 1->2->8->5->7 + DAG dag = DagHelper.buildDagGraph(processDag); Map completeTaskList = new HashMap<>(); Map skipNodeList = new HashMap<>(); @@ -205,11 +370,6 @@ public void testParsePostNodeList() throws IOException { Assertions.assertTrue(postNodes.contains(7L)); } - /** - * test forbidden post node - * - * @throws JsonProcessingException - */ @Test public void testForbiddenPostNode() throws IOException { DAG dag = generateDag(); @@ -243,11 +403,6 @@ public void testForbiddenPostNode() throws IOException { Assertions.assertTrue(postNodes.contains(3L)); } - /** - * test condition post node - * - * @throws JsonProcessingException - */ @Test public void testConditionPostNode() throws IOException { DAG dag = generateDag(); @@ -259,24 +414,22 @@ public void testConditionPostNode() throws IOException { completeTaskList.put(1L, new TaskInstance()); completeTaskList.put(2L, new TaskInstance()); completeTaskList.put(4L, new TaskInstance()); + + TaskInstance taskInstance3 = new TaskInstance(); + taskInstance3.setTaskType(ConditionsLogicTaskChannelFactory.NAME); + ConditionsParameters.ConditionResult conditionResult = ConditionsParameters.ConditionResult.builder() + .conditionSuccess(true) + .successNode(Lists.newArrayList(5L)) + .failedNode(Lists.newArrayList(6L)) + .build(); + ConditionsParameters conditionsParameters = new ConditionsParameters(); + conditionsParameters.setConditionResult(conditionResult); + taskInstance3.setTaskParams(JSONUtils.toJsonString(conditionsParameters)); + taskInstance3.setState(TaskExecutionStatus.SUCCESS); TaskNode node3 = dag.getNode(3L); - node3.setType(TASK_TYPE_CONDITIONS); - node3.setConditionResult("{\n" - + - " \"successNode\": [5\n" - + - " ],\n" - + - " \"failedNode\": [6\n" - + - " ]\n" - + - " }"); - completeTaskList.remove(3L); - TaskInstance taskInstance = new TaskInstance(); - taskInstance.setState(TaskExecutionStatus.SUCCESS); + node3.setType(ConditionsLogicTaskChannelFactory.NAME); // complete 1/2/3/4 expect:8 - completeTaskList.put(3L, taskInstance); + completeTaskList.put(3L, taskInstance3); postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList); Assertions.assertEquals(1, postNodes.size()); Assertions.assertTrue(postNodes.contains(8L)); @@ -291,7 +444,6 @@ public void testConditionPostNode() throws IOException { // 3.complete 1/2/3/4/5/8 expect post:7 skip:6 skipNodeList.clear(); TaskInstance taskInstance1 = new TaskInstance(); - taskInstance.setState(TaskExecutionStatus.SUCCESS); completeTaskList.put(5L, taskInstance1); postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList); Assertions.assertEquals(1, postNodes.size()); @@ -299,66 +451,120 @@ public void testConditionPostNode() throws IOException { Assertions.assertEquals(1, skipNodeList.size()); Assertions.assertTrue(skipNodeList.containsKey(6L)); - // dag: 1-2-3-5-7 4-3-6 - // 3-if , complete:1/2/3/4 - // 1.failure:3 expect post:6 skip:5/7 - skipNodeList.clear(); - completeTaskList.remove(3L); - taskInstance = new TaskInstance(); - - Map taskParamsMap = new HashMap<>(); - taskParamsMap.put(Constants.SWITCH_RESULT, ""); - taskInstance.setTaskParams(JSONUtils.toJsonString(taskParamsMap)); - taskInstance.setState(TaskExecutionStatus.FAILURE); - completeTaskList.put(3L, taskInstance); - postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList); - Assertions.assertEquals(1, postNodes.size()); - Assertions.assertTrue(postNodes.contains(6L)); - Assertions.assertEquals(2, skipNodeList.size()); - Assertions.assertTrue(skipNodeList.containsKey(5L)); - Assertions.assertTrue(skipNodeList.containsKey(7L)); - - // dag: 1-2-3-5-7 4-3-6 - // 3-if , complete:1/2/3/4 - // 1.failure:3 expect post:6 skip:5/7 - dag = generateDag2(); - skipNodeList.clear(); - completeTaskList.clear(); - taskInstance.setSwitchDependency(getSwitchNode()); - completeTaskList.put(1L, taskInstance); - postNodes = DagHelper.parsePostNodes(1L, skipNodeList, dag, completeTaskList); - Assertions.assertEquals(1, postNodes.size()); } @Test - public void testSwitchPostNode() throws IOException { - DAG dag = generateDag2(); + public void testSwitchPostNode() { + List taskNodeList = new ArrayList<>(); + + TaskNode node = new TaskNode(); + node.setId("0"); + node.setName("0"); + node.setCode(0); + node.setType("SHELL"); + taskNodeList.add(node); + + TaskNode node1 = new TaskNode(); + node1.setId("1"); + node1.setName("1"); + node1.setCode(1); + node1.setType(SwitchLogicTaskChannelFactory.NAME); + SwitchParameters switchParameters = new SwitchParameters(); + node1.setParams(JSONUtils.toJsonString(switchParameters)); + taskNodeList.add(node1); + + TaskNode node2 = new TaskNode(); + node2.setId("2"); + node2.setName("2"); + node2.setCode(2); + node2.setType("SHELL"); + List dep2 = new ArrayList<>(); + dep2.add("1"); + node2.setPreTasks(JSONUtils.toJsonString(dep2)); + taskNodeList.add(node2); + + TaskNode node4 = new TaskNode(); + node4.setId("4"); + node4.setName("4"); + node4.setCode(4); + node4.setType("SHELL"); + List dep4 = new ArrayList<>(); + dep4.add("1"); + node4.setPreTasks(JSONUtils.toJsonString(dep4)); + taskNodeList.add(node4); + + TaskNode node5 = new TaskNode(); + node5.setId("5"); + node5.setName("5"); + node5.setCode(5); + node5.setType("SHELL"); + List dep5 = new ArrayList<>(); + dep5.add(1L); + node5.setPreTasks(JSONUtils.toJsonString(dep5)); + taskNodeList.add(node5); + + TaskNode node6 = new TaskNode(); + node5.setId("6"); + node5.setName("6"); + node5.setCode(6); + node5.setType("SHELL"); + List dep6 = new ArrayList<>(); + dep5.add(2L); + dep5.add(4L); + node5.setPreTasks(JSONUtils.toJsonString(dep6)); + taskNodeList.add(node6); + + List startNodes = new ArrayList<>(); + List recoveryNodes = new ArrayList<>(); + + // 0 + // 1->2->6 + // 1->4->6 + // 1->5 + List destTaskNodeList = DagHelper.generateFlowNodeListByStartNode(taskNodeList, + startNodes, recoveryNodes, TaskDependType.TASK_POST); + List taskNodeRelations = DagHelper.generateRelationListByFlowNodes(destTaskNodeList); + ProcessDag processDag = new ProcessDag(); + processDag.setEdges(taskNodeRelations); + processDag.setNodes(destTaskNodeList); + + DAG dag = DagHelper.buildDagGraph(processDag); Map skipTaskNodeList = new HashMap<>(); Map completeTaskList = new HashMap<>(); - completeTaskList.put(0l, new TaskInstance()); + completeTaskList.put(0L, new TaskInstance()); TaskInstance taskInstance = new TaskInstance(); taskInstance.setState(TaskExecutionStatus.SUCCESS); - taskInstance.setTaskCode(1l); - Map taskParamsMap = new HashMap<>(); - taskParamsMap.put(Constants.SWITCH_RESULT, ""); - taskInstance.setTaskParams(JSONUtils.toJsonString(taskParamsMap)); - taskInstance.setSwitchDependency(getSwitchNode()); + taskInstance.setTaskCode(1L); + taskInstance.setTaskType(SwitchLogicTaskChannelFactory.NAME); + switchParameters = SwitchParameters.builder() + .nextBranch(5L) + .switchResult(SwitchParameters.SwitchResult.builder() + .dependTaskList(Lists.newArrayList( + new SwitchResultVo("", 2L), + new SwitchResultVo("", 4L))) + .nextNode(5L) + .build()) + .build(); + taskInstance.setTaskParams(JSONUtils.toJsonString(switchParameters)); completeTaskList.put(1l, taskInstance); - DagHelper.skipTaskNode4Switch(dag.getNode(1l), skipTaskNodeList, completeTaskList, dag); + List nextBranch = DagHelper.skipTaskNode4Switch(skipTaskNodeList, taskInstance, dag); Assertions.assertNotNull(skipTaskNodeList.get(2L)); - Assertions.assertEquals(1, skipTaskNodeList.size()); + Assertions.assertNotNull(skipTaskNodeList.get(4L)); + Assertions.assertEquals(2, skipTaskNodeList.size()); + Truth.assertThat(nextBranch).containsExactly(5L); } + /** * process: * 1->2->3->5->7 * 4->3->6 * 1->2->8->5->7 * DAG graph: - * 4 -> -> 6 - * \ / + * 4 -> -> 6 + * \ / * 1 -> 2 -> 3 -> 5 -> 7 - * \ / - * -> 8 -> + * \ / + * -> 8 -> * * @return dag * @throws JsonProcessingException if error throws JsonProcessingException @@ -452,108 +658,6 @@ private DAG generateDag() throws IOException { return DagHelper.buildDagGraph(processDag); } - /** - * DAG graph: - * -> 2-> - * / \ - * / \ - * 0->1(switch)->5 6 - * \ / - * \ / - * -> 4-> - * - * @return dag - * @throws JsonProcessingException if error throws JsonProcessingException - */ - private DAG generateDag2() throws IOException { - List taskNodeList = new ArrayList<>(); - - TaskNode node = new TaskNode(); - node.setId("0"); - node.setName("0"); - node.setCode(0); - node.setType("SHELL"); - taskNodeList.add(node); - - TaskNode node1 = new TaskNode(); - node1.setId("1"); - node1.setName("1"); - node1.setCode(1); - node1.setType("switch"); - node1.setDependence(JSONUtils.toJsonString(getSwitchNode())); - taskNodeList.add(node1); - - TaskNode node2 = new TaskNode(); - node2.setId("2"); - node2.setName("2"); - node2.setCode(2); - node2.setType("SHELL"); - List dep2 = new ArrayList<>(); - dep2.add("1"); - node2.setPreTasks(JSONUtils.toJsonString(dep2)); - taskNodeList.add(node2); - - TaskNode node4 = new TaskNode(); - node4.setId("4"); - node4.setName("4"); - node4.setCode(4); - node4.setType("SHELL"); - List dep4 = new ArrayList<>(); - dep4.add("1"); - node4.setPreTasks(JSONUtils.toJsonString(dep4)); - taskNodeList.add(node4); - - TaskNode node5 = new TaskNode(); - node5.setId("5"); - node5.setName("5"); - node5.setCode(5); - node5.setType("SHELL"); - List dep5 = new ArrayList<>(); - dep5.add(1L); - node5.setPreTasks(JSONUtils.toJsonString(dep5)); - taskNodeList.add(node5); - - TaskNode node6 = new TaskNode(); - node5.setId("6"); - node5.setName("6"); - node5.setCode(6); - node5.setType("SHELL"); - List dep6 = new ArrayList<>(); - dep5.add(2L); - dep5.add(4L); - node5.setPreTasks(JSONUtils.toJsonString(dep6)); - taskNodeList.add(node6); - - List startNodes = new ArrayList<>(); - List recoveryNodes = new ArrayList<>(); - List destTaskNodeList = DagHelper.generateFlowNodeListByStartNode(taskNodeList, - startNodes, recoveryNodes, TaskDependType.TASK_POST); - List taskNodeRelations = DagHelper.generateRelationListByFlowNodes(destTaskNodeList); - ProcessDag processDag = new ProcessDag(); - processDag.setEdges(taskNodeRelations); - processDag.setNodes(destTaskNodeList); - return DagHelper.buildDagGraph(processDag); - } - - private SwitchParameters getSwitchNode() { - SwitchParameters conditionsParameters = new SwitchParameters(); - SwitchResultVo switchResultVo1 = new SwitchResultVo(); - switchResultVo1.setCondition(" 2 == 1"); - switchResultVo1.setNextNode(2L); - SwitchResultVo switchResultVo2 = new SwitchResultVo(); - switchResultVo2.setCondition(" 2 == 2"); - switchResultVo2.setNextNode(4L); - List list = new ArrayList<>(); - list.add(switchResultVo1); - list.add(switchResultVo2); - conditionsParameters.setDependTaskList(list); - conditionsParameters.setNextNode(5L); - conditionsParameters.setRelation("AND"); - conditionsParameters.setResultConditionLocation(1); - // in: AND(AND(1 is SUCCESS)) - return conditionsParameters; - } - @Test public void testBuildDagGraph() { String shellJson = diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/DbType.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/DbType.java index e7ebbeee0a2a..882b170e1190 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/DbType.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/DbType.java @@ -28,42 +28,44 @@ public enum DbType { - MYSQL(0, "mysql"), - POSTGRESQL(1, "postgresql"), - HIVE(2, "hive"), - SPARK(3, "spark"), - CLICKHOUSE(4, "clickhouse"), - ORACLE(5, "oracle"), - SQLSERVER(6, "sqlserver"), - DB2(7, "db2"), - PRESTO(8, "presto"), - H2(9, "h2"), - REDSHIFT(10, "redshift"), - ATHENA(11, "athena"), - TRINO(12, "trino"), - STARROCKS(13, "starrocks"), - AZURESQL(14, "azuresql"), - DAMENG(15, "dameng"), - OCEANBASE(16, "oceanbase"), - SSH(17, "ssh"), - KYUUBI(18, "kyuubi"), - DATABEND(19, "databend"), - SNOWFLAKE(20, "snowflake"), - VERTICA(21, "vertica"), - HANA(22, "hana"), - DORIS(23, "doris"), - ZEPPELIN(24, "zeppelin"), - SAGEMAKER(25, "sagemaker"), + MYSQL(0, "mysql", "mysql"), + POSTGRESQL(1, "postgresql", "postgresql"), + HIVE(2, "hive", "hive"), + SPARK(3, "spark", "spark"), + CLICKHOUSE(4, "clickhouse", "clickhouse"), + ORACLE(5, "oracle", "oracle"), + SQLSERVER(6, "sqlserver", "sqlserver"), + DB2(7, "db2", "db2"), + PRESTO(8, "presto", "presto"), + H2(9, "h2", "h2"), + REDSHIFT(10, "redshift", "redshift"), + ATHENA(11, "athena", "athena"), + TRINO(12, "trino", "trino"), + STARROCKS(13, "starrocks", "starrocks"), + AZURESQL(14, "azuresql", "azuresql"), + DAMENG(15, "dameng", "dameng"), + OCEANBASE(16, "oceanbase", "oceanbase"), + SSH(17, "ssh", "ssh"), + KYUUBI(18, "kyuubi", "kyuubi"), + DATABEND(19, "databend", "databend"), + SNOWFLAKE(20, "snowflake", "snowflake"), + VERTICA(21, "vertica", "vertica"), + HANA(22, "hana", "hana"), + DORIS(23, "doris", "doris"), + ZEPPELIN(24, "zeppelin", "zeppelin"), + SAGEMAKER(25, "sagemaker", "sagemaker"), - K8S(26, "k8s"); + K8S(26, "k8s", "k8s"); private static final Map DB_TYPE_MAP = Arrays.stream(DbType.values()).collect(toMap(DbType::getCode, Functions.identity())); @EnumValue private final int code; + private final String name; private final String descp; - DbType(int code, String descp) { + DbType(int code, String name, String descp) { this.code = code; + this.name = name; this.descp = descp; } @@ -83,6 +85,10 @@ public int getCode() { return code; } + public String getName() { + return name; + } + public String getDescp() { return descp; } diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/ResourceType.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/ResourceType.java index 16c49a1ecc5c..8be58ebb15c6 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/ResourceType.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/ResourceType.java @@ -17,34 +17,37 @@ package org.apache.dolphinscheduler.spi.enums; +import lombok.Getter; + import com.baomidou.mybatisplus.annotation.EnumValue; /** * resource type */ +@Getter public enum ResourceType { /** - * 0 file, 1 udf + * 0 file */ FILE(0, "file"), - UDF(1, "udf"), ALL(2, "all"); - ResourceType(int code, String descp) { + ResourceType(int code, String desc) { this.code = code; - this.descp = descp; + this.desc = desc; } @EnumValue private final int code; - private final String descp; - - public int getCode() { - return code; - } - - public String getDescp() { - return descp; + private final String desc; + + public static ResourceType getResourceType(int code) { + for (ResourceType resourceType : ResourceType.values()) { + if (resourceType.getCode() == code) { + return resourceType; + } + } + return null; } } diff --git a/dolphinscheduler-standalone-server/src/main/assembly/dolphinscheduler-standalone-server.xml b/dolphinscheduler-standalone-server/src/main/assembly/dolphinscheduler-standalone-server.xml index db312b729803..480301718ed1 100644 --- a/dolphinscheduler-standalone-server/src/main/assembly/dolphinscheduler-standalone-server.xml +++ b/dolphinscheduler-standalone-server/src/main/assembly/dolphinscheduler-standalone-server.xml @@ -86,6 +86,13 @@ conf + + ${basedir}/../dolphinscheduler-authentication/dolphinscheduler-aws-authentication/src/main/resources + + **/*.yaml + + conf + ${basedir}/../dolphinscheduler-api/src/main/resources diff --git a/dolphinscheduler-standalone-server/src/main/bin/jvm_args_env.sh b/dolphinscheduler-standalone-server/src/main/bin/jvm_args_env.sh index 42f8b1c656c7..95b283f91e03 100644 --- a/dolphinscheduler-standalone-server/src/main/bin/jvm_args_env.sh +++ b/dolphinscheduler-standalone-server/src/main/bin/jvm_args_env.sh @@ -24,6 +24,7 @@ -XX:+PrintGCDetails -Xloggc:gc.log +-XX:-OmitStackTraceInFastThrow -XX:+ExitOnOutOfMemoryError -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=dump.hprof diff --git a/dolphinscheduler-standalone-server/src/main/java/org/apache/dolphinscheduler/StandaloneServer.java b/dolphinscheduler-standalone-server/src/main/java/org/apache/dolphinscheduler/StandaloneServer.java index 14808ab62939..a39d3c9a2200 100644 --- a/dolphinscheduler-standalone-server/src/main/java/org/apache/dolphinscheduler/StandaloneServer.java +++ b/dolphinscheduler-standalone-server/src/main/java/org/apache/dolphinscheduler/StandaloneServer.java @@ -24,8 +24,8 @@ import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; -@SpringBootApplication @Slf4j +@SpringBootApplication public class StandaloneServer { public static void main(String[] args) throws Exception { diff --git a/dolphinscheduler-standalone-server/src/main/resources/application.yaml b/dolphinscheduler-standalone-server/src/main/resources/application.yaml index 8e58b8956804..2da687d285ea 100644 --- a/dolphinscheduler-standalone-server/src/main/resources/application.yaml +++ b/dolphinscheduler-standalone-server/src/main/resources/application.yaml @@ -23,17 +23,6 @@ spring: date-format: "yyyy-MM-dd HH:mm:ss" banner: charset: UTF-8 - cache: - # default enable cache, you can disable by `type: none` - type: none - cache-names: - - tenant - - user - - processDefinition - - processTaskRelation - - taskDefinition - caffeine: - spec: maximumSize=100,expireAfterWrite=300s,recordStats sql: init: schema-locations: classpath:sql/dolphinscheduler_h2.sql @@ -75,6 +64,7 @@ spring: mvc: pathmatch: matching-strategy: ANT_PATH_MATCHER + cloud.discovery.client.composite-indicator.enabled: false mybatis-plus: mapper-locations: classpath:org/apache/dolphinscheduler/dao/mapper/*Mapper.xml @@ -95,12 +85,12 @@ registry: namespace: dolphinscheduler connect-string: localhost:2181 retry-policy: - base-sleep-time: 60ms - max-sleep: 300ms + base-sleep-time: 1s + max-sleep: 3s max-retries: 5 - session-timeout: 30s - connection-timeout: 9s - block-until-connected: 600ms + session-timeout: 60s + connection-timeout: 15s + block-until-connected: 15s digest: ~ security: @@ -171,8 +161,6 @@ casdoor: master: listen-port: 5678 - # master fetch command num - fetch-command-num: 10 # master prepare execute thread number to limit handle commands in parallel pre-exec-threads: 10 # master execute thread number to limit process instances in parallel @@ -190,10 +178,10 @@ master: state-wheel-interval: 5s server-load-protection: enabled: true - # Master max cpu usage, when the master's cpu usage is smaller then this value, master server can execute workflow. - max-cpu-usage-percentage-thresholds: 0.9 - # Master max JVM memory usage , when the master's jvm memory usage is smaller then this value, master server can execute workflow. - max-jvm-memory-usage-percentage-thresholds: 0.9 + # Master max system cpu usage, when the master's system cpu usage is smaller then this value, master server can execute workflow. + max-system-cpu-usage-percentage-thresholds: 1 + # Master max jvm cpu usage, when the master's jvm cpu usage is smaller then this value, master server can execute workflow. + max-jvm-cpu-usage-percentage-thresholds: 0.9 # Master max System memory usage , when the master's system memory usage is smaller then this value, master server can execute workflow. max-system-memory-usage-percentage-thresholds: 0.9 # Master max disk usage , when the master's disk usage is smaller then this value, master server can execute workflow. @@ -203,6 +191,13 @@ master: # kill yarn/k8s application when failover taskInstance, default true kill-application-when-task-failover: true worker-group-refresh-interval: 10s + command-fetch-strategy: + type: ID_SLOT_BASED + config: + # The incremental id step + id-step: 1 + # master fetch command num + fetch-size: 10 worker: # worker listener port @@ -215,10 +210,10 @@ worker: host-weight: 100 server-load-protection: enabled: true - # Worker max cpu usage, when the worker's cpu usage is smaller then this value, worker server can be dispatched tasks. - max-cpu-usage-percentage-thresholds: 0.9 - # Worker max JVM memory usage , when the worker's jvm memory usage is smaller then this value, worker server can be dispatched tasks. - max-jvm-memory-usage-percentage-thresholds: 0.9 + # Worker max system cpu usage, when the worker's system cpu usage is smaller then this value, worker server can be dispatched tasks. + max-system-cpu-usage-percentage-thresholds: 1 + # Worker max jvm cpu usage, when the worker's jvm cpu usage is smaller then this value, worker server can be dispatched tasks. + max-jvm-cpu-usage-percentage-thresholds: 0.9 # Worker max System memory usage , when the worker's system memory usage is smaller then this value, worker server can be dispatched tasks. max-system-memory-usage-percentage-thresholds: 0.9 # Worker max disk usage , when the worker's disk usage is smaller then this value, worker server can be dispatched tasks. @@ -238,7 +233,8 @@ alert: # Define value is (0 = infinite), and alert server would be waiting alert result. wait-timeout: 0 max-heartbeat-interval: 60s - query_alert_threshold: 100 + # The maximum number of alerts that can be processed in parallel + sender-parallelism: 5 api: audit-enable: false @@ -340,4 +336,4 @@ spring: driver-class-name: com.mysql.cj.jdbc.Driver url: jdbc:mysql://127.0.0.1:3306/dolphinscheduler?useUnicode=true&characterEncoding=UTF-8 username: root - password: root@123 + password: root diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-abs/src/main/java/org/apache/dolphinscheduler/plugin/storage/abs/AbsStorageOperator.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-abs/src/main/java/org/apache/dolphinscheduler/plugin/storage/abs/AbsStorageOperator.java index d470545babd4..81b93ea207fc 100644 --- a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-abs/src/main/java/org/apache/dolphinscheduler/plugin/storage/abs/AbsStorageOperator.java +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-abs/src/main/java/org/apache/dolphinscheduler/plugin/storage/abs/AbsStorageOperator.java @@ -19,17 +19,12 @@ import static org.apache.dolphinscheduler.common.constants.Constants.EMPTY_STRING; import static org.apache.dolphinscheduler.common.constants.Constants.FOLDER_SEPARATOR; -import static org.apache.dolphinscheduler.common.constants.Constants.FORMAT_S_S; -import static org.apache.dolphinscheduler.common.constants.Constants.RESOURCE_TYPE_FILE; -import static org.apache.dolphinscheduler.common.constants.Constants.RESOURCE_TYPE_UDF; import org.apache.dolphinscheduler.common.constants.Constants; -import org.apache.dolphinscheduler.common.enums.ResUploadType; import org.apache.dolphinscheduler.common.utils.FileUtils; -import org.apache.dolphinscheduler.common.utils.PropertyUtils; +import org.apache.dolphinscheduler.plugin.storage.api.AbstractStorageOperator; import org.apache.dolphinscheduler.plugin.storage.api.StorageEntity; -import org.apache.dolphinscheduler.plugin.storage.api.StorageOperate; -import org.apache.dolphinscheduler.spi.enums.ResourceType; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; import org.apache.commons.lang3.StringUtils; @@ -37,136 +32,69 @@ import java.io.ByteArrayInputStream; import java.io.Closeable; import java.io.File; -import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStreamReader; +import java.nio.file.FileAlreadyExistsException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; -import java.sql.Date; -import java.util.ArrayList; import java.util.Collections; -import java.util.LinkedList; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; -import lombok.Data; +import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; -import com.azure.core.http.rest.PagedIterable; import com.azure.storage.blob.BlobClient; import com.azure.storage.blob.BlobContainerClient; import com.azure.storage.blob.BlobServiceClient; import com.azure.storage.blob.BlobServiceClientBuilder; import com.azure.storage.blob.models.BlobContainerItem; -import com.azure.storage.blob.models.BlobItem; import com.azure.storage.blob.specialized.BlockBlobClient; -@Data @Slf4j -public class AbsStorageOperator implements Closeable, StorageOperate { +public class AbsStorageOperator extends AbstractStorageOperator implements Closeable, StorageOperator { - private BlobContainerClient blobContainerClient; + private final BlobContainerClient blobContainerClient; - private BlobServiceClient blobServiceClient; + private final BlobServiceClient blobServiceClient; - private String connectionString; - - private String storageAccountName; - - private String containerName; - - public AbsStorageOperator() { - - } - - public void init() { - containerName = readContainerName(); - connectionString = readConnectionString(); - storageAccountName = readAccountName(); - blobServiceClient = buildBlobServiceClient(); - blobContainerClient = buildBlobContainerClient(); - checkContainerNameExists(); - } - - protected BlobServiceClient buildBlobServiceClient() { - return new BlobServiceClientBuilder() - .endpoint("https://" + storageAccountName + ".blob.core.windows.net/") - .connectionString(connectionString) + public AbsStorageOperator(AbsStorageProperties absStorageProperties) { + super(absStorageProperties.getResourceUploadPath()); + blobServiceClient = new BlobServiceClientBuilder() + .endpoint("https://" + absStorageProperties.getStorageAccountName() + ".blob.core.windows.net/") + .connectionString(absStorageProperties.getConnectionString()) .buildClient(); - } - - protected BlobContainerClient buildBlobContainerClient() { - return blobServiceClient.getBlobContainerClient(containerName); - } - - protected String readConnectionString() { - return PropertyUtils.getString(Constants.AZURE_BLOB_STORAGE_CONNECTION_STRING); - } - - protected String readContainerName() { - return PropertyUtils.getString(Constants.AZURE_BLOB_STORAGE_CONTAINER_NAME); - } - - protected String readAccountName() { - return PropertyUtils.getString(Constants.AZURE_BLOB_STORAGE_ACCOUNT_NAME); - } - - @Override - public void createTenantDirIfNotExists(String tenantCode) throws Exception { - mkdir(tenantCode, getAbsResDir(tenantCode)); - mkdir(tenantCode, getAbsUdfDir(tenantCode)); - } - - public String getAbsResDir(String tenantCode) { - return String.format("%s/" + RESOURCE_TYPE_FILE, getAbsTenantDir(tenantCode)); - } - - public String getAbsUdfDir(String tenantCode) { - return String.format("%s/" + RESOURCE_TYPE_UDF, getAbsTenantDir(tenantCode)); - } - - public String getAbsTenantDir(String tenantCode) { - return String.format(FORMAT_S_S, getGcsDataBasePath(), tenantCode); - } - - public String getGcsDataBasePath() { - if (FOLDER_SEPARATOR.equals(RESOURCE_UPLOAD_PATH)) { - return EMPTY_STRING; - } else { - return RESOURCE_UPLOAD_PATH.replaceFirst(FOLDER_SEPARATOR, EMPTY_STRING); - } - } - - @Override - public String getResDir(String tenantCode) { - return getAbsResDir(tenantCode) + FOLDER_SEPARATOR; + blobContainerClient = blobServiceClient.getBlobContainerClient(absStorageProperties.getContainerName()); + checkContainerNameExists(absStorageProperties.getContainerName()); } @Override - public String getUdfDir(String tenantCode) { - return getAbsUdfDir(tenantCode) + FOLDER_SEPARATOR; - } - - @Override - public String getResourceFullName(String tenantCode, String fileName) { - if (fileName.startsWith(FOLDER_SEPARATOR)) { - fileName.replaceFirst(FOLDER_SEPARATOR, EMPTY_STRING); + public String getStorageBaseDirectory() { + // All directory should end with File.separator + if (getStorageBaseDirectory().startsWith("/")) { + log.warn("{} -> {} should not start with / in abs", Constants.RESOURCE_UPLOAD_PATH, + getStorageBaseDirectory()); + return getStorageBaseDirectory().substring(1); } - return String.format(FORMAT_S_S, getAbsResDir(tenantCode), fileName); + return getStorageBaseDirectory(); } + @SneakyThrows @Override - public String getFileName(ResourceType resourceType, String tenantCode, String fileName) { - if (fileName.startsWith(FOLDER_SEPARATOR)) { - fileName = fileName.replaceFirst(FOLDER_SEPARATOR, EMPTY_STRING); + public void createStorageDir(String directory) { + String objectName = directory + FOLDER_SEPARATOR; + if (isObjectExists(objectName)) { + throw new FileAlreadyExistsException("directory: " + objectName + " already exists"); } - return getDir(resourceType, tenantCode) + fileName; + BlobClient blobClient = blobContainerClient.getBlobClient(objectName); + blobClient.upload(new ByteArrayInputStream(EMPTY_STRING.getBytes()), 0); } + @SneakyThrows @Override - public void download(String srcFilePath, String dstFilePath, boolean overwrite) throws IOException { + public void download(String srcFilePath, String dstFilePath, boolean overwrite) { File dstFile = new File(dstFilePath); if (dstFile.isDirectory()) { Files.delete(dstFile.toPath()); @@ -179,7 +107,7 @@ public void download(String srcFilePath, String dstFilePath, boolean overwrite) } @Override - public boolean exists(String fullName) throws IOException { + public boolean exists(String fullName) { return isObjectExists(fullName); } @@ -187,36 +115,14 @@ protected boolean isObjectExists(String objectName) { return blobContainerClient.getBlobClient(objectName).exists(); } + @SneakyThrows @Override - public boolean delete(String filePath, boolean recursive) throws IOException { - try { - if (isObjectExists(filePath)) { - blobContainerClient.getBlobClient(filePath).delete(); - } - return true; - } catch (Exception e) { - log.error("delete the object error,the resource path is {}", filePath); - return false; - } + public void delete(String filePath, boolean recursive) { + blobContainerClient.getBlobClient(filePath).deleteIfExists(); } @Override - public boolean delete(String fullName, List childrenPathList, boolean recursive) throws IOException { - // append the resource fullName to the list for deletion. - childrenPathList.add(fullName); - - boolean result = true; - for (String filePath : childrenPathList) { - if (!delete(filePath, recursive)) { - result = false; - } - } - - return result; - } - - @Override - public boolean copy(String srcPath, String dstPath, boolean deleteSource, boolean overwrite) throws IOException { + public void copy(String srcPath, String dstPath, boolean deleteSource, boolean overwrite) { BlobClient srcBlobClient = blobContainerClient.getBlobClient(srcPath); BlockBlobClient dstBlobClient = blobContainerClient.getBlobClient(dstPath).getBlockBlobClient(); @@ -225,29 +131,23 @@ public boolean copy(String srcPath, String dstPath, boolean deleteSource, boolea if (deleteSource) { srcBlobClient.delete(); } - return true; } + @SneakyThrows @Override - public boolean upload(String tenantCode, String srcFile, String dstPath, boolean deleteSource, - boolean overwrite) throws IOException { - try { - BlobClient blobClient = blobContainerClient.getBlobClient(dstPath); - blobClient.uploadFromFile(srcFile, overwrite); - - Path srcPath = Paths.get(srcFile); - if (deleteSource) { - Files.delete(srcPath); - } - return true; - } catch (Exception e) { - log.error("upload failed,the container is {},the filePath is {}", containerName, dstPath); - return false; + public void upload(String srcFile, String dstPath, boolean deleteSource, boolean overwrite) { + BlobClient blobClient = blobContainerClient.getBlobClient(dstPath); + blobClient.uploadFromFile(srcFile, overwrite); + + Path srcPath = Paths.get(srcFile); + if (deleteSource) { + Files.delete(srcPath); } } + @SneakyThrows @Override - public List vimFile(String tenantCode, String filePath, int skipLineNums, int limit) throws IOException { + public List fetchFileContent(String filePath, int skipLineNums, int limit) { if (StringUtils.isBlank(filePath)) { log.error("file path:{} is blank", filePath); return Collections.emptyList(); @@ -263,199 +163,27 @@ public List vimFile(String tenantCode, String filePath, int skipLineNums } } - @Override - public void deleteTenant(String tenantCode) throws Exception { - deleteTenantCode(tenantCode); - } - - protected void deleteTenantCode(String tenantCode) { - deleteDirectory(getResDir(tenantCode)); - deleteDirectory(getUdfDir(tenantCode)); - } - - @Override - public String getDir(ResourceType resourceType, String tenantCode) { - switch (resourceType) { - case UDF: - return getUdfDir(tenantCode); - case FILE: - return getResDir(tenantCode); - case ALL: - return getGcsDataBasePath(); - default: - return EMPTY_STRING; - } - - } - - protected void deleteDirectory(String directoryName) { - if (isObjectExists(directoryName)) { - blobContainerClient.getBlobClient(directoryName).delete(); - } - } - - @Override - public boolean mkdir(String tenantCode, String path) throws IOException { - String objectName = path + FOLDER_SEPARATOR; - if (!isObjectExists(objectName)) { - BlobClient blobClient = blobContainerClient.getBlobClient(objectName); - blobClient.upload(new ByteArrayInputStream(EMPTY_STRING.getBytes()), 0); - } - return true; - } - @Override public void close() throws IOException { } @Override - public ResUploadType returnStorageType() { - return ResUploadType.ABS; + public List listStorageEntity(String resourceAbsolutePath) { + return null; } @Override - public List listFilesStatusRecursively(String path, String defaultPath, String tenantCode, - ResourceType type) { - List storageEntityList = new ArrayList<>(); - LinkedList foldersToFetch = new LinkedList<>(); - - StorageEntity initialEntity = null; - try { - initialEntity = getFileStatus(path, defaultPath, tenantCode, type); - } catch (Exception e) { - log.error("error while listing files status recursively, path: {}", path, e); - return storageEntityList; - } - foldersToFetch.add(initialEntity); - - while (!foldersToFetch.isEmpty()) { - String pathToExplore = foldersToFetch.pop().getFullName(); - try { - List tempList = listFilesStatus(pathToExplore, defaultPath, tenantCode, type); - for (StorageEntity temp : tempList) { - if (temp.isDirectory()) { - foldersToFetch.add(temp); - } - } - storageEntityList.addAll(tempList); - } catch (Exception e) { - log.error("error while listing files stat:wus recursively, path: {}", pathToExplore, e); - } - } - - return storageEntityList; - } - - @Override - public List listFilesStatus(String path, String defaultPath, String tenantCode, - ResourceType type) throws Exception { - List storageEntityList = new ArrayList<>(); - - PagedIterable blobItems; - blobItems = blobContainerClient.listBlobsByHierarchy(path); - if (blobItems == null) { - return storageEntityList; - } - - for (BlobItem blobItem : blobItems) { - if (path.equals(blobItem.getName())) { - continue; - } - if (blobItem.isPrefix()) { - String suffix = StringUtils.difference(path, blobItem.getName()); - String fileName = StringUtils.difference(defaultPath, blobItem.getName()); - StorageEntity entity = new StorageEntity(); - entity.setAlias(suffix); - entity.setFileName(fileName); - entity.setFullName(blobItem.getName()); - entity.setDirectory(true); - entity.setUserName(tenantCode); - entity.setType(type); - entity.setSize(0); - entity.setCreateTime(null); - entity.setUpdateTime(null); - entity.setPfullName(path); - - storageEntityList.add(entity); - } else { - String[] aliasArr = blobItem.getName().split("/"); - String alias = aliasArr[aliasArr.length - 1]; - String fileName = StringUtils.difference(defaultPath, blobItem.getName()); - - StorageEntity entity = new StorageEntity(); - entity.setAlias(alias); - entity.setFileName(fileName); - entity.setFullName(blobItem.getName()); - entity.setDirectory(false); - entity.setUserName(tenantCode); - entity.setType(type); - entity.setSize(blobItem.getProperties().getContentLength()); - entity.setCreateTime(Date.from(blobItem.getProperties().getCreationTime().toInstant())); - entity.setUpdateTime(Date.from(blobItem.getProperties().getLastModified().toInstant())); - entity.setPfullName(path); - - storageEntityList.add(entity); - } - } - - return storageEntityList; + public List listFileStorageEntityRecursively(String resourceAbsolutePath) { + return null; } @Override - public StorageEntity getFileStatus(String path, String defaultPath, String tenantCode, - ResourceType type) throws Exception { - if (path.endsWith(FOLDER_SEPARATOR)) { - // the path is a directory that may or may not exist - String alias = findDirAlias(path); - String fileName = StringUtils.difference(defaultPath, path); - - StorageEntity entity = new StorageEntity(); - entity.setAlias(alias); - entity.setFileName(fileName); - entity.setFullName(path); - entity.setDirectory(true); - entity.setUserName(tenantCode); - entity.setType(type); - entity.setSize(0); - - return entity; - } else { - if (isObjectExists(path)) { - BlobClient blobClient = blobContainerClient.getBlobClient(path); - - String[] aliasArr = blobClient.getBlobName().split(FOLDER_SEPARATOR); - String alias = aliasArr[aliasArr.length - 1]; - String fileName = StringUtils.difference(defaultPath, blobClient.getBlobName()); - - StorageEntity entity = new StorageEntity(); - entity.setAlias(alias); - entity.setFileName(fileName); - entity.setFullName(blobClient.getBlobName()); - entity.setDirectory(false); - entity.setUserName(tenantCode); - entity.setType(type); - entity.setSize(blobClient.getProperties().getBlobSize()); - entity.setCreateTime(Date.from(blobClient.getProperties().getCreationTime().toInstant())); - entity.setUpdateTime(Date.from(blobClient.getProperties().getLastModified().toInstant())); - - return entity; - } else { - throw new FileNotFoundException("Object is not found in ABS container: " + containerName); - } - } - } - - private String findDirAlias(String dirPath) { - if (!dirPath.endsWith(FOLDER_SEPARATOR)) { - return dirPath; - } - - Path path = Paths.get(dirPath); - return path.getName(path.getNameCount() - 1) + FOLDER_SEPARATOR; + public StorageEntity getStorageEntity(String resourceAbsolutePath) { + return null; } - public void checkContainerNameExists() { + public void checkContainerNameExists(String containerName) { if (StringUtils.isBlank(containerName)) { throw new IllegalArgumentException(containerName + " is blank"); } diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-abs/src/main/java/org/apache/dolphinscheduler/plugin/storage/abs/AbsStorageOperatorFactory.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-abs/src/main/java/org/apache/dolphinscheduler/plugin/storage/abs/AbsStorageOperatorFactory.java index 1909ece6f176..6428ec0980a2 100644 --- a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-abs/src/main/java/org/apache/dolphinscheduler/plugin/storage/abs/AbsStorageOperatorFactory.java +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-abs/src/main/java/org/apache/dolphinscheduler/plugin/storage/abs/AbsStorageOperatorFactory.java @@ -17,20 +17,30 @@ package org.apache.dolphinscheduler.plugin.storage.abs; -import org.apache.dolphinscheduler.plugin.storage.api.StorageOperate; -import org.apache.dolphinscheduler.plugin.storage.api.StorageOperateFactory; +import org.apache.dolphinscheduler.common.constants.Constants; +import org.apache.dolphinscheduler.common.utils.PropertyUtils; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperatorFactory; import org.apache.dolphinscheduler.plugin.storage.api.StorageType; import com.google.auto.service.AutoService; -@AutoService(StorageOperateFactory.class) -public class AbsStorageOperatorFactory implements StorageOperateFactory { +@AutoService(StorageOperatorFactory.class) +public class AbsStorageOperatorFactory implements StorageOperatorFactory { @Override - public StorageOperate createStorageOperate() { - AbsStorageOperator absStorageOperator = new AbsStorageOperator(); - absStorageOperator.init(); - return absStorageOperator; + public StorageOperator createStorageOperate() { + final AbsStorageProperties absStorageProperties = getAbsStorageProperties(); + return new AbsStorageOperator(absStorageProperties); + } + + private AbsStorageProperties getAbsStorageProperties() { + return AbsStorageProperties.builder() + .containerName(PropertyUtils.getString(Constants.AZURE_BLOB_STORAGE_CONTAINER_NAME)) + .connectionString(PropertyUtils.getString(Constants.AZURE_BLOB_STORAGE_CONNECTION_STRING)) + .storageAccountName(PropertyUtils.getString(Constants.AZURE_BLOB_STORAGE_ACCOUNT_NAME)) + .resourceUploadPath(PropertyUtils.getString(Constants.RESOURCE_UPLOAD_PATH, "/dolphinscheduler")) + .build(); } @Override diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-abs/src/main/java/org/apache/dolphinscheduler/plugin/storage/abs/AbsStorageProperties.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-abs/src/main/java/org/apache/dolphinscheduler/plugin/storage/abs/AbsStorageProperties.java new file mode 100644 index 000000000000..e33e165a9b7a --- /dev/null +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-abs/src/main/java/org/apache/dolphinscheduler/plugin/storage/abs/AbsStorageProperties.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.storage.abs; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class AbsStorageProperties { + + private String containerName; + private String connectionString; + private String storageAccountName; + private String resourceUploadPath; + +} diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-abs/src/test/java/org/apache/dolphinscheduler/plugin/storage/abs/AbsStorageOperatorTest.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-abs/src/test/java/org/apache/dolphinscheduler/plugin/storage/abs/AbsStorageOperatorTest.java deleted file mode 100644 index daec0c36b227..000000000000 --- a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-abs/src/test/java/org/apache/dolphinscheduler/plugin/storage/abs/AbsStorageOperatorTest.java +++ /dev/null @@ -1,274 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.storage.abs; - -import static org.apache.dolphinscheduler.common.constants.Constants.FOLDER_SEPARATOR; -import static org.apache.dolphinscheduler.common.constants.Constants.FORMAT_S_S; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.doNothing; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; - -import org.apache.dolphinscheduler.plugin.storage.api.StorageEntity; -import org.apache.dolphinscheduler.spi.enums.ResourceType; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.Mockito; -import org.mockito.junit.jupiter.MockitoExtension; - -import com.azure.storage.blob.BlobClient; -import com.azure.storage.blob.BlobContainerClient; -import com.azure.storage.blob.BlobServiceClient; -import com.azure.storage.blob.specialized.BlockBlobClient; - -@ExtendWith(MockitoExtension.class) -public class AbsStorageOperatorTest { - - private static final String CONNECTION_STRING_MOCK = "CONNECTION_STRING_MOCK"; - - private static final String ACCOUNT_NAME_MOCK = "ACCOUNT_NAME_MOCK"; - - private static final String CONTAINER_NAME_MOCK = "CONTAINER_NAME_MOCK"; - - private static final String TENANT_CODE_MOCK = "TENANT_CODE_MOCK"; - - private static final String DIR_MOCK = "DIR_MOCK"; - - private static final String FILE_NAME_MOCK = "FILE_NAME_MOCK"; - - private static final String FILE_PATH_MOCK = "FILE_PATH_MOCK"; - - private static final String FULL_NAME = "/tmp/dir1/"; - - private static final String DEFAULT_PATH = "/tmp/"; - - @Mock - private BlobContainerClient blobContainerClient; - - @Mock - private BlobServiceClient blobServiceClient; - - @Mock - private BlockBlobClient blockBlobClient; - - @Mock - private BlobClient blobClient; - - private AbsStorageOperator absStorageOperator; - - @BeforeEach - public void setUp() throws Exception { - absStorageOperator = Mockito.spy(AbsStorageOperator.class); - Mockito.doReturn(CONNECTION_STRING_MOCK).when(absStorageOperator).readConnectionString(); - Mockito.doReturn(CONTAINER_NAME_MOCK).when(absStorageOperator).readContainerName(); - Mockito.doReturn(ACCOUNT_NAME_MOCK).when(absStorageOperator).readAccountName(); - Mockito.doReturn(blobContainerClient).when(absStorageOperator).buildBlobContainerClient(); - Mockito.doReturn(blobServiceClient).when(absStorageOperator).buildBlobServiceClient(); - Mockito.doNothing().when(absStorageOperator).checkContainerNameExists(); - - absStorageOperator.init(); - } - - @Test - public void testInit() throws Exception { - verify(absStorageOperator, times(1)).buildBlobServiceClient(); - verify(absStorageOperator, times(1)).buildBlobContainerClient(); - Assertions.assertEquals(CONNECTION_STRING_MOCK, absStorageOperator.getConnectionString()); - Assertions.assertEquals(CONTAINER_NAME_MOCK, absStorageOperator.getContainerName()); - Assertions.assertEquals(ACCOUNT_NAME_MOCK, absStorageOperator.getStorageAccountName()); - } - - @Test - public void createTenantResAndUdfDir() throws Exception { - doReturn(DIR_MOCK).when(absStorageOperator).getAbsResDir(TENANT_CODE_MOCK); - doReturn(DIR_MOCK).when(absStorageOperator).getAbsUdfDir(TENANT_CODE_MOCK); - doReturn(true).when(absStorageOperator).mkdir(TENANT_CODE_MOCK, DIR_MOCK); - absStorageOperator.createTenantDirIfNotExists(TENANT_CODE_MOCK); - verify(absStorageOperator, times(2)).mkdir(TENANT_CODE_MOCK, DIR_MOCK); - } - - @Test - public void getResDir() { - final String expectedResourceDir = String.format("dolphinscheduler/%s/resources/", TENANT_CODE_MOCK); - final String dir = absStorageOperator.getResDir(TENANT_CODE_MOCK); - Assertions.assertEquals(expectedResourceDir, dir); - } - - @Test - public void getUdfDir() { - final String expectedUdfDir = String.format("dolphinscheduler/%s/udfs/", TENANT_CODE_MOCK); - final String dir = absStorageOperator.getUdfDir(TENANT_CODE_MOCK); - Assertions.assertEquals(expectedUdfDir, dir); - } - - @Test - public void mkdirWhenDirExists() { - boolean isSuccess = false; - try { - final String key = DIR_MOCK + FOLDER_SEPARATOR; - Mockito.doReturn(true).when(absStorageOperator).isObjectExists(key); - isSuccess = absStorageOperator.mkdir(TENANT_CODE_MOCK, DIR_MOCK); - - } catch (IOException e) { - Assertions.fail("test failed due to unexpected IO exception"); - } - - Assertions.assertTrue(isSuccess); - } - - @Test - public void getResourceFullName() { - final String expectedResourceFileName = - String.format("dolphinscheduler/%s/resources/%s", TENANT_CODE_MOCK, FILE_NAME_MOCK); - final String resourceFileName = absStorageOperator.getResourceFullName(TENANT_CODE_MOCK, FILE_NAME_MOCK); - Assertions.assertEquals(expectedResourceFileName, resourceFileName); - } - - @Test - public void getFileName() { - final String expectedFileName = - String.format("dolphinscheduler/%s/resources/%s", TENANT_CODE_MOCK, FILE_NAME_MOCK); - final String fileName = absStorageOperator.getFileName(ResourceType.FILE, TENANT_CODE_MOCK, FILE_NAME_MOCK); - Assertions.assertEquals(expectedFileName, fileName); - } - - @Test - public void exists() { - boolean doesExist = false; - doReturn(true).when(absStorageOperator).isObjectExists(FILE_NAME_MOCK); - try { - doesExist = absStorageOperator.exists(FILE_NAME_MOCK); - } catch (IOException e) { - Assertions.fail("unexpected IO exception in unit test"); - } - - Assertions.assertTrue(doesExist); - } - - @Test - public void delete() { - boolean isDeleted = false; - doReturn(true).when(absStorageOperator).isObjectExists(FILE_NAME_MOCK); - Mockito.doReturn(blobClient).when(blobContainerClient).getBlobClient(Mockito.anyString()); - try { - isDeleted = absStorageOperator.delete(FILE_NAME_MOCK, true); - } catch (IOException e) { - Assertions.fail("unexpected IO exception in unit test"); - } - - Assertions.assertTrue(isDeleted); - verify(blobClient, times(1)).delete(); - } - - @Test - public void copy() { - boolean isSuccess = false; - Mockito.doReturn(blobClient).when(blobContainerClient).getBlobClient(Mockito.anyString()); - Mockito.doReturn(blockBlobClient).when(blobClient).getBlockBlobClient(); - try { - isSuccess = absStorageOperator.copy(FILE_PATH_MOCK, FILE_PATH_MOCK, false, false); - } catch (IOException e) { - Assertions.fail("unexpected IO exception in unit test"); - } - - Assertions.assertTrue(isSuccess); - } - - @Test - public void deleteTenant() { - doNothing().when(absStorageOperator).deleteTenantCode(anyString()); - try { - absStorageOperator.deleteTenant(TENANT_CODE_MOCK); - } catch (Exception e) { - Assertions.fail("unexpected exception caught in unit test"); - } - - verify(absStorageOperator, times(1)).deleteTenantCode(anyString()); - } - - @Test - public void getGcsResDir() { - final String expectedGcsResDir = String.format("dolphinscheduler/%s/resources", TENANT_CODE_MOCK); - final String gcsResDir = absStorageOperator.getAbsResDir(TENANT_CODE_MOCK); - Assertions.assertEquals(expectedGcsResDir, gcsResDir); - } - - @Test - public void getGcsUdfDir() { - final String expectedGcsUdfDir = String.format("dolphinscheduler/%s/udfs", TENANT_CODE_MOCK); - final String gcsUdfDir = absStorageOperator.getAbsUdfDir(TENANT_CODE_MOCK); - Assertions.assertEquals(expectedGcsUdfDir, gcsUdfDir); - } - - @Test - public void getGcsTenantDir() { - final String expectedGcsTenantDir = String.format(FORMAT_S_S, DIR_MOCK, TENANT_CODE_MOCK); - doReturn(DIR_MOCK).when(absStorageOperator).getGcsDataBasePath(); - final String gcsTenantDir = absStorageOperator.getAbsTenantDir(TENANT_CODE_MOCK); - Assertions.assertEquals(expectedGcsTenantDir, gcsTenantDir); - } - - @Test - public void deleteDir() { - Mockito.doReturn(blobClient).when(blobContainerClient).getBlobClient(Mockito.anyString()); - doReturn(true).when(absStorageOperator).isObjectExists(Mockito.any()); - absStorageOperator.deleteDirectory(DIR_MOCK); - verify(blobClient, times(1)).delete(); - } - - @Test - public void testGetFileStatus() throws Exception { - StorageEntity entity = - absStorageOperator.getFileStatus(FULL_NAME, DEFAULT_PATH, TENANT_CODE_MOCK, ResourceType.FILE); - Assertions.assertEquals(FULL_NAME, entity.getFullName()); - Assertions.assertEquals("dir1/", entity.getFileName()); - } - - @Test - public void testListFilesStatus() throws Exception { - Mockito.doReturn(null).when(blobContainerClient).listBlobsByHierarchy(Mockito.any()); - List result = - absStorageOperator.listFilesStatus(FULL_NAME, DEFAULT_PATH, TENANT_CODE_MOCK, ResourceType.FILE); - verify(blobContainerClient, times(1)).listBlobsByHierarchy(Mockito.any()); - } - - @Test - public void testListFilesStatusRecursively() throws Exception { - StorageEntity entity = new StorageEntity(); - entity.setFullName(FULL_NAME); - - doReturn(entity).when(absStorageOperator).getFileStatus(FULL_NAME, DEFAULT_PATH, TENANT_CODE_MOCK, - ResourceType.FILE); - doReturn(Collections.EMPTY_LIST).when(absStorageOperator).listFilesStatus(anyString(), anyString(), anyString(), - Mockito.any(ResourceType.class)); - - List result = - absStorageOperator.listFilesStatusRecursively(FULL_NAME, DEFAULT_PATH, TENANT_CODE_MOCK, - ResourceType.FILE); - Assertions.assertEquals(0, result.size()); - } -} diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-abs/src/test/resources/logback.xml b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-abs/src/test/resources/logback.xml new file mode 100644 index 000000000000..6f211959c590 --- /dev/null +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-abs/src/test/resources/logback.xml @@ -0,0 +1,21 @@ + + + + + + diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-api/src/main/java/org/apache/dolphinscheduler/plugin/storage/api/AbstractStorageOperator.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-api/src/main/java/org/apache/dolphinscheduler/plugin/storage/api/AbstractStorageOperator.java new file mode 100644 index 000000000000..924c581248fc --- /dev/null +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-api/src/main/java/org/apache/dolphinscheduler/plugin/storage/api/AbstractStorageOperator.java @@ -0,0 +1,111 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.storage.api; + +import org.apache.dolphinscheduler.common.constants.Constants; +import org.apache.dolphinscheduler.common.utils.FileUtils; +import org.apache.dolphinscheduler.common.utils.PropertyUtils; +import org.apache.dolphinscheduler.spi.enums.ResourceType; + +import org.apache.commons.lang3.StringUtils; + +import java.io.File; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.base.Preconditions; +import com.google.common.io.Files; + +public abstract class AbstractStorageOperator implements StorageOperator { + + private static final Logger log = LoggerFactory.getLogger(AbstractStorageOperator.class); + protected final String resourceBaseAbsolutePath; + + public AbstractStorageOperator(String resourceBaseAbsolutePath) { + Preconditions.checkNotNull(resourceBaseAbsolutePath, "Resource upload path should not be null"); + this.resourceBaseAbsolutePath = resourceBaseAbsolutePath; + } + + @Override + public ResourceMetadata getResourceMetaData(String resourceAbsolutePath) { + String storageBaseDirectory = getStorageBaseDirectory(); + String resourceSegment = StringUtils.substringAfter(resourceAbsolutePath, storageBaseDirectory); + String[] segments = StringUtils.split(resourceSegment, File.separator, 3); + if (segments.length == 0) { + throw new IllegalArgumentException("Invalid resource path: " + resourceAbsolutePath); + } + return ResourceMetadata.builder() + .resourceAbsolutePath(resourceAbsolutePath) + .resourceBaseDirectory(storageBaseDirectory) + .isDirectory(Files.getFileExtension(resourceAbsolutePath).isEmpty()) + .tenant(segments[0]) + .resourceType(ResourceType.FILE) + .resourceRelativePath(segments.length == 2 ? "/" : segments[2]) + .resourceParentAbsolutePath(StringUtils.substringBeforeLast(resourceAbsolutePath, File.separator)) + .build(); + } + + @Override + public String getStorageBaseDirectory() { + // All directory should end with File.separator + return PropertyUtils.getString(Constants.RESOURCE_UPLOAD_PATH, "/tmp/dolphinscheduler"); + } + + @Override + public String getStorageBaseDirectory(String tenantCode) { + if (StringUtils.isEmpty(tenantCode)) { + throw new IllegalArgumentException("Tenant code should not be empty"); + } + // All directory should end with File.separator + return FileUtils.concatFilePath(getStorageBaseDirectory(), tenantCode); + } + + @Override + public String getStorageBaseDirectory(String tenantCode, ResourceType resourceType) { + String tenantBaseDirectory = getStorageBaseDirectory(tenantCode); + if (resourceType == null) { + throw new IllegalArgumentException("Resource type should not be null"); + } + String resourceBaseDirectory; + switch (resourceType) { + case FILE: + resourceBaseDirectory = FileUtils.concatFilePath(tenantBaseDirectory, FILE_FOLDER_NAME); + break; + case ALL: + resourceBaseDirectory = tenantBaseDirectory; + break; + default: + throw new IllegalArgumentException("Resource type: " + resourceType + " not supported"); + } + // All directory should end with File.separator + return resourceBaseDirectory; + } + + @Override + public String getStorageFileAbsolutePath(String tenantCode, String fileName) { + return FileUtils.concatFilePath(getStorageBaseDirectory(tenantCode, ResourceType.FILE), fileName); + } + + protected void exceptionIfPathEmpty(String resourceAbsolutePath) { + if (StringUtils.isEmpty(resourceAbsolutePath)) { + throw new IllegalArgumentException("Resource path should not be empty"); + } + } + +} diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-api/src/main/java/org/apache/dolphinscheduler/plugin/storage/api/ResourceMetadata.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-api/src/main/java/org/apache/dolphinscheduler/plugin/storage/api/ResourceMetadata.java new file mode 100644 index 000000000000..6a2ee3dc6349 --- /dev/null +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-api/src/main/java/org/apache/dolphinscheduler/plugin/storage/api/ResourceMetadata.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.storage.api; + +import org.apache.dolphinscheduler.spi.enums.ResourceType; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class ResourceMetadata { + + private String resourceAbsolutePath; + + private String resourceBaseDirectory; + private String tenant; + private ResourceType resourceType; + private String resourceRelativePath; + private String resourceParentAbsolutePath; + private boolean isDirectory; + +} diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-api/src/main/java/org/apache/dolphinscheduler/plugin/storage/api/StorageConfiguration.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-api/src/main/java/org/apache/dolphinscheduler/plugin/storage/api/StorageConfiguration.java index d34e6d01de5e..9b30ddb4505a 100644 --- a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-api/src/main/java/org/apache/dolphinscheduler/plugin/storage/api/StorageConfiguration.java +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-api/src/main/java/org/apache/dolphinscheduler/plugin/storage/api/StorageConfiguration.java @@ -32,13 +32,13 @@ public class StorageConfiguration { @Bean - public StorageOperate storageOperate() { + public StorageOperator storageOperate() { Optional storageTypeOptional = StorageType.getStorageType(PropertyUtils.getUpperCaseString(RESOURCE_STORAGE_TYPE)); - Optional storageOperate = storageTypeOptional.map(storageType -> { - ServiceLoader storageOperateFactories = - ServiceLoader.load(StorageOperateFactory.class); - for (StorageOperateFactory storageOperateFactory : storageOperateFactories) { + Optional storageOperate = storageTypeOptional.map(storageType -> { + ServiceLoader storageOperateFactories = + ServiceLoader.load(StorageOperatorFactory.class); + for (StorageOperatorFactory storageOperateFactory : storageOperateFactories) { if (storageOperateFactory.getStorageOperate() == storageType) { return storageOperateFactory.createStorageOperate(); } diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-api/src/main/java/org/apache/dolphinscheduler/plugin/storage/api/StorageEntity.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-api/src/main/java/org/apache/dolphinscheduler/plugin/storage/api/StorageEntity.java index e3639b8afed2..cae9d862a016 100644 --- a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-api/src/main/java/org/apache/dolphinscheduler/plugin/storage/api/StorageEntity.java +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-api/src/main/java/org/apache/dolphinscheduler/plugin/storage/api/StorageEntity.java @@ -24,7 +24,10 @@ import java.util.Date; +import lombok.AllArgsConstructor; +import lombok.Builder; import lombok.Data; +import lombok.NoArgsConstructor; // StorageEneity is an entity representing a resource in the third-part storage service. // It is only stored in t_ds_relation_resources_task if the resource is used by a task. @@ -32,32 +35,16 @@ // in table t_ds_relation_resources_task. @Data +@NoArgsConstructor +@AllArgsConstructor +@Builder public class StorageEntity { - /** - * exist only if it is stored in t_ds_relation_resources_task. - * - */ - private int id; - /** - * fullname is in a format of basepath + tenantCode + res/udf + filename - */ private String fullName; - /** - * filename is in a format of possible parent folders + alias - */ + private String fileName; - /** - * the name of the file - */ - private String alias; - /** - * parent folder time - */ private String pfullName; private boolean isDirectory; - private int userId; - private String userName; private ResourceType type; private long size; private Date createTime; diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-api/src/main/java/org/apache/dolphinscheduler/plugin/storage/api/StorageOperate.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-api/src/main/java/org/apache/dolphinscheduler/plugin/storage/api/StorageOperate.java deleted file mode 100644 index 945e361a09c8..000000000000 --- a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-api/src/main/java/org/apache/dolphinscheduler/plugin/storage/api/StorageOperate.java +++ /dev/null @@ -1,203 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.storage.api; - -import static org.apache.dolphinscheduler.common.constants.Constants.RESOURCE_TYPE_FILE; - -import org.apache.dolphinscheduler.common.constants.Constants; -import org.apache.dolphinscheduler.common.enums.ResUploadType; -import org.apache.dolphinscheduler.common.utils.PropertyUtils; -import org.apache.dolphinscheduler.spi.enums.ResourceType; - -import java.io.IOException; -import java.util.List; - -public interface StorageOperate { - - String RESOURCE_UPLOAD_PATH = PropertyUtils.getString(Constants.RESOURCE_UPLOAD_PATH, "/dolphinscheduler"); - - /** - * if the resource of tenant 's exist, the resource of folder will be created - * @param tenantCode - * @throws Exception - */ - void createTenantDirIfNotExists(String tenantCode) throws Exception; - - /** - * get the resource directory of tenant - * @param tenantCode - * @return - */ - String getResDir(String tenantCode); - - /** - * return the udf directory of tenant - * @param tenantCode - * @return - */ - String getUdfDir(String tenantCode); - - /** - * create the directory that the path of tenant wanted to create - * @param tenantCode - * @param path - * @return - * @throws IOException - */ - boolean mkdir(String tenantCode, String path) throws IOException; - - /** - * get the path of the resource file (fullName) - * @param tenantCode - * @param fileName - * @return - */ - String getResourceFullName(String tenantCode, String fileName); - - /** - * get the path of the resource file excluding the base path (fileName) - */ - default String getResourceFileName(String tenantCode, String fullName) { - String resDir = getResDir(tenantCode); - String filenameReplaceResDir = fullName.replaceFirst(resDir, ""); - if (!filenameReplaceResDir.equals(fullName)) { - return filenameReplaceResDir; - } - - // Replace resource dir not effective in case of run workflow with different tenant from resource file's. - // this is backup solution to get related path, by split with RESOURCE_TYPE_FILE - return filenameReplaceResDir.contains(RESOURCE_TYPE_FILE) - ? filenameReplaceResDir.split(String.format("%s/", RESOURCE_TYPE_FILE))[1] - : filenameReplaceResDir; - } - - /** - * get the path of the file - * @param resourceType - * @param tenantCode - * @param fileName - * @return - */ - String getFileName(ResourceType resourceType, String tenantCode, String fileName); - - /** - * predicate if the resource of tenant exists - * @param fullName - * @return - * @throws IOException - */ - boolean exists(String fullName) throws IOException; - - /** - * delete the resource of filePath - * todo if the filePath is the type of directory ,the files in the filePath need to be deleted at all - * @param filePath - * @param recursive - * @return - * @throws IOException - */ - boolean delete(String filePath, boolean recursive) throws IOException; - - boolean delete(String filePath, List childrenPathArray, boolean recursive) throws IOException; - - /** - * copy the file from srcPath to dstPath - * @param srcPath - * @param dstPath - * @param deleteSource if need to delete the file of srcPath - * @param overwrite - * @return - * @throws IOException - */ - boolean copy(String srcPath, String dstPath, boolean deleteSource, boolean overwrite) throws IOException; - - /** - * get the root path of the tenant with resourceType - * @param resourceType - * @param tenantCode - * @return - */ - String getDir(ResourceType resourceType, String tenantCode); - - /** - * upload the local srcFile to dstPath - * @param tenantCode - * @param srcFile - * @param dstPath - * @param deleteSource - * @param overwrite - * @return - * @throws IOException - */ - boolean upload(String tenantCode, String srcFile, String dstPath, boolean deleteSource, - boolean overwrite) throws IOException; - - /** - * download the srcPath to local - * - * @param srcFilePath the full path of the srcPath - * @param dstFile - * @param overwrite - * @throws IOException - */ - void download(String srcFilePath, String dstFile, boolean overwrite) throws IOException; - - /** - * vim the context of filePath - * @param tenantCode - * @param filePath - * @param skipLineNums - * @param limit - * @return - * @throws IOException - */ - List vimFile(String tenantCode, String filePath, int skipLineNums, int limit) throws IOException; - - /** - * delete the files and directory of the tenant - * - * @param tenantCode - * @throws Exception - */ - void deleteTenant(String tenantCode) throws Exception; - - /** - * return the storageType - * - * @return - */ - ResUploadType returnStorageType(); - - /** - * return files and folders in the current directory and subdirectories - * */ - List listFilesStatusRecursively(String path, String defaultPath, String tenantCode, - ResourceType type); - - /** - * return files and folders in the current directory - * */ - List listFilesStatus(String path, String defaultPath, String tenantCode, - ResourceType type) throws Exception; - - /** - * return a file status - * */ - StorageEntity getFileStatus(String path, String defaultPath, String tenantCode, - ResourceType type) throws Exception; -} diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-api/src/main/java/org/apache/dolphinscheduler/plugin/storage/api/StorageOperator.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-api/src/main/java/org/apache/dolphinscheduler/plugin/storage/api/StorageOperator.java new file mode 100644 index 000000000000..fb27bba21795 --- /dev/null +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-api/src/main/java/org/apache/dolphinscheduler/plugin/storage/api/StorageOperator.java @@ -0,0 +1,157 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.storage.api; + +import org.apache.dolphinscheduler.spi.enums.ResourceType; + +import java.nio.file.FileAlreadyExistsException; +import java.util.List; + +public interface StorageOperator { + + String FILE_FOLDER_NAME = "resources"; + String UDF_FOLDER_NAME = "udfs"; + + ResourceMetadata getResourceMetaData(String resourceAbsolutePath); + + /** + * Get the absolute path of base directory. + * + * @return the base directory. e.g. file:///tmp/dolphinscheduler/, /tmp/dolphinscheduler/ + */ + String getStorageBaseDirectory(); + + /** + * Get the absolute path of directory which will be used by the given tenant. the tenant directory is under the base directory. + * + * @param tenantCode the tenant code, cannot be empty + * @return the tenant directory. e.g. file:///tmp/dolphinscheduler/default/ + */ + String getStorageBaseDirectory(String tenantCode); + + /** + * Get the absolute path of directory which will be used by the given tenant and resource type. the resource directory is under the tenant directory. + *

If the resource type is FILE, will be 'file:///tmp/dolphinscheduler/default/resources/'. + *

If the resource type is UDF, will be 'is file:///tmp/dolphinscheduler/default/udfs/'. + *

If the resource type is ALL, will be 'is file:///tmp/dolphinscheduler/default/'. + * + * @param tenantCode the tenant code, cannot be empty + * @param resourceType the resource type, cannot be null + * @return the resource directory. e.g. file:///tmp/dolphinscheduler/default/resources/ + */ + String getStorageBaseDirectory(String tenantCode, ResourceType resourceType); + + /** + * Get the absolute path of the file in the storage. the file will under the file resource directory. + * + * @param tenantCode the tenant code, cannot be empty + * @param fileName the file name, cannot be empty + * @return the file absolute path. e.g. file:///tmp/dolphinscheduler/default/resources/test.sh + */ + String getStorageFileAbsolutePath(String tenantCode, String fileName); + + /** + * Create a directory if the directory is already exists will throw exception(Dependent on the storage implementation). + *

If the directory is not exists, will create the directory. + *

If the parent directory is not exists, will create the parent directory. + *

If the directory is already exists, will throw {@link FileAlreadyExistsException}. + * + * @param directoryAbsolutePath the directory absolute path + */ + void createStorageDir(String directoryAbsolutePath); + + /** + * Check if the resource exists. + * + * @param resourceAbsolutePath the resource absolute path + * @return true if the resource exists, otherwise false + */ + boolean exists(String resourceAbsolutePath); + + /** + * Delete the resource, if the resourceAbsolutePath is not exists, will do nothing. + * + * @param resourceAbsolutePath the resource absolute path + * @param recursive whether to delete all the sub file/directory under the given resource + */ + void delete(String resourceAbsolutePath, boolean recursive); + + /** + * Copy the resource from the source path to the destination path. + * + * @param srcAbsolutePath the source path + * @param dstAbsolutePath the destination path + * @param deleteSource whether to delete the source path after copying + * @param overwrite whether to overwrite the destination path if it exists + */ + void copy(String srcAbsolutePath, String dstAbsolutePath, boolean deleteSource, boolean overwrite); + + /** + * Move the resource from the source path to the destination path. + * + * @param srcLocalFileAbsolutePath the source local file + * @param dstAbsolutePath the destination path + * @param deleteSource whether to delete the source path after moving + * @param overwrite whether to overwrite the destination path if it exists + */ + void upload(String srcLocalFileAbsolutePath, String dstAbsolutePath, boolean deleteSource, boolean overwrite); + + /** + * Download the resource from the source path to the destination path. + * + * @param srcFileAbsolutePath the source path + * @param dstAbsoluteFile the destination file + * @param overwrite whether to overwrite the destination file if it exists + */ + void download(String srcFileAbsolutePath, String dstAbsoluteFile, boolean overwrite); + + /** + * Fetch the content of the file. + * + * @param fileAbsolutePath the file path + * @param skipLineNums the number of lines to skip + * @param limit the number of lines to read + * @return the content of the file + */ + List fetchFileContent(String fileAbsolutePath, int skipLineNums, int limit); + + /** + * Return the {@link StorageEntity} under the given path. + *

If the path is a file, return the file status. + *

If the path is a directory, return the file/directory under the directory. + *

If the path is not exist, will return empty. + * + * @param resourceAbsolutePath the resource absolute path, cannot be empty + */ + List listStorageEntity(String resourceAbsolutePath); + + /** + * Return the {@link StorageEntity} which is file under the given path + * + * @param resourceAbsolutePath the resource absolute path, cannot be empty + */ + List listFileStorageEntityRecursively(String resourceAbsolutePath); + + /** + * Return the {@link StorageEntity} under the current directory + * + * @param resourceAbsolutePath the resource absolute path, cannot be empty + */ + StorageEntity getStorageEntity(String resourceAbsolutePath); + +} diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-api/src/main/java/org/apache/dolphinscheduler/plugin/storage/api/StorageOperateFactory.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-api/src/main/java/org/apache/dolphinscheduler/plugin/storage/api/StorageOperatorFactory.java similarity index 91% rename from dolphinscheduler-storage-plugin/dolphinscheduler-storage-api/src/main/java/org/apache/dolphinscheduler/plugin/storage/api/StorageOperateFactory.java rename to dolphinscheduler-storage-plugin/dolphinscheduler-storage-api/src/main/java/org/apache/dolphinscheduler/plugin/storage/api/StorageOperatorFactory.java index b3a60888c97f..1e6e1f5a5299 100644 --- a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-api/src/main/java/org/apache/dolphinscheduler/plugin/storage/api/StorageOperateFactory.java +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-api/src/main/java/org/apache/dolphinscheduler/plugin/storage/api/StorageOperatorFactory.java @@ -17,9 +17,9 @@ package org.apache.dolphinscheduler.plugin.storage.api; -public interface StorageOperateFactory { +public interface StorageOperatorFactory { - StorageOperate createStorageOperate(); + StorageOperator createStorageOperate(); StorageType getStorageOperate(); } diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-gcs/src/main/java/org/apache/dolphinscheduler/plugin/storage/gcs/GcsStorageOperator.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-gcs/src/main/java/org/apache/dolphinscheduler/plugin/storage/gcs/GcsStorageOperator.java index e4176dc58eac..00aa746e73eb 100644 --- a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-gcs/src/main/java/org/apache/dolphinscheduler/plugin/storage/gcs/GcsStorageOperator.java +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-gcs/src/main/java/org/apache/dolphinscheduler/plugin/storage/gcs/GcsStorageOperator.java @@ -18,18 +18,13 @@ package org.apache.dolphinscheduler.plugin.storage.gcs; import static org.apache.dolphinscheduler.common.constants.Constants.EMPTY_STRING; -import static org.apache.dolphinscheduler.common.constants.Constants.FOLDER_SEPARATOR; -import static org.apache.dolphinscheduler.common.constants.Constants.FORMAT_S_S; -import static org.apache.dolphinscheduler.common.constants.Constants.RESOURCE_TYPE_FILE; -import static org.apache.dolphinscheduler.common.constants.Constants.RESOURCE_TYPE_UDF; import org.apache.dolphinscheduler.common.constants.Constants; -import org.apache.dolphinscheduler.common.enums.ResUploadType; import org.apache.dolphinscheduler.common.utils.FileUtils; -import org.apache.dolphinscheduler.common.utils.PropertyUtils; +import org.apache.dolphinscheduler.plugin.storage.api.AbstractStorageOperator; +import org.apache.dolphinscheduler.plugin.storage.api.ResourceMetadata; import org.apache.dolphinscheduler.plugin.storage.api.StorageEntity; -import org.apache.dolphinscheduler.plugin.storage.api.StorageOperate; -import org.apache.dolphinscheduler.spi.enums.ResourceType; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; import org.apache.commons.lang3.StringUtils; @@ -37,22 +32,24 @@ import java.io.ByteArrayInputStream; import java.io.Closeable; import java.io.File; -import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; +import java.nio.file.FileAlreadyExistsException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.sql.Date; import java.util.ArrayList; import java.util.Collections; +import java.util.HashSet; import java.util.LinkedList; import java.util.List; +import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; -import lombok.Data; +import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import com.google.api.gax.paging.Page; @@ -64,82 +61,53 @@ import com.google.cloud.storage.Storage; import com.google.cloud.storage.StorageOptions; -@Data @Slf4j -public class GcsStorageOperator implements Closeable, StorageOperate { +public class GcsStorageOperator extends AbstractStorageOperator implements Closeable, StorageOperator { - private Storage gcsStorage; + private final Storage gcsStorage; - private String bucketName; + private final String bucketName; - private String credential; - - public GcsStorageOperator() { - - } - - public void init() { - try { - credential = readCredentials(); - bucketName = readBucketName(); - gcsStorage = buildGcsStorage(credential); - - checkBucketNameExists(bucketName); - } catch (IOException e) { - log.error("GCS Storage operator init failed", e); - } - } - - protected Storage buildGcsStorage(String credential) throws IOException { - return StorageOptions.newBuilder() + @SneakyThrows + public GcsStorageOperator(GcsStorageProperties gcsStorageProperties) { + super(gcsStorageProperties.getResourceUploadPath()); + bucketName = gcsStorageProperties.getBucketName(); + gcsStorage = StorageOptions.newBuilder() .setCredentials(ServiceAccountCredentials.fromStream( - Files.newInputStream(Paths.get(credential)))) + Files.newInputStream(Paths.get(gcsStorageProperties.getCredential())))) .build() .getService(); - } - protected String readCredentials() { - return PropertyUtils.getString(Constants.GOOGLE_CLOUD_STORAGE_CREDENTIAL); - } - - protected String readBucketName() { - return PropertyUtils.getString(Constants.GOOGLE_CLOUD_STORAGE_BUCKET_NAME); + checkBucketNameExists(bucketName); } @Override - public void createTenantDirIfNotExists(String tenantCode) throws Exception { - mkdir(tenantCode, getGcsResDir(tenantCode)); - mkdir(tenantCode, getGcsUdfDir(tenantCode)); - } - - @Override - public String getResDir(String tenantCode) { - return getGcsResDir(tenantCode) + FOLDER_SEPARATOR; - } - - @Override - public String getUdfDir(String tenantCode) { - return getGcsUdfDir(tenantCode) + FOLDER_SEPARATOR; - } - - @Override - public String getResourceFullName(String tenantCode, String fileName) { - if (fileName.startsWith(FOLDER_SEPARATOR)) { - fileName.replaceFirst(FOLDER_SEPARATOR, EMPTY_STRING); + public String getStorageBaseDirectory() { + // All directory should end with File.separator + if (resourceBaseAbsolutePath.startsWith("/")) { + log.warn("{} -> {} should not start with / in Gcs", Constants.RESOURCE_UPLOAD_PATH, + resourceBaseAbsolutePath); + return resourceBaseAbsolutePath.substring(1); } - return String.format(FORMAT_S_S, getGcsResDir(tenantCode), fileName); + return getStorageBaseDirectory(); } + @SneakyThrows @Override - public String getFileName(ResourceType resourceType, String tenantCode, String fileName) { - if (fileName.startsWith(FOLDER_SEPARATOR)) { - fileName = fileName.replaceFirst(FOLDER_SEPARATOR, EMPTY_STRING); + public void createStorageDir(String directoryAbsolutePath) { + directoryAbsolutePath = transformAbsolutePathToGcsKey(directoryAbsolutePath); + if (exists(directoryAbsolutePath)) { + throw new FileAlreadyExistsException("directory: " + directoryAbsolutePath + " already exists"); } - return getDir(resourceType, tenantCode) + fileName; + BlobInfo blobInfo = BlobInfo.newBuilder(BlobId.of(bucketName, directoryAbsolutePath)).build(); + gcsStorage.create(blobInfo, EMPTY_STRING.getBytes(StandardCharsets.UTF_8)); } + @SneakyThrows @Override - public void download(String srcFilePath, String dstFilePath, boolean overwrite) throws IOException { + public void download(String srcFilePath, String dstFilePath, boolean overwrite) { + srcFilePath = transformAbsolutePathToGcsKey(srcFilePath); + File dstFile = new File(dstFilePath); if (dstFile.isDirectory()) { Files.delete(dstFile.toPath()); @@ -152,40 +120,26 @@ public void download(String srcFilePath, String dstFilePath, boolean overwrite) } @Override - public boolean exists(String fullName) throws IOException { - return isObjectExists(fullName); + public boolean exists(String fullName) { + fullName = transformAbsolutePathToGcsKey(fullName); + Blob blob = gcsStorage.get(BlobId.of(bucketName, fullName)); + return blob != null && blob.exists(); } + @SneakyThrows @Override - public boolean delete(String filePath, boolean recursive) throws IOException { - try { - if (isObjectExists(filePath)) { - gcsStorage.delete(BlobId.of(bucketName, filePath)); - } - return true; - } catch (Exception e) { - log.error("delete the object error,the resource path is {}", filePath); - return false; + public void delete(String filePath, boolean recursive) { + filePath = transformAbsolutePathToGcsKey(filePath); + if (exists(filePath)) { + gcsStorage.delete(BlobId.of(bucketName, filePath)); } } @Override - public boolean delete(String fullName, List childrenPathList, boolean recursive) throws IOException { - // append the resource fullName to the list for deletion. - childrenPathList.add(fullName); - - boolean result = true; - for (String filePath : childrenPathList) { - if (!delete(filePath, recursive)) { - result = false; - } - } - - return result; - } + public void copy(String srcPath, String dstPath, boolean deleteSource, boolean overwrite) { + srcPath = transformGcsKeyToAbsolutePath(srcPath); + dstPath = transformGcsKeyToAbsolutePath(dstPath); - @Override - public boolean copy(String srcPath, String dstPath, boolean deleteSource, boolean overwrite) throws IOException { BlobId source = BlobId.of(bucketName, srcPath); BlobId target = BlobId.of(bucketName, dstPath); @@ -198,31 +152,30 @@ public boolean copy(String srcPath, String dstPath, boolean deleteSource, boolea if (deleteSource) { gcsStorage.delete(source); } - return true; } + @SneakyThrows @Override - public boolean upload(String tenantCode, String srcFile, String dstPath, boolean deleteSource, - boolean overwrite) throws IOException { - try { - BlobInfo blobInfo = BlobInfo.newBuilder( - BlobId.of(bucketName, dstPath)).build(); + public void upload(String srcFile, String dstPath, boolean deleteSource, boolean overwrite) { + dstPath = transformAbsolutePathToGcsKey(dstPath); + if (exists(dstPath) && !overwrite) { + throw new FileAlreadyExistsException("file: " + dstPath + " already exists"); + } + BlobInfo blobInfo = BlobInfo.newBuilder( + BlobId.of(bucketName, dstPath)).build(); - Path srcPath = Paths.get(srcFile); - gcsStorage.create(blobInfo, Files.readAllBytes(srcPath)); + Path srcPath = Paths.get(srcFile); + gcsStorage.create(blobInfo, Files.readAllBytes(srcPath)); - if (deleteSource) { - Files.delete(srcPath); - } - return true; - } catch (Exception e) { - log.error("upload failed,the bucketName is {},the filePath is {}", bucketName, dstPath); - return false; + if (deleteSource) { + Files.delete(srcPath); } } + @SneakyThrows @Override - public List vimFile(String tenantCode, String filePath, int skipLineNums, int limit) throws IOException { + public List fetchFileContent(String filePath, int skipLineNums, int limit) { + filePath = transformAbsolutePathToGcsKey(filePath); if (StringUtils.isBlank(filePath)) { log.error("file path:{} is blank", filePath); return Collections.emptyList(); @@ -237,232 +190,58 @@ public List vimFile(String tenantCode, String filePath, int skipLineNums } } + @SneakyThrows @Override - public void deleteTenant(String tenantCode) throws Exception { - deleteTenantCode(tenantCode); - } - - protected void deleteTenantCode(String tenantCode) { - deleteDirectory(getResDir(tenantCode)); - deleteDirectory(getUdfDir(tenantCode)); - } - - @Override - public String getDir(ResourceType resourceType, String tenantCode) { - switch (resourceType) { - case UDF: - return getUdfDir(tenantCode); - case FILE: - return getResDir(tenantCode); - case ALL: - return getGcsDataBasePath(); - default: - return EMPTY_STRING; - } - - } - - protected void deleteDirectory(String directoryName) { - if (isObjectExists(directoryName)) { - gcsStorage.delete(BlobId.of(bucketName, directoryName)); - } - } - - public String getGcsResDir(String tenantCode) { - return String.format("%s/" + RESOURCE_TYPE_FILE, getGcsTenantDir(tenantCode)); - } - - public String getGcsUdfDir(String tenantCode) { - return String.format("%s/" + RESOURCE_TYPE_UDF, getGcsTenantDir(tenantCode)); - } - - public String getGcsTenantDir(String tenantCode) { - return String.format(FORMAT_S_S, getGcsDataBasePath(), tenantCode); - } - - public String getGcsDataBasePath() { - if (FOLDER_SEPARATOR.equals(RESOURCE_UPLOAD_PATH)) { - return EMPTY_STRING; - } else { - return RESOURCE_UPLOAD_PATH.replaceFirst(FOLDER_SEPARATOR, EMPTY_STRING); + public void close() throws IOException { + if (gcsStorage != null) { + gcsStorage.close(); } } @Override - public boolean mkdir(String tenantCode, String path) throws IOException { - String objectName = path + FOLDER_SEPARATOR; - if (!isObjectExists(objectName)) { - BlobInfo blobInfo = BlobInfo.newBuilder( - BlobId.of(bucketName, objectName)).build(); + public List listStorageEntity(String resourceAbsolutePath) { + resourceAbsolutePath = transformAbsolutePathToGcsKey(resourceAbsolutePath); - gcsStorage.create(blobInfo, EMPTY_STRING.getBytes(StandardCharsets.UTF_8)); - } - return true; - } - - @Override - public void close() throws IOException { - try { - if (gcsStorage != null) { - gcsStorage.close(); - } - } catch (Exception e) { - throw new IOException(e); - } + Page blobs = gcsStorage.list(bucketName, Storage.BlobListOption.prefix(resourceAbsolutePath)); + List storageEntities = new ArrayList<>(); + blobs.iterateAll().forEach(blob -> storageEntities.add(transformBlobToStorageEntity(blob))); + return storageEntities; } @Override - public ResUploadType returnStorageType() { - return ResUploadType.GCS; - } + public List listFileStorageEntityRecursively(String resourceAbsolutePath) { + resourceAbsolutePath = transformAbsolutePathToGcsKey(resourceAbsolutePath); - @Override - public List listFilesStatusRecursively(String path, String defaultPath, String tenantCode, - ResourceType type) { + Set visited = new HashSet<>(); List storageEntityList = new ArrayList<>(); - LinkedList foldersToFetch = new LinkedList<>(); - - StorageEntity initialEntity = null; - try { - initialEntity = getFileStatus(path, defaultPath, tenantCode, type); - } catch (Exception e) { - log.error("error while listing files status recursively, path: {}", path, e); - return storageEntityList; - } - foldersToFetch.add(initialEntity); + LinkedList foldersToFetch = new LinkedList<>(); + foldersToFetch.addLast(resourceAbsolutePath); while (!foldersToFetch.isEmpty()) { - String pathToExplore = foldersToFetch.pop().getFullName(); - try { - List tempList = listFilesStatus(pathToExplore, defaultPath, tenantCode, type); - for (StorageEntity temp : tempList) { - if (temp.isDirectory()) { - foldersToFetch.add(temp); + String pathToExplore = foldersToFetch.pop(); + visited.add(pathToExplore); + List tempList = listStorageEntity(pathToExplore); + for (StorageEntity temp : tempList) { + if (temp.isDirectory()) { + if (visited.contains(temp.getFullName())) { + continue; } + foldersToFetch.add(temp.getFullName()); } - storageEntityList.addAll(tempList); - } catch (Exception e) { - log.error("error while listing files stat:wus recursively, path: {}", pathToExplore, e); - } - } - - return storageEntityList; - } - - @Override - public List listFilesStatus(String path, String defaultPath, String tenantCode, - ResourceType type) throws Exception { - List storageEntityList = new ArrayList<>(); - - Page blobs; - try { - blobs = - gcsStorage.list( - bucketName, - Storage.BlobListOption.prefix(path), - Storage.BlobListOption.currentDirectory()); - } catch (Exception e) { - throw new RuntimeException("Get GCS file list exception. ", e); - } - - if (blobs == null) { - return storageEntityList; - } - - for (Blob blob : blobs.iterateAll()) { - if (path.equals(blob.getName())) { - continue; - } - if (blob.isDirectory()) { - String suffix = StringUtils.difference(path, blob.getName()); - String fileName = StringUtils.difference(defaultPath, blob.getName()); - StorageEntity entity = new StorageEntity(); - entity.setAlias(suffix); - entity.setFileName(fileName); - entity.setFullName(blob.getName()); - entity.setDirectory(true); - entity.setUserName(tenantCode); - entity.setType(type); - entity.setSize(0); - entity.setCreateTime(null); - entity.setUpdateTime(null); - entity.setPfullName(path); - - storageEntityList.add(entity); - } else { - String[] aliasArr = blob.getName().split("/"); - String alias = aliasArr[aliasArr.length - 1]; - String fileName = StringUtils.difference(defaultPath, blob.getName()); - - StorageEntity entity = new StorageEntity(); - entity.setAlias(alias); - entity.setFileName(fileName); - entity.setFullName(blob.getName()); - entity.setDirectory(false); - entity.setUserName(tenantCode); - entity.setType(type); - entity.setSize(blob.getSize()); - entity.setCreateTime(Date.from(blob.getCreateTimeOffsetDateTime().toInstant())); - entity.setUpdateTime(Date.from(blob.getUpdateTimeOffsetDateTime().toInstant())); - entity.setPfullName(path); - - storageEntityList.add(entity); } + storageEntityList.addAll(tempList); } - return storageEntityList; } @Override - public StorageEntity getFileStatus(String path, String defaultPath, String tenantCode, - ResourceType type) throws Exception { - if (path.endsWith(FOLDER_SEPARATOR)) { - // the path is a directory that may or may not exist - String alias = findDirAlias(path); - String fileName = StringUtils.difference(defaultPath, path); - - StorageEntity entity = new StorageEntity(); - entity.setAlias(alias); - entity.setFileName(fileName); - entity.setFullName(path); - entity.setDirectory(true); - entity.setUserName(tenantCode); - entity.setType(type); - entity.setSize(0); - - return entity; - } else { - if (isObjectExists(path)) { - Blob blob = gcsStorage.get(BlobId.of(bucketName, path)); - - String[] aliasArr = blob.getName().split(FOLDER_SEPARATOR); - String alias = aliasArr[aliasArr.length - 1]; - String fileName = StringUtils.difference(defaultPath, blob.getName()); - - StorageEntity entity = new StorageEntity(); - entity.setAlias(alias); - entity.setFileName(fileName); - entity.setFullName(blob.getName()); - entity.setDirectory(false); - entity.setUserName(tenantCode); - entity.setType(type); - entity.setSize(blob.getSize()); - entity.setCreateTime(Date.from(blob.getCreateTimeOffsetDateTime().toInstant())); - entity.setUpdateTime(Date.from(blob.getUpdateTimeOffsetDateTime().toInstant())); - - return entity; - } else { - throw new FileNotFoundException("Object is not found in GCS Bucket: " + bucketName); - } - } - } - - protected boolean isObjectExists(String objectName) { - Blob blob = gcsStorage.get(BlobId.of(bucketName, objectName)); - return blob != null && blob.exists(); + public StorageEntity getStorageEntity(String resourceAbsolutePath) { + resourceAbsolutePath = transformAbsolutePathToGcsKey(resourceAbsolutePath); + Blob blob = gcsStorage.get(BlobId.of(bucketName, resourceAbsolutePath)); + return transformBlobToStorageEntity(blob); } - public void checkBucketNameExists(String bucketName) { + private void checkBucketNameExists(String bucketName) { if (StringUtils.isBlank(bucketName)) { throw new IllegalArgumentException(Constants.GOOGLE_CLOUD_STORAGE_BUCKET_NAME + " is blank"); } @@ -483,12 +262,35 @@ public void checkBucketNameExists(String bucketName) { } } - private String findDirAlias(String dirPath) { - if (!dirPath.endsWith(FOLDER_SEPARATOR)) { - return dirPath; + private StorageEntity transformBlobToStorageEntity(Blob blob) { + String absolutePath = transformGcsKeyToAbsolutePath(blob.getName()); + + ResourceMetadata resourceMetaData = getResourceMetaData(absolutePath); + + StorageEntity entity = new StorageEntity(); + entity.setFileName(new File(absolutePath).getName()); + entity.setFullName(absolutePath); + entity.setDirectory(resourceMetaData.isDirectory()); + entity.setType(resourceMetaData.getResourceType()); + entity.setSize(blob.getSize()); + entity.setCreateTime(Date.from(blob.getCreateTimeOffsetDateTime().toInstant())); + entity.setUpdateTime(Date.from(blob.getUpdateTimeOffsetDateTime().toInstant())); + return entity; + } + + private String transformAbsolutePathToGcsKey(String absolutePath) { + ResourceMetadata resourceMetaData = getResourceMetaData(absolutePath); + if (resourceMetaData.isDirectory()) { + return FileUtils.concatFilePath(absolutePath, "/"); } + return absolutePath; + } - Path path = Paths.get(dirPath); - return path.getName(path.getNameCount() - 1) + FOLDER_SEPARATOR; + private String transformGcsKeyToAbsolutePath(String gcsKey) { + if (gcsKey.endsWith("/")) { + return gcsKey.substring(0, gcsKey.length() - 1); + } + return gcsKey; } + } diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-gcs/src/main/java/org/apache/dolphinscheduler/plugin/storage/gcs/GcsStorageOperatorFactory.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-gcs/src/main/java/org/apache/dolphinscheduler/plugin/storage/gcs/GcsStorageOperatorFactory.java index eedd41c01f2b..2bc1a4bfcb81 100644 --- a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-gcs/src/main/java/org/apache/dolphinscheduler/plugin/storage/gcs/GcsStorageOperatorFactory.java +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-gcs/src/main/java/org/apache/dolphinscheduler/plugin/storage/gcs/GcsStorageOperatorFactory.java @@ -17,20 +17,29 @@ package org.apache.dolphinscheduler.plugin.storage.gcs; -import org.apache.dolphinscheduler.plugin.storage.api.StorageOperate; -import org.apache.dolphinscheduler.plugin.storage.api.StorageOperateFactory; +import org.apache.dolphinscheduler.common.constants.Constants; +import org.apache.dolphinscheduler.common.utils.PropertyUtils; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperatorFactory; import org.apache.dolphinscheduler.plugin.storage.api.StorageType; import com.google.auto.service.AutoService; -@AutoService(StorageOperateFactory.class) -public class GcsStorageOperatorFactory implements StorageOperateFactory { +@AutoService(StorageOperatorFactory.class) +public class GcsStorageOperatorFactory implements StorageOperatorFactory { @Override - public StorageOperate createStorageOperate() { - GcsStorageOperator gcsStorageOperator = new GcsStorageOperator(); - gcsStorageOperator.init(); - return gcsStorageOperator; + public StorageOperator createStorageOperate() { + final GcsStorageProperties gcsStorageProperties = getGcsStorageProperties(); + return new GcsStorageOperator(gcsStorageProperties); + } + + public GcsStorageProperties getGcsStorageProperties() { + return GcsStorageProperties.builder() + .resourceUploadPath(PropertyUtils.getString(Constants.RESOURCE_UPLOAD_PATH, "/dolphinscheduler")) + .credential(PropertyUtils.getString(Constants.GOOGLE_CLOUD_STORAGE_CREDENTIAL)) + .bucketName(PropertyUtils.getString(Constants.GOOGLE_CLOUD_STORAGE_BUCKET_NAME)) + .build(); } @Override diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-gcs/src/main/java/org/apache/dolphinscheduler/plugin/storage/gcs/GcsStorageProperties.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-gcs/src/main/java/org/apache/dolphinscheduler/plugin/storage/gcs/GcsStorageProperties.java new file mode 100644 index 000000000000..5fd1b4b74726 --- /dev/null +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-gcs/src/main/java/org/apache/dolphinscheduler/plugin/storage/gcs/GcsStorageProperties.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.storage.gcs; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class GcsStorageProperties { + + private String bucketName; + + private String credential; + + private String resourceUploadPath; +} diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-gcs/src/test/java/org/apache/dolphinscheduler/plugin/storage/gcs/GcsStorageOperatorTest.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-gcs/src/test/java/org/apache/dolphinscheduler/plugin/storage/gcs/GcsStorageOperatorTest.java deleted file mode 100644 index eecde956d7fa..000000000000 --- a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-gcs/src/test/java/org/apache/dolphinscheduler/plugin/storage/gcs/GcsStorageOperatorTest.java +++ /dev/null @@ -1,290 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.storage.gcs; - -import static org.apache.dolphinscheduler.common.constants.Constants.FOLDER_SEPARATOR; -import static org.apache.dolphinscheduler.common.constants.Constants.FORMAT_S_S; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.doNothing; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; - -import org.apache.dolphinscheduler.plugin.storage.api.StorageEntity; -import org.apache.dolphinscheduler.spi.enums.ResourceType; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.Mockito; -import org.mockito.junit.jupiter.MockitoExtension; - -import com.google.cloud.storage.BlobId; -import com.google.cloud.storage.BlobInfo; -import com.google.cloud.storage.Storage; - -@ExtendWith(MockitoExtension.class) -public class GcsStorageOperatorTest { - - private static final String CREDENTIAL_MOCK = "CREDENTIAL_MOCK"; - - private static final String BUCKET_NAME_MOCK = "BUCKET_NAME_MOCK"; - - private static final String TENANT_CODE_MOCK = "TENANT_CODE_MOCK"; - - private static final String DIR_MOCK = "DIR_MOCK"; - - private static final String FILE_NAME_MOCK = "FILE_NAME_MOCK"; - - private static final String FILE_PATH_MOCK = "FILE_PATH_MOCK"; - - private static final String FULL_NAME = "/tmp/dir1/"; - - private static final String DEFAULT_PATH = "/tmp/"; - - @Mock - private Storage gcsStorage; - - private GcsStorageOperator gcsStorageOperator; - - @BeforeEach - public void setUp() throws Exception { - gcsStorageOperator = Mockito.spy(GcsStorageOperator.class); - Mockito.doReturn(CREDENTIAL_MOCK).when(gcsStorageOperator).readCredentials(); - Mockito.doReturn(BUCKET_NAME_MOCK).when(gcsStorageOperator).readBucketName(); - Mockito.doReturn(gcsStorage).when(gcsStorageOperator).buildGcsStorage(Mockito.anyString()); - Mockito.doNothing().when(gcsStorageOperator).checkBucketNameExists(Mockito.anyString()); - - gcsStorageOperator.init(); - } - - @Test - public void testInit() throws Exception { - verify(gcsStorageOperator, times(1)).buildGcsStorage(CREDENTIAL_MOCK); - Assertions.assertEquals(CREDENTIAL_MOCK, gcsStorageOperator.getCredential()); - Assertions.assertEquals(BUCKET_NAME_MOCK, gcsStorageOperator.getBucketName()); - } - - @Test - public void testClose() throws Exception { - doNothing().when(gcsStorage).close(); - gcsStorageOperator.close(); - verify(gcsStorage, times(1)).close(); - } - - @Test - public void createTenantResAndUdfDir() throws Exception { - doReturn(DIR_MOCK).when(gcsStorageOperator).getGcsResDir(TENANT_CODE_MOCK); - doReturn(DIR_MOCK).when(gcsStorageOperator).getGcsUdfDir(TENANT_CODE_MOCK); - doReturn(true).when(gcsStorageOperator).mkdir(TENANT_CODE_MOCK, DIR_MOCK); - gcsStorageOperator.createTenantDirIfNotExists(TENANT_CODE_MOCK); - verify(gcsStorageOperator, times(2)).mkdir(TENANT_CODE_MOCK, DIR_MOCK); - } - - @Test - public void getResDir() { - final String expectedResourceDir = String.format("dolphinscheduler/%s/resources/", TENANT_CODE_MOCK); - final String dir = gcsStorageOperator.getResDir(TENANT_CODE_MOCK); - Assertions.assertEquals(expectedResourceDir, dir); - } - - @Test - public void getUdfDir() { - final String expectedUdfDir = String.format("dolphinscheduler/%s/udfs/", TENANT_CODE_MOCK); - final String dir = gcsStorageOperator.getUdfDir(TENANT_CODE_MOCK); - Assertions.assertEquals(expectedUdfDir, dir); - } - - @Test - public void mkdirWhenDirExists() { - boolean isSuccess = false; - try { - final String key = DIR_MOCK + FOLDER_SEPARATOR; - Mockito.doReturn(true).when(gcsStorageOperator).isObjectExists(key); - isSuccess = gcsStorageOperator.mkdir(TENANT_CODE_MOCK, DIR_MOCK); - - } catch (IOException e) { - Assertions.fail("test failed due to unexpected IO exception"); - } - - Assertions.assertTrue(isSuccess); - } - - @Test - public void mkdirWhenDirNotExists() { - boolean isSuccess = true; - try { - final String key = DIR_MOCK + FOLDER_SEPARATOR; - doReturn(false).when(gcsStorageOperator).isObjectExists(key); - isSuccess = gcsStorageOperator.mkdir(TENANT_CODE_MOCK, DIR_MOCK); - verify(gcsStorage, times(1)).create(Mockito.any(BlobInfo.class), Mockito.any(byte[].class)); - } catch (IOException e) { - Assertions.fail("test failed due to unexpected IO exception"); - } - - Assertions.assertTrue(isSuccess); - } - - @Test - public void getResourceFullName() { - final String expectedResourceFullName = - String.format("dolphinscheduler/%s/resources/%s", TENANT_CODE_MOCK, FILE_NAME_MOCK); - final String resourceFullName = gcsStorageOperator.getResourceFullName(TENANT_CODE_MOCK, FILE_NAME_MOCK); - Assertions.assertEquals(expectedResourceFullName, resourceFullName); - } - - @Test - public void getResourceFileName() { - final String expectedResourceFileName = FILE_NAME_MOCK; - final String resourceFullName = - String.format("dolphinscheduler/%s/resources/%s", TENANT_CODE_MOCK, FILE_NAME_MOCK); - final String resourceFileName = gcsStorageOperator.getResourceFileName(TENANT_CODE_MOCK, resourceFullName); - Assertions.assertEquals(expectedResourceFileName, resourceFileName); - } - - @Test - public void getFileName() { - final String expectedFileName = - String.format("dolphinscheduler/%s/resources/%s", TENANT_CODE_MOCK, FILE_NAME_MOCK); - final String fileName = gcsStorageOperator.getFileName(ResourceType.FILE, TENANT_CODE_MOCK, FILE_NAME_MOCK); - Assertions.assertEquals(expectedFileName, fileName); - } - - @Test - public void exists() { - boolean doesExist = false; - doReturn(true).when(gcsStorageOperator).isObjectExists(FILE_NAME_MOCK); - try { - doesExist = gcsStorageOperator.exists(FILE_NAME_MOCK); - } catch (IOException e) { - Assertions.fail("unexpected IO exception in unit test"); - } - - Assertions.assertTrue(doesExist); - } - - @Test - public void delete() { - boolean isDeleted = false; - doReturn(true).when(gcsStorage).delete(Mockito.any(BlobId.class)); - doReturn(true).when(gcsStorageOperator).isObjectExists(FILE_NAME_MOCK); - try { - isDeleted = gcsStorageOperator.delete(FILE_NAME_MOCK, true); - } catch (IOException e) { - Assertions.fail("unexpected IO exception in unit test"); - } - - Assertions.assertTrue(isDeleted); - verify(gcsStorage, times(1)).delete(Mockito.any(BlobId.class)); - } - - @Test - public void copy() { - boolean isSuccess = false; - doReturn(null).when(gcsStorage).copy(Mockito.any()); - try { - isSuccess = gcsStorageOperator.copy(FILE_PATH_MOCK, FILE_PATH_MOCK, false, false); - } catch (IOException e) { - Assertions.fail("unexpected IO exception in unit test"); - } - - Assertions.assertTrue(isSuccess); - verify(gcsStorage, times(1)).copy(Mockito.any()); - } - - @Test - public void deleteTenant() { - doNothing().when(gcsStorageOperator).deleteTenantCode(anyString()); - try { - gcsStorageOperator.deleteTenant(TENANT_CODE_MOCK); - } catch (Exception e) { - Assertions.fail("unexpected exception caught in unit test"); - } - - verify(gcsStorageOperator, times(1)).deleteTenantCode(anyString()); - } - - @Test - public void getGcsResDir() { - final String expectedGcsResDir = String.format("dolphinscheduler/%s/resources", TENANT_CODE_MOCK); - final String gcsResDir = gcsStorageOperator.getGcsResDir(TENANT_CODE_MOCK); - Assertions.assertEquals(expectedGcsResDir, gcsResDir); - } - - @Test - public void getGcsUdfDir() { - final String expectedGcsUdfDir = String.format("dolphinscheduler/%s/udfs", TENANT_CODE_MOCK); - final String gcsUdfDir = gcsStorageOperator.getGcsUdfDir(TENANT_CODE_MOCK); - Assertions.assertEquals(expectedGcsUdfDir, gcsUdfDir); - } - - @Test - public void getGcsTenantDir() { - final String expectedGcsTenantDir = String.format(FORMAT_S_S, DIR_MOCK, TENANT_CODE_MOCK); - doReturn(DIR_MOCK).when(gcsStorageOperator).getGcsDataBasePath(); - final String gcsTenantDir = gcsStorageOperator.getGcsTenantDir(TENANT_CODE_MOCK); - Assertions.assertEquals(expectedGcsTenantDir, gcsTenantDir); - } - - @Test - public void deleteDir() { - doReturn(true).when(gcsStorageOperator).isObjectExists(Mockito.any()); - gcsStorageOperator.deleteDirectory(DIR_MOCK); - verify(gcsStorage, times(1)).delete(Mockito.any(BlobId.class)); - } - - @Test - public void testGetFileStatus() throws Exception { - StorageEntity entity = - gcsStorageOperator.getFileStatus(FULL_NAME, DEFAULT_PATH, TENANT_CODE_MOCK, ResourceType.FILE); - Assertions.assertEquals(FULL_NAME, entity.getFullName()); - Assertions.assertEquals("dir1/", entity.getFileName()); - } - - @Test - public void testListFilesStatus() throws Exception { - Mockito.doReturn(null).when(gcsStorage).list(Mockito.any(), Mockito.any(Storage.BlobListOption.class), - Mockito.any(Storage.BlobListOption.class)); - List result = - gcsStorageOperator.listFilesStatus(FULL_NAME, DEFAULT_PATH, TENANT_CODE_MOCK, ResourceType.FILE); - verify(gcsStorage, times(1)).list(Mockito.any(), Mockito.any(Storage.BlobListOption.class), - Mockito.any(Storage.BlobListOption.class)); - } - - @Test - public void testListFilesStatusRecursively() throws Exception { - StorageEntity entity = new StorageEntity(); - entity.setFullName(FULL_NAME); - - doReturn(entity).when(gcsStorageOperator).getFileStatus(FULL_NAME, DEFAULT_PATH, TENANT_CODE_MOCK, - ResourceType.FILE); - doReturn(Collections.EMPTY_LIST).when(gcsStorageOperator).listFilesStatus(anyString(), anyString(), anyString(), - Mockito.any(ResourceType.class)); - - List result = - gcsStorageOperator.listFilesStatusRecursively(FULL_NAME, DEFAULT_PATH, TENANT_CODE_MOCK, - ResourceType.FILE); - Assertions.assertEquals(0, result.size()); - } -} diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-gcs/src/test/resources/logback.xml b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-gcs/src/test/resources/logback.xml new file mode 100644 index 000000000000..6f211959c590 --- /dev/null +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-gcs/src/test/resources/logback.xml @@ -0,0 +1,21 @@ + + + + + + diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/pom.xml b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/pom.xml index 2a0758494830..04e06df244c2 100644 --- a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/pom.xml +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/pom.xml @@ -228,5 +228,11 @@ + + + org.testcontainers + testcontainers + test + diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/main/java/org/apache/dolphinscheduler/plugin/storage/hdfs/HdfsStorageOperator.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/main/java/org/apache/dolphinscheduler/plugin/storage/hdfs/HdfsStorageOperator.java index 4479c93a3d19..e9a95eee11ee 100644 --- a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/main/java/org/apache/dolphinscheduler/plugin/storage/hdfs/HdfsStorageOperator.java +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/main/java/org/apache/dolphinscheduler/plugin/storage/hdfs/HdfsStorageOperator.java @@ -17,340 +17,145 @@ package org.apache.dolphinscheduler.plugin.storage.hdfs; -import static org.apache.dolphinscheduler.common.constants.Constants.EMPTY_STRING; -import static org.apache.dolphinscheduler.common.constants.Constants.FOLDER_SEPARATOR; -import static org.apache.dolphinscheduler.common.constants.Constants.FORMAT_S_S; -import static org.apache.dolphinscheduler.common.constants.Constants.RESOURCE_TYPE_FILE; -import static org.apache.dolphinscheduler.common.constants.Constants.RESOURCE_TYPE_UDF; - import org.apache.dolphinscheduler.common.constants.Constants; -import org.apache.dolphinscheduler.common.enums.ResUploadType; -import org.apache.dolphinscheduler.common.exception.BaseException; -import org.apache.dolphinscheduler.common.utils.HttpUtils; -import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.apache.dolphinscheduler.common.utils.KerberosHttpClient; -import org.apache.dolphinscheduler.common.utils.PropertyUtils; +import org.apache.dolphinscheduler.common.utils.FileUtils; import org.apache.dolphinscheduler.plugin.datasource.api.utils.CommonUtils; +import org.apache.dolphinscheduler.plugin.storage.api.AbstractStorageOperator; +import org.apache.dolphinscheduler.plugin.storage.api.ResourceMetadata; import org.apache.dolphinscheduler.plugin.storage.api.StorageEntity; -import org.apache.dolphinscheduler.plugin.storage.api.StorageOperate; -import org.apache.dolphinscheduler.spi.enums.ResourceType; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; -import org.apache.commons.io.IOUtils; +import org.apache.commons.collections4.MapUtils; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; +import org.apache.hadoop.fs.LocatedFileStatus; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.security.UserGroupInformation; import java.io.BufferedReader; import java.io.Closeable; import java.io.File; -import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; +import java.nio.file.FileAlreadyExistsException; import java.nio.file.Files; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.LinkedList; import java.util.List; -import java.util.Map; import java.util.stream.Collectors; -import java.util.stream.Stream; +import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; -import com.fasterxml.jackson.databind.node.ObjectNode; - @Slf4j -public class HdfsStorageOperator implements Closeable, StorageOperate { - - protected static HdfsStorageProperties hdfsProperties = new HdfsStorageProperties(); - private static final String HADOOP_UTILS_KEY = "HADOOP_UTILS_KEY"; +public class HdfsStorageOperator extends AbstractStorageOperator implements Closeable, StorageOperator { - private volatile boolean yarnEnabled = false; + private final HdfsStorageProperties hdfsProperties; private Configuration configuration; private FileSystem fs; - public HdfsStorageOperator() { - this(new HdfsStorageProperties()); - } - public HdfsStorageOperator(HdfsStorageProperties hdfsStorageProperties) { + super(hdfsStorageProperties.getResourceUploadPath()); // Overwrite config from passing hdfsStorageProperties hdfsProperties = hdfsStorageProperties; init(); initHdfsPath(); } - /** - * init dolphinscheduler root path in hdfs - */ - + @SneakyThrows private void initHdfsPath() { - Path path = new Path(RESOURCE_UPLOAD_PATH); - try { - if (!fs.exists(path)) { - fs.mkdirs(path); - } - } catch (Exception e) { - log.error(e.getMessage(), e); - } - } - - /** - * init hadoop configuration - */ - public void init() throws NullPointerException { - try { - configuration = new HdfsConfiguration(); - - String hdfsUser = hdfsProperties.getUser(); - if (CommonUtils.loadKerberosConf(configuration)) { - hdfsUser = ""; - } - - String defaultFS = getDefaultFS(); - // first get key from core-site.xml hdfs-site.xml ,if null ,then try to get from properties file - // the default is the local file system - if (StringUtils.isNotBlank(defaultFS)) { - Map fsRelatedProps = PropertyUtils.getByPrefix("fs."); - configuration.set(Constants.HDFS_DEFAULT_FS, defaultFS); - fsRelatedProps.forEach((key, value) -> configuration.set(key, value)); - } else { - log.error("property:{} can not to be empty, please set!", Constants.FS_DEFAULT_FS); - throw new NullPointerException( - String.format("property: %s can not to be empty, please set!", Constants.FS_DEFAULT_FS)); - } - - if (!defaultFS.startsWith("file")) { - log.info("get property:{} -> {}, from core-site.xml hdfs-site.xml ", Constants.FS_DEFAULT_FS, - defaultFS); - } - - if (StringUtils.isNotEmpty(hdfsUser)) { - UserGroupInformation ugi = UserGroupInformation.createRemoteUser(hdfsUser); - ugi.doAs((PrivilegedExceptionAction) () -> { - fs = FileSystem.get(configuration); - return true; - }); + Path path = new Path(resourceBaseAbsolutePath); + if (!fs.exists(path)) { + if (!fs.mkdirs(path)) { + log.info("Create hdfs path: {} failed", path); } else { - log.warn("resource.hdfs.root.user is not set value!"); - fs = FileSystem.get(configuration); + log.error("Create hdfs path: {} success", path); } - - } catch (Exception e) { - log.error(e.getMessage(), e); } } - /** - * @return Configuration - */ - public Configuration getConfiguration() { - return configuration; - } - - /** - * @return DefaultFS - */ - public String getDefaultFS() { - String defaultFS = hdfsProperties.getDefaultFS(); - if (StringUtils.isBlank(defaultFS)) { - defaultFS = getConfiguration().get(Constants.HDFS_DEFAULT_FS); - } - return defaultFS; - } - - /** - * get application url - * if rmHaIds contains xx, it signs not use resourcemanager - * otherwise: - * if rmHaIds is empty, single resourcemanager enabled - * if rmHaIds not empty: resourcemanager HA enabled - * - * @param applicationId application id - * @return url of application - */ - public String getApplicationUrl(String applicationId) throws BaseException { - - yarnEnabled = true; - String appUrl = StringUtils.isEmpty(hdfsProperties.getYarnResourceRmIds()) - ? hdfsProperties.getYarnAppStatusAddress() - : getAppAddress(hdfsProperties.getYarnAppStatusAddress(), hdfsProperties.getYarnResourceRmIds()); - if (StringUtils.isBlank(appUrl)) { - throw new BaseException("yarn application url generation failed"); - } - log.debug("yarn application url:{}, applicationId:{}", appUrl, applicationId); - return String.format(appUrl, hdfsProperties.getHadoopResourceManagerHttpAddressPort(), applicationId); - } - - public String getJobHistoryUrl(String applicationId) { - // eg:application_1587475402360_712719 -> job_1587475402360_712719 - String jobId = applicationId.replace("application", "job"); - return String.format(hdfsProperties.getYarnJobHistoryStatusAddress(), jobId); - } + @SneakyThrows + private void init() { + configuration = new HdfsConfiguration(); - /** - * cat file on hdfs - * - * @param hdfsFilePath hdfs file path - * @return byte[] byte array - * @throws IOException errors - */ - public byte[] catFile(String hdfsFilePath) throws IOException { - - if (StringUtils.isBlank(hdfsFilePath)) { - log.error("hdfs file path:{} is blank", hdfsFilePath); - return new byte[0]; + if (MapUtils.isNotEmpty(hdfsProperties.getConfigurationProperties())) { + hdfsProperties.getConfigurationProperties().forEach((key, value) -> { + configuration.set(key, value); + log.info("Set HDFS prop: {} -> {}", key, value); + }); } - try (FSDataInputStream fsDataInputStream = fs.open(new Path(hdfsFilePath))) { - return IOUtils.toByteArray(fsDataInputStream); + String defaultFS = hdfsProperties.getDefaultFS(); + if (StringUtils.isNotEmpty(defaultFS)) { + configuration.set(Constants.HDFS_DEFAULT_FS, hdfsProperties.getDefaultFS()); } - } - /** - * cat file on hdfs - * - * @param hdfsFilePath hdfs file path - * @param skipLineNums skip line numbers - * @param limit read how many lines - * @return content of file - * @throws IOException errors - */ - public List catFile(String hdfsFilePath, int skipLineNums, int limit) throws IOException { - - if (StringUtils.isBlank(hdfsFilePath)) { - log.error("hdfs file path:{} is blank", hdfsFilePath); - return Collections.emptyList(); + if (CommonUtils.getKerberosStartupState()) { + CommonUtils.loadKerberosConf(configuration); + fs = FileSystem.get(configuration); + log.info("Initialize HdfsStorageOperator with kerberos"); + return; } - - try (FSDataInputStream in = fs.open(new Path(hdfsFilePath))) { - BufferedReader br = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8)); - Stream stream = br.lines().skip(skipLineNums).limit(limit); - return stream.collect(Collectors.toList()); + if (StringUtils.isNotEmpty(hdfsProperties.getUser())) { + UserGroupInformation ugi = UserGroupInformation.createRemoteUser(hdfsProperties.getUser()); + ugi.doAs((PrivilegedExceptionAction) () -> { + fs = FileSystem.get(configuration); + return true; + }); + UserGroupInformation.setLoginUser(ugi); + log.info("Initialize HdfsStorageOperator with remote user: {}", hdfsProperties.getUser()); + return; } - } - - @Override - public List vimFile(String bucketName, String hdfsFilePath, int skipLineNums, - int limit) throws IOException { - return catFile(hdfsFilePath, skipLineNums, limit); - } - - @Override - public void createTenantDirIfNotExists(String tenantCode) throws IOException { - mkdir(tenantCode, getHdfsResDir(tenantCode)); - mkdir(tenantCode, getHdfsUdfDir(tenantCode)); - } - - @Override - public String getResDir(String tenantCode) { - return getHdfsResDir(tenantCode) + FOLDER_SEPARATOR; - } - - @Override - public String getUdfDir(String tenantCode) { - return getHdfsUdfDir(tenantCode) + FOLDER_SEPARATOR; - } - - /** - * make the given file and all non-existent parents into - * directories. Has the semantics of Unix 'mkdir -p'. - * Existence of the directory hierarchy is not an error. - * - * @param hdfsPath path to create - * @return mkdir result - * @throws IOException errors - */ - @Override - public boolean mkdir(String tenantCode, String hdfsPath) throws IOException { - return fs.mkdirs(new Path(addFolderSeparatorIfNotExisted(hdfsPath))); - } + fs = FileSystem.get(configuration); + log.info("Initialize HdfsStorageOperator with default user"); - @Override - public String getResourceFullName(String tenantCode, String fullName) { - return getHdfsResourceFileName(tenantCode, fullName); } @Override - public String getFileName(ResourceType resourceType, String tenantCode, String fileName) { - return getHdfsFileName(resourceType, tenantCode, fileName); + public String getStorageBaseDirectory() { + String defaultFS = hdfsProperties.getDefaultFS(); + return FileUtils.concatFilePath(defaultFS, resourceBaseAbsolutePath); } + @SneakyThrows @Override - public void download(String srcHdfsFilePath, String dstFile, boolean overwrite) throws IOException { - copyHdfsToLocal(srcHdfsFilePath, dstFile, false, overwrite); + public List fetchFileContent(String hdfsFilePath, int skipLineNums, int limit) { + try ( + FSDataInputStream in = fs.open(new Path(hdfsFilePath)); + InputStreamReader inputStreamReader = new InputStreamReader(in, StandardCharsets.UTF_8); + BufferedReader br = new BufferedReader(inputStreamReader)) { + return br.lines() + .skip(skipLineNums) + .limit(limit) + .collect(Collectors.toList()); + } } - /** - * copy files between FileSystems - * - * @param srcPath source hdfs path - * @param dstPath destination hdfs path - * @param deleteSource whether to delete the src - * @param overwrite whether to overwrite an existing file - * @return if success or not - * @throws IOException errors - */ @Override - public boolean copy(String srcPath, String dstPath, boolean deleteSource, boolean overwrite) throws IOException { - return FileUtil.copy(fs, new Path(srcPath), fs, new Path(dstPath), deleteSource, overwrite, fs.getConf()); - } - - /** - * the src file is on the local disk. Add it to FS at - * the given dst name. - * - * @param srcFile local file - * @param dstHdfsPath destination hdfs path - * @param deleteSource whether to delete the src - * @param overwrite whether to overwrite an existing file - * @return if success or not - * @throws IOException errors - */ - public boolean copyLocalToHdfs(String srcFile, String dstHdfsPath, boolean deleteSource, - boolean overwrite) throws IOException { - Path srcPath = new Path(srcFile); - Path dstPath = new Path(dstHdfsPath); - - fs.copyFromLocalFile(deleteSource, overwrite, srcPath, dstPath); - - return true; + @SneakyThrows + public void createStorageDir(String directoryAbsolutePath) { + Path path = new Path(directoryAbsolutePath); + if (fs.exists(path)) { + throw new FileAlreadyExistsException("Directory already exists: " + directoryAbsolutePath); + } + fs.mkdirs(new Path(directoryAbsolutePath)); } + @SneakyThrows @Override - public boolean upload(String buckName, String srcFile, String dstPath, boolean deleteSource, - boolean overwrite) throws IOException { - return copyLocalToHdfs(srcFile, dstPath, deleteSource, overwrite); - } - - /** - * copy hdfs file to local - * - * @param srcHdfsFilePath source hdfs file path - * - * @param dstFile destination file - * - * @param deleteSource delete source - * - * @param overwrite overwrite - * - * @return result of copy hdfs file to local - * - * @throws IOException errors - */ - public boolean copyHdfsToLocal(String srcHdfsFilePath, String dstFile, boolean deleteSource, - boolean overwrite) throws IOException { - + public void download(String srcHdfsFilePath, String dstFile, boolean overwrite) { Path srcPath = new Path(srcHdfsFilePath); File dstPath = new File(dstFile); @@ -365,336 +170,83 @@ public boolean copyHdfsToLocal(String srcHdfsFilePath, String dstFile, boolean d } if (!dstPath.getParentFile().exists() && !dstPath.getParentFile().mkdirs()) { - return false; + throw new IOException("Failed to create parent directory for destination file"); } - return FileUtil.copy(fs, srcPath, dstPath, deleteSource, fs.getConf()); + FileUtil.copy(fs, srcPath, dstPath, false, fs.getConf()); } - /** - * delete a file - * - * @param hdfsFilePath the path to delete. - * @param recursive if path is a directory and set to - * true, the directory is deleted else throws an exception. In - * case of a file the recursive can be set to either true or false. - * @return true if delete is successful else false. - * @throws IOException errors - */ + @SneakyThrows @Override - public boolean delete(String hdfsFilePath, boolean recursive) throws IOException { - return fs.delete(new Path(hdfsFilePath), recursive); + public void copy(String srcPath, String dstPath, boolean deleteSource, boolean overwrite) { + FileUtil.copy(fs, new Path(srcPath), fs, new Path(dstPath), deleteSource, overwrite, fs.getConf()); } - /** - * delete a list of files - * - * @param filePath the path to delete, usually it is a directory. - * @param recursive if path is a directory and set to - * true, the directory is deleted else throws an exception. In - * case of a file the recursive can be set to either true or false. - * @return true if delete is successful else false. - * @throws IOException errors - */ - + @SneakyThrows @Override - public boolean delete(String filePath, List childrenPathArray, boolean recursive) throws IOException { - if (filePath.endsWith("/")) { - return fs.delete(new Path(filePath), true); - } - return fs.delete(new Path(filePath), recursive); + public void upload(String srcAbsoluteFilePath, + String dstAbsoluteFilePath, + boolean deleteSource, + boolean overwrite) { + Path srcPath = new Path(srcAbsoluteFilePath); + Path dstPath = new Path(dstAbsoluteFilePath); + fs.copyFromLocalFile(deleteSource, overwrite, srcPath, dstPath); } - /** - * check if exists - * - * @param hdfsFilePath source file path - * @return result of exists or not - * @throws IOException errors - */ + @SneakyThrows @Override - public boolean exists(String hdfsFilePath) throws IOException { - return fs.exists(new Path(hdfsFilePath)); + public void delete(String resourceAbsolutePath, boolean recursive) { + exceptionIfPathEmpty(resourceAbsolutePath); + fs.delete(new Path(resourceAbsolutePath), recursive); } - /** - * Gets a list of files in the directory - * - * @param path file fullName path - * @return {@link FileStatus} file status - * @throws IOException errors - */ + @SneakyThrows @Override - public List listFilesStatus(String path, String defaultPath, String tenantCode, - ResourceType type) throws IOException { - // TODO: Does listStatus truncate resultList if its size goes above certain threshold (like a 1000 in S3) - // TODO: add hdfs prefix getFile - List storageEntityList = new ArrayList<>(); - try { - Path filePath = new Path(path); - if (!fs.exists(filePath)) { - return storageEntityList; - } - FileStatus[] fileStatuses = fs.listStatus(filePath); - - // transform FileStatusArray into the StorageEntity List - for (FileStatus fileStatus : fileStatuses) { - if (fileStatus.isDirectory()) { - // the path is a directory - String fullName = fileStatus.getPath().toString(); - fullName = addFolderSeparatorIfNotExisted(fullName); - - String suffix = StringUtils.difference(path, fullName); - String fileName = StringUtils.difference(defaultPath, fullName); - - StorageEntity entity = new StorageEntity(); - entity.setAlias(suffix); - entity.setFileName(fileName); - entity.setFullName(fullName); - entity.setDirectory(true); - entity.setUserName(tenantCode); - entity.setType(type); - entity.setSize(fileStatus.getLen()); - entity.setCreateTime(new Date(fileStatus.getModificationTime())); - entity.setUpdateTime(new Date(fileStatus.getModificationTime())); - entity.setPfullName(path); - - storageEntityList.add(entity); - } else { - // the path is a file - String fullName = fileStatus.getPath().toString(); - String[] aliasArr = fullName.split("/"); - String alias = aliasArr[aliasArr.length - 1]; - - String fileName = StringUtils.difference(defaultPath, fullName); - - StorageEntity entity = new StorageEntity(); - entity.setAlias(alias); - entity.setFileName(fileName); - entity.setFullName(fullName); - entity.setDirectory(false); - entity.setUserName(tenantCode); - entity.setType(type); - entity.setSize(fileStatus.getLen()); - entity.setCreateTime(new Date(fileStatus.getModificationTime())); - entity.setUpdateTime(new Date(fileStatus.getModificationTime())); - entity.setPfullName(path); - - storageEntityList.add(entity); - } - } - } catch (FileNotFoundException e) { - throw new FileNotFoundException("The path does not exist."); - } catch (IOException e) { - throw new IOException("Get file list exception.", e); - } - - return storageEntityList; + public boolean exists(String resourceAbsolutePath) { + exceptionIfPathEmpty(resourceAbsolutePath); + return fs.exists(new Path(resourceAbsolutePath)); } + @SneakyThrows @Override - public StorageEntity getFileStatus(String path, String prefix, String tenantCode, - ResourceType type) throws IOException { - try { - FileStatus fileStatus = fs.getFileStatus(new Path(path)); - String alias = ""; - String fileName = ""; - String fullName = fileStatus.getPath().toString(); - if (fileStatus.isDirectory()) { - fullName = addFolderSeparatorIfNotExisted(fullName); - alias = findDirAlias(fullName); - fileName = StringUtils.difference(prefix, fullName); - } else { - String[] aliasArr = fileStatus.getPath().toString().split("/"); - alias = aliasArr[aliasArr.length - 1]; - fileName = StringUtils.difference(prefix, fileStatus.getPath().toString()); - } - - StorageEntity entity = new StorageEntity(); - entity.setAlias(alias); - entity.setFileName(fileName); - entity.setFullName(fullName); - entity.setDirectory(fileStatus.isDirectory()); - entity.setUserName(tenantCode); - entity.setType(type); - entity.setSize(fileStatus.getLen()); - entity.setCreateTime(new Date(fileStatus.getModificationTime())); - entity.setUpdateTime(new Date(fileStatus.getModificationTime())); - entity.setPfullName(path); - - return entity; - } catch (FileNotFoundException e) { - throw new FileNotFoundException("The path does not exist."); - } catch (IOException e) { - throw new IOException("Get file exception.", e); - } - } - - /** - * Renames Path src to Path dst. Can take place on local fs - * or remote DFS. - * - * @param src path to be renamed - * @param dst new path after rename - * @return true if rename is successful - * @throws IOException on failure - */ - public boolean rename(String src, String dst) throws IOException { - return fs.rename(new Path(src), new Path(dst)); - } - - /** - * hadoop resourcemanager enabled or not - * - * @return result - */ - public boolean isYarnEnabled() { - return yarnEnabled; - } - - /** - * get data hdfs path - * - * @return data hdfs path - */ - public static String getHdfsDataBasePath() { - String defaultFS = hdfsProperties.getDefaultFS(); - defaultFS = defaultFS.endsWith("/") ? StringUtils.chop(defaultFS) : defaultFS; - if (FOLDER_SEPARATOR.equals(RESOURCE_UPLOAD_PATH)) { - return defaultFS + ""; - } else { - return defaultFS + RESOURCE_UPLOAD_PATH; - } - } - - /** - * hdfs resource dir - * - * @param tenantCode tenant code - * @param resourceType resource type - * @return hdfs resource dir - */ - public static String getHdfsDir(ResourceType resourceType, String tenantCode) { - switch (resourceType) { - case UDF: - return getHdfsUdfDir(tenantCode); - case FILE: - return getHdfsResDir(tenantCode); - case ALL: - return getHdfsDataBasePath(); - default: - return EMPTY_STRING; + public List listStorageEntity(String resourceAbsolutePath) { + exceptionIfPathEmpty(resourceAbsolutePath); + Path path = new Path(resourceAbsolutePath); + if (!fs.exists(path)) { + return Collections.emptyList(); } + return Arrays.stream(fs.listStatus(new Path(resourceAbsolutePath))) + .map(this::transformFileStatusToResourceMetadata) + .collect(Collectors.toList()); } + @SneakyThrows @Override - public String getDir(ResourceType resourceType, String tenantCode) { - return getHdfsDir(resourceType, tenantCode); - } - - /** - * hdfs resource dir - * - * @param tenantCode tenant code - * @return hdfs resource dir - */ - public static String getHdfsResDir(String tenantCode) { - return String.format("%s/" + RESOURCE_TYPE_FILE, getHdfsTenantDir(tenantCode)); - } - - /** - * hdfs udf dir - * - * @param tenantCode tenant code - * @return get udf dir on hdfs - */ - public static String getHdfsUdfDir(String tenantCode) { - return String.format("%s/" + RESOURCE_TYPE_UDF, getHdfsTenantDir(tenantCode)); - } + public List listFileStorageEntityRecursively(String resourceAbsolutePath) { + exceptionIfPathEmpty(resourceAbsolutePath); - /** - * get hdfs file name - * - * @param resourceType resource type - * @param tenantCode tenant code - * @param fileName file name - * @return hdfs file name - */ - public static String getHdfsFileName(ResourceType resourceType, String tenantCode, String fileName) { - if (fileName.startsWith(FOLDER_SEPARATOR)) { - fileName = fileName.replaceFirst(FOLDER_SEPARATOR, ""); - } - return String.format(FORMAT_S_S, getHdfsDir(resourceType, tenantCode), fileName); - } + List result = new ArrayList<>(); - /** - * get absolute path and name for resource file on hdfs - * - * @param tenantCode tenant code - * @param fileName file name - * @return get absolute path and name for file on hdfs - */ - public static String getHdfsResourceFileName(String tenantCode, String fileName) { - if (fileName.startsWith(FOLDER_SEPARATOR)) { - fileName = fileName.replaceFirst(FOLDER_SEPARATOR, ""); - } - return String.format(FORMAT_S_S, getHdfsResDir(tenantCode), fileName); - } + LinkedList foldersToFetch = new LinkedList<>(); + foldersToFetch.addLast(resourceAbsolutePath); - /** - * get absolute path and name for udf file on hdfs - * - * @param tenantCode tenant code - * @param fileName file name - * @return get absolute path and name for udf file on hdfs - */ - public static String getHdfsUdfFileName(String tenantCode, String fileName) { - if (fileName.startsWith(FOLDER_SEPARATOR)) { - fileName = fileName.replaceFirst(FOLDER_SEPARATOR, ""); + while (!foldersToFetch.isEmpty()) { + String absolutePath = foldersToFetch.pollFirst(); + RemoteIterator remoteIterator = fs.listFiles(new Path(absolutePath), true); + while (remoteIterator.hasNext()) { + LocatedFileStatus locatedFileStatus = remoteIterator.next(); + result.add(transformFileStatusToResourceMetadata(locatedFileStatus)); + } } - return String.format(FORMAT_S_S, getHdfsUdfDir(tenantCode), fileName); - } - - /** - * @param tenantCode tenant code - * @return file directory of tenants on hdfs - */ - public static String getHdfsTenantDir(String tenantCode) { - return String.format(FORMAT_S_S, getHdfsDataBasePath(), tenantCode); + return result; } - /** - * getAppAddress - * - * @param appAddress app address - * @param rmHa resource manager ha - * @return app address - */ - public static String getAppAddress(String appAddress, String rmHa) { - - String[] split1 = appAddress.split(Constants.DOUBLE_SLASH); - - if (split1.length != 2) { - return null; - } - - String start = split1[0] + Constants.DOUBLE_SLASH; - String[] split2 = split1[1].split(Constants.COLON); - - if (split2.length != 2) { - return null; - } - - String end = Constants.COLON + split2[1]; - - // get active ResourceManager - String activeRM = YarnHAAdminUtils.getActiveRMName(start, rmHa); - - if (StringUtils.isEmpty(activeRM)) { - return null; - } - - return start + activeRM + end; + @SneakyThrows + @Override + public StorageEntity getStorageEntity(String resourceAbsolutePath) { + exceptionIfPathEmpty(resourceAbsolutePath); + FileStatus fileStatus = fs.getFileStatus(new Path(resourceAbsolutePath)); + return transformFileStatusToResourceMetadata(fileStatus); } @Override @@ -709,144 +261,21 @@ public void close() throws IOException { } } - /** - * yarn ha admin utils - */ - private static final class YarnHAAdminUtils { + private StorageEntity transformFileStatusToResourceMetadata(FileStatus fileStatus) { + Path fileStatusPath = fileStatus.getPath(); + String fileAbsolutePath = fileStatusPath.toString(); + ResourceMetadata resourceMetaData = getResourceMetaData(fileAbsolutePath); - /** - * get active resourcemanager node - * - * @param protocol http protocol - * @param rmIds yarn ha ids - * @return yarn active node - */ - public static String getActiveRMName(String protocol, String rmIds) { - - String[] rmIdArr = rmIds.split(Constants.COMMA); - - String yarnUrl = - protocol + "%s:" + hdfsProperties.getHadoopResourceManagerHttpAddressPort() + "/ws/v1/cluster/info"; - - try { - - /** - * send http get request to rm - */ - - for (String rmId : rmIdArr) { - String state = getRMState(String.format(yarnUrl, rmId)); - if (Constants.HADOOP_RM_STATE_ACTIVE.equals(state)) { - return rmId; - } - } - - } catch (Exception e) { - log.error("yarn ha application url generation failed, message:{}", e.getMessage()); - } - return null; - } - - /** - * get ResourceManager state - */ - public static String getRMState(String url) { - - String retStr = Boolean.TRUE - .equals(hdfsProperties.isHadoopSecurityAuthStartupState()) - ? KerberosHttpClient.get(url) - : HttpUtils.get(url); - - if (StringUtils.isEmpty(retStr)) { - return null; - } - // to json - ObjectNode jsonObject = JSONUtils.parseObject(retStr); - - // get ResourceManager state - if (!jsonObject.has("clusterInfo")) { - return null; - } - return jsonObject.get("clusterInfo").path("haState").asText(); - } - - } - - @Override - public void deleteTenant(String tenantCode) throws Exception { - String tenantPath = getHdfsDataBasePath() + FOLDER_SEPARATOR + tenantCode; - - if (exists(tenantPath)) { - delete(tenantPath, true); - - } - } - - @Override - public ResUploadType returnStorageType() { - return ResUploadType.HDFS; + return StorageEntity.builder() + .fileName(fileStatusPath.getName()) + .fullName(fileAbsolutePath) + .pfullName(resourceMetaData.getResourceParentAbsolutePath()) + .type(resourceMetaData.getResourceType()) + .isDirectory(fileStatus.isDirectory()) + .size(fileStatus.getLen()) + .createTime(new Date(fileStatus.getModificationTime())) + .updateTime(new Date(fileStatus.getModificationTime())) + .build(); } - @Override - public List listFilesStatusRecursively(String path, String defaultPath, String tenantCode, - ResourceType type) { - List storageEntityList = new ArrayList<>(); - - LinkedList foldersToFetch = new LinkedList<>(); - - do { - String pathToExplore = ""; - if (foldersToFetch.size() == 0) { - pathToExplore = path; - } else { - pathToExplore = foldersToFetch.pop().getFullName(); - } - - try { - List tempList = listFilesStatus(pathToExplore, defaultPath, tenantCode, type); - - for (StorageEntity temp : tempList) { - if (temp.isDirectory()) { - foldersToFetch.add(temp); - } - } - - storageEntityList.addAll(tempList); - } catch (FileNotFoundException e) { - log.error("Resource path: {}", pathToExplore, e); - // return the resources fetched before error occurs. - return storageEntityList; - } catch (IOException e) { - log.error("Resource path: {}", pathToExplore, e); - // return the resources fetched before error occurs. - return storageEntityList; - } - - } while (foldersToFetch.size() != 0); - - return storageEntityList; - - } - - /** - * find alias for directories, NOT for files - * a directory is a path ending with "/" - */ - private String findDirAlias(String myStr) { - if (!myStr.endsWith("/")) { - // Make sure system won't crush down if someone accidentally misuse the function. - return myStr; - } - int lastIndex = myStr.lastIndexOf("/"); - String subbedString = myStr.substring(0, lastIndex); - int secondLastIndex = subbedString.lastIndexOf("/"); - StringBuilder stringBuilder = new StringBuilder(); - stringBuilder.append(myStr, secondLastIndex + 1, lastIndex + 1); - - return stringBuilder.toString(); - } - - private String addFolderSeparatorIfNotExisted(String fullName) { - return fullName.endsWith(FOLDER_SEPARATOR) ? fullName : fullName + FOLDER_SEPARATOR; - } } diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/main/java/org/apache/dolphinscheduler/plugin/storage/hdfs/HdfsStorageOperatorFactory.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/main/java/org/apache/dolphinscheduler/plugin/storage/hdfs/HdfsStorageOperatorFactory.java index d2a6ef02624d..ed2aa60615f9 100644 --- a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/main/java/org/apache/dolphinscheduler/plugin/storage/hdfs/HdfsStorageOperatorFactory.java +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/main/java/org/apache/dolphinscheduler/plugin/storage/hdfs/HdfsStorageOperatorFactory.java @@ -17,20 +17,36 @@ package org.apache.dolphinscheduler.plugin.storage.hdfs; -import org.apache.dolphinscheduler.plugin.storage.api.StorageOperate; -import org.apache.dolphinscheduler.plugin.storage.api.StorageOperateFactory; +import static org.apache.dolphinscheduler.common.constants.Constants.FS_DEFAULT_FS; +import static org.apache.dolphinscheduler.common.constants.Constants.HDFS_ROOT_USER; + +import org.apache.dolphinscheduler.common.constants.Constants; +import org.apache.dolphinscheduler.common.utils.PropertyUtils; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperatorFactory; import org.apache.dolphinscheduler.plugin.storage.api.StorageType; +import java.util.Map; + import com.google.auto.service.AutoService; -@AutoService(StorageOperateFactory.class) -public class HdfsStorageOperatorFactory implements StorageOperateFactory { +@AutoService(StorageOperatorFactory.class) +public class HdfsStorageOperatorFactory implements StorageOperatorFactory { @Override - public StorageOperate createStorageOperate() { - HdfsStorageOperator hdfsOperator = new HdfsStorageOperator(); - hdfsOperator.init(); - return hdfsOperator; + public StorageOperator createStorageOperate() { + final HdfsStorageProperties hdfsStorageProperties = getHdfsStorageProperties(); + return new HdfsStorageOperator(hdfsStorageProperties); + } + + private HdfsStorageProperties getHdfsStorageProperties() { + Map configurationProperties = PropertyUtils.getByPrefix("fs."); + return HdfsStorageProperties.builder() + .user(PropertyUtils.getString(HDFS_ROOT_USER)) + .defaultFS(PropertyUtils.getString(FS_DEFAULT_FS)) + .configurationProperties(configurationProperties) + .resourceUploadPath(PropertyUtils.getString(Constants.RESOURCE_UPLOAD_PATH, "/dolphinscheduler")) + .build(); } @Override diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/main/java/org/apache/dolphinscheduler/plugin/storage/hdfs/HdfsStorageProperties.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/main/java/org/apache/dolphinscheduler/plugin/storage/hdfs/HdfsStorageProperties.java index a40529f584a4..48a2f1d17927 100644 --- a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/main/java/org/apache/dolphinscheduler/plugin/storage/hdfs/HdfsStorageProperties.java +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/main/java/org/apache/dolphinscheduler/plugin/storage/hdfs/HdfsStorageProperties.java @@ -17,66 +17,25 @@ package org.apache.dolphinscheduler.plugin.storage.hdfs; -import static org.apache.dolphinscheduler.common.constants.Constants.FS_DEFAULT_FS; -import static org.apache.dolphinscheduler.common.constants.Constants.HADOOP_RESOURCE_MANAGER_HTTPADDRESS_PORT; -import static org.apache.dolphinscheduler.common.constants.Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE; -import static org.apache.dolphinscheduler.common.constants.Constants.HDFS_ROOT_USER; -import static org.apache.dolphinscheduler.common.constants.Constants.KERBEROS_EXPIRE_TIME; -import static org.apache.dolphinscheduler.common.constants.Constants.YARN_APPLICATION_STATUS_ADDRESS; -import static org.apache.dolphinscheduler.common.constants.Constants.YARN_JOB_HISTORY_STATUS_ADDRESS; -import static org.apache.dolphinscheduler.common.constants.Constants.YARN_RESOURCEMANAGER_HA_RM_IDS; - -import org.apache.dolphinscheduler.common.utils.PropertyUtils; +import java.util.Map; +import lombok.AllArgsConstructor; +import lombok.Builder; import lombok.Data; - -import org.springframework.context.annotation.Configuration; +import lombok.NoArgsConstructor; @Data -@Configuration +@Builder +@NoArgsConstructor +@AllArgsConstructor public class HdfsStorageProperties { - /** - * HDFS storage user - */ - private String user = PropertyUtils.getString(HDFS_ROOT_USER); - - /** - * HDFS default fs - */ - private String defaultFS = PropertyUtils.getString(FS_DEFAULT_FS); - - /** - * YARN resource manager HA RM ids - */ - private String yarnResourceRmIds = PropertyUtils.getString(YARN_RESOURCEMANAGER_HA_RM_IDS); - - /** - * YARN application status address - */ - private String yarnAppStatusAddress = PropertyUtils.getString(YARN_APPLICATION_STATUS_ADDRESS); + private String user; - /** - * YARN job history status address - */ - private String yarnJobHistoryStatusAddress = PropertyUtils.getString(YARN_JOB_HISTORY_STATUS_ADDRESS); + private String defaultFS; - /** - * Hadoop resouece manager http address port - */ - private String hadoopResourceManagerHttpAddressPort = - PropertyUtils.getString(HADOOP_RESOURCE_MANAGER_HTTPADDRESS_PORT); + private Map configurationProperties; - /** - * Hadoop security authentication startup state - */ - private boolean hadoopSecurityAuthStartupState = - PropertyUtils.getBoolean(HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE, false); + private String resourceUploadPath; - /** - * Kerberos expire time - */ - public static int getKerberosExpireTime() { - return PropertyUtils.getInt(KERBEROS_EXPIRE_TIME, 2); - } } diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/main/java/org/apache/dolphinscheduler/plugin/storage/hdfs/LocalStorageOperator.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/main/java/org/apache/dolphinscheduler/plugin/storage/hdfs/LocalStorageOperator.java index 173760b63203..31c8da50744b 100644 --- a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/main/java/org/apache/dolphinscheduler/plugin/storage/hdfs/LocalStorageOperator.java +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/main/java/org/apache/dolphinscheduler/plugin/storage/hdfs/LocalStorageOperator.java @@ -22,18 +22,8 @@ @Slf4j public class LocalStorageOperator extends HdfsStorageOperator { - public LocalStorageOperator() { - super(new HdfsStorageProperties()); - } - public LocalStorageOperator(HdfsStorageProperties hdfsStorageProperties) { super(hdfsStorageProperties); } - @Override - public String getResourceFileName(String tenantCode, String fullName) { - // prefix schema `file:/` should be remove in local file mode - String fullNameRemoveSchema = fullName.replaceFirst(hdfsProperties.getDefaultFS(), ""); - return super.getResourceFileName(tenantCode, fullNameRemoveSchema); - } } diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/main/java/org/apache/dolphinscheduler/plugin/storage/hdfs/LocalStorageOperatorFactory.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/main/java/org/apache/dolphinscheduler/plugin/storage/hdfs/LocalStorageOperatorFactory.java index 5f44eca87ba0..f0bb1543aa52 100644 --- a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/main/java/org/apache/dolphinscheduler/plugin/storage/hdfs/LocalStorageOperatorFactory.java +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/main/java/org/apache/dolphinscheduler/plugin/storage/hdfs/LocalStorageOperatorFactory.java @@ -17,24 +17,32 @@ package org.apache.dolphinscheduler.plugin.storage.hdfs; -import org.apache.dolphinscheduler.plugin.storage.api.StorageOperate; -import org.apache.dolphinscheduler.plugin.storage.api.StorageOperateFactory; +import org.apache.dolphinscheduler.common.constants.Constants; +import org.apache.dolphinscheduler.common.utils.PropertyUtils; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperatorFactory; import org.apache.dolphinscheduler.plugin.storage.api.StorageType; import com.google.auto.service.AutoService; -@AutoService(StorageOperateFactory.class) -public class LocalStorageOperatorFactory implements StorageOperateFactory { +@AutoService(StorageOperatorFactory.class) +public class LocalStorageOperatorFactory implements StorageOperatorFactory { - private static final String LOCAL_DEFAULT_FS = "file:/"; + public static final String LOCAL_DEFAULT_FS = "file:/"; @Override - public StorageOperate createStorageOperate() { - HdfsStorageProperties hdfsStorageProperties = new HdfsStorageProperties(); - hdfsStorageProperties.setDefaultFS(LOCAL_DEFAULT_FS); + public StorageOperator createStorageOperate() { + final HdfsStorageProperties hdfsStorageProperties = getHdfsStorageProperties(); return new LocalStorageOperator(hdfsStorageProperties); } + private HdfsStorageProperties getHdfsStorageProperties() { + return HdfsStorageProperties.builder() + .defaultFS(LOCAL_DEFAULT_FS) + .resourceUploadPath(PropertyUtils.getString(Constants.RESOURCE_UPLOAD_PATH, "/dolphinscheduler")) + .build(); + } + @Override public StorageType getStorageOperate() { return StorageType.LOCAL; diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/test/java/org/apache/dolphinscheduler/plugin/storage/hdfs/HdfsStorageOperatorTest.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/test/java/org/apache/dolphinscheduler/plugin/storage/hdfs/HdfsStorageOperatorTest.java index 63931cba787c..ee7daa2418df 100644 --- a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/test/java/org/apache/dolphinscheduler/plugin/storage/hdfs/HdfsStorageOperatorTest.java +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/test/java/org/apache/dolphinscheduler/plugin/storage/hdfs/HdfsStorageOperatorTest.java @@ -17,62 +17,183 @@ package org.apache.dolphinscheduler.plugin.storage.hdfs; -import org.apache.dolphinscheduler.common.utils.HttpUtils; +import static com.google.common.truth.Truth.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import org.apache.dolphinscheduler.plugin.storage.api.ResourceMetadata; +import org.apache.dolphinscheduler.plugin.storage.api.StorageEntity; import org.apache.dolphinscheduler.spi.enums.ResourceType; -import org.junit.jupiter.api.Assertions; +import java.io.File; +import java.nio.file.FileAlreadyExistsException; +import java.time.Duration; +import java.util.List; +import java.util.stream.Stream; + +import lombok.SneakyThrows; + +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.MockedStatic; -import org.mockito.Mockito; -import org.mockito.junit.jupiter.MockitoExtension; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * hadoop utils test - */ -@ExtendWith(MockitoExtension.class) -public class HdfsStorageOperatorTest { +import org.testcontainers.containers.ComposeContainer; +import org.testcontainers.containers.wait.strategy.Wait; +import org.testcontainers.lifecycle.Startables; +import org.testcontainers.shaded.com.google.common.collect.ImmutableMap; + +class HdfsStorageOperatorTest { + + private static HdfsStorageOperator storageOperator; + + private static ComposeContainer hdfsContainer; + + @BeforeAll + public static void setUp() throws InterruptedException { + String hdfsDockerComposeFilePath = + HdfsStorageOperatorTest.class.getResource("/hadoop-docker-compose/docker-compose.yaml").getFile(); + hdfsContainer = new ComposeContainer(new File(hdfsDockerComposeFilePath)) + .withPull(true) + .withTailChildContainers(true) + .withLocalCompose(true) + .waitingFor("namenode", Wait.forHealthcheck().withStartupTimeout(Duration.ofSeconds(60))) + .waitingFor("datanode", Wait.forHealthcheck().withStartupTimeout(Duration.ofSeconds(60))); + + Startables.deepStart(Stream.of(hdfsContainer)).join(); + + HdfsStorageProperties hdfsStorageProperties = HdfsStorageProperties.builder() + .resourceUploadPath("/tmp/dolphinscheduler") + .user("hadoop") + .defaultFS("hdfs://localhost") + // The default replication factor is 3, which is too large for the test environment. + // So we set it to 1. + .configurationProperties(ImmutableMap.of("dfs.replication", "1")) + .build(); + storageOperator = new HdfsStorageOperator(hdfsStorageProperties); + } + + @BeforeEach + public void initializeStorageFile() { + storageOperator.delete("hdfs://localhost/tmp/dolphinscheduler/test-default", true); + storageOperator.createStorageDir("hdfs://localhost/tmp/dolphinscheduler/test-default/resources/empty"); + storageOperator.createStorageDir("hdfs://localhost/tmp/dolphinscheduler/test-default/resources/sql"); + // todo: upload file and add file case + } + + @Test + public void testGetResourceMetaData() { + ResourceMetadata resourceMetaData = + storageOperator.getResourceMetaData( + "hdfs://localhost/tmp/dolphinscheduler/test-default/resources/sqlDirectory/demo.sql"); + assertEquals("hdfs://localhost/tmp/dolphinscheduler/test-default/resources/sqlDirectory/demo.sql", + resourceMetaData.getResourceAbsolutePath()); + assertEquals("hdfs://localhost/tmp/dolphinscheduler", resourceMetaData.getResourceBaseDirectory()); + assertEquals("test-default", resourceMetaData.getTenant()); + assertEquals(ResourceType.FILE, resourceMetaData.getResourceType()); + assertEquals("sqlDirectory/demo.sql", resourceMetaData.getResourceRelativePath()); + assertEquals("hdfs://localhost/tmp/dolphinscheduler/test-default/resources/sqlDirectory", + resourceMetaData.getResourceParentAbsolutePath()); + assertFalse(resourceMetaData.isDirectory()); + } + + @Test + public void testGetStorageBaseDirectory() { + assertEquals("hdfs://localhost/tmp/dolphinscheduler", storageOperator.getStorageBaseDirectory()); + } + + @Test + public void testGetStorageBaseDirectory_withTenantCode() { + assertEquals("hdfs://localhost/tmp/dolphinscheduler/default", + storageOperator.getStorageBaseDirectory("default")); + } + + @Test + public void testGetStorageBaseDirectory_withTenantCode_withFile() { + assertEquals("hdfs://localhost/tmp/dolphinscheduler/default/resources", + storageOperator.getStorageBaseDirectory("default", ResourceType.FILE)); + } - private static final Logger logger = LoggerFactory.getLogger(HdfsStorageOperatorTest.class); + @Test + public void testGetStorageBaseDirectory_withTenantCode_withAll() { + assertEquals("hdfs://localhost/tmp/dolphinscheduler/default", + storageOperator.getStorageBaseDirectory("default", ResourceType.ALL)); + } @Test - public void getHdfsTenantDir() { - HdfsStorageOperator hdfsStorageOperator = new HdfsStorageOperator(); - logger.info(hdfsStorageOperator.getHdfsTenantDir("1234")); - Assertions.assertTrue(true); + public void testGetStorageFileAbsolutePath() { + assertEquals("hdfs://localhost/tmp/dolphinscheduler/default/resources/a.sql", + storageOperator.getStorageFileAbsolutePath("default", "a.sql")); } @Test - public void getHdfsUdfFileName() { - HdfsStorageOperator hdfsStorageOperator = new HdfsStorageOperator(); - logger.info(hdfsStorageOperator.getHdfsUdfFileName("admin", "file_name")); - Assertions.assertTrue(true); + public void testCreateStorageDir_notExist() { + storageOperator.createStorageDir("hdfs://localhost/tmp/dolphinscheduler/test-default/resources/newDirectory"); + storageOperator.exists("hdfs://localhost/tmp/dolphinscheduler/test-default/resources/newDirectory"); } @Test - public void getHdfsResourceFileName() { - HdfsStorageOperator hdfsStorageOperator = new HdfsStorageOperator(); - logger.info(hdfsStorageOperator.getHdfsResourceFileName("admin", "file_name")); - Assertions.assertTrue(true); + public void testCreateStorageDir_exist() { + assertThrows(FileAlreadyExistsException.class, + () -> storageOperator + .createStorageDir("hdfs://localhost/tmp/dolphinscheduler/test-default/resources/empty")); } @Test - public void getHdfsFileName() { - HdfsStorageOperator hdfsStorageOperator = new HdfsStorageOperator(); - logger.info(hdfsStorageOperator.getHdfsFileName(ResourceType.FILE, "admin", "file_name")); - Assertions.assertTrue(true); + public void testExist_DirectoryExist() { + assertThat(storageOperator.exists("hdfs://localhost/tmp/dolphinscheduler/test-default/resources/sql")) + .isTrue(); } @Test - public void getAppAddress() { - HdfsStorageOperator hdfsStorageOperator = new HdfsStorageOperator(); - try (MockedStatic mockedHttpUtils = Mockito.mockStatic(HttpUtils.class)) { - mockedHttpUtils.when(() -> HttpUtils.get("http://ds1:8088/ws/v1/cluster/info")) - .thenReturn("{\"clusterInfo\":{\"state\":\"STARTED\",\"haState\":\"ACTIVE\"}}"); - logger.info(hdfsStorageOperator.getAppAddress("http://ds1:8088/ws/v1/cluster/apps/%s", "ds1,ds2")); - Assertions.assertTrue(true); + public void testExist_DirectoryNotExist() { + assertThat( + storageOperator.exists("hdfs://localhost/tmp/dolphinscheduler/test-default/resources/notExist")) + .isFalse(); + } + + @Test + public void testDelete_directoryExist() { + storageOperator.delete("hdfs://localhost/tmp/dolphinscheduler/test-default/resources/sql", true); + assertThat(storageOperator.exists("hdfs://localhost/tmp/dolphinscheduler/test-default/resources/sql")) + .isFalse(); + } + + @Test + public void testDelete_directoryNotExist() { + storageOperator.delete("hdfs://localhost/tmp/dolphinscheduler/test-default/resources/non", true); + assertThat(storageOperator.exists("hdfs://localhost/tmp/dolphinscheduler/test-default/resources/non")) + .isFalse(); + } + + @Test + public void testListStorageEntity_directory() { + List storageEntities = + storageOperator.listStorageEntity("hdfs://localhost/tmp/dolphinscheduler/test-default/resources/"); + assertThat(storageEntities).hasSize(2); + } + + @Test + public void testGetStorageEntity_directory() { + StorageEntity storageEntity = + storageOperator.getStorageEntity("hdfs://localhost/tmp/dolphinscheduler/test-default/resources/sql"); + assertThat(storageEntity.getFullName()) + .isEqualTo("hdfs://localhost/tmp/dolphinscheduler/test-default/resources/sql"); + assertThat(storageEntity.isDirectory()).isTrue(); + assertThat(storageEntity.getPfullName()) + .isEqualTo("hdfs://localhost/tmp/dolphinscheduler/test-default/resources"); + assertThat(storageEntity.getType()).isEqualTo(ResourceType.FILE); + assertThat(storageEntity.getFileName()).isEqualTo("sql"); + } + + @SneakyThrows + @AfterAll + public static void tearDown() { + if (storageOperator != null) { + storageOperator.close(); + } + if (hdfsContainer != null) { + hdfsContainer.stop(); } } diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/test/java/org/apache/dolphinscheduler/plugin/storage/hdfs/LocalStorageOperatorTest.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/test/java/org/apache/dolphinscheduler/plugin/storage/hdfs/LocalStorageOperatorTest.java new file mode 100644 index 000000000000..8c20d92566e1 --- /dev/null +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/test/java/org/apache/dolphinscheduler/plugin/storage/hdfs/LocalStorageOperatorTest.java @@ -0,0 +1,314 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.storage.hdfs; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import org.apache.dolphinscheduler.common.constants.Constants; +import org.apache.dolphinscheduler.common.utils.FileUtils; +import org.apache.dolphinscheduler.plugin.storage.api.ResourceMetadata; +import org.apache.dolphinscheduler.plugin.storage.api.StorageEntity; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; +import org.apache.dolphinscheduler.spi.enums.ResourceType; + +import java.nio.file.FileAlreadyExistsException; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.List; + +import lombok.SneakyThrows; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +class LocalStorageOperatorTest { + + private StorageOperator storageOperator; + + private static final String resourceBaseDir = + Paths.get(LocalStorageOperatorTest.class.getResource("/").getFile(), "localStorage").toString(); + private static final String tenantCode = "default"; + private static final String baseDir = + Paths.get(resourceBaseDir, tenantCode, Constants.RESOURCE_TYPE_FILE).toString(); + + @SneakyThrows + @BeforeEach + public void setup() { + Files.createDirectories(Paths.get(resourceBaseDir)); + System.clearProperty(Constants.RESOURCE_UPLOAD_PATH); + System.setProperty(Constants.RESOURCE_UPLOAD_PATH, resourceBaseDir); + + LocalStorageOperatorFactory localStorageOperatorFactory = new LocalStorageOperatorFactory(); + storageOperator = localStorageOperatorFactory.createStorageOperate(); + // create file and directory + Files.createDirectories(Paths.get(baseDir, "sqlDirectory")); + Files.createDirectories(Paths.get(baseDir, "emptyDirectory")); + Files.createFile(Paths.get(baseDir, "sqlDirectory", "demo.sql")); + Files.write(Paths.get(baseDir, "sqlDirectory", "demo.sql"), "select * from demo".getBytes()); + + } + + @Test + public void testGetResourceMetaData_directory() { + String resourceFileAbsolutePath = "file:" + baseDir; + + ResourceMetadata resourceMetaData = storageOperator.getResourceMetaData(resourceFileAbsolutePath); + assertThat(resourceMetaData.getResourceAbsolutePath()).isEqualTo("file:" + baseDir); + assertThat(resourceMetaData.getResourceBaseDirectory()).isEqualTo("file:" + resourceBaseDir); + assertThat(resourceMetaData.getTenant()).isEqualTo("default"); + assertThat(resourceMetaData.getResourceType()).isEqualTo(ResourceType.FILE); + assertThat(resourceMetaData.getResourceRelativePath()).isEqualTo("/"); + } + + @Test + public void testGetResourceMetaData_file() { + String resourceFileAbsolutePath = "file:" + Paths.get(baseDir, "sqlDirectory", "demo.sql"); + + ResourceMetadata resourceMetaData = storageOperator.getResourceMetaData(resourceFileAbsolutePath); + assertThat(resourceMetaData.getResourceAbsolutePath()).isEqualTo(resourceFileAbsolutePath); + assertThat(resourceMetaData.getResourceBaseDirectory()).isEqualTo("file:" + resourceBaseDir); + assertThat(resourceMetaData.getTenant()).isEqualTo("default"); + assertThat(resourceMetaData.getResourceType()).isEqualTo(ResourceType.FILE); + assertThat(resourceMetaData.getResourceRelativePath()).isEqualTo("sqlDirectory/demo.sql"); + } + + @Test + public void testGetResourceMetaData_invalidatedPath() { + String resourceFileAbsolutePath = Paths.get(baseDir, "sqlDirectory", "demo.sql").toString(); + + IllegalArgumentException illegalArgumentException = assertThrows(IllegalArgumentException.class, + () -> storageOperator.getResourceMetaData(resourceFileAbsolutePath)); + assertThat(illegalArgumentException.getMessage()) + .isEqualTo("Invalid resource path: " + resourceFileAbsolutePath); + } + + @Test + public void testGetStorageBaseDirectory() { + String storageBaseDirectory = storageOperator.getStorageBaseDirectory(); + assertThat(storageBaseDirectory).isEqualTo("file:" + resourceBaseDir); + } + + @Test + public void testGetStorageBaseDirectory_withTenant() { + String storageBaseDirectory = storageOperator.getStorageBaseDirectory("default"); + assertThat(storageBaseDirectory).isEqualTo("file:" + Paths.get(resourceBaseDir, tenantCode)); + } + + @Test + public void testGetStorageBaseDirectory_withTenant_withResourceTypeFile() { + String storageBaseDirectory = storageOperator.getStorageBaseDirectory("default", ResourceType.FILE); + assertThat(storageBaseDirectory) + .isEqualTo("file:" + Paths.get(resourceBaseDir, tenantCode, Constants.RESOURCE_TYPE_FILE)); + } + + @Test + public void testGetStorageBaseDirectory_withTenant_withResourceTypeAll() { + String storageBaseDirectory = storageOperator.getStorageBaseDirectory("default", ResourceType.ALL); + assertThat(storageBaseDirectory).isEqualTo("file:" + Paths.get(resourceBaseDir, tenantCode)); + } + + @Test + public void testGetStorageBaseDirectory_withEmptyTenant_withResourceType() { + IllegalArgumentException illegalArgumentException = assertThrows(IllegalArgumentException.class, + () -> storageOperator.getStorageBaseDirectory("", ResourceType.ALL)); + assertThat(illegalArgumentException.getMessage()).isEqualTo("Tenant code should not be empty"); + } + + @Test + public void testGetStorageBaseDirectory_withTenant_withEmptyResourceType() { + IllegalArgumentException illegalArgumentException = assertThrows(IllegalArgumentException.class, + () -> storageOperator.getStorageBaseDirectory("default", null)); + assertThat(illegalArgumentException.getMessage()).isEqualTo("Resource type should not be null"); + } + + @Test + public void testGetStorageFileAbsolutePath() { + String fileAbsolutePath = storageOperator.getStorageFileAbsolutePath("default", "test.sh"); + assertThat(fileAbsolutePath).isEqualTo( + "file:" + Paths.get(resourceBaseDir, tenantCode, Constants.RESOURCE_TYPE_FILE, "test.sh")); + } + + @SneakyThrows + @Test + public void testCreateStorageDir_notExists() { + String testDirFileAbsolutePath = + "file:" + Paths.get(resourceBaseDir, "root", Constants.RESOURCE_TYPE_FILE, "testDir"); + try { + storageOperator.createStorageDir(testDirFileAbsolutePath); + StorageEntity storageEntity = storageOperator.getStorageEntity(testDirFileAbsolutePath); + assertThat(storageEntity.getFullName()).isEqualTo(testDirFileAbsolutePath); + assertThat(storageEntity.getFileName()).isEqualTo("testDir"); + assertThat(storageEntity.getPfullName()) + .isEqualTo("file:" + Paths.get(resourceBaseDir, "root", Constants.RESOURCE_TYPE_FILE)); + assertThat(storageEntity.isDirectory()).isTrue(); + assertThat(storageEntity.getType()).isEqualTo(ResourceType.FILE); + } finally { + storageOperator.delete(testDirFileAbsolutePath, true); + } + } + + @SneakyThrows + @Test + public void testCreateStorageDir_exists() { + String testDirFileAbsolutePath = + "file:" + Paths.get(resourceBaseDir, "default", Constants.RESOURCE_TYPE_FILE, "sqlDirectory"); + assertThrows(FileAlreadyExistsException.class, () -> storageOperator.createStorageDir(testDirFileAbsolutePath)); + } + + @Test + public void testExists_fileExist() { + String resourceFileAbsolutePath = "file:" + Paths.get(baseDir, "sqlDirectory", "demo.sql"); + assertThat(storageOperator.exists(resourceFileAbsolutePath)).isTrue(); + } + + @Test + public void testExists_fileNotExist() { + String resourceFileAbsolutePath = "file:" + Paths.get(baseDir, "sqlDirectory", "demo.sh"); + assertThat(storageOperator.exists(resourceFileAbsolutePath)).isFalse(); + } + + @Test + public void testExists_directoryExist() { + String resourceFileAbsolutePath = "file:" + Paths.get(baseDir, "sqlDirectory"); + assertThat(storageOperator.exists(resourceFileAbsolutePath)).isTrue(); + } + + @Test + public void testExists_directoryNotExist() { + String resourceFileAbsolutePath = "file:" + Paths.get(baseDir, "shellDirectory"); + assertThat(storageOperator.exists(resourceFileAbsolutePath)).isFalse(); + } + + @Test + public void testDelete_directoryExist() { + String resourceFileAbsolutePath = "file:" + Paths.get(baseDir, "sqlDirectory"); + assertThat(storageOperator.exists(resourceFileAbsolutePath)).isTrue(); + + storageOperator.delete(resourceFileAbsolutePath, true); + assertThat(storageOperator.exists(resourceFileAbsolutePath)).isFalse(); + } + + @Test + public void testDelete_directoryNotExist() { + String resourceFileAbsolutePath = "file:" + Paths.get(baseDir, "shellDirectory"); + assertThat(storageOperator.exists(resourceFileAbsolutePath)).isFalse(); + + storageOperator.delete(resourceFileAbsolutePath, true); + assertThat(storageOperator.exists(resourceFileAbsolutePath)).isFalse(); + } + + @Test + public void testDelete_fileExist() { + String resourceFileAbsolutePath = "file:" + Paths.get(baseDir, "sqlDirectory", "demo.sql"); + assertThat(storageOperator.exists(resourceFileAbsolutePath)).isTrue(); + + storageOperator.delete(resourceFileAbsolutePath, true); + assertThat(storageOperator.exists(resourceFileAbsolutePath)).isFalse(); + } + + @Test + public void testDelete_fileNotExist() { + String resourceFileAbsolutePath = "file:" + Paths.get(baseDir, "sqlDirectory", "demo.sh"); + assertThat(storageOperator.exists(resourceFileAbsolutePath)).isFalse(); + + storageOperator.delete(resourceFileAbsolutePath, true); + assertThat(storageOperator.exists(resourceFileAbsolutePath)).isFalse(); + } + + @Test + public void testFetchFileContent() { + // todo: add large file test case + String resourceFileAbsolutePath = "file:" + Paths.get(baseDir, "sqlDirectory", "demo.sql"); + List content = storageOperator.fetchFileContent(resourceFileAbsolutePath, 0, 10); + assertThat(content).containsExactly("select * from demo"); + + } + + @Test + public void testListStorageEntity_directoryNotEmpty() { + String resourceFileAbsolutePath = "file:" + baseDir; + List storageEntities = storageOperator.listStorageEntity(resourceFileAbsolutePath); + assertThat(storageEntities.size()).isEqualTo(2); + + StorageEntity storageEntity1 = storageEntities.get(0); + assertThat(storageEntity1.getFullName()).isEqualTo("file:" + baseDir + "/emptyDirectory"); + assertThat(storageEntity1.getFileName()).isEqualTo("emptyDirectory"); + assertThat(storageEntity1.getPfullName()).isEqualTo("file:" + baseDir); + assertThat(storageEntity1.isDirectory()).isTrue(); + assertThat(storageEntity1.getType()).isEqualTo(ResourceType.FILE); + + StorageEntity storageEntity2 = storageEntities.get(1); + assertThat(storageEntity2.getFullName()).isEqualTo("file:" + baseDir + "/sqlDirectory"); + assertThat(storageEntity2.getFileName()).isEqualTo("sqlDirectory"); + assertThat(storageEntity2.getPfullName()).isEqualTo("file:" + baseDir); + assertThat(storageEntity2.isDirectory()).isTrue(); + assertThat(storageEntity2.getType()).isEqualTo(ResourceType.FILE); + } + + @Test + public void testListStorageEntity_directoryEmpty() { + String resourceFileAbsolutePath = "file:" + baseDir + "/emptyDirectory"; + List storageEntities = storageOperator.listStorageEntity(resourceFileAbsolutePath); + assertThat(storageEntities.size()).isEqualTo(0); + } + + @Test + public void testListStorageEntity_directoryNotExist() { + String resourceFileAbsolutePath = "file:" + baseDir + "/notExistDirectory"; + assertThat(storageOperator.listStorageEntity(resourceFileAbsolutePath)).isEmpty(); + } + + @Test + public void testListStorageEntity_file() { + String resourceFileAbsolutePath = "file:" + Paths.get(baseDir, "sqlDirectory", "demo.sql"); + List storageEntities = storageOperator.listStorageEntity(resourceFileAbsolutePath); + assertThat(storageEntities.size()).isEqualTo(1); + + StorageEntity storageEntity = storageEntities.get(0); + assertThat(storageEntity.getFullName()).isEqualTo("file:" + Paths.get(baseDir, "sqlDirectory", "demo.sql")); + assertThat(storageEntity.getFileName()).isEqualTo("demo.sql"); + assertThat(storageEntity.getPfullName()).isEqualTo("file:" + Paths.get(baseDir, "sqlDirectory")); + assertThat(storageEntity.isDirectory()).isFalse(); + assertThat(storageEntity.getType()).isEqualTo(ResourceType.FILE); + + } + + @Test + public void testListStorageEntityRecursively_directory() { + String resourceFileAbsolutePath = "file:" + baseDir; + List storageEntities = + storageOperator.listFileStorageEntityRecursively(resourceFileAbsolutePath); + assertThat(storageEntities.size()).isEqualTo(1); + + StorageEntity storageEntity2 = storageEntities.get(0); + assertThat(storageEntity2.getFullName()).isEqualTo("file:" + Paths.get(baseDir, "sqlDirectory", "demo.sql")); + assertThat(storageEntity2.getFileName()).isEqualTo("demo.sql"); + assertThat(storageEntity2.getPfullName()).isEqualTo("file:" + Paths.get(baseDir, "sqlDirectory")); + assertThat(storageEntity2.isDirectory()).isFalse(); + assertThat(storageEntity2.getType()).isEqualTo(ResourceType.FILE); + } + + @SneakyThrows + @AfterEach + public void after() { + FileUtils.deleteFile(resourceBaseDir); + } + +} diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/test/resources/hadoop-docker-compose/docker-compose.yaml b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/test/resources/hadoop-docker-compose/docker-compose.yaml new file mode 100644 index 000000000000..60ede8faf841 --- /dev/null +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/test/resources/hadoop-docker-compose/docker-compose.yaml @@ -0,0 +1,110 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +version: "3" + +services: + namenode: + image: sbloodys/hadoop:3.3.6 + hostname: namenode + command: [ "hdfs", "namenode" ] + ports: + - 9870:9870 + - 8020:8020 + env_file: + - ./hadoop.env + environment: + ENSURE_NAMENODE_DIR: "/tmp/hadoop-root/dfs/name" + logging: + driver: "json-file" + options: + max-size: "200m" + max-file: "1" + tty: true + stdin_open: true + restart: always + healthcheck: + test: [ "CMD", "curl", "http://namenode:9870" ] + interval: 5s + timeout: 5s + retries: 120 + datanode: + image: sbloodys/hadoop:3.3.6 + hostname: datanode + command: [ "hdfs", "datanode" ] + env_file: + - ./hadoop.env + logging: + driver: "json-file" + options: + max-size: "200m" + max-file: "1" + ports: + - 9864:9864 + tty: true + stdin_open: true + restart: always + healthcheck: + test: [ "CMD", "curl", "http://datanode:9864" ] + interval: 5s + timeout: 5s + retries: 120 + depends_on: + namenode: + condition: service_healthy + resourcemanager: + image: sbloodys/hadoop:3.3.6 + hostname: resourcemanager + command: [ "yarn", "resourcemanager" ] + ports: + - 8088:8088 + env_file: + - ./hadoop.env + logging: + driver: "json-file" + options: + max-size: "200m" + max-file: "1" + tty: true + stdin_open: true + restart: always + healthcheck: + test: [ "CMD", "curl", "http://resourcemanager:8088" ] + interval: 5s + timeout: 5s + retries: 120 + nodemanager: + image: sbloodys/hadoop:3.3.6 + hostname: nodemanager + command: [ "yarn", "nodemanager" ] + env_file: + - ./hadoop.env + logging: + driver: "json-file" + options: + max-size: "200m" + max-file: "1" + tty: true + stdin_open: true + restart: always + depends_on: + resourcemanager: + condition: service_healthy + healthcheck: + test: [ "CMD", "curl", "http://nodemanager:8042" ] + interval: 5s + timeout: 5s + retries: 120 diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/test/resources/hadoop-docker-compose/hadoop.env b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/test/resources/hadoop-docker-compose/hadoop.env new file mode 100644 index 000000000000..701cf3471f11 --- /dev/null +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/test/resources/hadoop-docker-compose/hadoop.env @@ -0,0 +1,45 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +HADOOP_HOME=/opt/hadoop +CORE-SITE.XML_fs.default.name=hdfs://namenode +CORE-SITE.XML_fs.defaultFS=hdfs://namenode +HDFS-SITE.XML_dfs.namenode.rpc-address=namenode:8020 +HDFS-SITE.XML_dfs.replication=1 +CORE-SITE.XML_hadoop.http.staticuser.user=hadoop +MAPRED-SITE.XML_mapreduce.framework.name=yarn +MAPRED-SITE.XML_yarn.app.mapreduce.am.env=HADOOP_MAPRED_HOME=$HADOOP_HOME +MAPRED-SITE.XML_mapreduce.map.env=HADOOP_MAPRED_HOME=$HADOOP_HOME +MAPRED-SITE.XML_mapreduce.reduce.env=HADOOP_MAPRED_HOME=$HADOOP_HOME +YARN-SITE.XML_yarn.resourcemanager.hostname=resourcemanager +YARN-SITE.XML_yarn.nodemanager.pmem-check-enabled=false +YARN-SITE.XML_yarn.nodemanager.delete.debug-delay-sec=600 +YARN-SITE.XML_yarn.nodemanager.vmem-check-enabled=false +YARN-SITE.XML_yarn.nodemanager.aux-services=mapreduce_shuffle +CAPACITY-SCHEDULER.XML_yarn.scheduler.capacity.maximum-applications=10000 +CAPACITY-SCHEDULER.XML_yarn.scheduler.capacity.maximum-am-resource-percent=0.1 +CAPACITY-SCHEDULER.XML_yarn.scheduler.capacity.resource-calculator=org.apache.hadoop.yarn.util.resource.DefaultResourceCalculator +CAPACITY-SCHEDULER.XML_yarn.scheduler.capacity.root.queues=default +CAPACITY-SCHEDULER.XML_yarn.scheduler.capacity.root.default.capacity=100 +CAPACITY-SCHEDULER.XML_yarn.scheduler.capacity.root.default.user-limit-factor=1 +CAPACITY-SCHEDULER.XML_yarn.scheduler.capacity.root.default.maximum-capacity=100 +CAPACITY-SCHEDULER.XML_yarn.scheduler.capacity.root.default.state=RUNNING +CAPACITY-SCHEDULER.XML_yarn.scheduler.capacity.root.default.acl_submit_applications=* +CAPACITY-SCHEDULER.XML_yarn.scheduler.capacity.root.default.acl_administer_queue=* +CAPACITY-SCHEDULER.XML_yarn.scheduler.capacity.node-locality-delay=40 +CAPACITY-SCHEDULER.XML_yarn.scheduler.capacity.queue-mappings= +CAPACITY-SCHEDULER.XML_yarn.scheduler.capacity.queue-mappings-override.enable=false \ No newline at end of file diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/test/resources/logback.xml b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/test/resources/logback.xml new file mode 100644 index 000000000000..6f211959c590 --- /dev/null +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/test/resources/logback.xml @@ -0,0 +1,21 @@ + + + + + + diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/test/resources/storage/hello.sh b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/test/resources/storage/hello.sh new file mode 100644 index 000000000000..9ee358318988 --- /dev/null +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/test/resources/storage/hello.sh @@ -0,0 +1,18 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +echo hello \ No newline at end of file diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/test/resources/storage/sql/demo.sql b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/test/resources/storage/sql/demo.sql new file mode 100644 index 000000000000..83ebbac8d0c8 --- /dev/null +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/test/resources/storage/sql/demo.sql @@ -0,0 +1,18 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +select * from t; \ No newline at end of file diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-obs/src/main/java/org/apache/dolphinscheduler/plugin/storage/obs/ObsStorageOperator.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-obs/src/main/java/org/apache/dolphinscheduler/plugin/storage/obs/ObsStorageOperator.java index b644210048ef..6f67ccb36320 100644 --- a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-obs/src/main/java/org/apache/dolphinscheduler/plugin/storage/obs/ObsStorageOperator.java +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-obs/src/main/java/org/apache/dolphinscheduler/plugin/storage/obs/ObsStorageOperator.java @@ -17,19 +17,12 @@ package org.apache.dolphinscheduler.plugin.storage.obs; -import static org.apache.dolphinscheduler.common.constants.Constants.FOLDER_SEPARATOR; -import static org.apache.dolphinscheduler.common.constants.Constants.FORMAT_S_S; -import static org.apache.dolphinscheduler.common.constants.Constants.RESOURCE_TYPE_FILE; -import static org.apache.dolphinscheduler.common.constants.Constants.RESOURCE_TYPE_UDF; - import org.apache.dolphinscheduler.common.constants.Constants; -import org.apache.dolphinscheduler.common.enums.ResUploadType; import org.apache.dolphinscheduler.common.utils.FileUtils; -import org.apache.dolphinscheduler.common.utils.PropertyUtils; +import org.apache.dolphinscheduler.plugin.storage.api.AbstractStorageOperator; +import org.apache.dolphinscheduler.plugin.storage.api.ResourceMetadata; import org.apache.dolphinscheduler.plugin.storage.api.StorageEntity; -import org.apache.dolphinscheduler.plugin.storage.api.StorageOperate; -import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; -import org.apache.dolphinscheduler.spi.enums.ResourceType; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; import org.apache.commons.lang3.StringUtils; @@ -37,153 +30,83 @@ import java.io.ByteArrayInputStream; import java.io.Closeable; import java.io.File; -import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; +import java.nio.file.FileAlreadyExistsException; import java.nio.file.Files; -import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; -import java.util.Collections; +import java.util.HashSet; import java.util.LinkedList; import java.util.List; +import java.util.Set; import java.util.stream.Collectors; -import java.util.stream.Stream; -import lombok.Data; +import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import com.obs.services.ObsClient; import com.obs.services.exception.ObsException; -import com.obs.services.internal.ServiceException; -import com.obs.services.model.DeleteObjectsRequest; -import com.obs.services.model.GetObjectRequest; import com.obs.services.model.ListObjectsRequest; import com.obs.services.model.ObjectListing; import com.obs.services.model.ObjectMetadata; import com.obs.services.model.ObsObject; import com.obs.services.model.PutObjectRequest; -@Data @Slf4j -public class ObsStorageOperator implements Closeable, StorageOperate { - - private String accessKeyId; - - private String accessKeySecret; - - private String bucketName; +public class ObsStorageOperator extends AbstractStorageOperator implements Closeable, StorageOperator { - private String endPoint; + private final String bucketName; - private ObsClient obsClient; + private final ObsClient obsClient; - public ObsStorageOperator() { - } - - public void init() { - this.accessKeyId = readObsAccessKeyID(); - this.accessKeySecret = readObsAccessKeySecret(); - this.endPoint = readObsEndPoint(); - this.bucketName = readObsBucketName(); - this.obsClient = buildObsClient(); + public ObsStorageOperator(ObsStorageProperties obsStorageProperties) { + super(obsStorageProperties.getResourceUploadPath()); + this.bucketName = obsStorageProperties.getBucketName(); + this.obsClient = new ObsClient( + obsStorageProperties.getAccessKeyId(), + obsStorageProperties.getAccessKeySecret(), + obsStorageProperties.getEndPoint()); ensureBucketSuccessfullyCreated(bucketName); } - protected String readObsAccessKeyID() { - return PropertyUtils.getString(TaskConstants.HUAWEI_CLOUD_ACCESS_KEY_ID); - } - - protected String readObsAccessKeySecret() { - return PropertyUtils.getString(TaskConstants.HUAWEI_CLOUD_ACCESS_KEY_SECRET); - } - - protected String readObsBucketName() { - return PropertyUtils.getString(Constants.HUAWEI_CLOUD_OBS_BUCKET_NAME); - } - - protected String readObsEndPoint() { - return PropertyUtils.getString(Constants.HUAWEI_CLOUD_OBS_END_POINT); - } - @Override public void close() throws IOException { obsClient.close(); } @Override - public void createTenantDirIfNotExists(String tenantCode) throws Exception { - mkdir(tenantCode, getObsResDir(tenantCode)); - mkdir(tenantCode, getObsUdfDir(tenantCode)); - } - - @Override - public String getResDir(String tenantCode) { - return getObsResDir(tenantCode) + FOLDER_SEPARATOR; - } - - @Override - public String getUdfDir(String tenantCode) { - return getObsUdfDir(tenantCode) + FOLDER_SEPARATOR; + public String getStorageBaseDirectory() { + // All directory should end with File.separator + if (resourceBaseAbsolutePath.startsWith("/")) { + log.warn("{} -> {} should not start with / in obs", Constants.RESOURCE_UPLOAD_PATH, + resourceBaseAbsolutePath); + return resourceBaseAbsolutePath.substring(1); + } + return resourceBaseAbsolutePath; } + @SneakyThrows @Override - public boolean mkdir(String tenantCode, String path) throws IOException { - final String key = path + FOLDER_SEPARATOR; - if (!obsClient.doesObjectExist(bucketName, key)) { - createObsPrefix(bucketName, key); + public void createStorageDir(String directoryAbsolutePath) { + directoryAbsolutePath = transformAbsolutePathToObsKey(directoryAbsolutePath); + if (obsClient.doesObjectExist(bucketName, directoryAbsolutePath)) { + throw new FileAlreadyExistsException("directory: " + directoryAbsolutePath + " already exists"); } - return true; - } - - protected void createObsPrefix(final String bucketName, final String key) { ObjectMetadata metadata = new ObjectMetadata(); metadata.setContentLength(0L); InputStream emptyContent = new ByteArrayInputStream(new byte[0]); - PutObjectRequest putObjectRequest = new PutObjectRequest(bucketName, key, emptyContent); + PutObjectRequest putObjectRequest = new PutObjectRequest(bucketName, directoryAbsolutePath, emptyContent); obsClient.putObject(putObjectRequest); } + @SneakyThrows @Override - public String getResourceFullName(String tenantCode, String fileName) { - if (fileName.startsWith(FOLDER_SEPARATOR)) { - fileName = fileName.replaceFirst(FOLDER_SEPARATOR, ""); - } - return String.format(FORMAT_S_S, getObsResDir(tenantCode), fileName); - } + public void download(String srcFilePath, String dstFilePath, boolean overwrite) { + srcFilePath = transformAbsolutePathToObsKey(srcFilePath); - @Override - public String getFileName(ResourceType resourceType, String tenantCode, String fileName) { - if (fileName.startsWith(FOLDER_SEPARATOR)) { - fileName = fileName.replaceFirst(FOLDER_SEPARATOR, ""); - } - return getDir(resourceType, tenantCode) + fileName; - } - - @Override - public boolean delete(String fullName, List childrenPathList, boolean recursive) throws IOException { - // append the resource fullName to the list for deletion. - childrenPathList.add(fullName); - - DeleteObjectsRequest deleteObjectsRequest = new DeleteObjectsRequest(bucketName); - for (String deleteKys : childrenPathList) { - deleteObjectsRequest.addKeyAndVersion(deleteKys); - } - - try { - obsClient.deleteObjects(deleteObjectsRequest); - } catch (Exception e) { - log.error("delete objects error", e); - return false; - } - - return true; - } - - @Override - public void download(String srcFilePath, String dstFilePath, boolean overwrite) throws IOException { File dstFile = new File(dstFilePath); if (dstFile.isDirectory()) { Files.delete(dstFile.toPath()); @@ -199,277 +122,115 @@ public void download(String srcFilePath, String dstFilePath, boolean overwrite) while ((readLen = obsInputStream.read(readBuf)) > 0) { fos.write(readBuf, 0, readLen); } - } catch (ObsException e) { - throw new IOException(e); - } catch (FileNotFoundException e) { - log.error("cannot find the destination file {}", dstFilePath); - throw e; } } @Override - public boolean exists(String fileName) throws IOException { + public boolean exists(String fileName) { + fileName = transformAbsolutePathToObsKey(fileName); return obsClient.doesObjectExist(bucketName, fileName); } @Override - public boolean delete(String filePath, boolean recursive) throws IOException { - try { - obsClient.deleteObject(bucketName, filePath); - return true; - } catch (ObsException e) { - log.error("fail to delete the object, the resource path is {}", filePath, e); - return false; - } + public void delete(String filePath, boolean recursive) { + filePath = transformAbsolutePathToObsKey(filePath); + obsClient.deleteObject(bucketName, filePath); } @Override - public boolean copy(String srcPath, String dstPath, boolean deleteSource, boolean overwrite) throws IOException { + public void copy(String srcPath, String dstPath, boolean deleteSource, boolean overwrite) { + srcPath = transformAbsolutePathToObsKey(srcPath); + dstPath = transformAbsolutePathToObsKey(dstPath); obsClient.copyObject(bucketName, srcPath, bucketName, dstPath); if (deleteSource) { obsClient.deleteObject(bucketName, srcPath); } - return true; - } - - @Override - public String getDir(ResourceType resourceType, String tenantCode) { - switch (resourceType) { - case UDF: - return getUdfDir(tenantCode); - case FILE: - return getResDir(tenantCode); - case ALL: - return getObsDataBasePath(); - default: - return ""; - } } + @SneakyThrows @Override - public boolean upload(String tenantCode, String srcFile, String dstPath, boolean deleteSource, - boolean overwrite) throws IOException { - try { - obsClient.putObject(bucketName, dstPath, new File(srcFile)); - if (deleteSource) { - Files.delete(Paths.get(srcFile)); + public void upload(String srcFile, String dstPath, boolean deleteSource, boolean overwrite) { + dstPath = transformAbsolutePathToObsKey(dstPath); + if (obsClient.doesObjectExist(bucketName, dstPath)) { + if (!overwrite) { + throw new ObsException("file: " + dstPath + " already exists"); + } else { + obsClient.deleteObject(bucketName, dstPath); } - return true; - } catch (ObsException e) { - log.error("upload failed, the bucketName is {}, the filePath is {}", bucketName, dstPath, e); - return false; } - } - - @Override - public List vimFile(String tenantCode, String filePath, int skipLineNums, int limit) throws IOException { - if (StringUtils.isBlank(filePath)) { - log.error("file path:{} is empty", filePath); - return Collections.emptyList(); - } - ObsObject obsObject = obsClient.getObject(bucketName, filePath); - try (BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(obsObject.getObjectContent()))) { - Stream stream = bufferedReader.lines().skip(skipLineNums).limit(limit); - return stream.collect(Collectors.toList()); + obsClient.putObject(bucketName, dstPath, new File(srcFile)); + if (deleteSource) { + Files.delete(Paths.get(srcFile)); } - } - @Override - public ResUploadType returnStorageType() { - return ResUploadType.OBS; } + @SneakyThrows @Override - public List listFilesStatusRecursively(String path, String defaultPath, String tenantCode, - ResourceType type) { - List storageEntityList = new ArrayList<>(); - LinkedList foldersToFetch = new LinkedList<>(); - - StorageEntity initialEntity = null; - try { - initialEntity = getFileStatus(path, defaultPath, tenantCode, type); - } catch (Exception e) { - log.error("error while listing files status recursively, path: {}", path, e); - return storageEntityList; - } - foldersToFetch.add(initialEntity); - - while (!foldersToFetch.isEmpty()) { - String pathToExplore = foldersToFetch.pop().getFullName(); - try { - List tempList = listFilesStatus(pathToExplore, defaultPath, tenantCode, type); - for (StorageEntity temp : tempList) { - if (temp.isDirectory()) { - foldersToFetch.add(temp); - } - } - storageEntityList.addAll(tempList); - } catch (Exception e) { - log.error("error while listing files stat:wus recursively, path: {}", pathToExplore, e); - } + public List fetchFileContent(String filePath, int skipLineNums, int limit) { + filePath = transformAbsolutePathToObsKey(filePath); + ObsObject obsObject = obsClient.getObject(bucketName, filePath); + try ( + InputStreamReader inputStreamReader = new InputStreamReader(obsObject.getObjectContent()); + BufferedReader bufferedReader = new BufferedReader(inputStreamReader)) { + return bufferedReader + .lines() + .skip(skipLineNums) + .limit(limit) + .collect(Collectors.toList()); } - - return storageEntityList; } @Override - public List listFilesStatus(String path, String defaultPath, String tenantCode, - ResourceType type) throws Exception { - List storageEntityList = new ArrayList<>(); + public List listStorageEntity(String resourceAbsolutePath) { + resourceAbsolutePath = transformObsKeyToAbsolutePath(resourceAbsolutePath); ListObjectsRequest request = new ListObjectsRequest(); request.setBucketName(bucketName); - request.setPrefix(path); - request.setDelimiter(FOLDER_SEPARATOR); - ObjectListing result = null; - try { - result = obsClient.listObjects(request); - } catch (Exception e) { - throw new ServiceException("Get ObsClient file list exception", e); - } - - while (result != null) { - String nextMarker = result.getNextMarker(); - List objects = result.getObjects(); - - for (ObsObject object : objects) { - if (!object.getObjectKey().endsWith(FOLDER_SEPARATOR)) { - // the path is a file - String[] aliasArr = object.getObjectKey().split(FOLDER_SEPARATOR); - String alias = aliasArr[aliasArr.length - 1]; - String fileName = StringUtils.difference(defaultPath, object.getObjectKey()); - - StorageEntity entity = new StorageEntity(); - ObjectMetadata metadata = object.getMetadata(); - entity.setAlias(alias); - entity.setFileName(fileName); - entity.setFullName(object.getObjectKey()); - entity.setDirectory(false); - entity.setUserName(tenantCode); - entity.setType(type); - entity.setSize(metadata.getContentLength()); - entity.setCreateTime(metadata.getLastModified()); - entity.setUpdateTime(metadata.getLastModified()); - entity.setPfullName(path); - - storageEntityList.add(entity); - } - } + request.setPrefix(resourceAbsolutePath); + request.setDelimiter("/"); - for (String commonPrefix : result.getCommonPrefixes()) { - // the paths in commonPrefix are directories - String suffix = StringUtils.difference(path, commonPrefix); - String fileName = StringUtils.difference(defaultPath, commonPrefix); - - StorageEntity entity = new StorageEntity(); - entity.setAlias(suffix); - entity.setFileName(fileName); - entity.setFullName(commonPrefix); - entity.setDirectory(true); - entity.setUserName(tenantCode); - entity.setType(type); - entity.setSize(0); - entity.setCreateTime(null); - entity.setUpdateTime(null); - entity.setPfullName(path); - - storageEntityList.add(entity); - } - - if (!StringUtils.isNotBlank(nextMarker)) { - break; - } + ObjectListing result = obsClient.listObjects(request); - request.setMarker(nextMarker); - try { - result = obsClient.listObjects(request); - } catch (Exception e) { - throw new ServiceException("Get ObsClient file list exception", e); - } - } - return storageEntityList; + return result.getObjects() + .stream() + .map(this::transformObsObjectToStorageEntity) + .collect(Collectors.toList()); } @Override - public StorageEntity getFileStatus(String path, String defaultPath, String tenantCode, - ResourceType type) throws Exception { + public List listFileStorageEntityRecursively(String resourceAbsolutePath) { + resourceAbsolutePath = transformObsKeyToAbsolutePath(resourceAbsolutePath); - if (path.endsWith(FOLDER_SEPARATOR)) { - // the path is a directory that may or may not exist in ObsClient - String alias = findDirAlias(path); - String fileName = StringUtils.difference(defaultPath, path); - - StorageEntity entity = new StorageEntity(); - entity.setAlias(alias); - entity.setFileName(fileName); - entity.setFullName(path); - entity.setDirectory(true); - entity.setUserName(tenantCode); - entity.setType(type); - entity.setSize(0); - - return entity; + Set visited = new HashSet<>(); + List storageEntityList = new ArrayList<>(); + LinkedList foldersToFetch = new LinkedList<>(); + foldersToFetch.addLast(resourceAbsolutePath); - } else { - GetObjectRequest request = new GetObjectRequest(); - request.setBucketName(bucketName); - request.setObjectKey(path); - ObsObject object; - try { - object = obsClient.getObject(request); - } catch (Exception e) { - throw new ServiceException("Get ObsClient file list exception", e); + while (!foldersToFetch.isEmpty()) { + String pathToExplore = foldersToFetch.pop(); + visited.add(pathToExplore); + List tempList = listStorageEntity(pathToExplore); + for (StorageEntity temp : tempList) { + if (temp.isDirectory()) { + if (visited.contains(temp.getFullName())) { + continue; + } + foldersToFetch.add(temp.getFullName()); + } } - - String[] aliasArr = object.getObjectKey().split(FOLDER_SEPARATOR); - String alias = aliasArr[aliasArr.length - 1]; - String fileName = StringUtils.difference(defaultPath, object.getObjectKey()); - - StorageEntity entity = new StorageEntity(); - ObjectMetadata metadata = object.getMetadata(); - entity.setAlias(alias); - entity.setFileName(fileName); - entity.setFullName(object.getObjectKey()); - entity.setDirectory(false); - entity.setUserName(tenantCode); - entity.setType(type); - entity.setSize(metadata.getContentLength()); - entity.setCreateTime(metadata.getLastModified()); - entity.setUpdateTime(metadata.getLastModified()); - - return entity; + storageEntityList.addAll(tempList); } + return storageEntityList; } @Override - public void deleteTenant(String tenantCode) throws Exception { - deleteTenantCode(tenantCode); - } - - public String getObsResDir(String tenantCode) { - return String.format("%s/" + RESOURCE_TYPE_FILE, getObsTenantDir(tenantCode)); - } + public StorageEntity getStorageEntity(String resourceAbsolutePath) { + resourceAbsolutePath = transformObsKeyToAbsolutePath(resourceAbsolutePath); - public String getObsUdfDir(String tenantCode) { - return String.format("%s/" + RESOURCE_TYPE_UDF, getObsTenantDir(tenantCode)); - } - - public String getObsTenantDir(String tenantCode) { - return String.format(FORMAT_S_S, getObsDataBasePath(), tenantCode); - } - - public String getObsDataBasePath() { - if (FOLDER_SEPARATOR.equals(RESOURCE_UPLOAD_PATH)) { - return ""; - } else { - return RESOURCE_UPLOAD_PATH.replaceFirst(FOLDER_SEPARATOR, ""); - } - } - - protected void deleteTenantCode(String tenantCode) { - deleteDir(getResDir(tenantCode)); - deleteDir(getUdfDir(tenantCode)); + ObsObject object = obsClient.getObject(bucketName, resourceAbsolutePath); + return transformObsObjectToStorageEntity(object); } public void ensureBucketSuccessfullyCreated(String bucketName) { @@ -486,22 +247,37 @@ public void ensureBucketSuccessfullyCreated(String bucketName) { log.info("bucketName: {} has been found", bucketName); } - protected void deleteDir(String directoryName) { - if (obsClient.doesObjectExist(bucketName, directoryName)) { - obsClient.deleteObject(bucketName, directoryName); + protected StorageEntity transformObsObjectToStorageEntity(ObsObject object) { + ObjectMetadata metadata = object.getMetadata(); + String fileAbsolutePath = transformObsKeyToAbsolutePath(object.getObjectKey()); + ResourceMetadata resourceMetaData = getResourceMetaData(fileAbsolutePath); + String fileExtension = com.google.common.io.Files.getFileExtension(resourceMetaData.getResourceAbsolutePath()); + + return StorageEntity.builder() + .fileName(new File(fileAbsolutePath).getName()) + .fullName(fileAbsolutePath) + .pfullName(resourceMetaData.getResourceParentAbsolutePath()) + .type(resourceMetaData.getResourceType()) + .isDirectory(StringUtils.isEmpty(fileExtension)) + .size(metadata.getContentLength()) + .createTime(metadata.getLastModified()) + .updateTime(metadata.getLastModified()) + .build(); + } + + private String transformAbsolutePathToObsKey(String absolutePath) { + ResourceMetadata resourceMetaData = getResourceMetaData(absolutePath); + if (resourceMetaData.isDirectory()) { + return FileUtils.concatFilePath(absolutePath, "/"); } + return absolutePath; } - protected ObsClient buildObsClient() { - return new ObsClient(accessKeyId, accessKeySecret, endPoint); - } - - private String findDirAlias(String dirPath) { - if (!dirPath.endsWith(FOLDER_SEPARATOR)) { - return dirPath; + private String transformObsKeyToAbsolutePath(String s3Key) { + if (s3Key.endsWith("/")) { + return s3Key.substring(0, s3Key.length() - 1); } - - Path path = Paths.get(dirPath); - return path.getName(path.getNameCount() - 1) + FOLDER_SEPARATOR; + return s3Key; } + } diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-obs/src/main/java/org/apache/dolphinscheduler/plugin/storage/obs/ObsStorageOperatorFactory.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-obs/src/main/java/org/apache/dolphinscheduler/plugin/storage/obs/ObsStorageOperatorFactory.java index 2e6710393137..66ba23ee480b 100644 --- a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-obs/src/main/java/org/apache/dolphinscheduler/plugin/storage/obs/ObsStorageOperatorFactory.java +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-obs/src/main/java/org/apache/dolphinscheduler/plugin/storage/obs/ObsStorageOperatorFactory.java @@ -17,23 +17,32 @@ package org.apache.dolphinscheduler.plugin.storage.obs; -import org.apache.dolphinscheduler.plugin.storage.api.StorageOperate; -import org.apache.dolphinscheduler.plugin.storage.api.StorageOperateFactory; +import org.apache.dolphinscheduler.common.constants.Constants; +import org.apache.dolphinscheduler.common.utils.PropertyUtils; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperatorFactory; import org.apache.dolphinscheduler.plugin.storage.api.StorageType; +import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; import com.google.auto.service.AutoService; -@AutoService({StorageOperateFactory.class}) -public class ObsStorageOperatorFactory implements StorageOperateFactory { +@AutoService({StorageOperatorFactory.class}) +public class ObsStorageOperatorFactory implements StorageOperatorFactory { - public ObsStorageOperatorFactory() { + @Override + public StorageOperator createStorageOperate() { + final ObsStorageProperties obsStorageProperties = getObsStorageProperties(); + return new ObsStorageOperator(obsStorageProperties); } - @Override - public StorageOperate createStorageOperate() { - ObsStorageOperator ossOperator = new ObsStorageOperator(); - ossOperator.init(); - return ossOperator; + private ObsStorageProperties getObsStorageProperties() { + return ObsStorageProperties.builder() + .accessKeyId(PropertyUtils.getString(TaskConstants.HUAWEI_CLOUD_ACCESS_KEY_ID)) + .accessKeySecret(PropertyUtils.getString(TaskConstants.HUAWEI_CLOUD_ACCESS_KEY_SECRET)) + .bucketName(PropertyUtils.getString(Constants.HUAWEI_CLOUD_OBS_BUCKET_NAME)) + .endPoint(PropertyUtils.getString(Constants.HUAWEI_CLOUD_OBS_END_POINT)) + .resourceUploadPath(PropertyUtils.getString(Constants.RESOURCE_UPLOAD_PATH, "/dolphinscheduler")) + .build(); } @Override diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-obs/src/main/java/org/apache/dolphinscheduler/plugin/storage/obs/ObsStorageProperties.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-obs/src/main/java/org/apache/dolphinscheduler/plugin/storage/obs/ObsStorageProperties.java new file mode 100644 index 000000000000..a71e9b21b752 --- /dev/null +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-obs/src/main/java/org/apache/dolphinscheduler/plugin/storage/obs/ObsStorageProperties.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.storage.obs; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@AllArgsConstructor +@NoArgsConstructor +public class ObsStorageProperties { + + private String accessKeyId; + + private String accessKeySecret; + + private String bucketName; + + private String endPoint; + + private String resourceUploadPath; +} diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-obs/src/test/java/org/apache/dolphinscheduler/plugin/storage/obs/ObsStorageOperatorTest.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-obs/src/test/java/org/apache/dolphinscheduler/plugin/storage/obs/ObsStorageOperatorTest.java deleted file mode 100644 index 4aabdb6bd820..000000000000 --- a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-obs/src/test/java/org/apache/dolphinscheduler/plugin/storage/obs/ObsStorageOperatorTest.java +++ /dev/null @@ -1,291 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.storage.obs; - -import static org.apache.dolphinscheduler.common.constants.Constants.FOLDER_SEPARATOR; -import static org.apache.dolphinscheduler.common.constants.Constants.FORMAT_S_S; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.doNothing; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; - -import org.apache.dolphinscheduler.plugin.storage.api.StorageEntity; -import org.apache.dolphinscheduler.spi.enums.ResourceType; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.Mockito; -import org.mockito.junit.jupiter.MockitoExtension; - -import com.obs.services.ObsClient; - -@ExtendWith(MockitoExtension.class) -public class ObsStorageOperatorTest { - - private static final String ACCESS_KEY_ID_MOCK = "ACCESS_KEY_ID_MOCK"; - private static final String ACCESS_KEY_SECRET_MOCK = "ACCESS_KEY_SECRET_MOCK"; - private static final String END_POINT_MOCK = "END_POINT_MOCK"; - private static final String BUCKET_NAME_MOCK = "BUCKET_NAME_MOCK"; - private static final String TENANT_CODE_MOCK = "TENANT_CODE_MOCK"; - private static final String DIR_MOCK = "DIR_MOCK"; - private static final String FILE_NAME_MOCK = "FILE_NAME_MOCK"; - private static final String FILE_PATH_MOCK = "FILE_PATH_MOCK"; - private static final String FULL_NAME = "/tmp/dir1/"; - - private static final String DEFAULT_PATH = "/tmp/"; - @Mock - private ObsClient obsClientMock; - - private ObsStorageOperator obsOperator; - - @BeforeEach - public void setUp() throws Exception { - obsOperator = spy(new ObsStorageOperator()); - doReturn(ACCESS_KEY_ID_MOCK).when(obsOperator) - .readObsAccessKeyID(); - doReturn(ACCESS_KEY_SECRET_MOCK).when(obsOperator) - .readObsAccessKeySecret(); - doReturn(BUCKET_NAME_MOCK).when(obsOperator).readObsBucketName(); - doReturn(END_POINT_MOCK).when(obsOperator).readObsEndPoint(); - doReturn(obsClientMock).when(obsOperator).buildObsClient(); - doNothing().when(obsOperator).ensureBucketSuccessfullyCreated(any()); - - obsOperator.init(); - - } - - @Test - public void initObsOperator() { - verify(obsOperator, times(1)).buildObsClient(); - Assertions.assertEquals(ACCESS_KEY_ID_MOCK, obsOperator.getAccessKeyId()); - Assertions.assertEquals(ACCESS_KEY_SECRET_MOCK, obsOperator.getAccessKeySecret()); - Assertions.assertEquals(BUCKET_NAME_MOCK, obsOperator.getBucketName()); - } - - @Test - public void tearDownObsOperator() throws IOException { - doNothing().when(obsClientMock).close(); - obsOperator.close(); - verify(obsClientMock, times(1)).close(); - } - - @Test - public void createTenantResAndUdfDir() throws Exception { - doReturn(DIR_MOCK).when(obsOperator).getObsResDir(TENANT_CODE_MOCK); - doReturn(DIR_MOCK).when(obsOperator).getObsUdfDir(TENANT_CODE_MOCK); - doReturn(true).when(obsOperator).mkdir(TENANT_CODE_MOCK, DIR_MOCK); - obsOperator.createTenantDirIfNotExists(TENANT_CODE_MOCK); - verify(obsOperator, times(2)).mkdir(TENANT_CODE_MOCK, DIR_MOCK); - } - - @Test - public void getResDir() { - final String expectedResourceDir = String.format("dolphinscheduler/%s/resources/", TENANT_CODE_MOCK); - final String dir = obsOperator.getResDir(TENANT_CODE_MOCK); - Assertions.assertEquals(expectedResourceDir, dir); - } - - @Test - public void getUdfDir() { - final String expectedUdfDir = String.format("dolphinscheduler/%s/udfs/", TENANT_CODE_MOCK); - final String dir = obsOperator.getUdfDir(TENANT_CODE_MOCK); - Assertions.assertEquals(expectedUdfDir, dir); - } - - @Test - public void mkdirWhenDirExists() { - boolean isSuccess = false; - try { - final String key = DIR_MOCK + FOLDER_SEPARATOR; - doReturn(true).when(obsClientMock).doesObjectExist(BUCKET_NAME_MOCK, key); - isSuccess = obsOperator.mkdir(TENANT_CODE_MOCK, DIR_MOCK); - verify(obsClientMock, times(1)).doesObjectExist(BUCKET_NAME_MOCK, key); - - } catch (IOException e) { - Assertions.fail("test failed due to unexpected IO exception"); - } - - Assertions.assertTrue(isSuccess); - } - - @Test - public void mkdirWhenDirNotExists() { - boolean isSuccess = true; - try { - final String key = DIR_MOCK + FOLDER_SEPARATOR; - doReturn(false).when(obsClientMock).doesObjectExist(BUCKET_NAME_MOCK, key); - doNothing().when(obsOperator).createObsPrefix(BUCKET_NAME_MOCK, key); - isSuccess = obsOperator.mkdir(TENANT_CODE_MOCK, DIR_MOCK); - verify(obsClientMock, times(1)).doesObjectExist(BUCKET_NAME_MOCK, key); - verify(obsOperator, times(1)).createObsPrefix(BUCKET_NAME_MOCK, key); - - } catch (IOException e) { - Assertions.fail("test failed due to unexpected IO exception"); - } - - Assertions.assertTrue(isSuccess); - } - - @Test - public void getResourceFullName() { - final String expectedResourceFullName = - String.format("dolphinscheduler/%s/resources/%s", TENANT_CODE_MOCK, FILE_NAME_MOCK); - final String resourceFullName = obsOperator.getResourceFullName(TENANT_CODE_MOCK, FILE_NAME_MOCK); - Assertions.assertEquals(expectedResourceFullName, resourceFullName); - } - - @Test - public void getResourceFileName() { - final String expectedResourceFileName = FILE_NAME_MOCK; - final String resourceFullName = - String.format("dolphinscheduler/%s/resources/%s", TENANT_CODE_MOCK, FILE_NAME_MOCK); - final String resourceFileName = obsOperator.getResourceFileName(TENANT_CODE_MOCK, resourceFullName); - Assertions.assertEquals(expectedResourceFileName, resourceFileName); - } - - @Test - public void getFileName() { - final String expectedFileName = - String.format("dolphinscheduler/%s/resources/%s", TENANT_CODE_MOCK, FILE_NAME_MOCK); - final String fileName = obsOperator.getFileName(ResourceType.FILE, TENANT_CODE_MOCK, FILE_NAME_MOCK); - Assertions.assertEquals(expectedFileName, fileName); - } - - @Test - public void exists() { - boolean doesExist = false; - doReturn(true).when(obsClientMock).doesObjectExist(BUCKET_NAME_MOCK, FILE_NAME_MOCK); - try { - doesExist = obsOperator.exists(FILE_NAME_MOCK); - } catch (IOException e) { - Assertions.fail("unexpected IO exception in unit test"); - } - - Assertions.assertTrue(doesExist); - verify(obsClientMock, times(1)).doesObjectExist(BUCKET_NAME_MOCK, FILE_NAME_MOCK); - } - - @Test - public void delete() { - boolean isDeleted = false; - doReturn(null).when(obsClientMock).deleteObject(anyString(), anyString()); - try { - isDeleted = obsOperator.delete(FILE_NAME_MOCK, true); - } catch (IOException e) { - Assertions.fail("unexpected IO exception in unit test"); - } - - Assertions.assertTrue(isDeleted); - verify(obsClientMock, times(1)).deleteObject(anyString(), anyString()); - } - - @Test - public void copy() { - boolean isSuccess = false; - doReturn(null).when(obsClientMock).copyObject(anyString(), anyString(), anyString(), anyString()); - try { - isSuccess = obsOperator.copy(FILE_PATH_MOCK, FILE_PATH_MOCK, false, false); - } catch (IOException e) { - Assertions.fail("unexpected IO exception in unit test"); - } - - Assertions.assertTrue(isSuccess); - verify(obsClientMock, times(1)).copyObject(anyString(), anyString(), anyString(), anyString()); - } - - @Test - public void deleteTenant() { - doNothing().when(obsOperator).deleteTenantCode(anyString()); - try { - obsOperator.deleteTenant(TENANT_CODE_MOCK); - } catch (Exception e) { - Assertions.fail("unexpected exception caught in unit test"); - } - - verify(obsOperator, times(1)).deleteTenantCode(anyString()); - } - - @Test - public void getObsResDir() { - final String expectedObsResDir = String.format("dolphinscheduler/%s/resources", TENANT_CODE_MOCK); - final String obsResDir = obsOperator.getObsResDir(TENANT_CODE_MOCK); - Assertions.assertEquals(expectedObsResDir, obsResDir); - } - - @Test - public void getObsUdfDir() { - final String expectedObsUdfDir = String.format("dolphinscheduler/%s/udfs", TENANT_CODE_MOCK); - final String obsUdfDir = obsOperator.getObsUdfDir(TENANT_CODE_MOCK); - Assertions.assertEquals(expectedObsUdfDir, obsUdfDir); - } - - @Test - public void getObsTenantDir() { - final String expectedObsTenantDir = String.format(FORMAT_S_S, DIR_MOCK, TENANT_CODE_MOCK); - doReturn(DIR_MOCK).when(obsOperator).getObsDataBasePath(); - final String obsTenantDir = obsOperator.getObsTenantDir(TENANT_CODE_MOCK); - Assertions.assertEquals(expectedObsTenantDir, obsTenantDir); - } - - @Test - public void deleteDir() { - doReturn(true).when(obsClientMock).doesObjectExist(anyString(), anyString()); - obsOperator.deleteDir(DIR_MOCK); - verify(obsClientMock, times(1)).deleteObject(anyString(), anyString()); - } - - @Test - public void testGetFileStatus() throws Exception { - StorageEntity entity = obsOperator.getFileStatus(FULL_NAME, DEFAULT_PATH, TENANT_CODE_MOCK, ResourceType.FILE); - Assertions.assertEquals(FULL_NAME, entity.getFullName()); - Assertions.assertEquals("dir1/", entity.getFileName()); - } - - @Test - public void testListFilesStatus() throws Exception { - List result = - obsOperator.listFilesStatus("dolphinscheduler/default/resources/", - "dolphinscheduler/default/resources/", - "default", ResourceType.FILE); - Assertions.assertEquals(0, result.size()); - } - - @Test - public void testListFilesStatusRecursively() throws Exception { - StorageEntity entity = new StorageEntity(); - entity.setFullName(FULL_NAME); - - doReturn(entity).when(obsOperator).getFileStatus(FULL_NAME, DEFAULT_PATH, TENANT_CODE_MOCK, ResourceType.FILE); - doReturn(Collections.EMPTY_LIST).when(obsOperator).listFilesStatus(anyString(), anyString(), anyString(), - Mockito.any(ResourceType.class)); - - List result = - obsOperator.listFilesStatusRecursively(FULL_NAME, DEFAULT_PATH, TENANT_CODE_MOCK, ResourceType.FILE); - Assertions.assertEquals(0, result.size()); - } -} diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-obs/src/test/resources/logback.xml b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-obs/src/test/resources/logback.xml new file mode 100644 index 000000000000..6f211959c590 --- /dev/null +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-obs/src/test/resources/logback.xml @@ -0,0 +1,21 @@ + + + + + + diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-oss/src/main/java/org/apache/dolphinscheduler/plugin/storage/oss/OssStorageOperator.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-oss/src/main/java/org/apache/dolphinscheduler/plugin/storage/oss/OssStorageOperator.java index 61754b52a738..afdc6874c9ab 100644 --- a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-oss/src/main/java/org/apache/dolphinscheduler/plugin/storage/oss/OssStorageOperator.java +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-oss/src/main/java/org/apache/dolphinscheduler/plugin/storage/oss/OssStorageOperator.java @@ -17,21 +17,16 @@ package org.apache.dolphinscheduler.plugin.storage.oss; -import static org.apache.dolphinscheduler.common.constants.Constants.FOLDER_SEPARATOR; -import static org.apache.dolphinscheduler.common.constants.Constants.FORMAT_S_S; -import static org.apache.dolphinscheduler.common.constants.Constants.RESOURCE_TYPE_FILE; -import static org.apache.dolphinscheduler.common.constants.Constants.RESOURCE_TYPE_UDF; - import org.apache.dolphinscheduler.common.constants.Constants; -import org.apache.dolphinscheduler.common.enums.ResUploadType; import org.apache.dolphinscheduler.common.factory.OssClientFactory; import org.apache.dolphinscheduler.common.model.OssConnection; import org.apache.dolphinscheduler.common.utils.FileUtils; import org.apache.dolphinscheduler.common.utils.PropertyUtils; +import org.apache.dolphinscheduler.plugin.storage.api.AbstractStorageOperator; +import org.apache.dolphinscheduler.plugin.storage.api.ResourceMetadata; import org.apache.dolphinscheduler.plugin.storage.api.StorageEntity; -import org.apache.dolphinscheduler.plugin.storage.api.StorageOperate; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; -import org.apache.dolphinscheduler.spi.enums.ResourceType; import org.apache.commons.lang3.StringUtils; @@ -44,23 +39,22 @@ import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; +import java.nio.file.FileAlreadyExistsException; import java.nio.file.Files; -import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; -import java.util.Collections; +import java.util.HashSet; import java.util.LinkedList; import java.util.List; +import java.util.Set; import java.util.stream.Collectors; -import java.util.stream.Stream; -import lombok.Data; +import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import com.aliyun.oss.OSS; import com.aliyun.oss.OSSException; -import com.aliyun.oss.ServiceException; -import com.aliyun.oss.model.DeleteObjectsRequest; +import com.aliyun.oss.model.GetObjectRequest; import com.aliyun.oss.model.ListObjectsV2Request; import com.aliyun.oss.model.ListObjectsV2Result; import com.aliyun.oss.model.OSSObject; @@ -68,9 +62,8 @@ import com.aliyun.oss.model.ObjectMetadata; import com.aliyun.oss.model.PutObjectRequest; -@Data @Slf4j -public class OssStorageOperator implements Closeable, StorageOperate { +public class OssStorageOperator extends AbstractStorageOperator implements Closeable, StorageOperator { private String accessKeyId; @@ -86,10 +79,11 @@ public class OssStorageOperator implements Closeable, StorageOperate { private OSS ossClient; - public OssStorageOperator() { + public OssStorageOperator(String resourceBaseAbsolutePath) { + super(resourceBaseAbsolutePath); } - public void init() { + private void init() { this.accessKeyId = readOssAccessKeyID(); this.accessKeySecret = readOssAccessKeySecret(); this.endPoint = readOssEndPoint(); @@ -108,7 +102,7 @@ public void init(OssConnection ossConnection) { this.region = readOssRegion(); this.bucketName = readOssBucketName(); this.ossConnection = ossConnection; - this.ossClient = getOssClient(); + this.ossClient = buildOssClient(); ensureBucketSuccessfullyCreated(bucketName); } @@ -137,79 +131,43 @@ protected OssConnection buildOssConnection() { } @Override - public void close() throws IOException { - ossClient.shutdown(); - } - - @Override - public void createTenantDirIfNotExists(String tenantCode) throws Exception { - mkdir(tenantCode, getOssResDir(tenantCode)); - mkdir(tenantCode, getOssUdfDir(tenantCode)); + public String getStorageBaseDirectory() { + // All directory should end with File.separator + if (resourceBaseAbsolutePath.startsWith("/")) { + log.warn("{} -> {} should not start with / in Oss", Constants.RESOURCE_UPLOAD_PATH, + resourceBaseAbsolutePath); + return resourceBaseAbsolutePath.substring(1); + } + return resourceBaseAbsolutePath; } @Override - public String getResDir(String tenantCode) { - return getOssResDir(tenantCode) + FOLDER_SEPARATOR; + public void close() throws IOException { + ossClient.shutdown(); } + @SneakyThrows @Override - public String getUdfDir(String tenantCode) { - return getOssUdfDir(tenantCode) + FOLDER_SEPARATOR; - } + public void createStorageDir(String directory) { + directory = transformAbsolutePathToOssKey(directory); - @Override - public boolean mkdir(String tenantCode, String path) throws IOException { - final String key = path + FOLDER_SEPARATOR; - if (!ossClient.doesObjectExist(bucketName, key)) { - createOssPrefix(bucketName, key); + if (ossClient.doesObjectExist(bucketName, directory)) { + throw new FileAlreadyExistsException("directory: " + directory + " already exists"); } - return true; - } - - protected void createOssPrefix(final String bucketName, final String key) { ObjectMetadata metadata = new ObjectMetadata(); metadata.setContentLength(0); InputStream emptyContent = new ByteArrayInputStream(new byte[0]); - PutObjectRequest putObjectRequest = new PutObjectRequest(bucketName, key, emptyContent, metadata); + PutObjectRequest putObjectRequest = new PutObjectRequest(bucketName, directory, emptyContent, metadata); ossClient.putObject(putObjectRequest); } + @SneakyThrows @Override - public String getResourceFullName(String tenantCode, String fileName) { - if (fileName.startsWith(FOLDER_SEPARATOR)) { - fileName = fileName.replaceFirst(FOLDER_SEPARATOR, ""); - } - return String.format(FORMAT_S_S, getOssResDir(tenantCode), fileName); - } - - @Override - public String getFileName(ResourceType resourceType, String tenantCode, String fileName) { - if (fileName.startsWith(FOLDER_SEPARATOR)) { - fileName = fileName.replaceFirst(FOLDER_SEPARATOR, ""); - } - return getDir(resourceType, tenantCode) + fileName; - } - - @Override - public boolean delete(String fullName, List childrenPathList, boolean recursive) throws IOException { - // append the resource fullName to the list for deletion. - childrenPathList.add(fullName); - - DeleteObjectsRequest deleteObjectsRequest = new DeleteObjectsRequest(bucketName) - .withKeys(childrenPathList); - try { - ossClient.deleteObjects(deleteObjectsRequest); - } catch (Exception e) { - log.error("delete objects error", e); - return false; - } + public void download(String srcFilePath, + String dstFilePath, + boolean overwrite) { + srcFilePath = transformAbsolutePathToOssKey(srcFilePath); - return true; - } - - @Override - public void download(String srcFilePath, String dstFilePath, - boolean overwrite) throws IOException { File dstFile = new File(dstFilePath); if (dstFile.isDirectory()) { Files.delete(dstFile.toPath()); @@ -234,300 +192,198 @@ public void download(String srcFilePath, String dstFilePath, } @Override - public boolean exists(String fileName) throws IOException { + public boolean exists(String fileName) { + fileName = transformAbsolutePathToOssKey(fileName); return ossClient.doesObjectExist(bucketName, fileName); } @Override - public boolean delete(String filePath, boolean recursive) throws IOException { - try { - ossClient.deleteObject(bucketName, filePath); - return true; - } catch (OSSException e) { - log.error("fail to delete the object, the resource path is {}", filePath, e); - return false; - } + public void delete(String filePath, boolean recursive) { + filePath = transformAbsolutePathToOssKey(filePath); + ossClient.deleteObject(bucketName, filePath); } @Override - public boolean copy(String srcPath, String dstPath, boolean deleteSource, boolean overwrite) throws IOException { + public void copy(String srcPath, String dstPath, boolean deleteSource, boolean overwrite) { + srcPath = transformAbsolutePathToOssKey(srcPath); + dstPath = transformAbsolutePathToOssKey(dstPath); + ossClient.copyObject(bucketName, srcPath, bucketName, dstPath); if (deleteSource) { ossClient.deleteObject(bucketName, srcPath); } - return true; - } - - @Override - public String getDir(ResourceType resourceType, String tenantCode) { - switch (resourceType) { - case UDF: - return getUdfDir(tenantCode); - case FILE: - return getResDir(tenantCode); - case ALL: - return getOssDataBasePath(); - default: - return ""; - } } + @SneakyThrows @Override - public boolean upload(String tenantCode, String srcFile, String dstPath, boolean deleteSource, - boolean overwrite) throws IOException { - try { - ossClient.putObject(bucketName, dstPath, new File(srcFile)); - if (deleteSource) { - Files.delete(Paths.get(srcFile)); + public void upload(String srcFile, String dstPath, boolean deleteSource, boolean overwrite) { + dstPath = transformAbsolutePathToOssKey(dstPath); + if (ossClient.doesObjectExist(bucketName, dstPath)) { + if (!overwrite) { + throw new FileAlreadyExistsException("file: " + dstPath + " already exists"); + } else { + ossClient.deleteObject(bucketName, dstPath); } - return true; - } catch (OSSException e) { - log.error("upload failed, the bucketName is {}, the filePath is {}", bucketName, dstPath, e); - return false; + + } + ossClient.putObject(bucketName, dstPath, new File(srcFile)); + if (deleteSource) { + Files.delete(Paths.get(srcFile)); } } + @SneakyThrows @Override - public List vimFile(String tenantCode, String filePath, int skipLineNums, int limit) throws IOException { - if (StringUtils.isBlank(filePath)) { - log.error("file path:{} is empty", filePath); - return Collections.emptyList(); - } + public List fetchFileContent(String filePath, int skipLineNums, int limit) { + filePath = transformAbsolutePathToOssKey(filePath); OSSObject ossObject = ossClient.getObject(bucketName, filePath); - try (BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(ossObject.getObjectContent()))) { - Stream stream = bufferedReader.lines().skip(skipLineNums).limit(limit); - return stream.collect(Collectors.toList()); + try ( + InputStreamReader inputStreamReader = new InputStreamReader(ossObject.getObjectContent()); + BufferedReader bufferedReader = new BufferedReader(inputStreamReader)) { + return bufferedReader.lines() + .skip(skipLineNums) + .limit(limit) + .collect(Collectors.toList()); } } @Override - public ResUploadType returnStorageType() { - return ResUploadType.OSS; + public List listStorageEntity(String resourceAbsolutePath) { + final String ossResourceAbsolutePath = transformAbsolutePathToOssKey(resourceAbsolutePath); + + ListObjectsV2Request listObjectsV2Request = new ListObjectsV2Request() + .withBucketName(bucketName) + .withDelimiter("/") + .withPrefix(ossResourceAbsolutePath); + + ListObjectsV2Result listObjectsV2Result = ossClient.listObjectsV2(listObjectsV2Request); + List storageEntities = new ArrayList<>(); + storageEntities.addAll(listObjectsV2Result.getCommonPrefixes() + .stream() + .map(this::transformCommonPrefixToStorageEntity) + .collect(Collectors.toList())); + storageEntities.addAll( + listObjectsV2Result.getObjectSummaries().stream() + .filter(s3ObjectSummary -> !s3ObjectSummary.getKey().equals(resourceAbsolutePath)) + .map(this::transformOSSObjectToStorageEntity) + .collect(Collectors.toList())); + + return storageEntities; + } @Override - public List listFilesStatusRecursively(String path, String defaultPath, String tenantCode, - ResourceType type) { + public List listFileStorageEntityRecursively(String resourceAbsolutePath) { + resourceAbsolutePath = transformOssKeyToAbsolutePath(resourceAbsolutePath); + + Set visited = new HashSet<>(); List storageEntityList = new ArrayList<>(); - LinkedList foldersToFetch = new LinkedList<>(); - - StorageEntity initialEntity = null; - try { - initialEntity = getFileStatus(path, defaultPath, tenantCode, type); - } catch (Exception e) { - log.error("error while listing files status recursively, path: {}", path, e); - return storageEntityList; - } - foldersToFetch.add(initialEntity); + LinkedList foldersToFetch = new LinkedList<>(); + foldersToFetch.addLast(resourceAbsolutePath); while (!foldersToFetch.isEmpty()) { - String pathToExplore = foldersToFetch.pop().getFullName(); - try { - List tempList = listFilesStatus(pathToExplore, defaultPath, tenantCode, type); - for (StorageEntity temp : tempList) { - if (temp.isDirectory()) { - foldersToFetch.add(temp); + String pathToExplore = foldersToFetch.pop(); + visited.add(pathToExplore); + List tempList = listStorageEntity(pathToExplore); + for (StorageEntity temp : tempList) { + if (temp.isDirectory()) { + if (visited.contains(temp.getFullName())) { + continue; } + foldersToFetch.add(temp.getFullName()); } - storageEntityList.addAll(tempList); - } catch (Exception e) { - log.error("error while listing files stat:wus recursively, path: {}", pathToExplore, e); } + storageEntityList.addAll(tempList); } return storageEntityList; } @Override - public List listFilesStatus(String path, String defaultPath, String tenantCode, - ResourceType type) throws Exception { - List storageEntityList = new ArrayList<>(); - - ListObjectsV2Result result = null; - String nextContinuationToken = null; - do { - try { - ListObjectsV2Request request = new ListObjectsV2Request(); - request.setBucketName(bucketName); - request.setPrefix(path); - request.setDelimiter(FOLDER_SEPARATOR); - request.setContinuationToken(nextContinuationToken); - - result = ossClient.listObjectsV2(request); - } catch (Exception e) { - throw new ServiceException("Get OSS file list exception", e); - } - - List summaries = result.getObjectSummaries(); - - for (OSSObjectSummary summary : summaries) { - if (!summary.getKey().endsWith(FOLDER_SEPARATOR)) { - // the path is a file - String[] aliasArr = summary.getKey().split(FOLDER_SEPARATOR); - String alias = aliasArr[aliasArr.length - 1]; - String fileName = StringUtils.difference(defaultPath, summary.getKey()); - - StorageEntity entity = new StorageEntity(); - entity.setAlias(alias); - entity.setFileName(fileName); - entity.setFullName(summary.getKey()); - entity.setDirectory(false); - entity.setUserName(tenantCode); - entity.setType(type); - entity.setSize(summary.getSize()); - entity.setCreateTime(summary.getLastModified()); - entity.setUpdateTime(summary.getLastModified()); - entity.setPfullName(path); - - storageEntityList.add(entity); - } - } - - for (String commonPrefix : result.getCommonPrefixes()) { - // the paths in commonPrefix are directories - String suffix = StringUtils.difference(path, commonPrefix); - String fileName = StringUtils.difference(defaultPath, commonPrefix); - - StorageEntity entity = new StorageEntity(); - entity.setAlias(suffix); - entity.setFileName(fileName); - entity.setFullName(commonPrefix); - entity.setDirectory(true); - entity.setUserName(tenantCode); - entity.setType(type); - entity.setSize(0); - entity.setCreateTime(null); - entity.setUpdateTime(null); - entity.setPfullName(path); - - storageEntityList.add(entity); - } - - nextContinuationToken = result.getNextContinuationToken(); - } while (result.isTruncated()); - - return storageEntityList; + public StorageEntity getStorageEntity(String resourceAbsolutePath) { + resourceAbsolutePath = transformAbsolutePathToOssKey(resourceAbsolutePath); + OSSObject object = ossClient.getObject(new GetObjectRequest(bucketName, resourceAbsolutePath)); + return transformOSSObjectToStorageEntity(object); } - @Override - public StorageEntity getFileStatus(String path, String defaultPath, String tenantCode, - ResourceType type) throws Exception { - ListObjectsV2Request request = new ListObjectsV2Request(); - request.setBucketName(bucketName); - request.setPrefix(path); - request.setDelimiter(FOLDER_SEPARATOR); - - ListObjectsV2Result result; - try { - result = ossClient.listObjectsV2(request); - } catch (Exception e) { - throw new ServiceException("Get OSS file list exception", e); + public void ensureBucketSuccessfullyCreated(String bucketName) { + if (StringUtils.isBlank(bucketName)) { + throw new IllegalArgumentException("resource.alibaba.cloud.oss.bucket.name is empty"); } - List summaries = result.getObjectSummaries(); - - if (path.endsWith(FOLDER_SEPARATOR)) { - // the path is a directory that may or may not exist in OSS - String alias = findDirAlias(path); - String fileName = StringUtils.difference(defaultPath, path); - - StorageEntity entity = new StorageEntity(); - entity.setAlias(alias); - entity.setFileName(fileName); - entity.setFullName(path); - entity.setDirectory(true); - entity.setUserName(tenantCode); - entity.setType(type); - entity.setSize(0); - - return entity; - - } else { - // the path is a file - if (summaries.size() > 0) { - OSSObjectSummary summary = summaries.get(0); - String[] aliasArr = summary.getKey().split(FOLDER_SEPARATOR); - String alias = aliasArr[aliasArr.length - 1]; - String fileName = StringUtils.difference(defaultPath, summary.getKey()); - - StorageEntity entity = new StorageEntity(); - entity.setAlias(alias); - entity.setFileName(fileName); - entity.setFullName(summary.getKey()); - entity.setDirectory(false); - entity.setUserName(tenantCode); - entity.setType(type); - entity.setSize(summary.getSize()); - entity.setCreateTime(summary.getLastModified()); - entity.setUpdateTime(summary.getLastModified()); - - return entity; - } + boolean existsBucket = ossClient.doesBucketExist(bucketName); + if (!existsBucket) { + throw new IllegalArgumentException( + "bucketName: " + bucketName + " is not exists, you need to create them by yourself"); } - throw new FileNotFoundException("Object is not found in OSS Bucket: " + bucketName); + log.info("bucketName: {} has been found, the current regionName is {}", bucketName, region); } - @Override - public void deleteTenant(String tenantCode) throws Exception { - deleteTenantCode(tenantCode); + protected OSS buildOssClient() { + return OssClientFactory.buildOssClient(ossConnection); } - public String getOssResDir(String tenantCode) { - return String.format("%s/" + RESOURCE_TYPE_FILE, getOssTenantDir(tenantCode)); - } + protected StorageEntity transformOSSObjectToStorageEntity(OSSObject ossObject) { + ResourceMetadata resourceMetaData = getResourceMetaData(ossObject.getKey()); - public String getOssUdfDir(String tenantCode) { - return String.format("%s/" + RESOURCE_TYPE_UDF, getOssTenantDir(tenantCode)); + StorageEntity storageEntity = new StorageEntity(); + storageEntity.setFileName(new File(ossObject.getKey()).getName()); + storageEntity.setFullName(ossObject.getKey()); + storageEntity.setPfullName(resourceMetaData.getResourceParentAbsolutePath()); + storageEntity.setType(resourceMetaData.getResourceType()); + storageEntity.setDirectory(resourceMetaData.isDirectory()); + storageEntity.setSize(ossObject.getObjectMetadata().getContentLength()); + storageEntity.setCreateTime(ossObject.getObjectMetadata().getLastModified()); + storageEntity.setUpdateTime(ossObject.getObjectMetadata().getLastModified()); + return storageEntity; } - public String getOssTenantDir(String tenantCode) { - return String.format(FORMAT_S_S, getOssDataBasePath(), tenantCode); - } + private StorageEntity transformOSSObjectToStorageEntity(OSSObjectSummary ossObjectSummary) { + String absolutePath = transformOssKeyToAbsolutePath(ossObjectSummary.getKey()); - public String getOssDataBasePath() { - if (FOLDER_SEPARATOR.equals(RESOURCE_UPLOAD_PATH)) { - return ""; - } else { - return RESOURCE_UPLOAD_PATH.replaceFirst(FOLDER_SEPARATOR, ""); - } - } + ResourceMetadata resourceMetaData = getResourceMetaData(absolutePath); - protected void deleteTenantCode(String tenantCode) { - deleteDir(getResDir(tenantCode)); - deleteDir(getUdfDir(tenantCode)); + StorageEntity storageEntity = new StorageEntity(); + storageEntity.setFileName(new File(absolutePath).getName()); + storageEntity.setFullName(absolutePath); + storageEntity.setPfullName(resourceMetaData.getResourceParentAbsolutePath()); + storageEntity.setType(resourceMetaData.getResourceType()); + storageEntity.setDirectory(resourceMetaData.isDirectory()); + storageEntity.setSize(ossObjectSummary.getSize()); + storageEntity.setCreateTime(ossObjectSummary.getLastModified()); + storageEntity.setUpdateTime(ossObjectSummary.getLastModified()); + return storageEntity; } - public void ensureBucketSuccessfullyCreated(String bucketName) { - if (StringUtils.isBlank(bucketName)) { - throw new IllegalArgumentException("resource.alibaba.cloud.oss.bucket.name is empty"); - } + private StorageEntity transformCommonPrefixToStorageEntity(String commonPrefix) { + String absolutePath = transformOssKeyToAbsolutePath(commonPrefix); - boolean existsBucket = ossClient.doesBucketExist(bucketName); - if (!existsBucket) { - throw new IllegalArgumentException( - "bucketName: " + bucketName + " is not exists, you need to create them by yourself"); - } + ResourceMetadata resourceMetaData = getResourceMetaData(absolutePath); - log.info("bucketName: {} has been found, the current regionName is {}", bucketName, region); + StorageEntity entity = new StorageEntity(); + entity.setFileName(new File(absolutePath).getName()); + entity.setFullName(absolutePath); + entity.setDirectory(resourceMetaData.isDirectory()); + entity.setType(resourceMetaData.getResourceType()); + entity.setSize(0L); + entity.setCreateTime(null); + entity.setUpdateTime(null); + return entity; } - protected void deleteDir(String directoryName) { - if (ossClient.doesObjectExist(bucketName, directoryName)) { - ossClient.deleteObject(bucketName, directoryName); + private String transformAbsolutePathToOssKey(String absolutePath) { + ResourceMetadata resourceMetaData = getResourceMetaData(absolutePath); + if (resourceMetaData.isDirectory()) { + return FileUtils.concatFilePath(absolutePath, "/"); } + return absolutePath; } - protected OSS buildOssClient() { - return OssClientFactory.buildOssClient(ossConnection); - } - - private String findDirAlias(String dirPath) { - if (!dirPath.endsWith(FOLDER_SEPARATOR)) { - return dirPath; + private String transformOssKeyToAbsolutePath(String s3Key) { + if (s3Key.endsWith("/")) { + return s3Key.substring(0, s3Key.length() - 1); } - - Path path = Paths.get(dirPath); - return path.getName(path.getNameCount() - 1) + FOLDER_SEPARATOR; + return s3Key; } } diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-oss/src/main/java/org/apache/dolphinscheduler/plugin/storage/oss/OssStorageOperatorFactory.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-oss/src/main/java/org/apache/dolphinscheduler/plugin/storage/oss/OssStorageOperatorFactory.java index 783faf59eafa..966cc471aba9 100644 --- a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-oss/src/main/java/org/apache/dolphinscheduler/plugin/storage/oss/OssStorageOperatorFactory.java +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-oss/src/main/java/org/apache/dolphinscheduler/plugin/storage/oss/OssStorageOperatorFactory.java @@ -17,20 +17,20 @@ package org.apache.dolphinscheduler.plugin.storage.oss; -import org.apache.dolphinscheduler.plugin.storage.api.StorageOperate; -import org.apache.dolphinscheduler.plugin.storage.api.StorageOperateFactory; +import org.apache.dolphinscheduler.common.constants.Constants; +import org.apache.dolphinscheduler.common.utils.PropertyUtils; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperatorFactory; import org.apache.dolphinscheduler.plugin.storage.api.StorageType; import com.google.auto.service.AutoService; -@AutoService(StorageOperateFactory.class) -public class OssStorageOperatorFactory implements StorageOperateFactory { +@AutoService(StorageOperatorFactory.class) +public class OssStorageOperatorFactory implements StorageOperatorFactory { @Override - public StorageOperate createStorageOperate() { - OssStorageOperator ossOperator = new OssStorageOperator(); - ossOperator.init(); - return ossOperator; + public StorageOperator createStorageOperate() { + return new OssStorageOperator(PropertyUtils.getString(Constants.RESOURCE_UPLOAD_PATH, "/dolphinscheduler")); } @Override diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-oss/src/test/java/org/apache/dolphinscheduler/plugin/storage/oss/OssStorageOperatorTest.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-oss/src/test/java/org/apache/dolphinscheduler/plugin/storage/oss/OssStorageOperatorTest.java deleted file mode 100644 index b84df0a02486..000000000000 --- a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-oss/src/test/java/org/apache/dolphinscheduler/plugin/storage/oss/OssStorageOperatorTest.java +++ /dev/null @@ -1,298 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.storage.oss; - -import static org.apache.dolphinscheduler.common.constants.Constants.FOLDER_SEPARATOR; -import static org.apache.dolphinscheduler.common.constants.Constants.FORMAT_S_S; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.doNothing; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; - -import org.apache.dolphinscheduler.plugin.storage.api.StorageEntity; -import org.apache.dolphinscheduler.spi.enums.ResourceType; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.Mockito; -import org.mockito.junit.jupiter.MockitoExtension; - -import com.aliyun.oss.OSS; -import com.aliyun.oss.model.ListObjectsV2Request; -import com.aliyun.oss.model.ListObjectsV2Result; - -@ExtendWith(MockitoExtension.class) -public class OssStorageOperatorTest { - - private static final String ACCESS_KEY_ID_MOCK = "ACCESS_KEY_ID_MOCK"; - private static final String ACCESS_KEY_SECRET_MOCK = "ACCESS_KEY_SECRET_MOCK"; - private static final String REGION_MOCK = "REGION_MOCK"; - private static final String END_POINT_MOCK = "END_POINT_MOCK"; - private static final String BUCKET_NAME_MOCK = "BUCKET_NAME_MOCK"; - private static final String TENANT_CODE_MOCK = "TENANT_CODE_MOCK"; - private static final String DIR_MOCK = "DIR_MOCK"; - private static final String FILE_NAME_MOCK = "FILE_NAME_MOCK"; - private static final String FILE_PATH_MOCK = "FILE_PATH_MOCK"; - - private static final String FULL_NAME = "/tmp/dir1/"; - - private static final String DEFAULT_PATH = "/tmp/"; - - @Mock - private OSS ossClientMock; - - private OssStorageOperator ossOperator; - - @BeforeEach - public void setUp() throws Exception { - ossOperator = spy(new OssStorageOperator()); - doReturn(ACCESS_KEY_ID_MOCK).when(ossOperator) - .readOssAccessKeyID(); - doReturn(ACCESS_KEY_SECRET_MOCK).when(ossOperator) - .readOssAccessKeySecret(); - doReturn(REGION_MOCK).when(ossOperator).readOssRegion(); - doReturn(BUCKET_NAME_MOCK).when(ossOperator).readOssBucketName(); - doReturn(END_POINT_MOCK).when(ossOperator).readOssEndPoint(); - doReturn(ossClientMock).when(ossOperator).buildOssClient(); - doNothing().when(ossOperator).ensureBucketSuccessfullyCreated(any()); - - ossOperator.init(); - - } - - @Test - public void initOssOperator() { - verify(ossOperator, times(1)).buildOssClient(); - Assertions.assertEquals(ACCESS_KEY_ID_MOCK, ossOperator.getAccessKeyId()); - Assertions.assertEquals(ACCESS_KEY_SECRET_MOCK, ossOperator.getAccessKeySecret()); - Assertions.assertEquals(REGION_MOCK, ossOperator.getRegion()); - Assertions.assertEquals(BUCKET_NAME_MOCK, ossOperator.getBucketName()); - } - - @Test - public void tearDownOssOperator() throws IOException { - doNothing().when(ossClientMock).shutdown(); - ossOperator.close(); - verify(ossClientMock, times(1)).shutdown(); - } - - @Test - public void createTenantResAndUdfDir() throws Exception { - doReturn(DIR_MOCK).when(ossOperator).getOssResDir(TENANT_CODE_MOCK); - doReturn(DIR_MOCK).when(ossOperator).getOssUdfDir(TENANT_CODE_MOCK); - doReturn(true).when(ossOperator).mkdir(TENANT_CODE_MOCK, DIR_MOCK); - ossOperator.createTenantDirIfNotExists(TENANT_CODE_MOCK); - verify(ossOperator, times(2)).mkdir(TENANT_CODE_MOCK, DIR_MOCK); - } - - @Test - public void getResDir() { - final String expectedResourceDir = String.format("dolphinscheduler/%s/resources/", TENANT_CODE_MOCK); - final String dir = ossOperator.getResDir(TENANT_CODE_MOCK); - Assertions.assertEquals(expectedResourceDir, dir); - } - - @Test - public void getUdfDir() { - final String expectedUdfDir = String.format("dolphinscheduler/%s/udfs/", TENANT_CODE_MOCK); - final String dir = ossOperator.getUdfDir(TENANT_CODE_MOCK); - Assertions.assertEquals(expectedUdfDir, dir); - } - - @Test - public void mkdirWhenDirExists() { - boolean isSuccess = false; - try { - final String key = DIR_MOCK + FOLDER_SEPARATOR; - doReturn(true).when(ossClientMock).doesObjectExist(BUCKET_NAME_MOCK, key); - isSuccess = ossOperator.mkdir(TENANT_CODE_MOCK, DIR_MOCK); - verify(ossClientMock, times(1)).doesObjectExist(BUCKET_NAME_MOCK, key); - - } catch (IOException e) { - Assertions.fail("test failed due to unexpected IO exception"); - } - - Assertions.assertTrue(isSuccess); - } - - @Test - public void mkdirWhenDirNotExists() { - boolean isSuccess = true; - try { - final String key = DIR_MOCK + FOLDER_SEPARATOR; - doReturn(false).when(ossClientMock).doesObjectExist(BUCKET_NAME_MOCK, key); - doNothing().when(ossOperator).createOssPrefix(BUCKET_NAME_MOCK, key); - isSuccess = ossOperator.mkdir(TENANT_CODE_MOCK, DIR_MOCK); - verify(ossClientMock, times(1)).doesObjectExist(BUCKET_NAME_MOCK, key); - verify(ossOperator, times(1)).createOssPrefix(BUCKET_NAME_MOCK, key); - - } catch (IOException e) { - Assertions.fail("test failed due to unexpected IO exception"); - } - - Assertions.assertTrue(isSuccess); - } - - @Test - public void getResourceFullName() { - final String expectedResourceFullName = - String.format("dolphinscheduler/%s/resources/%s", TENANT_CODE_MOCK, FILE_NAME_MOCK); - final String resourceFullName = ossOperator.getResourceFullName(TENANT_CODE_MOCK, FILE_NAME_MOCK); - Assertions.assertEquals(expectedResourceFullName, resourceFullName); - } - - @Test - public void getResourceFileName() { - final String expectedResourceFileName = FILE_NAME_MOCK; - final String resourceFullName = - String.format("dolphinscheduler/%s/resources/%s", TENANT_CODE_MOCK, FILE_NAME_MOCK); - final String resourceFileName = ossOperator.getResourceFileName(TENANT_CODE_MOCK, resourceFullName); - Assertions.assertEquals(expectedResourceFileName, resourceFileName); - } - - @Test - public void getFileName() { - final String expectedFileName = - String.format("dolphinscheduler/%s/resources/%s", TENANT_CODE_MOCK, FILE_NAME_MOCK); - final String fileName = ossOperator.getFileName(ResourceType.FILE, TENANT_CODE_MOCK, FILE_NAME_MOCK); - Assertions.assertEquals(expectedFileName, fileName); - } - - @Test - public void exists() { - boolean doesExist = false; - doReturn(true).when(ossClientMock).doesObjectExist(BUCKET_NAME_MOCK, FILE_NAME_MOCK); - try { - doesExist = ossOperator.exists(FILE_NAME_MOCK); - } catch (IOException e) { - Assertions.fail("unexpected IO exception in unit test"); - } - - Assertions.assertTrue(doesExist); - verify(ossClientMock, times(1)).doesObjectExist(BUCKET_NAME_MOCK, FILE_NAME_MOCK); - } - - @Test - public void delete() { - boolean isDeleted = false; - doReturn(null).when(ossClientMock).deleteObject(anyString(), anyString()); - try { - isDeleted = ossOperator.delete(FILE_NAME_MOCK, true); - } catch (IOException e) { - Assertions.fail("unexpected IO exception in unit test"); - } - - Assertions.assertTrue(isDeleted); - verify(ossClientMock, times(1)).deleteObject(anyString(), anyString()); - } - - @Test - public void copy() { - boolean isSuccess = false; - doReturn(null).when(ossClientMock).copyObject(anyString(), anyString(), anyString(), anyString()); - try { - isSuccess = ossOperator.copy(FILE_PATH_MOCK, FILE_PATH_MOCK, false, false); - } catch (IOException e) { - Assertions.fail("unexpected IO exception in unit test"); - } - - Assertions.assertTrue(isSuccess); - verify(ossClientMock, times(1)).copyObject(anyString(), anyString(), anyString(), anyString()); - } - - @Test - public void deleteTenant() { - doNothing().when(ossOperator).deleteTenantCode(anyString()); - try { - ossOperator.deleteTenant(TENANT_CODE_MOCK); - } catch (Exception e) { - Assertions.fail("unexpected exception caught in unit test"); - } - - verify(ossOperator, times(1)).deleteTenantCode(anyString()); - } - - @Test - public void getOssResDir() { - final String expectedOssResDir = String.format("dolphinscheduler/%s/resources", TENANT_CODE_MOCK); - final String ossResDir = ossOperator.getOssResDir(TENANT_CODE_MOCK); - Assertions.assertEquals(expectedOssResDir, ossResDir); - } - - @Test - public void getOssUdfDir() { - final String expectedOssUdfDir = String.format("dolphinscheduler/%s/udfs", TENANT_CODE_MOCK); - final String ossUdfDir = ossOperator.getOssUdfDir(TENANT_CODE_MOCK); - Assertions.assertEquals(expectedOssUdfDir, ossUdfDir); - } - - @Test - public void getOssTenantDir() { - final String expectedOssTenantDir = String.format(FORMAT_S_S, DIR_MOCK, TENANT_CODE_MOCK); - doReturn(DIR_MOCK).when(ossOperator).getOssDataBasePath(); - final String ossTenantDir = ossOperator.getOssTenantDir(TENANT_CODE_MOCK); - Assertions.assertEquals(expectedOssTenantDir, ossTenantDir); - } - - @Test - public void deleteDir() { - doReturn(true).when(ossClientMock).doesObjectExist(anyString(), anyString()); - ossOperator.deleteDir(DIR_MOCK); - verify(ossClientMock, times(1)).deleteObject(anyString(), anyString()); - } - - @Test - public void testGetFileStatus() throws Exception { - doReturn(new ListObjectsV2Result()).when(ossClientMock).listObjectsV2(Mockito.any(ListObjectsV2Request.class)); - StorageEntity entity = ossOperator.getFileStatus(FULL_NAME, DEFAULT_PATH, TENANT_CODE_MOCK, ResourceType.FILE); - Assertions.assertEquals(FULL_NAME, entity.getFullName()); - Assertions.assertEquals("dir1/", entity.getFileName()); - } - - @Test - public void testListFilesStatus() throws Exception { - doReturn(new ListObjectsV2Result()).when(ossClientMock).listObjectsV2(Mockito.any(ListObjectsV2Request.class)); - List result = - ossOperator.listFilesStatus(FULL_NAME, DEFAULT_PATH, TENANT_CODE_MOCK, ResourceType.FILE); - Assertions.assertEquals(0, result.size()); - } - - @Test - public void testListFilesStatusRecursively() throws Exception { - StorageEntity entity = new StorageEntity(); - entity.setFullName(FULL_NAME); - - doReturn(entity).when(ossOperator).getFileStatus(FULL_NAME, DEFAULT_PATH, TENANT_CODE_MOCK, ResourceType.FILE); - doReturn(Collections.EMPTY_LIST).when(ossOperator).listFilesStatus(anyString(), anyString(), anyString(), - Mockito.any(ResourceType.class)); - - List result = - ossOperator.listFilesStatusRecursively(FULL_NAME, DEFAULT_PATH, TENANT_CODE_MOCK, ResourceType.FILE); - Assertions.assertEquals(0, result.size()); - } -} diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-oss/src/test/resources/logback.xml b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-oss/src/test/resources/logback.xml new file mode 100644 index 000000000000..6f211959c590 --- /dev/null +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-oss/src/test/resources/logback.xml @@ -0,0 +1,21 @@ + + + + + + diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/pom.xml b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/pom.xml index b9dfd5d1c9ed..2c7a1d2d9969 100644 --- a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/pom.xml +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/pom.xml @@ -42,5 +42,12 @@ com.amazonaws aws-java-sdk-s3 + + + org.testcontainers + minio + test + + diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/src/main/java/org/apache/dolphinscheduler/plugin/storage/s3/S3StorageOperator.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/src/main/java/org/apache/dolphinscheduler/plugin/storage/s3/S3StorageOperator.java index a13611316edb..d70a4c5990d5 100644 --- a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/src/main/java/org/apache/dolphinscheduler/plugin/storage/s3/S3StorageOperator.java +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/src/main/java/org/apache/dolphinscheduler/plugin/storage/s3/S3StorageOperator.java @@ -17,20 +17,13 @@ package org.apache.dolphinscheduler.plugin.storage.s3; -import static org.apache.dolphinscheduler.common.constants.Constants.AWS_END_POINT; -import static org.apache.dolphinscheduler.common.constants.Constants.FOLDER_SEPARATOR; -import static org.apache.dolphinscheduler.common.constants.Constants.FORMAT_S_S; -import static org.apache.dolphinscheduler.common.constants.Constants.RESOURCE_TYPE_FILE; -import static org.apache.dolphinscheduler.common.constants.Constants.RESOURCE_TYPE_UDF; - +import org.apache.dolphinscheduler.authentication.aws.AmazonS3ClientFactory; import org.apache.dolphinscheduler.common.constants.Constants; -import org.apache.dolphinscheduler.common.enums.ResUploadType; import org.apache.dolphinscheduler.common.utils.FileUtils; -import org.apache.dolphinscheduler.common.utils.PropertyUtils; +import org.apache.dolphinscheduler.plugin.storage.api.AbstractStorageOperator; +import org.apache.dolphinscheduler.plugin.storage.api.ResourceMetadata; import org.apache.dolphinscheduler.plugin.storage.api.StorageEntity; -import org.apache.dolphinscheduler.plugin.storage.api.StorageOperate; -import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; -import org.apache.dolphinscheduler.spi.enums.ResourceType; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; import org.apache.commons.lang3.StringUtils; @@ -38,32 +31,24 @@ import java.io.ByteArrayInputStream; import java.io.Closeable; import java.io.File; -import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; +import java.nio.file.FileAlreadyExistsException; import java.nio.file.Files; -import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; -import java.util.Collections; +import java.util.HashSet; import java.util.LinkedList; import java.util.List; +import java.util.Set; import java.util.stream.Collectors; -import java.util.stream.Stream; -import lombok.Data; +import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; -import com.amazonaws.AmazonServiceException; -import com.amazonaws.auth.AWSStaticCredentialsProvider; -import com.amazonaws.auth.BasicAWSCredentials; -import com.amazonaws.client.builder.AwsClientBuilder; import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.AmazonS3ClientBuilder; -import com.amazonaws.services.s3.model.AmazonS3Exception; -import com.amazonaws.services.s3.model.DeleteObjectsRequest; import com.amazonaws.services.s3.model.ListObjectsV2Request; import com.amazonaws.services.s3.model.ListObjectsV2Result; import com.amazonaws.services.s3.model.ObjectMetadata; @@ -71,77 +56,30 @@ import com.amazonaws.services.s3.model.S3Object; import com.amazonaws.services.s3.model.S3ObjectInputStream; import com.amazonaws.services.s3.model.S3ObjectSummary; -import com.amazonaws.services.s3.transfer.MultipleFileDownload; -import com.amazonaws.services.s3.transfer.TransferManager; -import com.amazonaws.services.s3.transfer.TransferManagerBuilder; @Slf4j -@Data -public class S3StorageOperator implements Closeable, StorageOperate { - - private String accessKeyId; - - private String accessKeySecret; - - private String region; +public class S3StorageOperator extends AbstractStorageOperator implements Closeable, StorageOperator { - private String bucketName; + private final String bucketName; - private String endPoint; - - private AmazonS3 s3Client; - - public S3StorageOperator() { - } + private final AmazonS3 s3Client; - public void init() { - accessKeyId = readAccessKeyID(); - accessKeySecret = readAccessKeySecret(); - region = readRegion(); - bucketName = readBucketName(); - endPoint = readEndPoint(); - s3Client = buildS3Client(); - checkBucketNameExists(bucketName); + public S3StorageOperator(S3StorageProperties s3StorageProperties) { + super(s3StorageProperties.getResourceUploadPath()); + bucketName = s3StorageProperties.getBucketName(); + s3Client = AmazonS3ClientFactory.createAmazonS3Client(s3StorageProperties.getS3Configuration()); + exceptionWhenBucketNameNotExists(bucketName); } - protected AmazonS3 buildS3Client() { - if (!StringUtils.isEmpty(endPoint)) { - return AmazonS3ClientBuilder - .standard() - .withPathStyleAccessEnabled(true) - .withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration( - endPoint, region)) - .withCredentials( - new AWSStaticCredentialsProvider(new BasicAWSCredentials(accessKeyId, accessKeySecret))) - .build(); - } else { - return AmazonS3ClientBuilder - .standard() - .withCredentials( - new AWSStaticCredentialsProvider(new BasicAWSCredentials(accessKeyId, accessKeySecret))) - .withRegion(region) - .build(); + @Override + public String getStorageBaseDirectory() { + // All directory should end with File.separator + if (resourceBaseAbsolutePath.startsWith("/")) { + log.warn("{} -> {} should not start with / in s3", Constants.RESOURCE_UPLOAD_PATH, + resourceBaseAbsolutePath); + return resourceBaseAbsolutePath.substring(1); } - } - - protected String readAccessKeyID() { - return PropertyUtils.getString(TaskConstants.AWS_ACCESS_KEY_ID); - } - - protected String readAccessKeySecret() { - return PropertyUtils.getString(TaskConstants.AWS_SECRET_ACCESS_KEY); - } - - protected String readRegion() { - return PropertyUtils.getString(TaskConstants.AWS_REGION); - } - - protected String readBucketName() { - return PropertyUtils.getString(Constants.AWS_S3_BUCKET_NAME); - } - - protected String readEndPoint() { - return PropertyUtils.getString(AWS_END_POINT); + return resourceBaseAbsolutePath; } @Override @@ -149,54 +87,28 @@ public void close() throws IOException { s3Client.shutdown(); } + @SneakyThrows @Override - public void createTenantDirIfNotExists(String tenantCode) throws Exception { - mkdir(tenantCode, getS3ResDir(tenantCode)); - mkdir(tenantCode, getS3UdfDir(tenantCode)); - } - - @Override - public String getResDir(String tenantCode) { - return getS3ResDir(tenantCode) + FOLDER_SEPARATOR; - } - - @Override - public String getUdfDir(String tenantCode) { - return getS3UdfDir(tenantCode) + FOLDER_SEPARATOR; - } - - @Override - public boolean mkdir(String tenantCode, String path) throws IOException { - String objectName = path + FOLDER_SEPARATOR; - if (!s3Client.doesObjectExist(bucketName, objectName)) { - ObjectMetadata metadata = new ObjectMetadata(); - metadata.setContentLength(0); - InputStream emptyContent = new ByteArrayInputStream(new byte[0]); - PutObjectRequest putObjectRequest = new PutObjectRequest(bucketName, objectName, emptyContent, metadata); - s3Client.putObject(putObjectRequest); - } - return true; - } - - @Override - public String getResourceFullName(String tenantCode, String fileName) { - if (fileName.startsWith(FOLDER_SEPARATOR)) { - fileName = fileName.replaceFirst(FOLDER_SEPARATOR, ""); - } - return String.format(FORMAT_S_S, getS3ResDir(tenantCode), fileName); - } - - @Override - public String getFileName(ResourceType resourceType, String tenantCode, String fileName) { - if (fileName.startsWith(FOLDER_SEPARATOR)) { - fileName = fileName.replaceFirst(FOLDER_SEPARATOR, ""); + public void createStorageDir(String directoryAbsolutePath) { + directoryAbsolutePath = transformAbsolutePathToS3Key(directoryAbsolutePath); + if (s3Client.doesObjectExist(bucketName, directoryAbsolutePath)) { + throw new FileAlreadyExistsException( + "The directory " + directoryAbsolutePath + " already exists in the bucket " + bucketName); } - return getDir(resourceType, tenantCode) + fileName; + ObjectMetadata metadata = new ObjectMetadata(); + metadata.setContentLength(0); + InputStream emptyContent = new ByteArrayInputStream(new byte[0]); + PutObjectRequest putObjectRequest = + new PutObjectRequest(bucketName, directoryAbsolutePath, emptyContent, metadata); + s3Client.putObject(putObjectRequest); } + @SneakyThrows @Override - public void download(String srcFilePath, String dstFilePath, - boolean overwrite) throws IOException { + public void download(String srcFilePath, + String dstFilePath, + boolean overwrite) { + srcFilePath = transformAbsolutePathToS3Key(srcFilePath); File dstFile = new File(dstFilePath); if (dstFile.isDirectory()) { Files.delete(dstFile.toPath()); @@ -212,187 +124,84 @@ public void download(String srcFilePath, String dstFilePath, while ((readLen = s3is.read(readBuf)) > 0) { fos.write(readBuf, 0, readLen); } - } catch (AmazonServiceException e) { - throw new IOException(e.getMessage()); - } catch (FileNotFoundException e) { - log.error("the destination file {} not found", dstFilePath); - throw e; } } @Override - public boolean exists(String fullName) throws IOException { + public boolean exists(String fullName) { + fullName = transformAbsolutePathToS3Key(fullName); return s3Client.doesObjectExist(bucketName, fullName); } @Override - public boolean delete(String fullName, boolean recursive) throws IOException { - try { - s3Client.deleteObject(bucketName, fullName); - return true; - } catch (AmazonServiceException e) { - log.error("delete the object error,the resource path is {}", fullName); - return false; + public void delete(String absolutePath, boolean recursive) { + absolutePath = transformAbsolutePathToS3Key(absolutePath); + ResourceMetadata resourceMetaData = getResourceMetaData(absolutePath); + if (!resourceMetaData.isDirectory()) { + s3Client.deleteObject(bucketName, absolutePath); + return; } - } - - @Override - public boolean delete(String fullName, List childrenPathList, boolean recursive) throws IOException { - // append the resource fullName to the list for deletion. - childrenPathList.add(fullName); - - DeleteObjectsRequest deleteObjectsRequest = new DeleteObjectsRequest(bucketName) - .withKeys(childrenPathList.stream().toArray(String[]::new)); - try { - s3Client.deleteObjects(deleteObjectsRequest); - } catch (AmazonServiceException e) { - log.error("delete objects error", e); - return false; + if (recursive) { + List storageEntities = listStorageEntityRecursively(absolutePath); + for (StorageEntity storageEntity : storageEntities) { + s3Client.deleteObject(bucketName, transformAbsolutePathToS3Key(storageEntity.getFullName())); + } } - - return true; + s3Client.deleteObject(bucketName, absolutePath); } @Override - public boolean copy(String srcPath, String dstPath, boolean deleteSource, boolean overwrite) throws IOException { + public void copy(String srcPath, String dstPath, boolean deleteSource, boolean overwrite) { + srcPath = transformAbsolutePathToS3Key(srcPath); + dstPath = transformAbsolutePathToS3Key(dstPath); + + ResourceMetadata resourceMetaData = getResourceMetaData(srcPath); + if (resourceMetaData.isDirectory()) { + throw new UnsupportedOperationException("S3 does not support copying directories."); + } s3Client.copyObject(bucketName, srcPath, bucketName, dstPath); if (deleteSource) { s3Client.deleteObject(bucketName, srcPath); } - return true; } + @SneakyThrows @Override - public String getDir(ResourceType resourceType, String tenantCode) { - switch (resourceType) { - case UDF: - return getUdfDir(tenantCode); - case FILE: - return getResDir(tenantCode); - case ALL: - return getS3DataBasePath(); - default: - return ""; + public void upload(String srcFile, String dstPath, boolean deleteSource, boolean overwrite) { + dstPath = transformAbsolutePathToS3Key(dstPath); + + if (s3Client.doesObjectExist(bucketName, dstPath)) { + if (overwrite) { + s3Client.deleteObject(bucketName, dstPath); + } else { + throw new FileAlreadyExistsException("The file " + dstPath + " already exists in the bucket " + + bucketName + " and overwrite is not allowed."); + } } - } - - @Override - public boolean upload(String tenantCode, String srcFile, String dstPath, boolean deleteSource, - boolean overwrite) throws IOException { - try { - s3Client.putObject(bucketName, dstPath, new File(srcFile)); + s3Client.putObject(bucketName, dstPath, new File(srcFile)); - if (deleteSource) { - Files.delete(Paths.get(srcFile)); - } - return true; - } catch (AmazonServiceException e) { - log.error("upload failed,the bucketName is {},the filePath is {}", bucketName, dstPath); - return false; + if (deleteSource) { + Files.delete(Paths.get(srcFile)); } } + @SneakyThrows @Override - public List vimFile(String tenantCode, String filePath, int skipLineNums, int limit) throws IOException { - if (StringUtils.isBlank(filePath)) { - log.error("file path:{} is blank", filePath); - return Collections.emptyList(); - } + public List fetchFileContent(String filePath, int skipLineNums, int limit) { + filePath = transformAbsolutePathToS3Key(filePath); S3Object s3Object = s3Client.getObject(bucketName, filePath); - try (BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(s3Object.getObjectContent()))) { - Stream stream = bufferedReader.lines().skip(skipLineNums).limit(limit); - return stream.collect(Collectors.toList()); - } - } - - @Override - public void deleteTenant(String tenantCode) throws Exception { - deleteTenantCode(tenantCode); - } - - /** - * S3 resource dir - * - * @param tenantCode tenant code - * @return S3 resource dir - */ - public String getS3ResDir(String tenantCode) { - return String.format("%s/" + RESOURCE_TYPE_FILE, getS3TenantDir(tenantCode)); - } - - /** - * S3 udf dir - * - * @param tenantCode tenant code - * @return get udf dir on S3 - */ - public String getS3UdfDir(String tenantCode) { - return String.format("%s/" + RESOURCE_TYPE_UDF, getS3TenantDir(tenantCode)); - } - - /** - * @param tenantCode tenant code - * @return file directory of tenants on S3 - */ - public String getS3TenantDir(String tenantCode) { - return String.format(FORMAT_S_S, getS3DataBasePath(), tenantCode); - } - - /** - * get data S3 path - * - * @return data S3 path - */ - public String getS3DataBasePath() { - if (FOLDER_SEPARATOR.equals(RESOURCE_UPLOAD_PATH)) { - return ""; - } else { - return RESOURCE_UPLOAD_PATH.replaceFirst(FOLDER_SEPARATOR, ""); - } - } - - protected void deleteTenantCode(String tenantCode) { - deleteDir(getResDir(tenantCode)); - deleteDir(getUdfDir(tenantCode)); - } - - /** - * xxx untest - * upload local directory to S3 - * - * @param tenantCode - * @param keyPrefix the name of directory - * @param strPath - */ - private void uploadDirectory(String tenantCode, String keyPrefix, String strPath) { - s3Client.putObject(bucketName, tenantCode + FOLDER_SEPARATOR + keyPrefix, new File(strPath)); - } - - /** - * xxx untest - * download S3 Directory to local - * - * @param tenantCode - * @param keyPrefix the name of directory - * @param srcPath - */ - private void downloadDirectory(String tenantCode, String keyPrefix, String srcPath) { - TransferManager tm = TransferManagerBuilder.standard().withS3Client(s3Client).build(); - try { - MultipleFileDownload download = - tm.downloadDirectory(bucketName, tenantCode + FOLDER_SEPARATOR + keyPrefix, new File(srcPath)); - download.waitForCompletion(); - } catch (AmazonS3Exception | InterruptedException e) { - log.error("download the directory failed with the bucketName is {} and the keyPrefix is {}", bucketName, - tenantCode + FOLDER_SEPARATOR + keyPrefix); - Thread.currentThread().interrupt(); - } finally { - tm.shutdownNow(); + try ( + InputStreamReader inputStreamReader = new InputStreamReader(s3Object.getObjectContent()); + BufferedReader bufferedReader = new BufferedReader(inputStreamReader)) { + return bufferedReader.lines() + .skip(skipLineNums) + .limit(limit) + .collect(Collectors.toList()); } } - public void checkBucketNameExists(String bucketName) { + void exceptionWhenBucketNameNotExists(String bucketName) { if (StringUtils.isBlank(bucketName)) { throw new IllegalArgumentException("resource.aws.s3.bucket.name is blank"); } @@ -407,199 +216,135 @@ public void checkBucketNameExists(String bucketName) { s3Client.getRegionName()); } - /** - * only delete the object of directory ,it`s better to delete the files in it -r - */ - protected void deleteDir(String directoryName) { - if (s3Client.doesObjectExist(bucketName, directoryName)) { - s3Client.deleteObject(bucketName, directoryName); - } + @Override + public List listStorageEntity(String resourceAbsolutePath) { + final String s3ResourceAbsolutePath = transformAbsolutePathToS3Key(resourceAbsolutePath); + ListObjectsV2Request listObjectsV2Request = new ListObjectsV2Request() + .withBucketName(bucketName) + .withDelimiter("/") + .withPrefix(s3ResourceAbsolutePath); + + ListObjectsV2Result listObjectsV2Result = s3Client.listObjectsV2(listObjectsV2Request); + List storageEntities = new ArrayList<>(); + storageEntities.addAll(listObjectsV2Result.getCommonPrefixes() + .stream() + .map(this::transformCommonPrefixToStorageEntity) + .collect(Collectors.toList())); + storageEntities.addAll( + listObjectsV2Result.getObjectSummaries().stream() + .filter(s3ObjectSummary -> !s3ObjectSummary.getKey().equals(s3ResourceAbsolutePath)) + .map(this::transformS3ObjectToStorageEntity) + .collect(Collectors.toList())); + + return storageEntities; } @Override - public ResUploadType returnStorageType() { - return ResUploadType.S3; + public List listFileStorageEntityRecursively(String resourceAbsolutePath) { + return listStorageEntityRecursively(resourceAbsolutePath) + .stream() + .filter(storageEntity -> !storageEntity.isDirectory()) + .collect(Collectors.toList()); } @Override - public List listFilesStatusRecursively(String path, String defaultPath, String tenantCode, - ResourceType type) { + public StorageEntity getStorageEntity(String resourceAbsolutePath) { + resourceAbsolutePath = transformAbsolutePathToS3Key(resourceAbsolutePath); + + S3Object object = s3Client.getObject(bucketName, resourceAbsolutePath); + return transformS3ObjectToStorageEntity(object); + } + + private List listStorageEntityRecursively(String resourceAbsolutePath) { + resourceAbsolutePath = transformAbsolutePathToS3Key(resourceAbsolutePath); + + Set visited = new HashSet<>(); List storageEntityList = new ArrayList<>(); - LinkedList foldersToFetch = new LinkedList<>(); - - StorageEntity initialEntity = null; - try { - initialEntity = getFileStatus(path, defaultPath, tenantCode, type); - } catch (Exception e) { - log.error("error while listing files status recursively, path: {}", path, e); - return storageEntityList; - } - foldersToFetch.add(initialEntity); + LinkedList foldersToFetch = new LinkedList<>(); + foldersToFetch.addLast(resourceAbsolutePath); while (!foldersToFetch.isEmpty()) { - String pathToExplore = foldersToFetch.pop().getFullName(); - try { - List tempList = listFilesStatus(pathToExplore, defaultPath, tenantCode, type); - for (StorageEntity temp : tempList) { - if (temp.isDirectory()) { - foldersToFetch.add(temp); + String pathToExplore = foldersToFetch.pop(); + visited.add(pathToExplore); + List tempList = listStorageEntity(pathToExplore); + for (StorageEntity temp : tempList) { + if (temp.isDirectory()) { + if (visited.contains(temp.getFullName())) { + continue; } + foldersToFetch.add(temp.getFullName()); } - storageEntityList.addAll(tempList); - } catch (Exception e) { - log.error("error while listing files status recursively, path: {}", pathToExplore, e); } + storageEntityList.addAll(tempList); } return storageEntityList; - } - @Override - public List listFilesStatus(String path, String defaultPath, String tenantCode, - ResourceType type) throws AmazonServiceException { - List storageEntityList = new ArrayList<>(); + private StorageEntity transformS3ObjectToStorageEntity(S3Object object) { - // TODO: optimize pagination - ListObjectsV2Request request = new ListObjectsV2Request(); - request.setBucketName(bucketName); - request.setPrefix(path); - request.setDelimiter("/"); - - ListObjectsV2Result v2Result; - do { - try { - v2Result = s3Client.listObjectsV2(request); - } catch (AmazonServiceException e) { - throw new AmazonServiceException("Get S3 file list exception, error type:" + e.getErrorType(), e); - } + String s3Key = object.getKey(); + String absolutePath = transformS3KeyToAbsolutePath(s3Key); - List summaries = v2Result.getObjectSummaries(); - - for (S3ObjectSummary summary : summaries) { - if (!summary.getKey().endsWith("/")) { - // the path is a file - String[] aliasArr = summary.getKey().split("/"); - String alias = aliasArr[aliasArr.length - 1]; - String fileName = StringUtils.difference(defaultPath, summary.getKey()); - - StorageEntity entity = new StorageEntity(); - entity.setAlias(alias); - entity.setFileName(fileName); - entity.setFullName(summary.getKey()); - entity.setDirectory(false); - entity.setUserName(tenantCode); - entity.setType(type); - entity.setSize(summary.getSize()); - entity.setCreateTime(summary.getLastModified()); - entity.setUpdateTime(summary.getLastModified()); - entity.setPfullName(path); - - storageEntityList.add(entity); - } - } + ResourceMetadata resourceMetaData = getResourceMetaData(absolutePath); - for (String commonPrefix : v2Result.getCommonPrefixes()) { - // the paths in commonPrefix are directories - String suffix = StringUtils.difference(path, commonPrefix); - String fileName = StringUtils.difference(defaultPath, commonPrefix); - - StorageEntity entity = new StorageEntity(); - entity.setAlias(suffix); - entity.setFileName(fileName); - entity.setFullName(commonPrefix); - entity.setDirectory(true); - entity.setUserName(tenantCode); - entity.setType(type); - entity.setSize(0); - entity.setCreateTime(null); - entity.setUpdateTime(null); - entity.setPfullName(path); - - storageEntityList.add(entity); - } + StorageEntity entity = new StorageEntity(); + entity.setFileName(new File(absolutePath).getName()); + entity.setFullName(absolutePath); + entity.setDirectory(resourceMetaData.isDirectory()); + entity.setType(resourceMetaData.getResourceType()); + entity.setSize(object.getObjectMetadata().getContentLength()); + entity.setCreateTime(object.getObjectMetadata().getLastModified()); + entity.setUpdateTime(object.getObjectMetadata().getLastModified()); + return entity; + } - request.setContinuationToken(v2Result.getContinuationToken()); + private StorageEntity transformCommonPrefixToStorageEntity(String commonPrefix) { + String absolutePath = transformS3KeyToAbsolutePath(commonPrefix); - } while (v2Result.isTruncated()); + ResourceMetadata resourceMetaData = getResourceMetaData(absolutePath); - return storageEntityList; + StorageEntity entity = new StorageEntity(); + entity.setFileName(new File(absolutePath).getName()); + entity.setFullName(absolutePath); + entity.setDirectory(resourceMetaData.isDirectory()); + entity.setType(resourceMetaData.getResourceType()); + entity.setSize(0L); + entity.setCreateTime(null); + entity.setUpdateTime(null); + return entity; } - @Override - public StorageEntity getFileStatus(String path, String defaultPath, String tenantCode, - ResourceType type) throws AmazonServiceException, FileNotFoundException { - // Notice: we do not use getObject here because intermediate directories - // may not exist in S3, which can cause getObject to throw exception. - // Since we still want to access it on frontend, this is a workaround using listObjects. - - ListObjectsV2Request request = new ListObjectsV2Request(); - request.setBucketName(bucketName); - request.setPrefix(path); - request.setDelimiter("/"); - - ListObjectsV2Result v2Result; - try { - v2Result = s3Client.listObjectsV2(request); - } catch (AmazonServiceException e) { - throw new AmazonServiceException("Get S3 file list exception, error type:" + e.getErrorType(), e); - } - - List summaries = v2Result.getObjectSummaries(); - - if (path.endsWith("/")) { - // the path is a directory that may or may not exist in S3 - String alias = findDirAlias(path); - String fileName = StringUtils.difference(defaultPath, path); + private StorageEntity transformS3ObjectToStorageEntity(S3ObjectSummary s3ObjectSummary) { + String absolutePath = transformS3KeyToAbsolutePath(s3ObjectSummary.getKey()); - StorageEntity entity = new StorageEntity(); - entity.setAlias(alias); - entity.setFileName(fileName); - entity.setFullName(path); - entity.setDirectory(true); - entity.setUserName(tenantCode); - entity.setType(type); - entity.setSize(0); + ResourceMetadata resourceMetaData = getResourceMetaData(absolutePath); - return entity; + StorageEntity entity = new StorageEntity(); + entity.setFileName(new File(absolutePath).getName()); + entity.setFullName(absolutePath); + entity.setPfullName(resourceMetaData.getResourceParentAbsolutePath()); + entity.setDirectory(resourceMetaData.isDirectory()); + entity.setType(resourceMetaData.getResourceType()); + entity.setSize(s3ObjectSummary.getSize()); + entity.setCreateTime(s3ObjectSummary.getLastModified()); + entity.setUpdateTime(s3ObjectSummary.getLastModified()); + return entity; + } - } else { - // the path is a file - if (summaries.size() > 0) { - S3ObjectSummary summary = summaries.get(0); - String[] aliasArr = summary.getKey().split("/"); - String alias = aliasArr[aliasArr.length - 1]; - String fileName = StringUtils.difference(defaultPath, summary.getKey()); - - StorageEntity entity = new StorageEntity(); - entity.setAlias(alias); - entity.setFileName(fileName); - entity.setFullName(summary.getKey()); - entity.setDirectory(false); - entity.setUserName(tenantCode); - entity.setType(type); - entity.setSize(summary.getSize()); - entity.setCreateTime(summary.getLastModified()); - entity.setUpdateTime(summary.getLastModified()); - - return entity; - } + private String transformAbsolutePathToS3Key(String absolutePath) { + ResourceMetadata resourceMetaData = getResourceMetaData(absolutePath); + if (resourceMetaData.isDirectory()) { + return FileUtils.concatFilePath(absolutePath, "/"); } - - throw new FileNotFoundException("Object is not found in S3 Bucket: " + bucketName); + return absolutePath; } - /** - * find alias for directories, NOT for files - * a directory is a path ending with "/" - */ - private String findDirAlias(String myStr) { - if (!myStr.endsWith(FOLDER_SEPARATOR)) { - // Make sure system won't crush down if someone accidentally misuse the function. - return myStr; + private String transformS3KeyToAbsolutePath(String s3Key) { + if (s3Key.endsWith("/")) { + return s3Key.substring(0, s3Key.length() - 1); } - - Path path = Paths.get(myStr); - return path.getName(path.getNameCount() - 1) + FOLDER_SEPARATOR; + return s3Key; } + } diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/src/main/java/org/apache/dolphinscheduler/plugin/storage/s3/S3StorageOperatorFactory.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/src/main/java/org/apache/dolphinscheduler/plugin/storage/s3/S3StorageOperatorFactory.java index e1c3a41743d5..9b1539a4fbf4 100644 --- a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/src/main/java/org/apache/dolphinscheduler/plugin/storage/s3/S3StorageOperatorFactory.java +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/src/main/java/org/apache/dolphinscheduler/plugin/storage/s3/S3StorageOperatorFactory.java @@ -17,20 +17,29 @@ package org.apache.dolphinscheduler.plugin.storage.s3; -import org.apache.dolphinscheduler.plugin.storage.api.StorageOperate; -import org.apache.dolphinscheduler.plugin.storage.api.StorageOperateFactory; +import org.apache.dolphinscheduler.common.constants.Constants; +import org.apache.dolphinscheduler.common.utils.PropertyUtils; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; +import org.apache.dolphinscheduler.plugin.storage.api.StorageOperatorFactory; import org.apache.dolphinscheduler.plugin.storage.api.StorageType; import com.google.auto.service.AutoService; -@AutoService(StorageOperateFactory.class) -public class S3StorageOperatorFactory implements StorageOperateFactory { +@AutoService(StorageOperatorFactory.class) +public class S3StorageOperatorFactory implements StorageOperatorFactory { @Override - public StorageOperate createStorageOperate() { - S3StorageOperator s3StorageOperator = new S3StorageOperator(); - s3StorageOperator.init(); - return s3StorageOperator; + public StorageOperator createStorageOperate() { + final S3StorageProperties s3StorageProperties = getS3StorageProperties(); + return new S3StorageOperator(s3StorageProperties); + } + + private S3StorageProperties getS3StorageProperties() { + return S3StorageProperties.builder() + .bucketName(PropertyUtils.getString(Constants.AWS_S3_BUCKET_NAME)) + .s3Configuration(PropertyUtils.getByPrefix("aws.s3.", "")) + .resourceUploadPath(PropertyUtils.getString(Constants.RESOURCE_UPLOAD_PATH, "/dolphinscheduler")) + .build(); } @Override diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/src/main/java/org/apache/dolphinscheduler/plugin/storage/s3/S3StorageProperties.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/src/main/java/org/apache/dolphinscheduler/plugin/storage/s3/S3StorageProperties.java new file mode 100644 index 000000000000..b7f6f159b46b --- /dev/null +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/src/main/java/org/apache/dolphinscheduler/plugin/storage/s3/S3StorageProperties.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.storage.s3; + +import java.util.Map; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class S3StorageProperties { + + private Map s3Configuration; + + private String bucketName; + + private String resourceUploadPath; +} diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/src/test/java/org/apache/dolphinscheduler/plugin/storage/s3/S3StorageOperatorTest.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/src/test/java/org/apache/dolphinscheduler/plugin/storage/s3/S3StorageOperatorTest.java index 0c3f75d8611d..dc635cceb531 100644 --- a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/src/test/java/org/apache/dolphinscheduler/plugin/storage/s3/S3StorageOperatorTest.java +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/src/test/java/org/apache/dolphinscheduler/plugin/storage/s3/S3StorageOperatorTest.java @@ -17,289 +17,289 @@ package org.apache.dolphinscheduler.plugin.storage.s3; -import static org.apache.dolphinscheduler.common.constants.Constants.FOLDER_SEPARATOR; -import static org.apache.dolphinscheduler.common.constants.Constants.FORMAT_S_S; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.doNothing; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; +import static com.google.common.truth.Truth.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import org.apache.dolphinscheduler.plugin.storage.api.ResourceMetadata; import org.apache.dolphinscheduler.plugin.storage.api.StorageEntity; import org.apache.dolphinscheduler.spi.enums.ResourceType; -import java.io.IOException; -import java.util.Collections; +import java.nio.file.FileAlreadyExistsException; import java.util.List; +import java.util.stream.Stream; +import lombok.SneakyThrows; + +import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.Mockito; -import org.mockito.junit.jupiter.MockitoExtension; +import org.testcontainers.containers.MinIOContainer; +import org.testcontainers.lifecycle.Startables; +import org.testcontainers.shaded.com.google.common.collect.ImmutableMap; +import com.amazonaws.auth.AWSStaticCredentialsProvider; +import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.model.ListObjectsV2Request; -import com.amazonaws.services.s3.model.ListObjectsV2Result; +import com.amazonaws.services.s3.AmazonS3ClientBuilder; -@ExtendWith(MockitoExtension.class) public class S3StorageOperatorTest { - private static final String ACCESS_KEY_ID_MOCK = "ACCESS_KEY_ID_MOCK"; - - private static final String ACCESS_KEY_SECRET_MOCK = "ACCESS_KEY_SECRET_MOCK"; - - private static final String REGION_MOCK = "REGION_MOCK"; - - private static final String END_POINT_MOCK = "END_POINT_MOCK"; - - private static final String BUCKET_NAME_MOCK = "BUCKET_NAME_MOCK"; - - private static final String TENANT_CODE_MOCK = "TENANT_CODE_MOCK"; - - private static final String DIR_MOCK = "DIR_MOCK"; - - private static final String FILE_NAME_MOCK = "FILE_NAME_MOCK"; - - private static final String FILE_PATH_MOCK = "FILE_PATH_MOCK"; - - private static final String FULL_NAME = "/tmp/dir1/"; - - private static final String DEFAULT_PATH = "/tmp/"; - - @Mock - private AmazonS3 s3Client; - - private S3StorageOperator s3StorageOperator; + private static final String demoSql = S3StorageOperatorTest.class.getResource("/demo.sql").getFile(); + + private static MinIOContainer minIOContainer; + + private static S3StorageOperator s3StorageOperator; + + @BeforeAll + public static void setUp() throws Exception { + String bucketName = "dolphinscheduler"; + String accessKey = "accessKey123"; + String secretKey = "secretKey123"; + String region = "us-east-1"; + + minIOContainer = new MinIOContainer("minio/minio:RELEASE.2023-09-04T19-57-37Z") + .withEnv("MINIO_ACCESS_KEY", accessKey) + .withEnv("MINIO_SECRET_KEY", secretKey) + .withEnv("MINIO_REGION", region) + .withNetworkAliases(bucketName + "." + "localhost"); + + Startables.deepStart(Stream.of(minIOContainer)).join(); + + String endpoint = minIOContainer.getS3URL(); + + AmazonS3 amazonS3 = AmazonS3ClientBuilder.standard() + .withEndpointConfiguration(new AmazonS3ClientBuilder.EndpointConfiguration(endpoint, region)) + .withCredentials(new AWSStaticCredentialsProvider(new BasicAWSCredentials(accessKey, secretKey))) + .withPathStyleAccessEnabled(true) + .build(); + amazonS3.createBucket(bucketName); + + S3StorageProperties s3StorageProperties = S3StorageProperties.builder() + .bucketName(bucketName) + .resourceUploadPath("tmp/dolphinscheduler") + .s3Configuration(ImmutableMap.of( + "access.key.id", accessKey, + "access.key.secret", secretKey, + "region", region, + "endpoint", endpoint)) + .build(); + s3StorageOperator = new S3StorageOperator(s3StorageProperties); + } @BeforeEach - public void setUp() throws Exception { - s3StorageOperator = Mockito.spy(new S3StorageOperator()); - - doReturn(ACCESS_KEY_ID_MOCK).when(s3StorageOperator) - .readAccessKeyID(); - doReturn(ACCESS_KEY_SECRET_MOCK).when(s3StorageOperator) - .readAccessKeySecret(); - doReturn(REGION_MOCK).when(s3StorageOperator).readRegion(); - doReturn(BUCKET_NAME_MOCK).when(s3StorageOperator).readBucketName(); - doReturn(END_POINT_MOCK).when(s3StorageOperator).readEndPoint(); - Mockito.doReturn(s3Client) - .when(s3StorageOperator).buildS3Client(); - Mockito.doNothing() - .when(s3StorageOperator).checkBucketNameExists(Mockito.any()); - - s3StorageOperator.init(); + public void initializeFiles() { + s3StorageOperator.delete("tmp/dolphinscheduler/default/resources", true); + s3StorageOperator.createStorageDir("tmp/dolphinscheduler/default/resources/sqlDirectory"); + s3StorageOperator.createStorageDir("tmp/dolphinscheduler/default/resources/multipleDirectories"); + s3StorageOperator.createStorageDir("tmp/dolphinscheduler/default/resources/multipleDirectories/1"); + s3StorageOperator.createStorageDir("tmp/dolphinscheduler/default/resources/multipleDirectories/2"); + s3StorageOperator.createStorageDir("tmp/dolphinscheduler/default/resources/multipleDirectories/3"); + s3StorageOperator.upload(demoSql, "tmp/dolphinscheduler/default/resources/multipleDirectories/1/demo.sql", + false, true); + s3StorageOperator.createStorageDir("tmp/dolphinscheduler/default/resources/emptyDirectory"); + s3StorageOperator.upload(demoSql, "tmp/dolphinscheduler/default/resources/sqlDirectory/demo.sql", false, true); } @Test - public void testInit() { - verify(s3StorageOperator, times(1)).buildS3Client(); - Assertions.assertEquals(ACCESS_KEY_ID_MOCK, s3StorageOperator.getAccessKeyId()); - Assertions.assertEquals(ACCESS_KEY_SECRET_MOCK, s3StorageOperator.getAccessKeySecret()); - Assertions.assertEquals(REGION_MOCK, s3StorageOperator.getRegion()); - Assertions.assertEquals(BUCKET_NAME_MOCK, s3StorageOperator.getBucketName()); + public void testGetResourceMetaData() { + ResourceMetadata resourceMetaData = + s3StorageOperator.getResourceMetaData("tmp/dolphinscheduler/default/resources/sqlDirectory/demo.sql"); + assertEquals("tmp/dolphinscheduler/default/resources/sqlDirectory/demo.sql", + resourceMetaData.getResourceAbsolutePath()); + assertEquals("tmp/dolphinscheduler", resourceMetaData.getResourceBaseDirectory()); + assertEquals("default", resourceMetaData.getTenant()); + assertEquals(ResourceType.FILE, resourceMetaData.getResourceType()); + assertEquals("sqlDirectory/demo.sql", resourceMetaData.getResourceRelativePath()); + assertEquals("tmp/dolphinscheduler/default/resources/sqlDirectory", + resourceMetaData.getResourceParentAbsolutePath()); + assertFalse(resourceMetaData.isDirectory()); } @Test - public void testTearDown() throws IOException { - doNothing().when(s3Client).shutdown(); - s3StorageOperator.close(); - verify(s3Client, times(1)).shutdown(); + public void testGetStorageBaseDirectory() { + assertEquals("tmp/dolphinscheduler", s3StorageOperator.getStorageBaseDirectory()); } @Test - public void testCreateTenantResAndUdfDir() throws Exception { - doReturn(DIR_MOCK).when(s3StorageOperator).getS3ResDir(TENANT_CODE_MOCK); - doReturn(DIR_MOCK).when(s3StorageOperator).getS3UdfDir(TENANT_CODE_MOCK); - doReturn(true).when(s3StorageOperator).mkdir(TENANT_CODE_MOCK, DIR_MOCK); - s3StorageOperator.createTenantDirIfNotExists(TENANT_CODE_MOCK); - verify(s3StorageOperator, times(2)).mkdir(TENANT_CODE_MOCK, DIR_MOCK); + public void testGetStorageBaseDirectory_withTenant() { + assertEquals("tmp/dolphinscheduler/default", s3StorageOperator.getStorageBaseDirectory("default")); } @Test - public void testGetResDir() { - final String expectedResourceDir = String.format("dolphinscheduler/%s/resources/", TENANT_CODE_MOCK); - final String dir = s3StorageOperator.getResDir(TENANT_CODE_MOCK); - Assertions.assertEquals(expectedResourceDir, dir); + public void testGetStorageBaseDirectory_withTenant_withResourceTypeFile() { + String storageBaseDirectory = s3StorageOperator.getStorageBaseDirectory("default", ResourceType.FILE); + assertThat(storageBaseDirectory).isEqualTo("tmp/dolphinscheduler/default/resources"); } @Test - public void testGetUdfDir() { - final String expectedUdfDir = String.format("dolphinscheduler/%s/udfs/", TENANT_CODE_MOCK); - final String dir = s3StorageOperator.getUdfDir(TENANT_CODE_MOCK); - Assertions.assertEquals(expectedUdfDir, dir); + public void testGetStorageBaseDirectory_withTenant_withResourceTypeAll() { + String storageBaseDirectory = s3StorageOperator.getStorageBaseDirectory("default", ResourceType.ALL); + assertThat(storageBaseDirectory).isEqualTo("tmp/dolphinscheduler/default"); } @Test - public void mkdirWhenDirExists() { - boolean isSuccess = false; - try { - final String key = DIR_MOCK + FOLDER_SEPARATOR; - doReturn(true).when(s3Client).doesObjectExist(BUCKET_NAME_MOCK, key); - isSuccess = s3StorageOperator.mkdir(TENANT_CODE_MOCK, DIR_MOCK); - verify(s3Client, times(1)).doesObjectExist(BUCKET_NAME_MOCK, key); - - } catch (IOException e) { - Assertions.fail("test failed due to unexpected IO exception"); - } - - Assertions.assertTrue(isSuccess); + public void testGetStorageFileAbsolutePath() { + assertThat(s3StorageOperator.getStorageFileAbsolutePath("default", "demo.sql")) + .isEqualTo("tmp/dolphinscheduler/default/resources/demo.sql"); } @Test - public void mkdirWhenDirNotExists() { - boolean isSuccess = true; - try { - final String key = DIR_MOCK + FOLDER_SEPARATOR; - doReturn(false).when(s3Client).doesObjectExist(BUCKET_NAME_MOCK, key); - isSuccess = s3StorageOperator.mkdir(TENANT_CODE_MOCK, DIR_MOCK); - verify(s3Client, times(1)).doesObjectExist(BUCKET_NAME_MOCK, key); - - } catch (IOException e) { - Assertions.fail("test failed due to unexpected IO exception"); - } + public void testCreateStorageDir_notExist() { + String dirName = "tmp/dolphinscheduler/default/resources/testDirectory"; + s3StorageOperator.createStorageDir(dirName); + assertTrue(s3StorageOperator.exists(dirName)); - Assertions.assertTrue(isSuccess); } @Test - public void getResourceFullName() { - final String expectedResourceFullName = - String.format("dolphinscheduler/%s/resources/%s", TENANT_CODE_MOCK, FILE_NAME_MOCK); - final String resourceFullName = s3StorageOperator.getResourceFullName(TENANT_CODE_MOCK, FILE_NAME_MOCK); - Assertions.assertEquals(expectedResourceFullName, resourceFullName); + public void testCreateStorageDir_exist() { + final String dirName = "tmp/dolphinscheduler/default/resources/emptyDirectory"; + Assertions.assertThrows(FileAlreadyExistsException.class, () -> s3StorageOperator.createStorageDir(dirName)); } @Test - public void getResourceFileName() { - final String expectedResourceFileName = FILE_NAME_MOCK; - final String resourceFullName = - String.format("dolphinscheduler/%s/resources/%s", TENANT_CODE_MOCK, FILE_NAME_MOCK); - final String resourceFileName = s3StorageOperator.getResourceFileName(TENANT_CODE_MOCK, resourceFullName); - Assertions.assertEquals(expectedResourceFileName, resourceFileName); + public void testExists_fileExist() { + assertTrue(s3StorageOperator.exists("tmp/dolphinscheduler/default/resources/sqlDirectory/demo.sql")); } @Test - public void getFileName() { - final String expectedFileName = - String.format("dolphinscheduler/%s/resources/%s", TENANT_CODE_MOCK, FILE_NAME_MOCK); - final String fileName = s3StorageOperator.getFileName(ResourceType.FILE, TENANT_CODE_MOCK, FILE_NAME_MOCK); - Assertions.assertEquals(expectedFileName, fileName); + public void testExists_fileNotExist() { + assertFalse(s3StorageOperator.exists("tmp/dolphinscheduler/default/resources/sqlDirectory/notExist.sql")); } @Test - public void exists() { - boolean doesExist = false; - doReturn(true).when(s3Client).doesObjectExist(BUCKET_NAME_MOCK, FILE_NAME_MOCK); - try { - doesExist = s3StorageOperator.exists(FILE_NAME_MOCK); - } catch (IOException e) { - Assertions.fail("unexpected IO exception in unit test"); - } + public void testExists_directoryExist() { + assertTrue(s3StorageOperator.exists("tmp/dolphinscheduler/default/resources/sqlDirectory")); + } - Assertions.assertTrue(doesExist); - verify(s3Client, times(1)).doesObjectExist(BUCKET_NAME_MOCK, FILE_NAME_MOCK); + @Test + public void testExists_directoryNotExist() { + assertFalse(s3StorageOperator.exists("tmp/dolphinscheduler/default/resources/notExistDirectory")); } @Test - public void delete() { - doNothing().when(s3Client).deleteObject(anyString(), anyString()); - try { - s3StorageOperator.delete(FILE_NAME_MOCK, true); - } catch (IOException e) { - Assertions.fail("unexpected IO exception in unit test"); - } + public void delete_fileExist() { + s3StorageOperator.delete("tmp/dolphinscheduler/default/resources/sqlDirectory/demo.sql", true); + assertFalse(s3StorageOperator.exists("tmp/dolphinscheduler/default/resources/sqlDirectory/demo.sql")); + } - verify(s3Client, times(1)).deleteObject(anyString(), anyString()); + @Test + public void delete_fileNotExist() { + s3StorageOperator.delete("tmp/dolphinscheduler/default/resources/sqlDirectory/notExist.sql", true); + assertFalse(s3StorageOperator.exists("tmp/dolphinscheduler/default/resources/sqlDirectory/notExist.sql")); } @Test - public void copy() { - boolean isSuccess = false; - doReturn(null).when(s3Client).copyObject(anyString(), anyString(), anyString(), anyString()); - try { - isSuccess = s3StorageOperator.copy(FILE_PATH_MOCK, FILE_PATH_MOCK, false, false); - } catch (IOException e) { - Assertions.fail("unexpected IO exception in unit test"); - } + public void delete_directoryExist() { + s3StorageOperator.delete("tmp/dolphinscheduler/default/resources/sqlDirectory", true); + assertFalse(s3StorageOperator.exists("/tmp/dolphinscheduler/default/resources/sqlDirectory")); + } - Assertions.assertTrue(isSuccess); - verify(s3Client, times(1)).copyObject(anyString(), anyString(), anyString(), anyString()); + @Test + public void delete_directoryNotExist() { + s3StorageOperator.delete("tmp/dolphinscheduler/default/resources/notExist", true); + assertFalse(s3StorageOperator.exists("tmp/dolphinscheduler/default/resources/notExist")); } @Test - public void deleteTenant() { - doNothing().when(s3StorageOperator).deleteTenantCode(anyString()); - try { - s3StorageOperator.deleteTenant(TENANT_CODE_MOCK); - } catch (Exception e) { - Assertions.fail("unexpected exception caught in unit test"); - } + public void copy_file() { + s3StorageOperator.copy("tmp/dolphinscheduler/default/resources/sqlDirectory/demo.sql", + "tmp/dolphinscheduler/default/resources/sqlDirectory/demo_copy.sql", true, true); + assertTrue(s3StorageOperator.exists("tmp/dolphinscheduler/default/resources/sqlDirectory/demo_copy.sql")); + assertFalse(s3StorageOperator.exists("tmp/dolphinscheduler/default/resources/sqlDirectory/demo.sql")); + } - verify(s3StorageOperator, times(1)).deleteTenantCode(anyString()); + @Test + public void copy_directory() { + assertThrows(UnsupportedOperationException.class, + () -> s3StorageOperator.copy("tmp/dolphinscheduler/default/resources/sqlDirectory", + "tmp/dolphinscheduler/default/resources/sqlDirectory_copy", true, true)); } @Test - public void testGetS3ResDir() { - final String expectedS3ResDir = String.format("dolphinscheduler/%s/resources", TENANT_CODE_MOCK); - final String s3ResDir = s3StorageOperator.getS3ResDir(TENANT_CODE_MOCK); - Assertions.assertEquals(expectedS3ResDir, s3ResDir); + public void testUpload_file() { + String file = S3StorageOperatorTest.class.getResource("/student.sql").getFile(); + s3StorageOperator.upload(file, "tmp/dolphinscheduler/default/resources/sqlDirectory/student.sql", false, true); + assertTrue(s3StorageOperator.exists("tmp/dolphinscheduler/default/resources/sqlDirectory/student.sql")); } @Test - public void testGetS3UdfDir() { - final String expectedS3UdfDir = String.format("dolphinscheduler/%s/udfs", TENANT_CODE_MOCK); - final String s3UdfDir = s3StorageOperator.getS3UdfDir(TENANT_CODE_MOCK); - Assertions.assertEquals(expectedS3UdfDir, s3UdfDir); + public void testFetchFileContent() { + List strings = s3StorageOperator + .fetchFileContent("tmp/dolphinscheduler/default/resources/sqlDirectory/demo.sql", 0, 2); + assertThat(strings).hasSize(2); } @Test - public void testGetS3TenantDir() { - final String expectedS3TenantDir = String.format(FORMAT_S_S, DIR_MOCK, TENANT_CODE_MOCK); - doReturn(DIR_MOCK).when(s3StorageOperator).getS3DataBasePath(); - final String s3TenantDir = s3StorageOperator.getS3TenantDir(TENANT_CODE_MOCK); - Assertions.assertEquals(expectedS3TenantDir, s3TenantDir); + public void testListStorageEntity_file() { + List storageEntities = + s3StorageOperator.listStorageEntity("tmp/dolphinscheduler/default/resources/sqlDirectory"); + assertThat(storageEntities).hasSize(1); + + StorageEntity storageEntity = storageEntities.get(0); + assertThat(storageEntity.getFullName()) + .isEqualTo("tmp/dolphinscheduler/default/resources/sqlDirectory/demo.sql"); + assertThat(storageEntity.getFileName()) + .isEqualTo("demo.sql"); + assertThat(storageEntity.isDirectory()).isFalse(); + assertThat(storageEntity.getPfullName()).isEqualTo("tmp/dolphinscheduler/default/resources/sqlDirectory"); + assertThat(storageEntity.getType()).isEqualTo(ResourceType.FILE); } @Test - public void deleteDir() { - doReturn(true).when(s3Client).doesObjectExist(anyString(), anyString()); - s3StorageOperator.deleteDir(DIR_MOCK); - verify(s3Client, times(1)).deleteObject(anyString(), anyString()); + public void testListStorageEntity_directory() { + List storageEntities = + s3StorageOperator.listStorageEntity("tmp/dolphinscheduler/default/resources"); + assertThat(storageEntities).hasSize(3); + } @Test - public void testGetFileStatus() throws Exception { - doReturn(new ListObjectsV2Result()).when(s3Client).listObjectsV2(Mockito.any(ListObjectsV2Request.class)); - StorageEntity entity = - s3StorageOperator.getFileStatus(FULL_NAME, DEFAULT_PATH, TENANT_CODE_MOCK, ResourceType.FILE); - Assertions.assertEquals(FULL_NAME, entity.getFullName()); - Assertions.assertEquals("dir1/", entity.getFileName()); + public void testListStorageEntity_directoryNotExist() { + List storageEntities = + s3StorageOperator.listStorageEntity("tmp/dolphinscheduler/notExist/resources"); + assertThat(storageEntities).isEmpty(); + } @Test - public void testListFilesStatus() throws Exception { - doReturn(new ListObjectsV2Result()).when(s3Client).listObjectsV2(Mockito.any(ListObjectsV2Request.class)); - List result = - s3StorageOperator.listFilesStatus(FULL_NAME, DEFAULT_PATH, TENANT_CODE_MOCK, ResourceType.FILE); - Assertions.assertEquals(0, result.size()); + public void testListStorageEntityRecursively() { + List storageEntities = + s3StorageOperator + .listFileStorageEntityRecursively("tmp/dolphinscheduler/default/resources/multipleDirectories"); + assertThat(storageEntities).hasSize(1); + + StorageEntity storageEntity = storageEntities.get(0); + assertThat(storageEntity.getFullName()) + .isEqualTo("tmp/dolphinscheduler/default/resources/multipleDirectories/1/demo.sql"); + assertThat(storageEntity.getFileName()) + .isEqualTo("demo.sql"); + assertThat(storageEntity.isDirectory()).isFalse(); + assertThat(storageEntity.getPfullName()) + .isEqualTo("tmp/dolphinscheduler/default/resources/multipleDirectories/1"); + assertThat(storageEntity.getType()).isEqualTo(ResourceType.FILE); + } @Test - public void testListFilesStatusRecursively() throws Exception { - StorageEntity entity = new StorageEntity(); - entity.setFullName(FULL_NAME); - - doReturn(entity).when(s3StorageOperator).getFileStatus(FULL_NAME, DEFAULT_PATH, TENANT_CODE_MOCK, - ResourceType.FILE); - doReturn(Collections.EMPTY_LIST).when(s3StorageOperator).listFilesStatus(anyString(), anyString(), anyString(), - Mockito.any(ResourceType.class)); - - List result = - s3StorageOperator.listFilesStatusRecursively(FULL_NAME, DEFAULT_PATH, TENANT_CODE_MOCK, - ResourceType.FILE); - Assertions.assertEquals(0, result.size()); + public void testExceptionWhenBucketNameNotExists() { + Assertions.assertDoesNotThrow(() -> s3StorageOperator.exceptionWhenBucketNameNotExists("dolphinscheduler")); } + + @SneakyThrows + @AfterAll + public static void tearDown() { + if (s3StorageOperator != null) { + s3StorageOperator.close(); + } + if (minIOContainer != null) { + minIOContainer.stop(); + } + } + } diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/src/test/resources/demo.sql b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/src/test/resources/demo.sql new file mode 100644 index 000000000000..2c3a4cd616b3 --- /dev/null +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/src/test/resources/demo.sql @@ -0,0 +1,17 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ +select * from t_ds_version; \ No newline at end of file diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/src/test/resources/logback.xml b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/src/test/resources/logback.xml new file mode 100644 index 000000000000..6f211959c590 --- /dev/null +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/src/test/resources/logback.xml @@ -0,0 +1,21 @@ + + + + + + diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/src/test/resources/student.sql b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/src/test/resources/student.sql new file mode 100644 index 000000000000..d91aaf080369 --- /dev/null +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/src/test/resources/student.sql @@ -0,0 +1,17 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ +select * from t_ds_student; \ No newline at end of file diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/src/test/resources/student/student.sql b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/src/test/resources/student/student.sql new file mode 100644 index 000000000000..6eab26f880e5 --- /dev/null +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/src/test/resources/student/student.sql @@ -0,0 +1,18 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ + +select * from t_ds_student; \ No newline at end of file diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-all/pom.xml b/dolphinscheduler-task-plugin/dolphinscheduler-task-all/pom.xml index 5e4c74b27ad9..fbaea0dddc9f 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-all/pom.xml +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-all/pom.xml @@ -64,12 +64,6 @@ ${project.version} - - org.apache.dolphinscheduler - dolphinscheduler-task-pigeon - ${project.version} - - org.apache.dolphinscheduler dolphinscheduler-task-procedure diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/pom.xml b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/pom.xml index b115a1b6d14b..7c8f24c71715 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/pom.xml +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/pom.xml @@ -297,5 +297,10 @@ org.projectlombok lombok + + org.junit.jupiter + junit-jupiter + test + diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/ILogicTaskChannel.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/ILogicTaskChannel.java new file mode 100644 index 000000000000..7b9766f57d99 --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/ILogicTaskChannel.java @@ -0,0 +1,24 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.api; + +/** + * Used to mark a task channel as a logic task channel, the logic task channel is a special task channel that will be executed at master. + */ +public interface ILogicTaskChannel extends TaskChannel { +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/SQLTaskExecutionContext.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/SQLTaskExecutionContext.java index e1d643299996..40d9c5632eb1 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/SQLTaskExecutionContext.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/SQLTaskExecutionContext.java @@ -17,13 +17,10 @@ package org.apache.dolphinscheduler.plugin.task.api; -import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.UdfFuncParameters; - import java.io.Serializable; -import java.util.List; /** - * SQL Task ExecutionContext + * SQL Task ExecutionContext */ public class SQLTaskExecutionContext implements Serializable { @@ -37,8 +34,6 @@ public class SQLTaskExecutionContext implements Serializable { */ private String connectionParams; - private List udfFuncParametersList; - /** * DefaultFS */ @@ -52,14 +47,6 @@ public void setWarningGroupId(int warningGroupId) { this.warningGroupId = warningGroupId; } - public List getUdfFuncParametersList() { - return udfFuncParametersList; - } - - public void setUdfFuncParametersList(List udfFuncParametersList) { - this.udfFuncParametersList = udfFuncParametersList; - } - public String getConnectionParams() { return connectionParams; } @@ -81,7 +68,6 @@ public String toString() { return "SQLTaskExecutionContext{" + "warningGroupId=" + warningGroupId + ", connectionParams='" + connectionParams + '\'' - + ", udfFuncParametersList=" + udfFuncParametersList + ", defaultFS='" + defaultFS + '\'' + '}'; } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskChannel.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskChannel.java index 77abae504777..89fa2a07da7c 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskChannel.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskChannel.java @@ -18,19 +18,11 @@ package org.apache.dolphinscheduler.plugin.task.api; import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters; -import org.apache.dolphinscheduler.plugin.task.api.parameters.ParametersNode; -import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.ResourceParametersHelper; public interface TaskChannel { - void cancelApplication(boolean status); - - // todo: return ITask AbstractTask createTask(TaskExecutionContext taskRequest); - // todo: return IParameters - AbstractParameters parseParameters(ParametersNode parametersNode); - - ResourceParametersHelper getResources(String parameters); + AbstractParameters parseParameters(String taskParams); } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskChannelFactory.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskChannelFactory.java index f7719b4728e9..7176f1e82ff5 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskChannelFactory.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskChannelFactory.java @@ -17,16 +17,19 @@ package org.apache.dolphinscheduler.plugin.task.api; -import org.apache.dolphinscheduler.spi.common.UiChannelFactory; import org.apache.dolphinscheduler.spi.plugin.PrioritySPI; import org.apache.dolphinscheduler.spi.plugin.SPIIdentify; -public interface TaskChannelFactory extends UiChannelFactory, PrioritySPI { - - TaskChannel create(); +public interface TaskChannelFactory extends PrioritySPI { default SPIIdentify getIdentify() { - return SPIIdentify.builder().name(getName()).build(); + return SPIIdentify.builder() + .name(getName()) + .build(); } + String getName(); + + TaskChannel create(); + } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskConstants.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskConstants.java index 43734416e7e4..98780a22364a 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskConstants.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskConstants.java @@ -362,24 +362,10 @@ private TaskConstants() { */ public static final String DATA_QUALITY_JAR_DIR = "data-quality.jar.dir"; - public static final String TASK_TYPE_CONDITIONS = "CONDITIONS"; - - public static final String TASK_TYPE_SWITCH = "SWITCH"; - - public static final String TASK_TYPE_SUB_PROCESS = "SUB_PROCESS"; - - public static final String TASK_TYPE_DYNAMIC = "DYNAMIC"; - - public static final String TASK_TYPE_DEPENDENT = "DEPENDENT"; - - public static final String TASK_TYPE_SQL = "SQL"; - public static final String TASK_TYPE_DATA_QUALITY = "DATA_QUALITY"; public static final Set TASK_TYPE_SET_K8S = Sets.newHashSet("K8S", "KUBEFLOW"); - public static final String TASK_TYPE_BLOCKING = "BLOCKING"; - /** * azure config */ @@ -389,13 +375,6 @@ private TaskConstants() { public static final String AZURE_SECRET_TENANT_ID = "resource.azure.tenant.id"; public static final String QUERY_INTERVAL = "resource.query.interval"; - /** - * aws config - */ - public static final String AWS_ACCESS_KEY_ID = "resource.aws.access.key.id"; - public static final String AWS_SECRET_ACCESS_KEY = "resource.aws.secret.access.key"; - public static final String AWS_REGION = "resource.aws.region"; - /** * alibaba cloud config */ diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskExecutionContext.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskExecutionContext.java index 4304ad2c1331..b825949e884c 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskExecutionContext.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskExecutionContext.java @@ -203,11 +203,6 @@ public class TaskExecutionContext implements Serializable { */ private String workerGroup; - /** - * delay execution time. - */ - private int delayTime; - /** * current execution status */ @@ -262,12 +257,9 @@ public class TaskExecutionContext implements Serializable { private boolean logBufferEnable; - /** - * dispatch fail times - */ private int dispatchFailTimes; - public void increaseDispatchFailTimes() { - this.dispatchFailTimes++; + public int increaseDispatchFailTimes() { + return ++dispatchFailTimes; } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskPluginManager.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskPluginManager.java index be3417466ffe..7e61fe4dff1f 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskPluginManager.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskPluginManager.java @@ -17,40 +17,29 @@ package org.apache.dolphinscheduler.plugin.task.api; -import org.apache.dolphinscheduler.common.utils.JSONUtils; +import static com.google.common.base.Preconditions.checkNotNull; + import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters; -import org.apache.dolphinscheduler.plugin.task.api.parameters.BlockingParameters; -import org.apache.dolphinscheduler.plugin.task.api.parameters.ConditionsParameters; -import org.apache.dolphinscheduler.plugin.task.api.parameters.DependentParameters; -import org.apache.dolphinscheduler.plugin.task.api.parameters.DynamicParameters; -import org.apache.dolphinscheduler.plugin.task.api.parameters.ParametersNode; -import org.apache.dolphinscheduler.plugin.task.api.parameters.SubProcessParameters; -import org.apache.dolphinscheduler.plugin.task.api.parameters.SwitchParameters; import org.apache.dolphinscheduler.spi.plugin.PrioritySPIFactory; -import java.util.Collections; import java.util.HashMap; import java.util.Map; -import java.util.Objects; import java.util.concurrent.atomic.AtomicBoolean; import lombok.extern.slf4j.Slf4j; -import org.springframework.stereotype.Component; - -@Component @Slf4j public class TaskPluginManager { - private final Map taskChannelFactoryMap = new HashMap<>(); - private final Map taskChannelMap = new HashMap<>(); + private static final Map taskChannelMap = new HashMap<>(); - private final AtomicBoolean loadedFlag = new AtomicBoolean(false); + private static final AtomicBoolean loadedFlag = new AtomicBoolean(false); - /** - * Load task plugins from classpath. - */ - public void loadPlugin() { + static { + loadTaskPlugin(); + } + + public static void loadTaskPlugin() { if (!loadedFlag.compareAndSet(false, true)) { log.warn("The task plugin has already been loaded"); return; @@ -62,7 +51,6 @@ public void loadPlugin() { log.info("Registering task plugin: {} - {}", factoryName, factory.getClass().getSimpleName()); - taskChannelFactoryMap.put(factoryName, factory); taskChannelMap.put(factoryName, factory.create()); log.info("Registered task plugin: {} - {}", factoryName, factory.getClass().getSimpleName()); @@ -70,48 +58,49 @@ public void loadPlugin() { } - public Map getTaskChannelMap() { - return Collections.unmodifiableMap(taskChannelMap); - } - - public Map getTaskChannelFactoryMap() { - return Collections.unmodifiableMap(taskChannelFactoryMap); - } - - public TaskChannel getTaskChannel(String type) { - return this.getTaskChannelMap().get(type); + /** + * Get the TaskChannel by type, if the TaskChannel is not found, will throw + * @param type task type, cannot be null + * @throws IllegalArgumentException if the TaskChannel is not found + */ + public static TaskChannel getTaskChannel(String type) { + checkNotNull(type, "type cannot be null"); + TaskChannel taskChannel = taskChannelMap.get(type); + if (taskChannel == null) { + throw new IllegalArgumentException("Cannot find TaskChannel for : " + type); + } + return taskChannel; } - public boolean checkTaskParameters(ParametersNode parametersNode) { - AbstractParameters abstractParameters = this.getParameters(parametersNode); - return abstractParameters != null && abstractParameters.checkParameters(); + /** + * Check if the task parameters is validated + * @param taskType task type, cannot be null + * @param taskParams task parameters + * @return true if the task parameters is validated, otherwise false + * @throws IllegalArgumentException if the TaskChannel is not found + * @throws IllegalArgumentException if cannot deserialize the task parameters + */ + public static boolean checkTaskParameters(String taskType, String taskParams) { + AbstractParameters abstractParameters = parseTaskParameters(taskType, taskParams); + return abstractParameters.checkParameters(); } - public AbstractParameters getParameters(ParametersNode parametersNode) { - String taskType = parametersNode.getTaskType(); - if (Objects.isNull(taskType)) { - return null; - } - switch (taskType) { - case TaskConstants.TASK_TYPE_CONDITIONS: - return JSONUtils.parseObject(parametersNode.getTaskParams(), ConditionsParameters.class); - case TaskConstants.TASK_TYPE_SWITCH: - return JSONUtils.parseObject(parametersNode.getTaskParams(), SwitchParameters.class); - case TaskConstants.TASK_TYPE_SUB_PROCESS: - return JSONUtils.parseObject(parametersNode.getTaskParams(), SubProcessParameters.class); - case TaskConstants.TASK_TYPE_DEPENDENT: - return JSONUtils.parseObject(parametersNode.getTaskParams(), DependentParameters.class); - case TaskConstants.TASK_TYPE_BLOCKING: - return JSONUtils.parseObject(parametersNode.getTaskParams(), BlockingParameters.class); - case TaskConstants.TASK_TYPE_DYNAMIC: - return JSONUtils.parseObject(parametersNode.getTaskParams(), DynamicParameters.class); - default: - TaskChannel taskChannel = this.getTaskChannelMap().get(taskType); - if (Objects.isNull(taskChannel)) { - return null; - } - return taskChannel.parseParameters(parametersNode); + /** + * Parse the task parameters + * @param taskType task type, cannot be null + * @param taskParams task parameters + * @return AbstractParameters + * @throws IllegalArgumentException if the TaskChannel is not found + * @throws IllegalArgumentException if cannot deserialize the task parameters + */ + public static AbstractParameters parseTaskParameters(String taskType, String taskParams) { + checkNotNull(taskType, "taskType cannot be null"); + TaskChannel taskChannel = getTaskChannel(taskType); + AbstractParameters abstractParameters = taskChannel.parseParameters(taskParams); + if (abstractParameters == null) { + throw new IllegalArgumentException("Cannot parse task parameters: " + taskParams + " for : " + taskType); } + return abstractParameters; } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/enums/ResourceType.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/enums/ResourceType.java index 62424aacdbb1..75281ae25600 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/enums/ResourceType.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/enums/ResourceType.java @@ -20,5 +20,5 @@ public enum ResourceType { DATASOURCE, - UDF; + ; } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/k8s/AbstractK8sTaskExecutor.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/k8s/AbstractK8sTaskExecutor.java index 1313dc23a65c..8d3d2513af89 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/k8s/AbstractK8sTaskExecutor.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/k8s/AbstractK8sTaskExecutor.java @@ -17,12 +17,14 @@ package org.apache.dolphinscheduler.plugin.task.api.k8s; +import org.apache.dolphinscheduler.common.utils.ClassFilterConstructor; import org.apache.dolphinscheduler.plugin.task.api.TaskException; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.model.TaskResponse; import org.apache.dolphinscheduler.plugin.task.api.utils.K8sUtils; import java.util.HashMap; +import java.util.List; import java.util.Map; import org.yaml.snakeyaml.Yaml; @@ -36,7 +38,10 @@ public abstract class AbstractK8sTaskExecutor { protected AbstractK8sTaskExecutor(TaskExecutionContext taskRequest) { this.taskRequest = taskRequest; this.k8sUtils = new K8sUtils(); - this.yaml = new Yaml(); + this.yaml = new Yaml(new ClassFilterConstructor(new Class[]{ + List.class, + String.class + })); this.taskOutputParams = new HashMap<>(); } public Map getTaskOutputParams() { diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/log/TaskInstanceLogHeader.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/log/TaskInstanceLogHeader.java index 02ee2e1c995a..3aa76dd114ec 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/log/TaskInstanceLogHeader.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/log/TaskInstanceLogHeader.java @@ -27,11 +27,11 @@ public class TaskInstanceLogHeader { private static final List INITIALIZE_TASK_CONTEXT_HEADER = Lists.newArrayList( - "***********************************************************************************************", + "\n***********************************************************************************************", "********************************* Initialize task context ***********************************", "***********************************************************************************************"); private static final List LOAD_TASK_INSTANCE_PLUGIN_HEADER = Lists.newArrayList( - "***********************************************************************************************", + "\n***********************************************************************************************", "********************************* Load task instance plugin *********************************", "***********************************************************************************************"); @@ -40,12 +40,12 @@ public static void printInitializeTaskContextHeader() { } private static final List EXECUTE_TASK_HEADER = Lists.newArrayList( - "***********************************************************************************************", + "\n***********************************************************************************************", "********************************* Execute task instance *************************************", "***********************************************************************************************"); private static final List FINALIZE_TASK_HEADER = Lists.newArrayList( - "***********************************************************************************************", + "\n***********************************************************************************************", "********************************* Finalize task instance ************************************", "***********************************************************************************************"); diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/loop/template/http/parser/HttpTaskDefinitionParser.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/loop/template/http/parser/HttpTaskDefinitionParser.java index b00942fbd528..b28cd0c301cf 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/loop/template/http/parser/HttpTaskDefinitionParser.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/loop/template/http/parser/HttpTaskDefinitionParser.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.plugin.task.api.loop.template.http.parser; +import org.apache.dolphinscheduler.common.utils.ClassFilterConstructor; import org.apache.dolphinscheduler.plugin.task.api.loop.template.LoopTaskYamlDefinition; import org.apache.dolphinscheduler.plugin.task.api.loop.template.TaskDefinitionParser; import org.apache.dolphinscheduler.plugin.task.api.loop.template.http.HttpLoopTaskDefinition; @@ -28,11 +29,11 @@ import java.io.FileReader; import java.io.IOException; +import java.util.Map; import lombok.NonNull; import org.yaml.snakeyaml.Yaml; -import org.yaml.snakeyaml.constructor.Constructor; import com.google.common.base.Preconditions; @@ -60,9 +61,20 @@ public class HttpTaskDefinitionParser implements TaskDefinitionParser nextNode; - - public void setNextNode(Object nextNode) { - if (nextNode instanceof Long) { - List nextNodeList = new ArrayList<>(); - nextNodeList.add((Long) nextNode); - this.nextNode = nextNodeList; - } else { - this.nextNode = (ArrayList) nextNode; - } - } + private Long nextNode; } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/AbstractParameters.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/AbstractParameters.java index f11a83bc543e..f99578d74370 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/AbstractParameters.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/AbstractParameters.java @@ -25,6 +25,7 @@ import org.apache.dolphinscheduler.plugin.task.api.model.ResourceInfo; import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.DataSourceParameters; import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.ResourceParametersHelper; +import org.apache.dolphinscheduler.plugin.task.api.utils.VarPoolUtils; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.MapUtils; @@ -35,6 +36,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.stream.Collectors; import lombok.Getter; import lombok.Setter; @@ -42,6 +44,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; +import com.google.common.collect.Lists; @Getter @Slf4j @@ -82,6 +85,7 @@ public K8sTaskExecutionContext generateK8sTaskExecutionContext(ResourceParameter /** * get input local parameters map if the param direct is IN + * * @return parameters map */ public Map getInputLocalParametersMap() { @@ -121,44 +125,30 @@ public void setVarPool(String varPool) { } public void dealOutParam(Map taskOutputParams) { - if (CollectionUtils.isEmpty(localParams)) { - return; - } List outProperty = getOutProperty(localParams); if (CollectionUtils.isEmpty(outProperty)) { return; } - if (MapUtils.isEmpty(taskOutputParams)) { - outProperty.forEach(this::addPropertyToValPool); - return; - } - - for (Property info : outProperty) { - String propValue = taskOutputParams.get(info.getProp()); - if (StringUtils.isNotEmpty(propValue)) { - info.setValue(propValue); - addPropertyToValPool(info); - continue; - } - addPropertyToValPool(info); - if (StringUtils.isEmpty(info.getValue())) { - log.warn("The output parameter {} value is empty and cannot find the out parameter from task output", - info); + if (CollectionUtils.isNotEmpty(outProperty) && MapUtils.isNotEmpty(taskOutputParams)) { + // Inject the value + for (Property info : outProperty) { + String value = taskOutputParams.get(info.getProp()); + if (value != null) { + info.setValue(value); + } } } + + varPool = VarPoolUtils.mergeVarPool(Lists.newArrayList(varPool, outProperty)); } - public List getOutProperty(List params) { + protected List getOutProperty(List params) { if (CollectionUtils.isEmpty(params)) { return new ArrayList<>(); } - List result = new ArrayList<>(); - for (Property info : params) { - if (info.getDirect() == Direct.OUT) { - result.add(info); - } - } - return result; + return params.stream() + .filter(info -> info.getDirect() == Direct.OUT) + .collect(Collectors.toList()); } public List> getListMapByString(String json) { diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/ConditionsParameters.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/ConditionsParameters.java index 15141937b019..8ad567578c18 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/ConditionsParameters.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/ConditionsParameters.java @@ -19,41 +19,60 @@ import org.apache.dolphinscheduler.plugin.task.api.enums.DependentRelation; import org.apache.dolphinscheduler.plugin.task.api.model.DependentTaskModel; -import org.apache.dolphinscheduler.plugin.task.api.model.ResourceInfo; -import java.util.ArrayList; +import org.apache.commons.collections4.CollectionUtils; + import java.util.List; +import lombok.AllArgsConstructor; +import lombok.Builder; import lombok.Data; +import lombok.NoArgsConstructor; @Data +@Builder +@NoArgsConstructor +@AllArgsConstructor public class ConditionsParameters extends AbstractParameters { - // depend node list and state, only need task name - private List dependTaskList; - private DependentRelation dependRelation; - - // node list to run when success - private List successNode; + private ConditionDependency dependence; - // node list to run when failed - private List failedNode; + private ConditionResult conditionResult; @Override public boolean checkParameters() { + if (dependence == null || CollectionUtils.isEmpty(dependence.getDependTaskList())) { + return false; + } + if (conditionResult == null || CollectionUtils.isEmpty(conditionResult.getSuccessNode()) + || CollectionUtils.isEmpty(conditionResult.getFailedNode())) { + return false; + } return true; } - @Override - public List getResourceFilesList() { - return new ArrayList<>(); + @Data + @Builder + @NoArgsConstructor + @AllArgsConstructor + public static class ConditionDependency { + + private List dependTaskList; + + private DependentRelation relation; } - public String getConditionResult() { - return "{" - + "\"successNode\": [\"" + successNode.get(0) - + "\"],\"failedNode\": [\"" + failedNode.get(0) - + "\"]}"; + @Data + @Builder + @NoArgsConstructor + @AllArgsConstructor + public static class ConditionResult { + + private boolean conditionSuccess; + + private List successNode; + + private List failedNode; } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/DependentParameters.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/DependentParameters.java index 1e648d57757e..c5f9ed916f0a 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/DependentParameters.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/DependentParameters.java @@ -20,28 +20,55 @@ import org.apache.dolphinscheduler.plugin.task.api.enums.DependentRelation; import org.apache.dolphinscheduler.plugin.task.api.model.DependentTaskModel; +import org.apache.commons.collections4.CollectionUtils; + import java.util.List; +import lombok.AllArgsConstructor; +import lombok.Builder; import lombok.Data; import lombok.EqualsAndHashCode; +import lombok.NoArgsConstructor; @Data @EqualsAndHashCode(callSuper = true) public class DependentParameters extends AbstractParameters { - private List dependTaskList; - private DependentRelation relation; - /** Time unit is second */ - private Integer checkInterval; - private DependentFailurePolicyEnum failurePolicy; - /** Time unit is minutes */ - private Integer failureWaitingTime; + private Dependence dependence; @Override public boolean checkParameters() { + if (dependence == null) { + return false; + } + if (CollectionUtils.isEmpty(dependence.getDependTaskList())) { + return false; + } + if (dependence.getCheckInterval() != null && dependence.getCheckInterval() <= 0) { + return false; + } return true; } + @Data + @Builder + @NoArgsConstructor + @AllArgsConstructor + public static class Dependence { + + private List dependTaskList; + private DependentRelation relation; + /** + * Time unit is second + */ + private Integer checkInterval; + private DependentFailurePolicyEnum failurePolicy; + /** + * Time unit is minutes + */ + private Integer failureWaitingTime; + } + /** * the dependent task failure policy. */ diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/DynamicParameters.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/DynamicParameters.java index e918db3b1812..2a594655f9aa 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/DynamicParameters.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/DynamicParameters.java @@ -21,9 +21,15 @@ import java.util.List; +import lombok.AllArgsConstructor; +import lombok.Builder; import lombok.Data; +import lombok.NoArgsConstructor; @Data +@Builder +@NoArgsConstructor +@AllArgsConstructor public class DynamicParameters extends AbstractParameters { /** diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/ParametersNode.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/ParametersNode.java deleted file mode 100644 index b6eb88a68a41..000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/ParametersNode.java +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.task.api.parameters; - -/** - * TODO

Need to optimize, why there are multiple task parameter variables:taskParams,dependence,switchResult

- * - */ -public class ParametersNode { - - private String taskType; - - private String taskParams; - - private String dependence; - - private String switchResult; - - public static ParametersNode.ParametersNodeBuilder builder() { - return new ParametersNode.ParametersNodeBuilder(); - } - - public static class ParametersNodeBuilder { - - private String taskType; - - private String taskParams; - - private String dependence; - - private String switchResult; - - public ParametersNodeBuilder taskType(String taskType) { - this.taskType = taskType; - return this; - } - - public ParametersNodeBuilder taskParams(String taskParams) { - this.taskParams = taskParams; - return this; - } - - public ParametersNodeBuilder dependence(String dependence) { - this.dependence = dependence; - return this; - } - - public ParametersNodeBuilder switchResult(String switchResult) { - this.switchResult = switchResult; - return this; - } - - public ParametersNode build() { - return new ParametersNode(this.taskType, this.taskParams, this.dependence, this.switchResult); - } - - } - - public ParametersNode() { - - } - - public ParametersNode(String taskType, String taskParams, String dependence, String switchResult) { - this.taskType = taskType; - this.taskParams = taskParams; - this.dependence = dependence; - this.switchResult = switchResult; - } - - public String getTaskType() { - return taskType; - } - - public void setTaskType(String taskType) { - this.taskType = taskType; - } - - public String getTaskParams() { - return taskParams; - } - - public void setTaskParams(String taskParams) { - this.taskParams = taskParams; - } - - public String getDependence() { - return dependence; - } - - public void setDependence(String dependence) { - this.dependence = dependence; - } - - public String getSwitchResult() { - return switchResult; - } - - public void setSwitchResult(String switchResult) { - this.switchResult = switchResult; - } -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/SqlParameters.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/SqlParameters.java index 0f1a893a30bd..8864943bac43 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/SqlParameters.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/SqlParameters.java @@ -21,12 +21,11 @@ import org.apache.dolphinscheduler.plugin.task.api.SQLTaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.enums.DataType; import org.apache.dolphinscheduler.plugin.task.api.enums.ResourceType; -import org.apache.dolphinscheduler.plugin.task.api.enums.UdfType; import org.apache.dolphinscheduler.plugin.task.api.model.Property; import org.apache.dolphinscheduler.plugin.task.api.model.ResourceInfo; import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.DataSourceParameters; import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.ResourceParametersHelper; -import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.UdfFuncParameters; +import org.apache.dolphinscheduler.plugin.task.api.utils.VarPoolUtils; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; @@ -36,10 +35,8 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.stream.Collectors; -import com.google.common.base.Enums; -import com.google.common.base.Strings; +import com.google.common.collect.Lists; /** * Sql/Hql parameter @@ -78,10 +75,6 @@ public class SqlParameters extends AbstractParameters { */ private int displayRows; - /** - * udf list - */ - private String udfs; /** * show type * 0 TABLE @@ -146,14 +139,6 @@ public void setSql(String sql) { this.sql = sql; } - public String getUdfs() { - return udfs; - } - - public void setUdfs(String udfs) { - this.udfs = udfs; - } - public int getSqlType() { return sqlType; } @@ -245,7 +230,7 @@ public void dealOutParam(String result) { return; } if (StringUtils.isEmpty(result)) { - varPool.addAll(outProperty); + varPool = VarPoolUtils.mergeVarPool(Lists.newArrayList(varPool, outProperty)); return; } List> sqlResult = getListMapByString(result); @@ -268,7 +253,6 @@ public void dealOutParam(String result) { for (Property info : outProperty) { if (info.getType() == DataType.LIST) { info.setValue(JSONUtils.toJsonString(sqlResultFormat.get(info.getProp()))); - varPool.add(info); } } } else { @@ -276,9 +260,9 @@ public void dealOutParam(String result) { Map firstRow = sqlResult.get(0); for (Property info : outProperty) { info.setValue(String.valueOf(firstRow.get(info.getProp()))); - varPool.add(info); } } + varPool = VarPoolUtils.mergeVarPool(Lists.newArrayList(varPool, outProperty)); } @@ -292,7 +276,6 @@ public String toString() { + ", sendEmail=" + sendEmail + ", displayRows=" + displayRows + ", limit=" + limit - + ", udfs='" + udfs + '\'' + ", showType='" + showType + '\'' + ", connParams='" + connParams + '\'' + ", groupId='" + groupId + '\'' @@ -307,21 +290,12 @@ public ResourceParametersHelper getResources() { ResourceParametersHelper resources = super.getResources(); resources.put(ResourceType.DATASOURCE, datasource); - // whether udf type - boolean udfTypeFlag = Enums.getIfPresent(UdfType.class, Strings.nullToEmpty(this.getType())).isPresent() - && !StringUtils.isEmpty(this.getUdfs()); - - if (udfTypeFlag) { - String[] udfFunIds = this.getUdfs().split(","); - for (int i = 0; i < udfFunIds.length; i++) { - resources.put(ResourceType.UDF, Integer.parseInt(udfFunIds[i])); - } - } return resources; } /** * TODO SQLTaskExecutionContext needs to be optimized + * * @param parametersHelper * @return */ @@ -332,16 +306,6 @@ public SQLTaskExecutionContext generateExtendedContext(ResourceParametersHelper (DataSourceParameters) parametersHelper.getResourceParameters(ResourceType.DATASOURCE, datasource); sqlTaskExecutionContext.setConnectionParams(dbSource.getConnectionParams()); - // whether udf type - boolean udfTypeFlag = Enums.getIfPresent(UdfType.class, Strings.nullToEmpty(this.getType())).isPresent() - && !StringUtils.isEmpty(this.getUdfs()); - - if (udfTypeFlag) { - List collect = parametersHelper.getResourceMap(ResourceType.UDF).entrySet().stream() - .map(entry -> (UdfFuncParameters) entry.getValue()).collect(Collectors.toList()); - sqlTaskExecutionContext.setUdfFuncParametersList(collect); - } - return sqlTaskExecutionContext; } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/SubProcessParameters.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/SubProcessParameters.java index 336831393581..655ba7540cec 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/SubProcessParameters.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/SubProcessParameters.java @@ -19,9 +19,6 @@ public class SubProcessParameters extends AbstractParameters { - /** - * process definition id - */ private long processDefinitionCode; public void setProcessDefinitionCode(long processDefinitionCode) { @@ -34,7 +31,7 @@ public long getProcessDefinitionCode() { @Override public boolean checkParameters() { - return this.processDefinitionCode != 0; + return this.processDefinitionCode > 0; } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/SwitchParameters.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/SwitchParameters.java index a1928bbe1636..655a86f9e0c4 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/SwitchParameters.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/SwitchParameters.java @@ -17,69 +17,56 @@ package org.apache.dolphinscheduler.plugin.task.api.parameters; -import org.apache.dolphinscheduler.plugin.task.api.enums.DependentRelation; import org.apache.dolphinscheduler.plugin.task.api.model.SwitchResultVo; -import java.util.ArrayList; +import org.apache.commons.collections4.CollectionUtils; + import java.util.List; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor public class SwitchParameters extends AbstractParameters { - private DependentRelation dependRelation; - private String relation; - private List nextNode; + // due to history reasons, the field name is switchResult + private SwitchResult switchResult; + + // The next branch which should be executed after the switch logic task executed. + private Long nextBranch; @Override public boolean checkParameters() { + if (switchResult == null) { + return false; + } + if (CollectionUtils.isEmpty(switchResult.getDependTaskList()) && switchResult.getNextNode() == null) { + return false; + } + for (SwitchResultVo switchResultVo : switchResult.getDependTaskList()) { + if (switchResultVo == null || switchResultVo.getNextNode() == null) { + return false; + } + } return true; } - private int resultConditionLocation; - private List dependTaskList; + @Data + @Builder + @AllArgsConstructor + @NoArgsConstructor + public static class SwitchResult { - public DependentRelation getDependRelation() { - return dependRelation; - } - - public void setDependRelation(DependentRelation dependRelation) { - this.dependRelation = dependRelation; - } + // switch condition + private List dependTaskList; - public int getResultConditionLocation() { - return resultConditionLocation; + // default branch node code in switch task + private Long nextNode; } - public void setResultConditionLocation(int resultConditionLocation) { - this.resultConditionLocation = resultConditionLocation; - } - - public String getRelation() { - return relation; - } - - public void setRelation(String relation) { - this.relation = relation; - } - - public List getDependTaskList() { - return dependTaskList; - } - - public void setDependTaskList(List dependTaskList) { - this.dependTaskList = dependTaskList; - } - - public List getNextNode() { - return nextNode; - } - - public void setNextNode(Object nextNode) { - if (nextNode instanceof Long) { - List nextNodeList = new ArrayList<>(); - nextNodeList.add((Long) nextNode); - this.nextNode = nextNodeList; - } else { - this.nextNode = (ArrayList) nextNode; - } - } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/resource/AbstractResourceParameters.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/resource/AbstractResourceParameters.java index 4d3ec179548a..c61851133b9b 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/resource/AbstractResourceParameters.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/resource/AbstractResourceParameters.java @@ -23,8 +23,7 @@ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, visible = true, property = "resourceType") @JsonSubTypes({ - @Type(value = DataSourceParameters.class, name = "DATASOURCE"), - @Type(value = UdfFuncParameters.class, name = "UDF") + @Type(value = DataSourceParameters.class, name = "DATASOURCE") }) public abstract class AbstractResourceParameters { diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/resource/UdfFuncParameters.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/resource/UdfFuncParameters.java deleted file mode 100644 index a36c8d84b511..000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/resource/UdfFuncParameters.java +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.task.api.parameters.resource; - -import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.apache.dolphinscheduler.plugin.task.api.enums.UdfType; - -import java.util.Date; - -import lombok.Data; - -import com.fasterxml.jackson.annotation.JsonProperty; - -@Data -public class UdfFuncParameters extends AbstractResourceParameters { - - /** - * id - */ - private int id; - - public String getResourceType() { - return resourceType; - } - - public void setResourceType(String resourceType) { - this.resourceType = resourceType; - } - - @JsonProperty(value = "UDF") - private String resourceType; - - /** - * user id - */ - private int userId; - - /** - * udf function name - */ - private String funcName; - - /** - * udf class name - */ - private String className; - - /** - * udf argument types - */ - private String argTypes; - - /** - * udf data base - */ - private String database; - - /** - * udf description - */ - private String description; - - /** - * resource id - */ - private int resourceId; - - /** - * resource name - */ - private String resourceName; - - /** - * udf function type: hive / spark - */ - private UdfType type; - - /** - * create time - */ - private Date createTime; - - /** - * update time - */ - private Date updateTime; - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - UdfFuncParameters udfFuncRequest = (UdfFuncParameters) o; - - if (id != udfFuncRequest.id) { - return false; - } - return !(funcName != null ? !funcName.equals(udfFuncRequest.funcName) : udfFuncRequest.funcName != null); - - } - - @Override - public int hashCode() { - int result = id; - result = 31 * result + (funcName != null ? funcName.hashCode() : 0); - return result; - } - - @Override - public String toString() { - return JSONUtils.toJsonString(this); - } - -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/resource/ResourceContext.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/resource/ResourceContext.java index 687d1aeb958b..f90b5269024c 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/resource/ResourceContext.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/resource/ResourceContext.java @@ -60,7 +60,6 @@ public ResourceItem getResourceItem(String resourceAbsolutePathInStorage) { public static class ResourceItem { private String resourceAbsolutePathInStorage; - private String resourceRelativePath; private String resourceAbsolutePathInLocal; } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/stream/StreamTaskChannel.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/stream/StreamTaskChannel.java index 6c35b3cec41f..bc0107a11eb2 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/stream/StreamTaskChannel.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/stream/StreamTaskChannel.java @@ -17,13 +17,8 @@ package org.apache.dolphinscheduler.plugin.task.api.stream; -import org.apache.dolphinscheduler.plugin.task.api.AbstractTask; import org.apache.dolphinscheduler.plugin.task.api.TaskChannel; -import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; public interface StreamTaskChannel extends TaskChannel { - AbstractTask pauseTask(TaskExecutionContext taskExecutionContext); - - AbstractTask recoverTask(TaskExecutionContext taskExecutionContext); } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/AbstractLogicTaskChannel.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/AbstractLogicTaskChannel.java new file mode 100644 index 000000000000..b0bcdc8767eb --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/AbstractLogicTaskChannel.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.api.task; + +import org.apache.dolphinscheduler.plugin.task.api.AbstractTask; +import org.apache.dolphinscheduler.plugin.task.api.ILogicTaskChannel; +import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; + +public abstract class AbstractLogicTaskChannel implements ILogicTaskChannel { + + @Override + public AbstractTask createTask(TaskExecutionContext taskRequest) { + throw new UnsupportedOperationException("createTask is not supported"); + } + +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/ConditionsLogicTaskChannel.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/ConditionsLogicTaskChannel.java new file mode 100644 index 000000000000..976c6583038f --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/ConditionsLogicTaskChannel.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.api.task; + +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters; +import org.apache.dolphinscheduler.plugin.task.api.parameters.ConditionsParameters; + +public class ConditionsLogicTaskChannel extends AbstractLogicTaskChannel { + + @Override + public AbstractParameters parseParameters(String taskParams) { + return JSONUtils.parseObject(taskParams, ConditionsParameters.class); + } + +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/ConditionsLogicTaskChannelFactory.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/ConditionsLogicTaskChannelFactory.java new file mode 100644 index 000000000000..fd63ea50394a --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/ConditionsLogicTaskChannelFactory.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.api.task; + +import org.apache.dolphinscheduler.plugin.task.api.TaskChannel; +import org.apache.dolphinscheduler.plugin.task.api.TaskChannelFactory; + +import com.google.auto.service.AutoService; + +@AutoService(TaskChannelFactory.class) +public class ConditionsLogicTaskChannelFactory implements TaskChannelFactory { + + public static final String NAME = "CONDITIONS"; + + @Override + public String getName() { + return NAME; + } + + @Override + public TaskChannel create() { + return new ConditionsLogicTaskChannel(); + } +} diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/processor/TaskResponseProcessorTestConfig.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/DependentLogicTaskChannel.java similarity index 61% rename from dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/processor/TaskResponseProcessorTestConfig.java rename to dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/DependentLogicTaskChannel.java index df88de34e30a..04045f0f4b32 100644 --- a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/processor/TaskResponseProcessorTestConfig.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/DependentLogicTaskChannel.java @@ -15,22 +15,16 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.server.master.processor; +package org.apache.dolphinscheduler.plugin.task.api.task; -import org.apache.dolphinscheduler.server.master.utils.DataQualityResultOperator; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters; +import org.apache.dolphinscheduler.plugin.task.api.parameters.DependentParameters; -import org.mockito.Mockito; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; +public class DependentLogicTaskChannel extends AbstractLogicTaskChannel { -/** - * dependency config - */ -@Configuration -public class TaskResponseProcessorTestConfig { - - @Bean - public DataQualityResultOperator dataQualityResultOperator() { - return Mockito.mock(DataQualityResultOperator.class); + @Override + public AbstractParameters parseParameters(String taskParams) { + return JSONUtils.parseObject(taskParams, DependentParameters.class); } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/DependentLogicTaskChannelFactory.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/DependentLogicTaskChannelFactory.java new file mode 100644 index 000000000000..332a1c6c4279 --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/DependentLogicTaskChannelFactory.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.api.task; + +import org.apache.dolphinscheduler.plugin.task.api.TaskChannel; +import org.apache.dolphinscheduler.plugin.task.api.TaskChannelFactory; + +import com.google.auto.service.AutoService; + +@AutoService(TaskChannelFactory.class) +public class DependentLogicTaskChannelFactory implements TaskChannelFactory { + + public static final String NAME = "DEPENDENT"; + @Override + public String getName() { + return NAME; + } + + @Override + public TaskChannel create() { + return new DependentLogicTaskChannel(); + } +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/DynamicLogicTaskChannel.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/DynamicLogicTaskChannel.java new file mode 100644 index 000000000000..d9f066d773cb --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/DynamicLogicTaskChannel.java @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.api.task; + +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters; +import org.apache.dolphinscheduler.plugin.task.api.parameters.DynamicParameters; + +public class DynamicLogicTaskChannel extends AbstractLogicTaskChannel { + + @Override + public AbstractParameters parseParameters(String taskParams) { + return JSONUtils.parseObject(taskParams, DynamicParameters.class); + } +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/DynamicLogicTaskChannelFactory.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/DynamicLogicTaskChannelFactory.java new file mode 100644 index 000000000000..0164e3ba53eb --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/DynamicLogicTaskChannelFactory.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.api.task; + +import org.apache.dolphinscheduler.plugin.task.api.TaskChannel; +import org.apache.dolphinscheduler.plugin.task.api.TaskChannelFactory; + +import com.google.auto.service.AutoService; + +@AutoService(TaskChannelFactory.class) +public class DynamicLogicTaskChannelFactory implements TaskChannelFactory { + + public static final String NAME = "DYNAMIC"; + @Override + public String getName() { + return NAME; + } + + @Override + public TaskChannel create() { + return new DynamicLogicTaskChannel(); + } +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/SubWorkflowLogicTaskChannel.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/SubWorkflowLogicTaskChannel.java new file mode 100644 index 000000000000..354be549866d --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/SubWorkflowLogicTaskChannel.java @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.api.task; + +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters; +import org.apache.dolphinscheduler.plugin.task.api.parameters.SubProcessParameters; + +public class SubWorkflowLogicTaskChannel extends AbstractLogicTaskChannel { + + @Override + public AbstractParameters parseParameters(String taskParams) { + return JSONUtils.parseObject(taskParams, SubProcessParameters.class); + } +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/SubWorkflowLogicTaskChannelFactory.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/SubWorkflowLogicTaskChannelFactory.java new file mode 100644 index 000000000000..06bb6c5ebeed --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/SubWorkflowLogicTaskChannelFactory.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.api.task; + +import org.apache.dolphinscheduler.plugin.task.api.TaskChannel; +import org.apache.dolphinscheduler.plugin.task.api.TaskChannelFactory; + +import com.google.auto.service.AutoService; + +@AutoService(TaskChannelFactory.class) +public class SubWorkflowLogicTaskChannelFactory implements TaskChannelFactory { + + public static final String NAME = "SUB_PROCESS"; + + @Override + public String getName() { + return NAME; + } + + @Override + public TaskChannel create() { + return new SubWorkflowLogicTaskChannel(); + } +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/SwitchLogicTaskChannel.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/SwitchLogicTaskChannel.java new file mode 100644 index 000000000000..6d549bcb9bef --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/SwitchLogicTaskChannel.java @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.api.task; + +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters; +import org.apache.dolphinscheduler.plugin.task.api.parameters.SwitchParameters; + +public class SwitchLogicTaskChannel extends AbstractLogicTaskChannel { + + @Override + public AbstractParameters parseParameters(String taskParams) { + return JSONUtils.parseObject(taskParams, SwitchParameters.class); + } +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/SwitchLogicTaskChannelFactory.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/SwitchLogicTaskChannelFactory.java new file mode 100644 index 000000000000..3f5b254ca68d --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/task/SwitchLogicTaskChannelFactory.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.api.task; + +import org.apache.dolphinscheduler.plugin.task.api.TaskChannel; +import org.apache.dolphinscheduler.plugin.task.api.TaskChannelFactory; + +import com.google.auto.service.AutoService; + +@AutoService(TaskChannelFactory.class) +public class SwitchLogicTaskChannelFactory implements TaskChannelFactory { + + public static final String NAME = "SWITCH"; + + @Override + public String getName() { + return NAME; + } + + @Override + public TaskChannel create() { + return new SwitchLogicTaskChannel(); + } +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/ParameterUtils.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/ParameterUtils.java index ea334125d546..3347297a463a 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/ParameterUtils.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/ParameterUtils.java @@ -145,7 +145,7 @@ public static void setInParameter(int index, PreparedStatement stmt, DataType da } else if (dataType.equals(DataType.DATE)) { stmt.setDate(index, java.sql.Date.valueOf(value)); } else if (dataType.equals(DataType.TIME)) { - stmt.setString(index, value); + stmt.setTime(index, java.sql.Time.valueOf(value)); } else if (dataType.equals(DataType.TIMESTAMP)) { stmt.setTimestamp(index, java.sql.Timestamp.valueOf(value)); } else if (dataType.equals(DataType.BOOLEAN)) { diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/ProcessUtils.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/ProcessUtils.java index 7b61a1eaec40..e8e31faa6d31 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/ProcessUtils.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/ProcessUtils.java @@ -39,6 +39,7 @@ import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.SystemUtils; +import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; @@ -117,33 +118,45 @@ public static boolean kill(@NonNull TaskExecutionContext request) { * @throws Exception exception */ public static String getPidsStr(int processId) throws Exception { - StringBuilder sb = new StringBuilder(); - Matcher mat = null; + + String rawPidStr; + // pstree pid get sub pids if (SystemUtils.IS_OS_MAC) { - String pids = OSUtils.exeCmd(String.format("%s -sp %d", TaskConstants.PSTREE, processId)); - if (StringUtils.isNotEmpty(pids)) { - mat = MACPATTERN.matcher(pids); + rawPidStr = OSUtils.exeCmd(String.format("%s -sp %d", TaskConstants.PSTREE, processId)); + } else if (SystemUtils.IS_OS_LINUX) { + rawPidStr = OSUtils.exeCmd(String.format("%s -p %d", TaskConstants.PSTREE, processId)); + } else { + rawPidStr = OSUtils.exeCmd(String.format("%s -p %d", TaskConstants.PSTREE, processId)); + } + + return parsePidStr(rawPidStr); + } + + public static String parsePidStr(String rawPidStr) { + + log.info("prepare to parse pid, raw pid string: {}", rawPidStr); + ArrayList allPidList = new ArrayList<>(); + Matcher mat = null; + if (SystemUtils.IS_OS_MAC) { + if (StringUtils.isNotEmpty(rawPidStr)) { + mat = MACPATTERN.matcher(rawPidStr); } } else if (SystemUtils.IS_OS_LINUX) { - String pids = OSUtils.exeCmd(String.format("%s -p %d", TaskConstants.PSTREE, processId)); - if (StringUtils.isNotEmpty(pids)) { - mat = LINUXPATTERN.matcher(pids); + if (StringUtils.isNotEmpty(rawPidStr)) { + mat = LINUXPATTERN.matcher(rawPidStr); } } else { - String pids = OSUtils.exeCmd(String.format("%s -p %d", TaskConstants.PSTREE, processId)); - if (StringUtils.isNotEmpty(pids)) { - mat = WINDOWSPATTERN.matcher(pids); + if (StringUtils.isNotEmpty(rawPidStr)) { + mat = WINDOWSPATTERN.matcher(rawPidStr); } } - if (null != mat) { while (mat.find()) { - sb.append(mat.group(1)).append(" "); + allPidList.add(mat.group(1)); } } - - return sb.toString().trim(); + return String.join(" ", allPidList).trim(); } /** diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/PropertyUtils.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/PropertyUtils.java new file mode 100644 index 000000000000..41c115ab0f8c --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/PropertyUtils.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.api.utils; + +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.plugin.task.api.enums.DataType; +import org.apache.dolphinscheduler.plugin.task.api.enums.Direct; +import org.apache.dolphinscheduler.plugin.task.api.model.Property; + +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import com.google.gson.JsonElement; +import com.google.gson.JsonParser; + +/** + * property utils + */ +public class PropertyUtils { + + private PropertyUtils() { + throw new IllegalStateException("PropertyUtils class"); + } + + /** + * startParams transform propertyList + * + * @param startParams startParams + * @return startParamList + */ + public static List startParamsTransformPropertyList(String startParams) { + List startParamList = null; + if (startParams != null) { + JsonElement jsonElement = JsonParser.parseString(startParams); + boolean isJson = jsonElement.isJsonObject(); + if (isJson) { + Map startParamMap = JSONUtils.toMap(startParams); + startParamList = startParamMap.entrySet().stream() + .map(entry -> new Property(entry.getKey(), Direct.IN, DataType.VARCHAR, entry.getValue())) + .collect(Collectors.toList()); + } else { + startParamList = JSONUtils.toList(startParams, Property.class); + } + } + return startParamList; + } + +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/TaskTypeUtils.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/TaskTypeUtils.java new file mode 100644 index 000000000000..cf1f2022ea02 --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/TaskTypeUtils.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.plugin.task.api.utils; + +import org.apache.dolphinscheduler.plugin.task.api.ILogicTaskChannel; +import org.apache.dolphinscheduler.plugin.task.api.TaskPluginManager; +import org.apache.dolphinscheduler.plugin.task.api.task.ConditionsLogicTaskChannelFactory; +import org.apache.dolphinscheduler.plugin.task.api.task.DynamicLogicTaskChannelFactory; +import org.apache.dolphinscheduler.plugin.task.api.task.SubWorkflowLogicTaskChannelFactory; +import org.apache.dolphinscheduler.plugin.task.api.task.SwitchLogicTaskChannelFactory; + +import lombok.experimental.UtilityClass; + +@UtilityClass +public class TaskTypeUtils { + + public boolean isSwitchTask(String taskType) { + return SwitchLogicTaskChannelFactory.NAME.equals(taskType); + } + + public boolean isConditionTask(String taskType) { + return ConditionsLogicTaskChannelFactory.NAME.equals(taskType); + } + + public boolean isSubWorkflowTask(String taskType) { + return SubWorkflowLogicTaskChannelFactory.NAME.equals(taskType); + } + + public boolean isDependentTask(String taskType) { + return SubWorkflowLogicTaskChannelFactory.NAME.equals(taskType); + } + + public boolean isDynamicTask(String taskType) { + return DynamicLogicTaskChannelFactory.NAME.equals(taskType); + } + + public boolean isLogicTask(String taskType) { + return TaskPluginManager.getTaskChannel(taskType) instanceof ILogicTaskChannel; + } + +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/TaskUtils.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/TaskUtils.java deleted file mode 100644 index 606058d40fdf..000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/TaskUtils.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.dolphinscheduler.plugin.task.api.utils; - -import java.util.Set; - -import lombok.experimental.UtilityClass; - -import com.google.common.collect.Sets; - -@UtilityClass -public class TaskUtils { - - private final String blockingLogicTask = "BLOCKING"; - private final String conditionLogicTask = "CONDITIONS"; - - private final String dependentLogicTask = "DEPENDENT"; - private final String subWorkflowLogicTask = "SUB_PROCESS"; - private final String switchLogicTask = "SWITCH"; - private final String dynamicLogicTask = "DYNAMIC"; - - // todo: Add to SPI - private final Set MASTER_TASK_TYPES = Sets.newHashSet( - blockingLogicTask, - conditionLogicTask, - dependentLogicTask, - subWorkflowLogicTask, - switchLogicTask, - dynamicLogicTask); - - // todo: add to task plugin spi - public boolean isLogicTask(String taskType) { - return MASTER_TASK_TYPES.contains(taskType); - } - -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/VarPoolUtils.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/VarPoolUtils.java new file mode 100644 index 000000000000..7c24eb9a2189 --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/VarPoolUtils.java @@ -0,0 +1,119 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.api.utils; + +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.plugin.task.api.enums.Direct; +import org.apache.dolphinscheduler.plugin.task.api.model.Property; + +import org.apache.commons.collections4.CollectionUtils; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import lombok.experimental.UtilityClass; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +@UtilityClass +public class VarPoolUtils { + + public List deserializeVarPool(String varPoolJson) { + return JSONUtils.toList(varPoolJson, Property.class); + } + + /** + * @see #mergeVarPool(List) + */ + public String mergeVarPoolJsonString(List varPoolJsons) { + if (CollectionUtils.isEmpty(varPoolJsons)) { + return null; + } + List> varPools = varPoolJsons.stream() + .map(VarPoolUtils::deserializeVarPool) + .collect(Collectors.toList()); + List finalVarPool = mergeVarPool(varPools); + return JSONUtils.toJsonString(finalVarPool); + } + + /** + * Merge the given two varpools, and return the merged varpool. + * If the two varpools have the same property({@link Property#getProp()} and {@link Property#getDirect()} is same), the value of the property in varpool2 will be used. + * // todo: we may need to consider the datatype of the property + */ + public List mergeVarPool(List> varPools) { + if (CollectionUtils.isEmpty(varPools)) { + return null; + } + if (varPools.size() == 1) { + return varPools.get(0); + } + Map result = new HashMap<>(); + for (List varPool : varPools) { + if (CollectionUtils.isEmpty(varPool)) { + continue; + } + for (Property property : varPool) { + if (!Direct.OUT.equals(property.getDirect())) { + log.info("The direct should be OUT in varPool, but got {}", property.getDirect()); + continue; + } + result.put(property.getProp(), property); + } + } + return new ArrayList<>(result.values()); + } + + public String subtractVarPoolJson(String varPool, List subtractVarPool) { + List varPoolList = deserializeVarPool(varPool); + List> subtractVarPoolList = subtractVarPool.stream() + .map(VarPoolUtils::deserializeVarPool) + .collect(Collectors.toList()); + List finalVarPool = subtractVarPool(varPoolList, subtractVarPoolList); + return JSONUtils.toJsonString(finalVarPool); + } + + /** + * Return the subtracted varpool, which key is in varPool but not in subtractVarPool. + */ + public List subtractVarPool(List varPool, List> subtractVarPool) { + if (CollectionUtils.isEmpty(varPool)) { + return null; + } + if (CollectionUtils.isEmpty(subtractVarPool)) { + return varPool; + } + Map subtractVarPoolMap = new HashMap<>(); + for (List properties : subtractVarPool) { + for (Property property : properties) { + subtractVarPoolMap.put(property.getProp(), property); + } + } + List result = new ArrayList<>(); + for (Property property : varPool) { + if (!subtractVarPoolMap.containsKey(property.getProp())) { + result.add(property); + } + } + return result; + } + +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/java/org/apache/dolphinscheduler/plugin/task/api/TaskPluginManagerTest.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/java/org/apache/dolphinscheduler/plugin/task/api/TaskPluginManagerTest.java new file mode 100644 index 000000000000..a380af488f59 --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/java/org/apache/dolphinscheduler/plugin/task/api/TaskPluginManagerTest.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.api; + +import static com.google.common.truth.Truth.assertThat; + +import org.apache.dolphinscheduler.plugin.task.api.task.ConditionsLogicTaskChannelFactory; +import org.apache.dolphinscheduler.plugin.task.api.task.DependentLogicTaskChannelFactory; +import org.apache.dolphinscheduler.plugin.task.api.task.DynamicLogicTaskChannelFactory; +import org.apache.dolphinscheduler.plugin.task.api.task.SubWorkflowLogicTaskChannelFactory; +import org.apache.dolphinscheduler.plugin.task.api.task.SwitchLogicTaskChannelFactory; + +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; + +class TaskPluginManagerTest { + + @ParameterizedTest + @ValueSource(strings = { + ConditionsLogicTaskChannelFactory.NAME, + DependentLogicTaskChannelFactory.NAME, + DynamicLogicTaskChannelFactory.NAME, + SubWorkflowLogicTaskChannelFactory.NAME, + SwitchLogicTaskChannelFactory.NAME}) + void testGetTaskChannel_logicTaskChannel(String type) { + assertThat(TaskPluginManager.getTaskChannel(type)).isNotNull(); + } + +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/java/org/apache/dolphinscheduler/plugin/task/api/k8s/K8sTaskExecutorTest.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/java/org/apache/dolphinscheduler/plugin/task/api/k8s/K8sTaskExecutorTest.java index 1e7629acce25..d93130caee9a 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/java/org/apache/dolphinscheduler/plugin/task/api/k8s/K8sTaskExecutorTest.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/java/org/apache/dolphinscheduler/plugin/task/api/k8s/K8sTaskExecutorTest.java @@ -24,6 +24,7 @@ import java.util.Arrays; import java.util.HashMap; +import java.util.List; import java.util.Map; import org.junit.jupiter.api.Assertions; @@ -99,4 +100,12 @@ public void testWaitTimeoutNormal() { } } + @Test + public void testLoadYamlCorrectly() { + List expectedCommands = Arrays.asList("perl", "-Mbignum=bpi", "-wle", "print bpi(2000)"); + List actualCommands = + k8sTaskExecutor.getJob().getSpec().getTemplate().getSpec().getContainers().get(0).getCommand(); + Assertions.assertEquals(expectedCommands, actualCommands); + } + } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/java/org/apache/dolphinscheduler/plugin/task/api/loop/template/http/parser/HttpTaskDefinitionParserTest.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/java/org/apache/dolphinscheduler/plugin/task/api/loop/template/http/parser/HttpTaskDefinitionParserTest.java index 3bcb80585d18..e25eaf72a1af 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/java/org/apache/dolphinscheduler/plugin/task/api/loop/template/http/parser/HttpTaskDefinitionParserTest.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/java/org/apache/dolphinscheduler/plugin/task/api/loop/template/http/parser/HttpTaskDefinitionParserTest.java @@ -20,6 +20,8 @@ import org.apache.dolphinscheduler.plugin.task.api.loop.template.LoopTaskYamlDefinition; import java.io.IOException; +import java.util.HashMap; +import java.util.Map; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; @@ -32,11 +34,24 @@ public class HttpTaskDefinitionParserTest { @Test public void parseYamlConfigFile() throws IOException { LoopTaskYamlDefinition loopTaskYamlDefinition = new HttpTaskDefinitionParser().parseYamlConfigFile(yamlFile); + // check not null Assertions.assertNotNull(loopTaskYamlDefinition); Assertions.assertNotNull(loopTaskYamlDefinition.getService()); + Assertions.assertNotNull(loopTaskYamlDefinition.getService().getName()); + Assertions.assertNotNull(loopTaskYamlDefinition.getService().getType()); + Assertions.assertNotNull(loopTaskYamlDefinition.getService().getApi()); + Assertions.assertNotNull(loopTaskYamlDefinition.getService().getApi().getSubmit()); + Assertions.assertNotNull(loopTaskYamlDefinition.getService().getApi().getQueryState()); + Assertions.assertNotNull(loopTaskYamlDefinition.getService().getApi().getCancel()); + // check data consistency LoopTaskYamlDefinition.LoopTaskServiceYamlDefinition service = loopTaskYamlDefinition.getService(); Assertions.assertEquals("MockService", service.getName()); - Assertions.assertNotNull(service.getApi()); + Assertions.assertEquals("Http", service.getType()); + Map expectedHeaders = new HashMap<>(); + expectedHeaders.put("Content-Type", "text/html"); + expectedHeaders.put("Content-Length", "1234"); + Assertions.assertEquals("/api/v1/submit", service.getApi().getSubmit().getUrl()); + Assertions.assertEquals(expectedHeaders, service.getApi().getSubmit().getHttpHeaders()); } @Test diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/java/org/apache/dolphinscheduler/plugin/task/api/parameters/SqlParametersTest.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/java/org/apache/dolphinscheduler/plugin/task/api/parameters/SqlParametersTest.java index 83fe739f4705..8f1ee7656000 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/java/org/apache/dolphinscheduler/plugin/task/api/parameters/SqlParametersTest.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/java/org/apache/dolphinscheduler/plugin/task/api/parameters/SqlParametersTest.java @@ -33,7 +33,6 @@ public class SqlParametersTest { private final String type = "MYSQL"; private final String sql = "select * from t_ds_user"; - private final String udfs = "test-udfs-1.0.0-SNAPSHOT.jar"; private final int datasource = 1; private final int sqlType = 0; private final Boolean sendEmail = true; @@ -57,7 +56,6 @@ public void testSqlParameters() { sqlParameters.setType(type); sqlParameters.setSql(sql); - sqlParameters.setUdfs(udfs); sqlParameters.setDatasource(datasource); sqlParameters.setSqlType(sqlType); sqlParameters.setSendEmail(sendEmail); @@ -68,7 +66,6 @@ public void testSqlParameters() { Assertions.assertEquals(type, sqlParameters.getType()); Assertions.assertEquals(sql, sqlParameters.getSql()); - Assertions.assertEquals(udfs, sqlParameters.getUdfs()); Assertions.assertEquals(datasource, sqlParameters.getDatasource()); Assertions.assertEquals(sqlType, sqlParameters.getSqlType()); Assertions.assertEquals(sendEmail, sqlParameters.getSendEmail()); diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/java/org/apache/dolphinscheduler/plugin/task/api/utils/VarPoolUtilsTest.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/java/org/apache/dolphinscheduler/plugin/task/api/utils/VarPoolUtilsTest.java new file mode 100644 index 000000000000..231d97029fd9 --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/java/org/apache/dolphinscheduler/plugin/task/api/utils/VarPoolUtilsTest.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.api.utils; + +import org.apache.dolphinscheduler.plugin.task.api.enums.DataType; +import org.apache.dolphinscheduler.plugin.task.api.enums.Direct; +import org.apache.dolphinscheduler.plugin.task.api.model.Property; + +import java.util.List; + +import org.junit.jupiter.api.Test; + +import com.google.common.collect.Lists; +import com.google.common.truth.Truth; + +class VarPoolUtilsTest { + + @Test + void mergeVarPool() { + Truth.assertThat(VarPoolUtils.mergeVarPool(null)).isNull(); + + // Override the value of the same property + // Merge the property with different key. + List varpool1 = Lists.newArrayList(new Property("name", Direct.OUT, DataType.VARCHAR, "tom")); + List varpool2 = Lists.newArrayList( + new Property("name", Direct.OUT, DataType.VARCHAR, "tim"), + new Property("age", Direct.OUT, DataType.INTEGER, "10")); + + Truth.assertThat(VarPoolUtils.mergeVarPool(Lists.newArrayList(varpool1, varpool2))) + .containsExactly( + new Property("name", Direct.OUT, DataType.VARCHAR, "tim"), + new Property("age", Direct.OUT, DataType.INTEGER, "10")); + + } + + @Test + void subtractVarPool() { + Truth.assertThat(VarPoolUtils.subtractVarPool(null, null)).isNull(); + List varpool1 = Lists.newArrayList(new Property("name", Direct.OUT, DataType.VARCHAR, "tom"), + new Property("age", Direct.OUT, DataType.INTEGER, "10")); + List varpool2 = Lists.newArrayList(new Property("name", Direct.OUT, DataType.VARCHAR, "tom")); + List varpool3 = Lists.newArrayList(new Property("location", Direct.OUT, DataType.VARCHAR, "china")); + + Truth.assertThat(VarPoolUtils.subtractVarPool(varpool1, Lists.newArrayList(varpool2, varpool3))) + .containsExactly(new Property("age", Direct.OUT, DataType.INTEGER, "10")); + } +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/resources/common.properties b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/resources/common.properties index 402112263f9d..40e1c5abcb74 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/resources/common.properties +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/resources/common.properties @@ -26,17 +26,6 @@ resource.storage.type=NONE # resource store on HDFS/S3 path, resource file will store to this base path, self configuration, please make sure the directory exists on hdfs and have read write permissions. "/dolphinscheduler" is recommended resource.storage.upload.base.path=/dolphinscheduler -# The AWS access key. if resource.storage.type=S3 or use EMR-Task, This configuration is required -resource.aws.access.key.id=minioadmin -# The AWS secret access key. if resource.storage.type=S3 or use EMR-Task, This configuration is required -resource.aws.secret.access.key=minioadmin -# The AWS Region to use. if resource.storage.type=S3 or use EMR-Task, This configuration is required -resource.aws.region=cn-north-1 -# The name of the bucket. You need to create them by yourself. Otherwise, the system cannot start. All buckets in Amazon S3 share a single namespace; ensure the bucket is given a unique name. -resource.aws.s3.bucket.name=dolphinscheduler -# You need to set this parameter when private cloud s3. If S3 uses public cloud, you only need to set resource.aws.region or set to the endpoint of a public cloud such as S3.cn-north-1.amazonaws.com.cn -resource.aws.s3.endpoint=http://localhost:9000 - # alibaba cloud access key id, required if you set resource.storage.type=OSS resource.alibaba.cloud.access.key.id= # alibaba cloud access key secret, required if you set resource.storage.type=OSS diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/resources/mock_loop_task.yaml b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/resources/mock_loop_task.yaml index 3f891c805b20..61c98e8632ff 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/resources/mock_loop_task.yaml +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/resources/mock_loop_task.yaml @@ -22,6 +22,7 @@ service: url: /api/v1/submit method: POST dataType: Json + httpHeaders: { "Content-Type": "text/html", "Content-Length": "1234" } requestParams: { "taskId": "704" } taskInstanceIdJPath: "$.taskInstanceId[0]" queryState: diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-chunjun/src/main/java/org/apache/dolphinscheduler/plugin/task/chunjun/ChunJunTaskChannel.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-chunjun/src/main/java/org/apache/dolphinscheduler/plugin/task/chunjun/ChunJunTaskChannel.java index 02063d07c1c7..cce2e28090b5 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-chunjun/src/main/java/org/apache/dolphinscheduler/plugin/task/chunjun/ChunJunTaskChannel.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-chunjun/src/main/java/org/apache/dolphinscheduler/plugin/task/chunjun/ChunJunTaskChannel.java @@ -22,31 +22,17 @@ import org.apache.dolphinscheduler.plugin.task.api.TaskChannel; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters; -import org.apache.dolphinscheduler.plugin.task.api.parameters.ParametersNode; -import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.ResourceParametersHelper; -/** - * chunjun task channel - */ public class ChunJunTaskChannel implements TaskChannel { - @Override - public void cancelApplication(boolean status) { - - } - @Override public AbstractTask createTask(TaskExecutionContext taskExecutionContext) { return new ChunJunTask(taskExecutionContext); } @Override - public AbstractParameters parseParameters(ParametersNode parametersNode) { - return JSONUtils.parseObject(parametersNode.getTaskParams(), ChunJunParameters.class); + public AbstractParameters parseParameters(String taskParams) { + return JSONUtils.parseObject(taskParams, ChunJunParameters.class); } - @Override - public ResourceParametersHelper getResources(String parameters) { - return JSONUtils.parseObject(parameters, ChunJunParameters.class).getResources(); - } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-chunjun/src/main/java/org/apache/dolphinscheduler/plugin/task/chunjun/ChunJunTaskChannelFactory.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-chunjun/src/main/java/org/apache/dolphinscheduler/plugin/task/chunjun/ChunJunTaskChannelFactory.java index 06a0de36f1e6..b1465d723a31 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-chunjun/src/main/java/org/apache/dolphinscheduler/plugin/task/chunjun/ChunJunTaskChannelFactory.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-chunjun/src/main/java/org/apache/dolphinscheduler/plugin/task/chunjun/ChunJunTaskChannelFactory.java @@ -19,9 +19,6 @@ import org.apache.dolphinscheduler.plugin.task.api.TaskChannel; import org.apache.dolphinscheduler.plugin.task.api.TaskChannelFactory; -import org.apache.dolphinscheduler.spi.params.base.PluginParams; - -import java.util.List; import com.google.auto.service.AutoService; @@ -47,13 +44,4 @@ public String getName() { return "CHUNJUN"; } - /** - * Returns the configurable parameters that this plugin needs to display on the web ui - * - * @return this plugin params - */ - @Override - public List getParams() { - return null; - } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-datafactory/src/main/java/org/apache/dolphinscheduler/plugin/task/datafactory/DatafactoryTaskChannel.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-datafactory/src/main/java/org/apache/dolphinscheduler/plugin/task/datafactory/DatafactoryTaskChannel.java index d8c099bb09f0..b5062a3d4cf6 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-datafactory/src/main/java/org/apache/dolphinscheduler/plugin/task/datafactory/DatafactoryTaskChannel.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-datafactory/src/main/java/org/apache/dolphinscheduler/plugin/task/datafactory/DatafactoryTaskChannel.java @@ -21,27 +21,17 @@ import org.apache.dolphinscheduler.plugin.task.api.TaskChannel; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters; -import org.apache.dolphinscheduler.plugin.task.api.parameters.ParametersNode; -import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.ResourceParametersHelper; public class DatafactoryTaskChannel implements TaskChannel { - @Override - public void cancelApplication(boolean status) { - } - @Override public DatafactoryTask createTask(TaskExecutionContext taskRequest) { return new DatafactoryTask(taskRequest); } @Override - public AbstractParameters parseParameters(ParametersNode parametersNode) { - return JSONUtils.parseObject(parametersNode.getTaskParams(), DatafactoryParameters.class); + public AbstractParameters parseParameters(String taskParams) { + return JSONUtils.parseObject(taskParams, DatafactoryParameters.class); } - @Override - public ResourceParametersHelper getResources(String parameters) { - return null; - } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-datafactory/src/main/java/org/apache/dolphinscheduler/plugin/task/datafactory/DatafactoryTaskChannelFactory.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-datafactory/src/main/java/org/apache/dolphinscheduler/plugin/task/datafactory/DatafactoryTaskChannelFactory.java index e9dce1ac9667..0ceb0ea2ed1e 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-datafactory/src/main/java/org/apache/dolphinscheduler/plugin/task/datafactory/DatafactoryTaskChannelFactory.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-datafactory/src/main/java/org/apache/dolphinscheduler/plugin/task/datafactory/DatafactoryTaskChannelFactory.java @@ -19,10 +19,6 @@ import org.apache.dolphinscheduler.plugin.task.api.TaskChannel; import org.apache.dolphinscheduler.plugin.task.api.TaskChannelFactory; -import org.apache.dolphinscheduler.spi.params.base.PluginParams; - -import java.util.Collections; -import java.util.List; import com.google.auto.service.AutoService; @@ -39,8 +35,4 @@ public String getName() { return "DATA_FACTORY"; } - @Override - public List getParams() { - return Collections.emptyList(); - } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/pom.xml b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/pom.xml index cc64b6cdcb49..6626234a19ac 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/pom.xml +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/pom.xml @@ -43,11 +43,6 @@ dolphinscheduler-datasource-all ${project.version}
- - org.apache.dolphinscheduler - dolphinscheduler-data-quality - ${project.version} - org.apache.commons commons-collections4 diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTaskChannel.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTaskChannel.java index b14596978c50..1608de87afd3 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTaskChannel.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTaskChannel.java @@ -22,29 +22,18 @@ import org.apache.dolphinscheduler.plugin.task.api.TaskChannel; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters; -import org.apache.dolphinscheduler.plugin.task.api.parameters.ParametersNode; import org.apache.dolphinscheduler.plugin.task.api.parameters.dataquality.DataQualityParameters; -import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.ResourceParametersHelper; public class DataQualityTaskChannel implements TaskChannel { - @Override - public void cancelApplication(boolean status) { - - } - @Override public AbstractTask createTask(TaskExecutionContext taskRequest) { return new DataQualityTask(taskRequest); } @Override - public AbstractParameters parseParameters(ParametersNode parametersNode) { - return JSONUtils.parseObject(parametersNode.getTaskParams(), DataQualityParameters.class); + public AbstractParameters parseParameters(String taskParams) { + return JSONUtils.parseObject(taskParams, DataQualityParameters.class); } - @Override - public ResourceParametersHelper getResources(String parameters) { - return null; - } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTaskChannelFactory.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTaskChannelFactory.java index f56d65723f4c..7ad56daf136c 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTaskChannelFactory.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTaskChannelFactory.java @@ -19,9 +19,6 @@ import org.apache.dolphinscheduler.plugin.task.api.TaskChannel; import org.apache.dolphinscheduler.plugin.task.api.TaskChannelFactory; -import org.apache.dolphinscheduler.spi.params.base.PluginParams; - -import java.util.List; import com.google.auto.service.AutoService; @@ -33,11 +30,6 @@ public String getName() { return "DATA_QUALITY"; } - @Override - public List getParams() { - return null; - } - @Override public TaskChannel create() { return new DataQualityTaskChannel(); diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/RuleParserUtils.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/RuleParserUtils.java index e99bb3dfafd9..185573f66e3d 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/RuleParserUtils.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/RuleParserUtils.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.plugin.task.dq.utils; +import static java.nio.charset.StandardCharsets.UTF_8; import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.PARAMETER_BUSINESS_DATE; import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.PARAMETER_CURRENT_DATE; import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.PARAMETER_DATETIME; @@ -62,7 +63,6 @@ import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.USER; import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.apache.dolphinscheduler.data.quality.utils.ParserUtils; import org.apache.dolphinscheduler.plugin.datasource.api.utils.DataSourceUtils; import org.apache.dolphinscheduler.plugin.task.api.DataQualityTaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.enums.dp.ExecuteSqlType; @@ -80,12 +80,15 @@ import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; +import java.net.URLEncoder; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; +import lombok.SneakyThrows; + import com.fasterxml.jackson.databind.node.ArrayNode; /** @@ -102,9 +105,10 @@ private RuleParserUtils() { private static final String AND_TARGET_FILTER = "AND (${target_filter})"; private static final String WHERE_TARGET_FILTER = "WHERE (${target_filter})"; + @SneakyThrows public static List getReaderConfigList( Map inputParameterValue, - DataQualityTaskExecutionContext dataQualityTaskExecutionContext) throws DataQualityException { + DataQualityTaskExecutionContext dataQualityTaskExecutionContext) { List readerConfigList = new ArrayList<>(); @@ -123,7 +127,7 @@ public static List getReaderConfigList( config.put(URL, DataSourceUtils.getJdbcUrl(DbType.of(dataQualityTaskExecutionContext.getSourceType()), sourceDataSource)); config.put(USER, sourceDataSource.getUser()); - config.put(PASSWORD, ParserUtils.encode(sourceDataSource.getPassword())); + config.put(PASSWORD, URLEncoder.encode(sourceDataSource.getPassword(), UTF_8.name())); config.put(DRIVER, DataSourceUtils .getDatasourceDriver(DbType.of(dataQualityTaskExecutionContext.getSourceType()))); String outputTable = inputParameterValue.get(SRC_DATABASE) + "_" + inputParameterValue.get(SRC_TABLE); @@ -150,7 +154,7 @@ public static List getReaderConfigList( config.put(URL, DataSourceUtils.getJdbcUrl(DbType.of(dataQualityTaskExecutionContext.getTargetType()), targetDataSource)); config.put(USER, targetDataSource.getUser()); - config.put(PASSWORD, ParserUtils.encode(targetDataSource.getPassword())); + config.put(PASSWORD, URLEncoder.encode(targetDataSource.getPassword(), UTF_8.name())); config.put(DRIVER, DataSourceUtils .getDatasourceDriver(DbType.of(dataQualityTaskExecutionContext.getTargetType()))); String outputTable = @@ -264,9 +268,10 @@ public static Map getInputParameterMapFromEntryList(List getWriterConfigList( String sql, - DataQualityTaskExecutionContext dataQualityTaskExecutionContext) throws DataQualityException { + DataQualityTaskExecutionContext dataQualityTaskExecutionContext) { List writerConfigList = new ArrayList<>(); if (StringUtils.isNotEmpty(dataQualityTaskExecutionContext.getWriterConnectorType())) { @@ -284,7 +289,7 @@ public static List getWriterConfigList( config.put(URL, DataSourceUtils.getJdbcUrl(DbType.of(dataQualityTaskExecutionContext.getWriterType()), writerDataSource)); config.put(USER, writerDataSource.getUser()); - config.put(PASSWORD, ParserUtils.encode(writerDataSource.getPassword())); + config.put(PASSWORD, URLEncoder.encode(writerDataSource.getPassword(), UTF_8.name())); config.put(DRIVER, DataSourceUtils .getDatasourceDriver(DbType.of(dataQualityTaskExecutionContext.getWriterType()))); config.put(SQL, sql); @@ -336,8 +341,9 @@ public static List getStatisticsValueConfigReaderList( return readerConfigList; } + @SneakyThrows public static BaseConfig getStatisticsValueConfig( - DataQualityTaskExecutionContext dataQualityTaskExecutionContext) throws DataQualityException { + DataQualityTaskExecutionContext dataQualityTaskExecutionContext) { BaseConfig baseConfig = null; if (StringUtils.isNotEmpty(dataQualityTaskExecutionContext.getStatisticsValueConnectorType())) { BaseConnectionParam writerDataSource = @@ -354,7 +360,7 @@ public static BaseConfig getStatisticsValueConfig( config.put(URL, DataSourceUtils.getJdbcUrl( DbType.of(dataQualityTaskExecutionContext.getStatisticsValueType()), writerDataSource)); config.put(USER, writerDataSource.getUser()); - config.put(PASSWORD, ParserUtils.encode(writerDataSource.getPassword())); + config.put(PASSWORD, URLEncoder.encode(writerDataSource.getPassword(), UTF_8.name())); config.put(DRIVER, DataSourceUtils .getDatasourceDriver(DbType.of(dataQualityTaskExecutionContext.getWriterType()))); } @@ -544,6 +550,7 @@ public static BaseConfig getErrorOutputWriter(Map inputParameter /** * the unique code use to get the same type and condition task statistics value + * * @param inputParameterValue * @return */ diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-datasync/pom.xml b/dolphinscheduler-task-plugin/dolphinscheduler-task-datasync/pom.xml index b3e1edb79ff4..5ec1ce5766f8 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-datasync/pom.xml +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-datasync/pom.xml @@ -31,22 +31,18 @@ org.apache.dolphinscheduler dolphinscheduler-spi - provided org.apache.dolphinscheduler dolphinscheduler-task-api - provided - - software.amazon.awssdk - datasync + org.apache.dolphinscheduler + dolphinscheduler-aws-authentication org.apache.dolphinscheduler dolphinscheduler-common - provided
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-datasync/src/main/java/org/apache/dolphinscheduler/plugin/task/datasync/DatasyncHook.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-datasync/src/main/java/org/apache/dolphinscheduler/plugin/task/datasync/DatasyncHook.java index a9f855503a41..aff7ba2558d2 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-datasync/src/main/java/org/apache/dolphinscheduler/plugin/task/datasync/DatasyncHook.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-datasync/src/main/java/org/apache/dolphinscheduler/plugin/task/datasync/DatasyncHook.java @@ -17,13 +17,9 @@ package org.apache.dolphinscheduler.plugin.task.datasync; +import org.apache.dolphinscheduler.authentication.aws.DataSyncClientFactory; import org.apache.dolphinscheduler.common.utils.PropertyUtils; -import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; -import software.amazon.awssdk.auth.credentials.AwsBasicCredentials; -import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider; -import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider; -import software.amazon.awssdk.regions.Region; import software.amazon.awssdk.services.datasync.DataSyncClient; import software.amazon.awssdk.services.datasync.model.CancelTaskExecutionRequest; import software.amazon.awssdk.services.datasync.model.CancelTaskExecutionResponse; @@ -48,6 +44,7 @@ import java.lang.reflect.InvocationTargetException; import java.util.Arrays; import java.util.List; +import java.util.Map; import java.util.stream.Collectors; import lombok.Data; @@ -73,16 +70,8 @@ public DatasyncHook() { } protected static DataSyncClient createClient() { - final String awsAccessKeyId = PropertyUtils.getString(TaskConstants.AWS_ACCESS_KEY_ID); - final String awsSecretAccessKey = PropertyUtils.getString(TaskConstants.AWS_SECRET_ACCESS_KEY); - final String awsRegion = PropertyUtils.getString(TaskConstants.AWS_REGION); - - final AwsBasicCredentials basicAWSCredentials = AwsBasicCredentials.create(awsAccessKeyId, awsSecretAccessKey); - final AwsCredentialsProvider awsCredentialsProvider = StaticCredentialsProvider.create(basicAWSCredentials); - - // create a datasync client - return DataSyncClient.builder().region(Region.of(awsRegion)).credentialsProvider(awsCredentialsProvider) - .build(); + Map awsProperties = PropertyUtils.getByPrefix("aws.datasync.", ""); + return DataSyncClientFactory.createDataSyncClient(awsProperties); } public Boolean createDatasyncTask(DatasyncParameters parameters) { diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-datasync/src/main/java/org/apache/dolphinscheduler/plugin/task/datasync/DatasyncTaskChannel.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-datasync/src/main/java/org/apache/dolphinscheduler/plugin/task/datasync/DatasyncTaskChannel.java index 1d4351caae1a..0e390c757ef6 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-datasync/src/main/java/org/apache/dolphinscheduler/plugin/task/datasync/DatasyncTaskChannel.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-datasync/src/main/java/org/apache/dolphinscheduler/plugin/task/datasync/DatasyncTaskChannel.java @@ -21,28 +21,17 @@ import org.apache.dolphinscheduler.plugin.task.api.TaskChannel; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters; -import org.apache.dolphinscheduler.plugin.task.api.parameters.ParametersNode; -import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.ResourceParametersHelper; public class DatasyncTaskChannel implements TaskChannel { - @Override - public void cancelApplication(boolean status) { - - } - @Override public DatasyncTask createTask(TaskExecutionContext taskRequest) { return new DatasyncTask(taskRequest); } @Override - public AbstractParameters parseParameters(ParametersNode parametersNode) { - return JSONUtils.parseObject(parametersNode.getTaskParams(), DatasyncParameters.class); + public AbstractParameters parseParameters(String taskParams) { + return JSONUtils.parseObject(taskParams, DatasyncParameters.class); } - @Override - public ResourceParametersHelper getResources(String parameters) { - return null; - } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-datasync/src/main/java/org/apache/dolphinscheduler/plugin/task/datasync/DatasyncTaskChannelFactory.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-datasync/src/main/java/org/apache/dolphinscheduler/plugin/task/datasync/DatasyncTaskChannelFactory.java index 5fac501bea71..770b07d3bb07 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-datasync/src/main/java/org/apache/dolphinscheduler/plugin/task/datasync/DatasyncTaskChannelFactory.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-datasync/src/main/java/org/apache/dolphinscheduler/plugin/task/datasync/DatasyncTaskChannelFactory.java @@ -19,10 +19,6 @@ import org.apache.dolphinscheduler.plugin.task.api.TaskChannel; import org.apache.dolphinscheduler.plugin.task.api.TaskChannelFactory; -import org.apache.dolphinscheduler.spi.params.base.PluginParams; - -import java.util.Collections; -import java.util.List; import com.google.auto.service.AutoService; @@ -39,8 +35,4 @@ public String getName() { return "DATASYNC"; } - @Override - public List getParams() { - return Collections.emptyList(); - } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxTaskChannel.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxTaskChannel.java index 5585b3d0b791..fd93e9c37224 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxTaskChannel.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxTaskChannel.java @@ -22,28 +22,17 @@ import org.apache.dolphinscheduler.plugin.task.api.TaskChannel; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters; -import org.apache.dolphinscheduler.plugin.task.api.parameters.ParametersNode; -import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.ResourceParametersHelper; public class DataxTaskChannel implements TaskChannel { - @Override - public void cancelApplication(boolean status) { - - } - @Override public AbstractTask createTask(TaskExecutionContext taskRequest) { return new DataxTask(taskRequest); } @Override - public AbstractParameters parseParameters(ParametersNode parametersNode) { - return JSONUtils.parseObject(parametersNode.getTaskParams(), DataxParameters.class); + public AbstractParameters parseParameters(String taskParams) { + return JSONUtils.parseObject(taskParams, DataxParameters.class); } - @Override - public ResourceParametersHelper getResources(String parameters) { - return JSONUtils.parseObject(parameters, DataxParameters.class).getResources(); - } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxTaskChannelFactory.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxTaskChannelFactory.java index 332d6bf7082b..2416081acf2b 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxTaskChannelFactory.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxTaskChannelFactory.java @@ -19,9 +19,6 @@ import org.apache.dolphinscheduler.plugin.task.api.TaskChannel; import org.apache.dolphinscheduler.plugin.task.api.TaskChannelFactory; -import org.apache.dolphinscheduler.spi.params.base.PluginParams; - -import java.util.List; import com.google.auto.service.AutoService; @@ -33,11 +30,6 @@ public String getName() { return "DATAX"; } - @Override - public List getParams() { - return null; - } - @Override public TaskChannel create() { return new DataxTaskChannel(); diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dinky/src/main/java/org/apache/dolphinscheduler/plugin/task/dinky/DinkyTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dinky/src/main/java/org/apache/dolphinscheduler/plugin/task/dinky/DinkyTask.java index 2c0b0cb68e11..8f14e8518a48 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dinky/src/main/java/org/apache/dolphinscheduler/plugin/task/dinky/DinkyTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dinky/src/main/java/org/apache/dolphinscheduler/plugin/task/dinky/DinkyTask.java @@ -27,6 +27,8 @@ import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.model.Property; import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters; +import org.apache.dolphinscheduler.plugin.task.api.parser.PlaceholderUtils; +import org.apache.dolphinscheduler.plugin.task.api.utils.ParameterUtils; import org.apache.commons.lang3.StringUtils; import org.apache.http.HttpResponse; @@ -343,11 +345,15 @@ private Map generateVariables() { } } List localParams = this.dinkyParameters.getLocalParams(); + Map prepareParamsMap = taskExecutionContext.getPrepareParamsMap(); if (localParams == null || localParams.isEmpty()) { return variables; } + Map convertMap = ParameterUtils.convert(prepareParamsMap); for (Property property : localParams) { - variables.put(property.getProp(), property.getValue()); + String propertyValue = property.getValue(); + String value = PlaceholderUtils.replacePlaceholders(propertyValue, convertMap, true); + variables.put(property.getProp(), value); } return variables; } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dinky/src/main/java/org/apache/dolphinscheduler/plugin/task/dinky/DinkyTaskChannel.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dinky/src/main/java/org/apache/dolphinscheduler/plugin/task/dinky/DinkyTaskChannel.java index 83846ff0fc1a..681d965fa821 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dinky/src/main/java/org/apache/dolphinscheduler/plugin/task/dinky/DinkyTaskChannel.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dinky/src/main/java/org/apache/dolphinscheduler/plugin/task/dinky/DinkyTaskChannel.java @@ -22,28 +22,17 @@ import org.apache.dolphinscheduler.plugin.task.api.TaskChannel; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters; -import org.apache.dolphinscheduler.plugin.task.api.parameters.ParametersNode; -import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.ResourceParametersHelper; public class DinkyTaskChannel implements TaskChannel { - @Override - public void cancelApplication(boolean status) { - // nothing to do - } - @Override public AbstractTask createTask(TaskExecutionContext taskRequest) { return new DinkyTask(taskRequest); } @Override - public AbstractParameters parseParameters(ParametersNode parametersNode) { - return JSONUtils.parseObject(parametersNode.getTaskParams(), DinkyParameters.class); + public AbstractParameters parseParameters(String taskParams) { + return JSONUtils.parseObject(taskParams, DinkyParameters.class); } - @Override - public ResourceParametersHelper getResources(String parameters) { - return null; - } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dinky/src/main/java/org/apache/dolphinscheduler/plugin/task/dinky/DinkyTaskChannelFactory.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dinky/src/main/java/org/apache/dolphinscheduler/plugin/task/dinky/DinkyTaskChannelFactory.java index ddaa53d233a2..29f35b65e0ab 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dinky/src/main/java/org/apache/dolphinscheduler/plugin/task/dinky/DinkyTaskChannelFactory.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dinky/src/main/java/org/apache/dolphinscheduler/plugin/task/dinky/DinkyTaskChannelFactory.java @@ -19,10 +19,6 @@ import org.apache.dolphinscheduler.plugin.task.api.TaskChannel; import org.apache.dolphinscheduler.plugin.task.api.TaskChannelFactory; -import org.apache.dolphinscheduler.spi.params.base.PluginParams; - -import java.util.ArrayList; -import java.util.List; import com.google.auto.service.AutoService; @@ -34,11 +30,6 @@ public String getName() { return "DINKY"; } - @Override - public List getParams() { - return new ArrayList<>(); - } - @Override public TaskChannel create() { return new DinkyTaskChannel(); diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dms/pom.xml b/dolphinscheduler-task-plugin/dolphinscheduler-task-dms/pom.xml index 8a3bb854134c..4a2be06b47c4 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dms/pom.xml +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dms/pom.xml @@ -25,27 +25,23 @@ dolphinscheduler-task-dms - jar org.apache.dolphinscheduler dolphinscheduler-spi - provided org.apache.dolphinscheduler dolphinscheduler-task-api - provided org.apache.dolphinscheduler dolphinscheduler-common - provided - com.amazonaws - aws-java-sdk-dms + org.apache.dolphinscheduler + dolphinscheduler-aws-authentication diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/main/java/org/apache/dolphinscheduler/plugin/task/dms/DmsHook.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/main/java/org/apache/dolphinscheduler/plugin/task/dms/DmsHook.java index 40aa6a527de0..cf2306abb110 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/main/java/org/apache/dolphinscheduler/plugin/task/dms/DmsHook.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/main/java/org/apache/dolphinscheduler/plugin/task/dms/DmsHook.java @@ -17,9 +17,9 @@ package org.apache.dolphinscheduler.plugin.task.dms; +import org.apache.dolphinscheduler.authentication.aws.AWSDatabaseMigrationServiceClientFactory; import org.apache.dolphinscheduler.common.thread.ThreadUtils; import org.apache.dolphinscheduler.common.utils.PropertyUtils; -import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; import org.apache.commons.io.IOUtils; @@ -29,6 +29,7 @@ import java.util.Arrays; import java.util.Date; import java.util.List; +import java.util.Map; import lombok.AllArgsConstructor; import lombok.Data; @@ -37,11 +38,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.amazonaws.auth.AWSCredentialsProvider; -import com.amazonaws.auth.AWSStaticCredentialsProvider; -import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.services.databasemigrationservice.AWSDatabaseMigrationService; -import com.amazonaws.services.databasemigrationservice.AWSDatabaseMigrationServiceClientBuilder; import com.amazonaws.services.databasemigrationservice.model.CreateReplicationTaskRequest; import com.amazonaws.services.databasemigrationservice.model.CreateReplicationTaskResult; import com.amazonaws.services.databasemigrationservice.model.DeleteReplicationTaskRequest; @@ -87,17 +84,8 @@ public DmsHook() { } public static AWSDatabaseMigrationService createClient() { - final String awsAccessKeyId = PropertyUtils.getString(TaskConstants.AWS_ACCESS_KEY_ID); - final String awsSecretAccessKey = PropertyUtils.getString(TaskConstants.AWS_SECRET_ACCESS_KEY); - final String awsRegion = PropertyUtils.getString(TaskConstants.AWS_REGION); - final BasicAWSCredentials basicAWSCredentials = new BasicAWSCredentials(awsAccessKeyId, awsSecretAccessKey); - final AWSCredentialsProvider awsCredentialsProvider = new AWSStaticCredentialsProvider(basicAWSCredentials); - - // create a DMS client - return AWSDatabaseMigrationServiceClientBuilder.standard() - .withCredentials(awsCredentialsProvider) - .withRegion(awsRegion) - .build(); + Map awsProperties = PropertyUtils.getByPrefix("aws.dms.", ""); + return AWSDatabaseMigrationServiceClientFactory.createAWSDatabaseMigrationServiceClient(awsProperties); } public Boolean createReplicationTask() throws Exception { diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/main/java/org/apache/dolphinscheduler/plugin/task/dms/DmsTaskChannel.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/main/java/org/apache/dolphinscheduler/plugin/task/dms/DmsTaskChannel.java index 45f26c5c1eed..cb8d8893bd53 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/main/java/org/apache/dolphinscheduler/plugin/task/dms/DmsTaskChannel.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/main/java/org/apache/dolphinscheduler/plugin/task/dms/DmsTaskChannel.java @@ -21,29 +21,17 @@ import org.apache.dolphinscheduler.plugin.task.api.TaskChannel; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters; -import org.apache.dolphinscheduler.plugin.task.api.parameters.ParametersNode; -import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.ResourceParametersHelper; public class DmsTaskChannel implements TaskChannel { - @Override - public void cancelApplication(boolean status) { - - } - @Override public DmsTask createTask(TaskExecutionContext taskRequest) { return new DmsTask(taskRequest); } @Override - public AbstractParameters parseParameters(ParametersNode parametersNode) { - return JSONUtils.parseObject(parametersNode.getTaskParams(), DmsParameters.class); - } - - @Override - public ResourceParametersHelper getResources(String parameters) { - return null; + public AbstractParameters parseParameters(String taskParams) { + return JSONUtils.parseObject(taskParams, DmsParameters.class); } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/main/java/org/apache/dolphinscheduler/plugin/task/dms/DmsTaskChannelFactory.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/main/java/org/apache/dolphinscheduler/plugin/task/dms/DmsTaskChannelFactory.java index a8564ff8a178..079e630d0c62 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/main/java/org/apache/dolphinscheduler/plugin/task/dms/DmsTaskChannelFactory.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/main/java/org/apache/dolphinscheduler/plugin/task/dms/DmsTaskChannelFactory.java @@ -19,10 +19,6 @@ import org.apache.dolphinscheduler.plugin.task.api.TaskChannel; import org.apache.dolphinscheduler.plugin.task.api.TaskChannelFactory; -import org.apache.dolphinscheduler.spi.params.base.PluginParams; - -import java.util.Collections; -import java.util.List; import com.google.auto.service.AutoService; @@ -39,8 +35,4 @@ public String getName() { return "DMS"; } - @Override - public List getParams() { - return Collections.emptyList(); - } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dvc/src/main/java/org/apache/dolphinscheduler/plugin/task/dvc/DvcTaskChannel.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dvc/src/main/java/org/apache/dolphinscheduler/plugin/task/dvc/DvcTaskChannel.java index cf78414918d9..06f4421ca466 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dvc/src/main/java/org/apache/dolphinscheduler/plugin/task/dvc/DvcTaskChannel.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dvc/src/main/java/org/apache/dolphinscheduler/plugin/task/dvc/DvcTaskChannel.java @@ -21,29 +21,17 @@ import org.apache.dolphinscheduler.plugin.task.api.TaskChannel; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters; -import org.apache.dolphinscheduler.plugin.task.api.parameters.ParametersNode; -import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.ResourceParametersHelper; public class DvcTaskChannel implements TaskChannel { - @Override - public void cancelApplication(boolean status) { - - } - @Override public DvcTask createTask(TaskExecutionContext taskRequest) { return new DvcTask(taskRequest); } @Override - public AbstractParameters parseParameters(ParametersNode parametersNode) { - return JSONUtils.parseObject(parametersNode.getTaskParams(), DvcParameters.class); - } - - @Override - public ResourceParametersHelper getResources(String parameters) { - return null; + public AbstractParameters parseParameters(String taskParams) { + return JSONUtils.parseObject(taskParams, DvcParameters.class); } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dvc/src/main/java/org/apache/dolphinscheduler/plugin/task/dvc/DvcTaskChannelFactory.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dvc/src/main/java/org/apache/dolphinscheduler/plugin/task/dvc/DvcTaskChannelFactory.java index a29ffded5b7f..b07612ad2eda 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dvc/src/main/java/org/apache/dolphinscheduler/plugin/task/dvc/DvcTaskChannelFactory.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dvc/src/main/java/org/apache/dolphinscheduler/plugin/task/dvc/DvcTaskChannelFactory.java @@ -19,14 +19,6 @@ import org.apache.dolphinscheduler.plugin.task.api.TaskChannel; import org.apache.dolphinscheduler.plugin.task.api.TaskChannelFactory; -import org.apache.dolphinscheduler.spi.params.base.ParamsOptions; -import org.apache.dolphinscheduler.spi.params.base.PluginParams; -import org.apache.dolphinscheduler.spi.params.base.Validate; -import org.apache.dolphinscheduler.spi.params.input.InputParam; -import org.apache.dolphinscheduler.spi.params.radio.RadioParam; - -import java.util.ArrayList; -import java.util.List; import com.google.auto.service.AutoService; @@ -43,19 +35,4 @@ public String getName() { return "DVC"; } - @Override - public List getParams() { - List paramsList = new ArrayList<>(); - - InputParam nodeName = InputParam.newBuilder("name", "$t('Node name')") - .addValidate(Validate.newBuilder().setRequired(true).build()).build(); - - RadioParam runFlag = RadioParam.newBuilder("runFlag", "RUN_FLAG") - .addParamsOptions(new ParamsOptions("NORMAL", "NORMAL", false)) - .addParamsOptions(new ParamsOptions("FORBIDDEN", "FORBIDDEN", false)).build(); - - paramsList.add(nodeName); - paramsList.add(runFlag); - return paramsList; - } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/pom.xml b/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/pom.xml index 969756a99146..fa7e51062e2f 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/pom.xml +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/pom.xml @@ -28,19 +28,20 @@ jar + org.apache.dolphinscheduler dolphinscheduler-spi - provided + org.apache.dolphinscheduler dolphinscheduler-task-api - provided + - com.amazonaws - aws-java-sdk-emr + org.apache.dolphinscheduler + dolphinscheduler-aws-authentication diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/AbstractEmrTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/AbstractEmrTask.java index 6f6ec63a29a5..412b0b86e85f 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/AbstractEmrTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/AbstractEmrTask.java @@ -22,22 +22,19 @@ import static com.fasterxml.jackson.databind.DeserializationFeature.READ_UNKNOWN_ENUM_VALUES_AS_NULL; import static com.fasterxml.jackson.databind.MapperFeature.REQUIRE_SETTERS_FOR_GETTERS; +import org.apache.dolphinscheduler.authentication.aws.AmazonElasticMapReduceClientFactory; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.PropertyUtils; import org.apache.dolphinscheduler.plugin.task.api.AbstractRemoteTask; -import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters; +import java.util.Map; import java.util.TimeZone; import lombok.extern.slf4j.Slf4j; -import com.amazonaws.auth.AWSCredentialsProvider; -import com.amazonaws.auth.AWSStaticCredentialsProvider; -import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.services.elasticmapreduce.AmazonElasticMapReduce; -import com.amazonaws.services.elasticmapreduce.AmazonElasticMapReduceClientBuilder; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.PropertyNamingStrategy; @@ -94,22 +91,8 @@ public AbstractParameters getParameters() { return emrParameters; } - /** - * create emr client from BasicAWSCredentials - * - * @return AmazonElasticMapReduce - */ protected AmazonElasticMapReduce createEmrClient() { - - final String awsAccessKeyId = PropertyUtils.getString(TaskConstants.AWS_ACCESS_KEY_ID); - final String awsSecretAccessKey = PropertyUtils.getString(TaskConstants.AWS_SECRET_ACCESS_KEY); - final String awsRegion = PropertyUtils.getString(TaskConstants.AWS_REGION); - final BasicAWSCredentials basicAWSCredentials = new BasicAWSCredentials(awsAccessKeyId, awsSecretAccessKey); - final AWSCredentialsProvider awsCredentialsProvider = new AWSStaticCredentialsProvider(basicAWSCredentials); - // create an EMR client - return AmazonElasticMapReduceClientBuilder.standard() - .withCredentials(awsCredentialsProvider) - .withRegion(awsRegion) - .build(); + Map awsProperties = PropertyUtils.getByPrefix("aws.emr.", ""); + return AmazonElasticMapReduceClientFactory.createAmazonElasticMapReduceClient(awsProperties); } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/EmrAddStepsTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/EmrAddStepsTask.java index 753b206e21f8..13dc35c30a68 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/EmrAddStepsTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/EmrAddStepsTask.java @@ -20,6 +20,7 @@ import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; import org.apache.dolphinscheduler.plugin.task.api.TaskException; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; +import org.apache.dolphinscheduler.plugin.task.api.utils.ParameterUtils; import java.util.Collections; import java.util.HashSet; @@ -126,11 +127,15 @@ public void trackApplicationStatus() throws TaskException { protected AddJobFlowStepsRequest createAddJobFlowStepsRequest() { final AddJobFlowStepsRequest addJobFlowStepsRequest; + String jobStepDefineJson = null; try { + jobStepDefineJson = ParameterUtils.convertParameterPlaceholders( + emrParameters.getStepsDefineJson(), + ParameterUtils.convert(taskExecutionContext.getPrepareParamsMap())); addJobFlowStepsRequest = - objectMapper.readValue(emrParameters.getStepsDefineJson(), AddJobFlowStepsRequest.class); + objectMapper.readValue(jobStepDefineJson, AddJobFlowStepsRequest.class); } catch (JsonProcessingException e) { - throw new EmrTaskException("can not parse AddJobFlowStepsRequest from json", e); + throw new EmrTaskException("can not parse AddJobFlowStepsRequest from json: " + jobStepDefineJson, e); } // When a single task definition is associated with multiple steps, the state tracking will have high diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/EmrJobFlowTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/EmrJobFlowTask.java index f4b05340652e..8b772a1118f2 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/EmrJobFlowTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/EmrJobFlowTask.java @@ -20,6 +20,7 @@ import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; import org.apache.dolphinscheduler.plugin.task.api.TaskException; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; +import org.apache.dolphinscheduler.plugin.task.api.utils.ParameterUtils; import java.util.Collections; import java.util.HashSet; @@ -120,10 +121,14 @@ public void trackApplicationStatus() throws TaskException { protected RunJobFlowRequest createRunJobFlowRequest() { final RunJobFlowRequest runJobFlowRequest; + String jobFlowDefineJson = null; try { - runJobFlowRequest = objectMapper.readValue(emrParameters.getJobFlowDefineJson(), RunJobFlowRequest.class); + jobFlowDefineJson = ParameterUtils.convertParameterPlaceholders( + emrParameters.getJobFlowDefineJson(), + ParameterUtils.convert(taskExecutionContext.getPrepareParamsMap())); + runJobFlowRequest = objectMapper.readValue(jobFlowDefineJson, RunJobFlowRequest.class); } catch (JsonProcessingException e) { - throw new EmrTaskException("can not parse RunJobFlowRequest from json", e); + throw new EmrTaskException("can not parse RunJobFlowRequest from json: " + jobFlowDefineJson, e); } return runJobFlowRequest; diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/EmrTaskChannel.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/EmrTaskChannel.java index 8e42eb600ffe..80bb271ba45b 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/EmrTaskChannel.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/EmrTaskChannel.java @@ -22,16 +22,9 @@ import org.apache.dolphinscheduler.plugin.task.api.TaskChannel; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters; -import org.apache.dolphinscheduler.plugin.task.api.parameters.ParametersNode; -import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.ResourceParametersHelper; public class EmrTaskChannel implements TaskChannel { - @Override - public void cancelApplication(boolean status) { - // no need - } - @Override public AbstractTask createTask(TaskExecutionContext taskRequest) { EmrParameters emrParameters = JSONUtils.parseObject(taskRequest.getTaskParams(), EmrParameters.class); @@ -46,12 +39,8 @@ public AbstractTask createTask(TaskExecutionContext taskRequest) { } @Override - public AbstractParameters parseParameters(ParametersNode parametersNode) { - return JSONUtils.parseObject(parametersNode.getTaskParams(), EmrParameters.class); + public AbstractParameters parseParameters(String taskParams) { + return JSONUtils.parseObject(taskParams, EmrParameters.class); } - @Override - public ResourceParametersHelper getResources(String parameters) { - return null; - } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/EmrTaskChannelFactory.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/EmrTaskChannelFactory.java index cad5583f07a1..606b0a64d88c 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/EmrTaskChannelFactory.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/EmrTaskChannelFactory.java @@ -19,10 +19,6 @@ import org.apache.dolphinscheduler.plugin.task.api.TaskChannel; import org.apache.dolphinscheduler.plugin.task.api.TaskChannelFactory; -import org.apache.dolphinscheduler.spi.params.base.PluginParams; - -import java.util.Collections; -import java.util.List; import com.google.auto.service.AutoService; @@ -34,11 +30,6 @@ public String getName() { return "EMR"; } - @Override - public List getParams() { - return Collections.emptyList(); - } - @Override public TaskChannel create() { return new EmrTaskChannel(); diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-flink-stream/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkStreamTaskChannel.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-flink-stream/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkStreamTaskChannel.java index 50f0984021c5..fd349e2abbcc 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-flink-stream/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkStreamTaskChannel.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-flink-stream/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkStreamTaskChannel.java @@ -18,42 +18,20 @@ package org.apache.dolphinscheduler.plugin.task.flink; import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.apache.dolphinscheduler.plugin.task.api.AbstractTask; import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters; -import org.apache.dolphinscheduler.plugin.task.api.parameters.ParametersNode; -import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.ResourceParametersHelper; import org.apache.dolphinscheduler.plugin.task.api.stream.StreamTaskChannel; public class FlinkStreamTaskChannel implements StreamTaskChannel { - @Override - public void cancelApplication(boolean status) { - - } - @Override public FlinkStreamTask createTask(TaskExecutionContext taskRequest) { return new FlinkStreamTask(taskRequest); } @Override - public AbstractParameters parseParameters(ParametersNode parametersNode) { - return JSONUtils.parseObject(parametersNode.getTaskParams(), FlinkStreamParameters.class); - } - - @Override - public ResourceParametersHelper getResources(String parameters) { - return null; - } - - @Override - public AbstractTask pauseTask(TaskExecutionContext taskExecutionContext) { - return null; + public AbstractParameters parseParameters(String taskParams) { + return JSONUtils.parseObject(taskParams, FlinkStreamParameters.class); } - @Override - public AbstractTask recoverTask(TaskExecutionContext taskExecutionContext) { - return null; - } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-flink-stream/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkStreamTaskChannelFactory.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-flink-stream/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkStreamTaskChannelFactory.java index 475386bf3654..c75e7dbfb84c 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-flink-stream/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkStreamTaskChannelFactory.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-flink-stream/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkStreamTaskChannelFactory.java @@ -19,9 +19,6 @@ import org.apache.dolphinscheduler.plugin.task.api.TaskChannel; import org.apache.dolphinscheduler.plugin.task.api.TaskChannelFactory; -import org.apache.dolphinscheduler.spi.params.base.PluginParams; - -import java.util.List; import com.google.auto.service.AutoService; @@ -38,8 +35,4 @@ public String getName() { return "FLINK_STREAM"; } - @Override - public List getParams() { - return null; - } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-flink-stream/src/test/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkArgsUtilsTest.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-flink-stream/src/test/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkArgsUtilsTest.java index 6c2c4c61008e..1d116b8b8ec2 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-flink-stream/src/test/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkArgsUtilsTest.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-flink-stream/src/test/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkArgsUtilsTest.java @@ -69,7 +69,7 @@ public void testRunJarInApplicationMode() throws Exception { List commandLine = FlinkArgsUtils.buildRunCommandLine(buildTestTaskExecutionContext(), flinkParameters); Assertions.assertEquals( - "flink run-application -t yarn-application -ys 4 -ynm demo-app-name -yjm 1024m -ytm 1024m -p 4 -sae -c org.example.Main /opt/job.jar", + "${FLINK_HOME}/bin/flink run-application -t yarn-application -ys 4 -ynm demo-app-name -yjm 1024m -ytm 1024m -p 4 -sae -c org.example.Main /opt/job.jar", joinStringListWithSpace(commandLine)); } @@ -81,7 +81,7 @@ public void testRunJarInClusterMode() throws Exception { FlinkArgsUtils.buildRunCommandLine(buildTestTaskExecutionContext(), flinkParameters); Assertions.assertEquals( - "flink run -m yarn-cluster -ys 4 -ynm demo-app-name -yjm 1024m -ytm 1024m -p 4 -sae -c org.example.Main /opt/job.jar", + "${FLINK_HOME}/bin/flink run -m yarn-cluster -ys 4 -ynm demo-app-name -yjm 1024m -ytm 1024m -p 4 -sae -c org.example.Main /opt/job.jar", joinStringListWithSpace(commandLine1)); flinkParameters.setFlinkVersion("<1.10"); @@ -89,7 +89,7 @@ public void testRunJarInClusterMode() throws Exception { FlinkArgsUtils.buildRunCommandLine(buildTestTaskExecutionContext(), flinkParameters); Assertions.assertEquals( - "flink run -m yarn-cluster -ys 4 -ynm demo-app-name -yjm 1024m -ytm 1024m -p 4 -sae -c org.example.Main /opt/job.jar", + "${FLINK_HOME}/bin/flink run -m yarn-cluster -ys 4 -ynm demo-app-name -yjm 1024m -ytm 1024m -p 4 -sae -c org.example.Main /opt/job.jar", joinStringListWithSpace(commandLine2)); flinkParameters.setFlinkVersion(">=1.12"); @@ -97,7 +97,7 @@ public void testRunJarInClusterMode() throws Exception { FlinkArgsUtils.buildRunCommandLine(buildTestTaskExecutionContext(), flinkParameters); Assertions.assertEquals( - "flink run -t yarn-per-job -ys 4 -ynm demo-app-name -yjm 1024m -ytm 1024m -p 4 -sae -c org.example.Main /opt/job.jar", + "${FLINK_HOME}/bin/flink run -t yarn-per-job -ys 4 -ynm demo-app-name -yjm 1024m -ytm 1024m -p 4 -sae -c org.example.Main /opt/job.jar", joinStringListWithSpace(commandLine3)); } @@ -107,7 +107,7 @@ public void testRunJarInLocalMode() throws Exception { List commandLine = FlinkArgsUtils.buildRunCommandLine(buildTestTaskExecutionContext(), flinkParameters); Assertions.assertEquals( - "flink run -p 4 -sae -c org.example.Main /opt/job.jar", + "${FLINK_HOME}/bin/flink run -p 4 -sae -c org.example.Main /opt/job.jar", joinStringListWithSpace(commandLine)); } @@ -117,7 +117,8 @@ public void testRunSql() throws Exception { flinkParameters.setProgramType(ProgramType.SQL); List commandLine = FlinkArgsUtils.buildRunCommandLine(buildTestTaskExecutionContext(), flinkParameters); - Assertions.assertEquals("sql-client.sh -i /tmp/execution/app-id_init.sql -f /tmp/execution/app-id_node.sql", + Assertions.assertEquals( + "${FLINK_HOME}/bin/sql-client.sh -i /tmp/execution/app-id_init.sql -f /tmp/execution/app-id_node.sql", joinStringListWithSpace(commandLine)); } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-flink/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkConstants.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-flink/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkConstants.java index b2d76077610d..c12c0bfce38a 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-flink/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkConstants.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-flink/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkConstants.java @@ -27,14 +27,14 @@ private FlinkConstants() { * flink command * usage: flink run [OPTIONS] */ - public static final String FLINK_COMMAND = "flink"; + public static final String FLINK_COMMAND = "${FLINK_HOME}/bin/flink"; public static final String FLINK_RUN = "run"; /** * flink sql command * usage: sql-client.sh -i , -f