diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/application-api.properties.j2 b/.github/CODEOWNERS similarity index 78% rename from ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/application-api.properties.j2 rename to .github/CODEOWNERS index 70118003b9234a461047d5c4b5edf7cae6bb52c9..ec6c33caee77073cf9cecc7421c5ce3eacc600fa 100644 --- a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/application-api.properties.j2 +++ b/.github/CODEOWNERS @@ -15,6 +15,7 @@ # limitations under the License. # -{% for key, value in dolphin_app_api_map.iteritems() -%} - {{key}}={{value}} -{% endfor %} \ No newline at end of file +dolphinscheduler/dolphinscheduler-alert @kezhenxu94 +dolphinscheduler/dolphinscheduler-e2e @kezhenxu94 +dolphinscheduler/dolphinscheduler-registry @kezhenxu94 +dolphinscheduler/dolphinscheduler-standalone-server @kezhenxu94 diff --git a/.github/actions/sanity-check/action.yml b/.github/actions/sanity-check/action.yml index a1d03a33c3f6831ba1b570d94d1829a3127f5c18..419b4991bf6501e6be26fe5fb0a86fbfc4a2bdab 100644 --- a/.github/actions/sanity-check/action.yml +++ b/.github/actions/sanity-check/action.yml @@ -38,16 +38,3 @@ runs: with: reviewdog_version: v0.10.2 - - shell: bash - run: ./mvnw -B -q checkstyle:checkstyle-aggregate - - - shell: bash - env: - REVIEWDOG_GITHUB_API_TOKEN: ${{ inputs.token }} - run: | - if [[ -n "${{ inputs.token }}" ]]; then - reviewdog -f=checkstyle \ - -reporter="github-pr-check" \ - -filter-mode="added" \ - -fail-on-error="true" < target/checkstyle-result.xml - fi diff --git a/.github/workflows/unit-test.yml b/.github/workflows/unit-test.yml index 308780689400b051e8c94043adc9aae012f6451f..c20632d251d4485592b2ab1ec2ceec941277a3b1 100644 --- a/.github/workflows/unit-test.yml +++ b/.github/workflows/unit-test.yml @@ -55,13 +55,6 @@ jobs: with: path: ~/.m2/repository key: ${{ runner.os }}-maven - - name: Bootstrap database - run: | - sed -i "/image: bitnami\/postgresql/a\ ports:\n - 5432:5432" $(pwd)/docker/docker-swarm/docker-compose.yml - sed -i "/image: bitnami\/zookeeper/a\ ports:\n - 2181:2181" $(pwd)/docker/docker-swarm/docker-compose.yml - docker-compose -f $(pwd)/docker/docker-swarm/docker-compose.yml up -d dolphinscheduler-zookeeper dolphinscheduler-postgresql - until docker logs docker-swarm_dolphinscheduler-postgresql_1 2>&1 | grep 'listening on IPv4 address'; do echo "waiting for postgresql ready ..."; sleep 1; done - docker run --rm --network docker-swarm_dolphinscheduler -v $(pwd)/sql/dolphinscheduler_postgre.sql:/docker-entrypoint-initdb.d/dolphinscheduler_postgre.sql bitnami/postgresql:11.11.0 bash -c "PGPASSWORD=root psql -h docker-swarm_dolphinscheduler-postgresql_1 -U root -d dolphinscheduler -v ON_ERROR_STOP=1 -f /docker-entrypoint-initdb.d/dolphinscheduler_postgre.sql" - name: Run Unit tests run: ./mvnw clean verify -B -Dmaven.test.skip=false @@ -85,7 +78,7 @@ jobs: -Dsonar.core.codeCoveragePlugin=jacoco -Dsonar.projectKey=apache-dolphinscheduler -Dsonar.login=e4058004bc6be89decf558ac819aa1ecbee57682 - -Dsonar.exclusions=dolphinscheduler-ui/src/**/i18n/locale/*.js,dolphinscheduler-microbench/src/**/* + -Dsonar.exclusions=dolphinscheduler-ui/src/**/i18n/locale/*.js -Dhttp.keepAlive=false -Dmaven.wagon.http.pool=false -Dmaven.wagon.httpconnectionManager.ttlSeconds=120 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.gitignore b/.gitignore index 9011db1479ae5cfa40120f68ac31b08741f22ab6..12a51b08e930609b58bf129a28afa2ee29c8897c 100644 --- a/.gitignore +++ b/.gitignore @@ -46,3 +46,17 @@ dolphinscheduler-server/src/main/resources/logback.xml dolphinscheduler-ui/dist dolphinscheduler-ui/node docker/build/apache-dolphinscheduler* + +# ------------------ +# pydolphinscheduler +# ------------------ +# Cache +__pycache__/ + +# Build +build/ +*egg-info/ + +# Test coverage +.coverage +htmlcov/ diff --git a/.licenserc.yaml b/.licenserc.yaml index 44a776ee599c853025201e980c90ad3744fdeab6..0bdc0eac05531679e773afa50aa5147b6972ac4f 100644 --- a/.licenserc.yaml +++ b/.licenserc.yaml @@ -26,8 +26,9 @@ header: - LICENSE - DISCLAIMER - dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ScriptRunner.java + - dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CodeGenerateUtils.java - mvnw.cmd - - sql/soft_version + - dolphinscheduler-dao/src/main/resources/sql/soft_version - .mvn - .gitattributes - '**/licenses/**/LICENSE-*' @@ -45,5 +46,6 @@ header: - '.github/actions/lable-on-issue/**' - '.github/actions/reviewdog-setup/**' - '.github/actions/translate-on-issue/**' + - '**/.gitkeep' comment: on-failure diff --git a/LICENSE b/LICENSE index 8c9beb882f145c173930f751f6f75af77384eced..20191b3d535c6f46698ec8d3e7731aac38930ecf 100644 --- a/LICENSE +++ b/LICENSE @@ -219,4 +219,4 @@ The text of each license is the standard Apache 2.0 license. DolphinPluginClassLoader from https://github.com/prestosql/presto Apache 2.0 DolphinPluginDiscovery from https://github.com/prestosql/presto Apache 2.0 DolphinPluginLoader from https://github.com/prestosql/presto Apache 2.0 - + CodeGenerateUtils from https://github.com/twitter-archive/snowflake/tree/snowflake-2010 Apache 2.0 diff --git a/NOTICE b/NOTICE index b07c3dd4cabb3a98596c3ae1d0e074f51453767c..458d64af0e8e2e1f90f2717c09e773cfbb639df5 100644 --- a/NOTICE +++ b/NOTICE @@ -1,5 +1,5 @@ Apache DolphinScheduler -Copyright 2019-2021 The Apache Software Foundation +Copyright 2019-2022 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). diff --git a/README.md b/README.md index 040459d6b6c04955e247d3078953a70b8f946bed..bc111cc1d51e8a771fd2f4caf605c7f0d073d0d1 100644 --- a/README.md +++ b/README.md @@ -41,7 +41,7 @@ Its main objectives are as follows: ## What's in DolphinScheduler Stability | Accessibility | Features | Scalability | - -- | -- | -- | -- + --------- | ------------- | -------- | ------------| Decentralized multi-master and multi-worker | Visualization of workflow key information, such as task status, task type, retry times, task operation machine information, visual variables, and so on at a glance.  |  Support pause, recover operation | Support customized task types support HA | Visualization of all workflow operations, dragging tasks to draw DAGs, configuring data sources and resources. At the same time, for third-party systems, provide API mode operations. | Users on DolphinScheduler can achieve many-to-one or one-to-one mapping relationship through tenants and Hadoop users, which is very important for scheduling large data jobs. | The scheduler supports distributed scheduling, and the overall scheduling capability will increase linearly with the scale of the cluster. Master and Worker support dynamic adjustment. Overload processing: By using the task queue mechanism, the number of schedulable tasks on a single machine can be flexibly configured. Machine jam can be avoided with high tolerance to numbers of tasks cached in task queue. | One-click deployment | Support traditional shell tasks, and big data platform task scheduling: MR, Spark, SQL (MySQL, PostgreSQL, hive, spark SQL), Python, Procedure, Sub_Process | | @@ -59,11 +59,11 @@ Overload processing: By using the task queue mechanism, the number of schedulabl ## QuickStart in Docker -Please referer the official website document: [QuickStart in Docker](https://dolphinscheduler.apache.org/en-us/docs/latest/user_doc/docker-deployment.html) +Please refer the official website document: [QuickStart in Docker](https://dolphinscheduler.apache.org/en-us/docs/latest/user_doc/guide/installation/docker.html) ## QuickStart in Kubernetes -Please referer the official website document: [QuickStart in Kubernetes](https://dolphinscheduler.apache.org/en-us/docs/latest/user_doc/kubernetes-deployment.html) +Please refer to the official website document: [QuickStart in Kubernetes](https://dolphinscheduler.apache.org/en-us/docs/latest/user_doc/guide/installation/kubernetes.html) ## How to Build @@ -95,8 +95,8 @@ You are very welcome to communicate with the developers and users of Dolphin Sch 2. Follow the [Twitter account of DolphinScheduler](https://twitter.com/dolphinschedule) and get the latest news on time. ### Contributor over time - -[![Contributor over time](https://contributor-graph-api.apiseven.com/contributors-svg?chart=contributorOverTime&repo=apache/dolphinscheduler)](https://www.apiseven.com/en/contributor-graph?chart=contributorOverTime&repo=apache/dolphinscheduler) + +[![Contributor over time](https://contributor-graph-api.apiseven.com/contributors-svg?chart=contributorOverTime&repo=apache/dolphinscheduler)](https://www.apiseven.com/en/contributor-graph?chart=contributorOverTime&repo=apache/dolphinscheduler) ## How to Contribute diff --git a/ambari_plugin/README.md b/ambari_plugin/README.md deleted file mode 100644 index 2a919748371281084e5c86e96a816a6d681879b5..0000000000000000000000000000000000000000 --- a/ambari_plugin/README.md +++ /dev/null @@ -1,132 +0,0 @@ -### Instructions for using the Dolphin Scheduler's Ambari plug-in - -#### Note - -1. This document is intended for users with a basic understanding of Ambari -2. This document is a description of adding the Dolphin Scheduler service to the installed Ambari service -3. This document is based on version 2.5.2 of Ambari - -#### Installation preparation - -1. Prepare the RPM packages - - - It is generated by executing the command ```mvn -U clean install -Prpmbuild -Dmaven.test.skip=true -X``` in the project root directory (In the directory: dolphinscheduler-dist/target/rpm/apache-dolphinscheduler/RPMS/noarch ) - -2. Create an installation for DS,who have read and write access to the installation directory (/opt/soft) - -3. Install with rpm package - - - Manual installation (recommended): - - Copy the prepared RPM packages to each node of the cluster. - - Execute with DS installation user: ```rpm -ivh apache-dolphinscheduler-xxx.noarch.rpm``` - - Mysql-connector-java packaged using the default POM file will not be included. - - The RPM package was packaged in the project with the installation path of /opt/soft. - If you use mysql as the database, you need add it manually. - - - Automatic installation with ambari - - Each node of the cluster needs to configure the local yum source - - Copy the prepared RPM packages to each node local yum source - -4. Copy plug-in directory - - - copy directory ambari_plugin/common-services/DOLPHIN to ambari-server/resources/common-services/ - - copy directory ambari_plugin/statcks/DOLPHIN to ambari-server/resources/stacks/HDP/2.6/services/--stack version is selected based on the actual situation - -5. Initializes the database information - - ``` - -- Create the database for the Dolphin Scheduler:dolphinscheduler - CREATE DATABASE dolphinscheduler DEFAULT CHARACTER SET utf8 DEFAULT COLLATE - utf8_general_ci; - - -- Initialize the user and password for the dolphinscheduler database and assign permissions - -- Replace the {user} in the SQL statement below with the user of the dolphinscheduler database - GRANT ALL PRIVILEGES ON dolphinscheduler.* TO '{user}'@'%' IDENTIFIED BY '{password}'; - GRANT ALL PRIVILEGES ON dolphinscheduler.* TO '{user}'@'localhost' IDENTIFIED BY - '{password}'; - flush privileges; - ``` - - - -#### Ambari Install Dolphin Scheduler -- **NOTE: You have to install zookeeper first** - -1. Install Dolphin Scheduler on ambari web interface - - ![](https://github.com/apache/dolphinscheduler-website/blob/master/img/ambari-plugin/DS2_AMBARI_001.png) - -2. Select the nodes for the Dolphin Scheduler's Master installation - - ![](https://github.com/apache/dolphinscheduler-website/blob/master/img/ambari-plugin/DS2_AMBARI_002.png) - -3. Configure the Dolphin Scheduler's nodes for Worker, Api, Logger, Alert installation - - ![](https://github.com/apache/dolphinscheduler-website/blob/master/img/ambari-plugin/DS2_AMBARI_003.png) - -4. Set the installation users of the Dolphin Scheduler service (created in step 1) and the user groups they belong to - - ![](https://github.com/apache/dolphinscheduler-website/blob/master/img/ambari-plugin/DS2_AMBARI_004.png) - -5. System Env Optimization will export some system environment config. Modify according to actual situation - - ![](https://github.com/apache/dolphinscheduler-website/blob/master/img/ambari-plugin/DS2_AMBARI_020.png) - -6. Configure the database information (same as in the initialization database in step 1) - - ![](https://github.com/apache/dolphinscheduler-website/blob/master/img/ambari-plugin/DS2_AMBARI_005.png) - -7. Configure additional information if needed - - ![](https://github.com/apache/dolphinscheduler-website/blob/master/img/ambari-plugin/DS2_AMBARI_006.png) - - ![](https://github.com/apache/dolphinscheduler-website/blob/master/img/ambari-plugin/DS2_AMBARI_007.png) - -8. Perform the next steps as normal - - ![](https://github.com/apache/dolphinscheduler-website/blob/master/img/ambari-plugin/DS2_AMBARI_008.png) - -9. The interface after successful installation - - ![](https://github.com/apache/dolphinscheduler-website/blob/master/img/ambari-plugin/DS2_AMBARI_009.png) - - - ------- - - - -#### Add components to the node through Ambari -- for example, add a DS Worker - -***NOTE***: DS Logger is the installation dependent component of DS Worker in Dolphin's Ambari installation (need to add installation first; Prevent the Job log on the corresponding Worker from being checked) - -1. Locate the component node to add -- for example, node ark3 - - ![DS2_AMBARI_011](https://github.com/apache/dolphinscheduler-website/blob/master/img/ambari-plugin/DS2_AMBARI_011.png) - -2. Add components -- the drop-down list is all addable - - ![DS2_AMBARI_012](https://github.com/apache/dolphinscheduler-website/blob/master/img/ambari-plugin/DS2_AMBARI_012.png) - -3. Confirm component addition - - ![DS2_AMBARI_013](https://github.com/apache/dolphinscheduler-website/blob/master/img/ambari-plugin/DS2_AMBARI_013.png) - -4. After adding DS Worker and DS Logger components - - ![DS2_AMBARI_015](https://github.com/apache/dolphinscheduler-website/blob/master/img/ambari-plugin/DS2_AMBARI_015.png) - -5. Start the component - - ![DS2_AMBARI_016](https://github.com/apache/dolphinscheduler-website/blob/master/img/ambari-plugin/DS2_AMBARI_016.png) - - -#### Remove the component from the node with Ambari - -1. Stop the component in the corresponding node - - ![DS2_AMBARI_018](https://github.com/apache/dolphinscheduler-website/blob/master/img/ambari-plugin/DS2_AMBARI_018.png) - -2. Remove components - - ![DS2_AMBARI_019](https://github.com/apache/dolphinscheduler-website/blob/master/img/ambari-plugin/DS2_AMBARI_019.png) \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/alerts.json b/ambari_plugin/common-services/DOLPHIN/1.3.0/alerts.json deleted file mode 100644 index 130335dbe97d26547a5c4b837c15598780d36cdd..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.0/alerts.json +++ /dev/null @@ -1,158 +0,0 @@ -{ - "DOLPHIN": { - "service": [], - "DOLPHIN_API": [ - { - "name": "dolphin_api_port_check", - "label": "dolphin_api_port_check", - "description": "dolphin_api_port_check.", - "interval": 10, - "scope": "ANY", - "source": { - "type": "PORT", - "uri": "{{dolphin-application-api/server.port}}", - "default_port": 12345, - "reporting": { - "ok": { - "text": "TCP OK - {0:.3f}s response on port {1}" - }, - "warning": { - "text": "TCP OK - {0:.3f}s response on port {1}", - "value": 1.5 - }, - "critical": { - "text": "Connection failed: {0} to {1}:{2}", - "value": 5.0 - } - } - } - } - ], - "DOLPHIN_LOGGER": [ - { - "name": "dolphin_logger_port_check", - "label": "dolphin_logger_port_check", - "description": "dolphin_logger_port_check.", - "interval": 10, - "scope": "ANY", - "source": { - "type": "PORT", - "uri": "{{dolphin-common/loggerserver.rpc.port}}", - "default_port": 50051, - "reporting": { - "ok": { - "text": "TCP OK - {0:.3f}s response on port {1}" - }, - "warning": { - "text": "TCP OK - {0:.3f}s response on port {1}", - "value": 1.5 - }, - "critical": { - "text": "Connection failed: {0} to {1}:{2}", - "value": 5.0 - } - } - } - } - ], - "DOLPHIN_MASTER": [ - { - "name": "DOLPHIN_MASTER_CHECK", - "label": "check dolphin scheduler master status", - "description": "", - "interval":10, - "scope": "HOST", - "enabled": true, - "source": { - "type": "SCRIPT", - "path": "DOLPHIN/1.3.0/package/alerts/alert_dolphin_scheduler_status.py", - "parameters": [ - - { - "name": "connection.timeout", - "display_name": "Connection Timeout", - "value": 5.0, - "type": "NUMERIC", - "description": "The maximum time before this alert is considered to be CRITICAL", - "units": "seconds", - "threshold": "CRITICAL" - }, - { - "name": "alertName", - "display_name": "alertName", - "value": "DOLPHIN_MASTER", - "type": "STRING", - "description": "alert name" - } - ] - } - } - ], - "DOLPHIN_WORKER": [ - { - "name": "DOLPHIN_WORKER_CHECK", - "label": "check dolphin scheduler worker status", - "description": "", - "interval":10, - "scope": "HOST", - "enabled": true, - "source": { - "type": "SCRIPT", - "path": "DOLPHIN/1.3.0/package/alerts/alert_dolphin_scheduler_status.py", - "parameters": [ - - { - "name": "connection.timeout", - "display_name": "Connection Timeout", - "value": 5.0, - "type": "NUMERIC", - "description": "The maximum time before this alert is considered to be CRITICAL", - "units": "seconds", - "threshold": "CRITICAL" - }, - { - "name": "alertName", - "display_name": "alertName", - "value": "DOLPHIN_WORKER", - "type": "STRING", - "description": "alert name" - } - ] - } - } - ], - "DOLPHIN_ALERT": [ - { - "name": "DOLPHIN_DOLPHIN_ALERT_CHECK", - "label": "check dolphin scheduler alert status", - "description": "", - "interval":10, - "scope": "HOST", - "enabled": true, - "source": { - "type": "SCRIPT", - "path": "DOLPHIN/1.3.0/package/alerts/alert_dolphin_scheduler_status.py", - "parameters": [ - - { - "name": "connection.timeout", - "display_name": "Connection Timeout", - "value": 5.0, - "type": "NUMERIC", - "description": "The maximum time before this alert is considered to be CRITICAL", - "units": "seconds", - "threshold": "CRITICAL" - }, - { - "name": "alertName", - "display_name": "alertName", - "value": "DOLPHIN_ALERT", - "type": "STRING", - "description": "alert name" - } - ] - } - } - ] - } -} diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-alert.xml b/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-alert.xml deleted file mode 100644 index 32abcc791d9687393fbab8af8ad5fb587b059841..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-alert.xml +++ /dev/null @@ -1,137 +0,0 @@ - - - - alert.type - EMAIL - alert type is EMAIL/SMS - - - - mail.protocol - SMTP - - - - - mail.server.host - xxx.xxx.com - - - - - mail.server.port - 25 - - int - - - - - - mail.sender - admin - - - - - mail.user - admin - - - - - mail.passwd - 000000 - - PASSWORD - - password - - - - - - mail.smtp.starttls.enable - true - - boolean - - - - - - mail.smtp.ssl.enable - true - - boolean - - - - - - mail.smtp.ssl.trust - xxx.xxx.com - - - - - - enterprise.wechat.enable - false - - - value-list - - - true - - - - false - - - - 1 - - - - - enterprise.wechat.corp.id - wechatId - - - - - enterprise.wechat.secret - secret - - - - - enterprise.wechat.agent.id - agentId - - - - - enterprise.wechat.users - wechatUsers - - - - \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-application-api.xml b/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-application-api.xml deleted file mode 100644 index 766c0f477dfb9d96c44466f88d4b075d70beb807..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-application-api.xml +++ /dev/null @@ -1,87 +0,0 @@ - - - - server.port - 12345 - - server port - - - int - - - - server.servlet.session.timeout - 7200 - - int - - - - - - server.servlet.context-path - /dolphinscheduler/ - - - - - spring.servlet.multipart.max-file-size - 1024 - - MB - int - - - - - - spring.servlet.multipart.max-request-size - 1024 - - MB - int - - - - - - server.jetty.max-http-post-size - 5000000 - - int - - - - - - spring.messages.encoding - UTF-8 - - - - spring.messages.basename - i18n/messages - - - - security.authentication.type - PASSWORD - - - \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-common.xml b/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-common.xml deleted file mode 100644 index 439e21188a008ece17ba53f30f48e2f6ff1f68c2..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-common.xml +++ /dev/null @@ -1,158 +0,0 @@ - - - - resource.storage.type - Choose Resource Upload Startup Type - - Resource upload startup type : HDFS,S3,NONE - - NONE - - value-list - - - HDFS - - - - S3 - - - - NONE - - - - 1 - - - - - resource.upload.path - /dolphinscheduler - - resource store on HDFS/S3 path, resource file will store to this hadoop hdfs path, self configuration, please make sure the directory exists on hdfs and have read write permissions。"/dolphinscheduler" is recommended - - - - - data.basedir.path - /tmp/dolphinscheduler - - user data local directory path, please make sure the directory exists and have read write permissions - - - - - - hadoop.security.authentication.startup.state - false - - value-list - - - true - - - - false - - - - 1 - - whether kerberos starts - - - java.security.krb5.conf.path - /opt/krb5.conf - - java.security.krb5.conf path - - - - - login.user.keytab.username - hdfs-mycluster@ESZ.COM - - LoginUserFromKeytab user - - - - - login.user.keytab.path - /opt/hdfs.headless.keytab - - LoginUserFromKeytab path - - - - - resource.view.suffixs - txt,log,sh,conf,cfg,py,java,sql,hql,xml,properties - - - - hdfs.root.user - hdfs - - Users who have permission to create directories under the HDFS root path - - - - - fs.defaultFS - hdfs://mycluster:8020 - - HA or single namenode, - If namenode ha needs to copy core-site.xml and hdfs-site.xml to the conf directory, - support s3,for example : s3a://dolphinscheduler - - - - - fs.s3a.endpoint - http://host:9010 - - s3 need,s3 endpoint - - - - - fs.s3a.access.key - A3DXS30FO22544RE - - s3 need,s3 access key - - - - - fs.s3a.secret.key - OloCLq3n+8+sdPHUhJ21XrSxTC+JK - - s3 need,s3 secret key - - - - - kerberos.expire.time - 7 - - - - \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-datasource.xml b/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-datasource.xml deleted file mode 100644 index 02d8de0482ab0d13db8571da5fea62af52e7cddb..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-datasource.xml +++ /dev/null @@ -1,206 +0,0 @@ - - - - spring.datasource.initialSize - 5 - - Init connection number - - - int - - - - - spring.datasource.minIdle - 5 - - Min connection number - - - int - - - - - spring.datasource.maxActive - 50 - - Max connection number - - - int - - - - - spring.datasource.maxWait - 60000 - - Max wait time for get a connection in milliseconds. - If configuring maxWait, fair locks are enabled by default and concurrency efficiency decreases. - If necessary, unfair locks can be used by configuring the useUnfairLock attribute to true. - - - int - - - - - spring.datasource.timeBetweenEvictionRunsMillis - 60000 - - Milliseconds for check to close free connections - - - int - - - - - spring.datasource.timeBetweenConnectErrorMillis - 60000 - - The Destroy thread detects the connection interval and closes the physical connection in milliseconds - if the connection idle time is greater than or equal to minEvictableIdleTimeMillis. - - - int - - - - - spring.datasource.minEvictableIdleTimeMillis - 300000 - - The longest time a connection remains idle without being evicted, in milliseconds - - - int - - - - - spring.datasource.validationQuery - SELECT 1 - - The SQL used to check whether the connection is valid requires a query statement. - If validation Query is null, testOnBorrow, testOnReturn, and testWhileIdle will not work. - - - - - spring.datasource.validationQueryTimeout - 3 - - int - - - Check whether the connection is valid for timeout, in seconds - - - - - spring.datasource.testWhileIdle - true - - boolean - - - When applying for a connection, - if it is detected that the connection is idle longer than time Between Eviction Runs Millis, - validation Query is performed to check whether the connection is valid - - - - - spring.datasource.testOnBorrow - true - - boolean - - - Execute validation to check if the connection is valid when applying for a connection - - - - - spring.datasource.testOnReturn - false - - boolean - - - Execute validation to check if the connection is valid when the connection is returned - - - - - spring.datasource.defaultAutoCommit - true - - boolean - - - - - - - spring.datasource.keepAlive - false - - boolean - - - - - - - - spring.datasource.poolPreparedStatements - true - - boolean - - - Open PSCache, specify count PSCache for every connection - - - - - spring.datasource.maxPoolPreparedStatementPerConnectionSize - 20 - - int - - - - - - spring.datasource.spring.datasource.filters - stat,wall,log4j - - - - - spring.datasource.connectionProperties - druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000 - - - - \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-env.xml b/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-env.xml deleted file mode 100644 index 18501c6d0a8a7b55803cdd638ba249c3fb9571c6..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-env.xml +++ /dev/null @@ -1,124 +0,0 @@ - - - - dolphin.database.type - mysql - Dolphin Scheduler DataBase Type Which Is Select - Dolphin Database Type - - value-list - - - mysql - - - - postgresql - - - - 1 - - - - - - dolphin.database.host - - Dolphin Database Host - - - - - dolphin.database.port - - Dolphin Database Port - - - - - dolphin.database.username - - Dolphin Database Username - - - - - dolphin.database.password - - Dolphin Database Password - PASSWORD - - password - - - - - - dolphin.user - - Which user to install and admin dolphin scheduler - Deploy User - - - - dolphin.group - - Which user to install and admin dolphin scheduler - Deploy Group - - - - - dolphinscheduler-env-content - Dolphinscheduler Env template - This is the jinja template for dolphinscheduler.env.sh file - # -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -export HADOOP_HOME=/opt/soft/hadoop -export HADOOP_CONF_DIR=/opt/soft/hadoop/etc/hadoop -export SPARK_HOME1=/opt/soft/spark1 -export SPARK_HOME2=/opt/soft/spark2 -export PYTHON_HOME=/opt/soft/python -export JAVA_HOME=/opt/soft/java -export HIVE_HOME=/opt/soft/hive -export FLINK_HOME=/opt/soft/flink -export DATAX_HOME=/opt/soft/datax - - content - false - false - - - - \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-master.xml b/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-master.xml deleted file mode 100644 index c8eec047fcc468f87b5042ac74f838160ca9475b..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-master.xml +++ /dev/null @@ -1,88 +0,0 @@ - - - - master.exec.threads - 100 - - int - - master execute thread num - - - - master.exec.task.num - 20 - - int - - master execute task number in parallel - - - - master.heartbeat.interval - 10 - - int - - master heartbeat interval - - - - master.task.commit.retryTimes - 5 - - int - - master commit task retry times - - - - master.task.commit.interval - 1000 - - int - - master commit task interval - - - - master.max.cpuload.avg - 100 - - int - - only less than cpu avg load, master server can work. default value : the number of cpu cores * 2 - - - - master.reserved.memory - 0.3 - only larger than reserved memory, master server can work. default value : physical memory * 1/10, unit is G. - - - - - master.listen.port - 5678 - - int - - master listen port - - - \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-quartz.xml b/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-quartz.xml deleted file mode 100644 index 7a0c68b051806ecc7cb80889eeac82a72ddb6e75..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-quartz.xml +++ /dev/null @@ -1,126 +0,0 @@ - - - - org.quartz.scheduler.instanceName - DolphinScheduler - - - - - org.quartz.scheduler.instanceId - AUTO - - - - org.quartz.scheduler.makeSchedulerThreadDaemon - true - - boolean - - - - - org.quartz.jobStore.useProperties - false - - boolean - - - - - org.quartz.threadPool.class - org.quartz.simpl.SimpleThreadPool - - - - org.quartz.threadPool.makeThreadsDaemons - true - - boolean - - - - - org.quartz.threadPool.threadCount - 25 - - int - - - - - org.quartz.threadPool.threadPriority - 5 - - int - - - - - org.quartz.jobStore.class - org.quartz.impl.jdbcjobstore.JobStoreTX - - - - org.quartz.jobStore.tablePrefix - QRTZ_ - - - - org.quartz.jobStore.isClustered - true - - boolean - - - - - org.quartz.jobStore.misfireThreshold - 60000 - - int - - - - - org.quartz.jobStore.clusterCheckinInterval - 5000 - - int - - - - - org.quartz.jobStore.acquireTriggersWithinLock - true - - boolean - - - - - org.quartz.jobStore.dataSource - myDs - - - - org.quartz.dataSource.myDs.connectionProvider.class - org.apache.dolphinscheduler.service.quartz.DruidConnectionProvider - - - \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-worker.xml b/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-worker.xml deleted file mode 100644 index f162b0882bf9d1a0186a5170701867bf7624717a..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-worker.xml +++ /dev/null @@ -1,67 +0,0 @@ - - - - worker.exec.threads - 100 - - int - - worker execute thread num - - - - worker.heartbeat.interval - 10 - - int - - worker heartbeat interval - - - - worker.max.cpuload.avg - 100 - - int - - only less than cpu avg load, worker server can work. default value : the number of cpu cores * 2 - - - - worker.reserved.memory - 0.3 - only larger than reserved memory, worker server can work. default value : physical memory * 1/10, unit is G. - - - - - worker.listen.port - 1234 - - int - - worker listen port - - - - worker.group - default - default worker group - - - \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/metainfo.xml b/ambari_plugin/common-services/DOLPHIN/1.3.0/metainfo.xml deleted file mode 100644 index e692479fd6afdc12a01a072f2b77bbd0bd1aafbe..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.0/metainfo.xml +++ /dev/null @@ -1,137 +0,0 @@ - - - - 2.0 - - - DOLPHIN - Dolphin Scheduler - 分布式易扩展的可视化DAG工作流任务调度系统 - 1.3.0 - - - DOLPHIN_MASTER - DS Master - MASTER - 1+ - - - PYTHON - 600 - - - - - DOLPHIN_LOGGER - DS Logger - SLAVE - 1+ - - - PYTHON - 600 - - - - - DOLPHIN_WORKER - DS Worker - SLAVE - 1+ - - - DOLPHIN/DOLPHIN_LOGGER - host - - true - - - - - - PYTHON - 600 - - - - - DOLPHIN_ALERT - DS Alert - SLAVE - 1 - - - PYTHON - 600 - - - - - DOLPHIN_API - DS_Api - SLAVE - 1 - - - PYTHON - 600 - - - - - - ZOOKEEPER - - - - - any - - - apache-dolphinscheduler* - - - - - - - dolphin-alert - dolphin-app-api - dolphin-app-dao - dolphin-common - dolphin-env - dolphin-quartz - - - - - theme.json - true - - - - quicklinks - - - quicklinks.json - true - - - - - \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/alerts/alert_dolphin_scheduler_status.py b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/alerts/alert_dolphin_scheduler_status.py deleted file mode 100644 index 87cc7b453b9aaff148387ff454c73e0178336196..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/alerts/alert_dolphin_scheduler_status.py +++ /dev/null @@ -1,124 +0,0 @@ -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -""" - -import socket -import urllib2 -import os -import logging -import ambari_simplejson as json -from resource_management.libraries.script.script import Script -import sys -reload(sys) -sys.setdefaultencoding('utf-8') - -logger = logging.getLogger('ambari_alerts') - -config = Script.get_config() - - -def get_tokens(): - """ - Returns a tuple of tokens in the format {{site/property}} that will be used - to build the dictionary passed into execute - - :rtype tuple - """ - -def get_info(url, connection_timeout): - response = None - - try: - response = urllib2.urlopen(url, timeout=connection_timeout) - json_data = response.read() - return json_data - finally: - if response is not None: - try: - response.close() - except: - pass - - -def execute(configurations={}, parameters={}, host_name=None): - """ - Returns a tuple containing the result code and a pre-formatted result label - - Keyword arguments: - configurations : a mapping of configuration key to value - parameters : a mapping of script parameter key to value - host_name : the name of this host where the alert is running - - :type configurations dict - :type parameters dict - :type host_name str - """ - - alert_name = parameters['alertName'] - - dolphin_pidfile_dir = "/opt/soft/run/dolphinscheduler" - - pid = "0" - - - from resource_management.core import sudo - - is_running = True - pid_file_path = "" - if alert_name == 'DOLPHIN_MASTER': - pid_file_path = dolphin_pidfile_dir + "/master-server.pid" - elif alert_name == 'DOLPHIN_WORKER': - pid_file_path = dolphin_pidfile_dir + "/worker-server.pid" - elif alert_name == 'DOLPHIN_ALERT': - pid_file_path = dolphin_pidfile_dir + "/alert-server.pid" - elif alert_name == 'DOLPHIN_LOGGER': - pid_file_path = dolphin_pidfile_dir + "/logger-server.pid" - elif alert_name == 'DOLPHIN_API': - pid_file_path = dolphin_pidfile_dir + "/api-server.pid" - - if not pid_file_path or not os.path.isfile(pid_file_path): - is_running = False - - try: - pid = int(sudo.read_file(pid_file_path)) - except: - is_running = False - - try: - # Kill will not actually kill the process - # From the doc: - # If sig is 0, then no signal is sent, but error checking is still - # performed; this can be used to check for the existence of a - # process ID or process group ID. - sudo.kill(pid, 0) - except OSError: - is_running = False - - if host_name is None: - host_name = socket.getfqdn() - - if not is_running: - result_code = "CRITICAL" - else: - result_code = "OK" - - label = "The comment {0} of DOLPHIN_SCHEDULER on {1} is {2}".format(alert_name, host_name, result_code) - - return ((result_code, [label])) - -if __name__ == "__main__": - pass diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_alert_service.py b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_alert_service.py deleted file mode 100644 index e78c38d272f67bffb5e71cefddb10a21f489787b..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_alert_service.py +++ /dev/null @@ -1,62 +0,0 @@ -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -""" -import time -from resource_management import * - -from dolphin_env import dolphin_env - - -class DolphinAlertService(Script): - def install(self, env): - import params - env.set_params(params) - self.install_packages(env) - Execute(('chmod', '-R', '777', params.dolphin_home)) - Execute(('chown', '-R', params.dolphin_user + ":" + params.dolphin_group, params.dolphin_home)) - - def configure(self, env): - import params - params.pika_slave = True - env.set_params(params) - - dolphin_env() - - def start(self, env): - import params - env.set_params(params) - self.configure(env) - no_op_test = format("ls {dolphin_pidfile_dir}/alert-server.pid >/dev/null 2>&1 && ps `cat {dolphin_pidfile_dir}/alert-server.pid` | grep `cat {dolphin_pidfile_dir}/alert-server.pid` >/dev/null 2>&1") - - start_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh start alert-server") - Execute(start_cmd, user=params.dolphin_user, not_if=no_op_test) - - def stop(self, env): - import params - env.set_params(params) - stop_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh stop alert-server") - Execute(stop_cmd, user=params.dolphin_user) - time.sleep(5) - - def status(self, env): - import status_params - env.set_params(status_params) - check_process_status(status_params.dolphin_run_dir + "alert-server.pid") - - -if __name__ == "__main__": - DolphinAlertService().execute() diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_api_service.py b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_api_service.py deleted file mode 100644 index 5a28924a9a7531a9c1091b3a136fd68d69815486..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_api_service.py +++ /dev/null @@ -1,71 +0,0 @@ -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -""" -import time -from resource_management import * - -from dolphin_env import dolphin_env - - -class DolphinApiService(Script): - def install(self, env): - import params - env.set_params(params) - self.install_packages(env) - Execute(('chmod', '-R', '777', params.dolphin_home)) - Execute(('chown', '-R', params.dolphin_user + ":" + params.dolphin_group, params.dolphin_home)) - - def configure(self, env): - import params - params.pika_slave = True - env.set_params(params) - - dolphin_env() - - def start(self, env): - import params - env.set_params(params) - self.configure(env) - - #init - init_cmd=format("sh " + params.dolphin_home + "/script/create-dolphinscheduler.sh") - Execute(init_cmd, user=params.dolphin_user) - - #upgrade - upgrade_cmd=format("sh " + params.dolphin_home + "/script/upgrade-dolphinscheduler.sh") - Execute(upgrade_cmd, user=params.dolphin_user) - - no_op_test = format("ls {dolphin_pidfile_dir}/api-server.pid >/dev/null 2>&1 && ps `cat {dolphin_pidfile_dir}/api-server.pid` | grep `cat {dolphin_pidfile_dir}/api-server.pid` >/dev/null 2>&1") - - start_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh start api-server") - Execute(start_cmd, user=params.dolphin_user, not_if=no_op_test) - - def stop(self, env): - import params - env.set_params(params) - stop_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh stop api-server") - Execute(stop_cmd, user=params.dolphin_user) - time.sleep(5) - - def status(self, env): - import status_params - env.set_params(status_params) - check_process_status(status_params.dolphin_run_dir + "api-server.pid") - - -if __name__ == "__main__": - DolphinApiService().execute() diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_env.py b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_env.py deleted file mode 100644 index 1661d76c753e09ff402dbebeac1afd41fa548632..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_env.py +++ /dev/null @@ -1,123 +0,0 @@ -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -""" - -from resource_management import * - - -def dolphin_env(): - import params - - Directory(params.dolphin_pidfile_dir, - mode=0777, - owner=params.dolphin_user, - group=params.dolphin_group, - create_parents=True - ) - Directory(params.dolphin_log_dir, - mode=0777, - owner=params.dolphin_user, - group=params.dolphin_group, - create_parents=True - ) - Directory(params.dolphin_conf_dir, - mode=0777, - owner=params.dolphin_user, - group=params.dolphin_group, - create_parents=True - ) - - Directory(params.dolphin_common_map['data.basedir.path'], - mode=0777, - owner=params.dolphin_user, - group=params.dolphin_group, - create_parents=True - ) - - - File(format(params.dolphin_env_path), - mode=0777, - content=InlineTemplate(params.dolphin_env_content), - owner=params.dolphin_user, - group=params.dolphin_group - ) - - - File(format(params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh"), - mode=0755, - content=Template("dolphin-daemon.sh.j2"), - owner=params.dolphin_user, - group=params.dolphin_group - ) - - File(format(params.dolphin_conf_dir + "/master.properties"), - mode=0755, - content=Template("master.properties.j2"), - owner=params.dolphin_user, - group=params.dolphin_group - ) - - File(format(params.dolphin_conf_dir + "/worker.properties"), - mode=0755, - content=Template("worker.properties.j2"), - owner=params.dolphin_user, - group=params.dolphin_group - ) - - - File(format(params.dolphin_conf_dir + "/alert.properties"), - mode=0755, - content=Template("alert.properties.j2"), - owner=params.dolphin_user, - group=params.dolphin_group - ) - - File(format(params.dolphin_conf_dir + "/datasource.properties"), - mode=0755, - content=Template("datasource.properties.j2"), - owner=params.dolphin_user, - group=params.dolphin_group - ) - - File(format(params.dolphin_conf_dir + "/application-api.properties"), - mode=0755, - content=Template("application-api.properties.j2"), - owner=params.dolphin_user, - group=params.dolphin_group - ) - - File(format(params.dolphin_conf_dir + "/common.properties"), - mode=0755, - content=Template("common.properties.j2"), - owner=params.dolphin_user, - group=params.dolphin_group - ) - - File(format(params.dolphin_conf_dir + "/quartz.properties"), - mode=0755, - content=Template("quartz.properties.j2"), - owner=params.dolphin_user, - group=params.dolphin_group - ) - - File(format(params.dolphin_conf_dir + "/zookeeper.properties"), - mode=0755, - content=Template("zookeeper.properties.j2"), - owner=params.dolphin_user, - group=params.dolphin_group - ) diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_logger_service.py b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_logger_service.py deleted file mode 100644 index fb47e132e17cccbcfc22cabc13d3b5fb7d93d52e..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_logger_service.py +++ /dev/null @@ -1,61 +0,0 @@ -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -""" -import time -from resource_management import * - -from dolphin_env import dolphin_env - - -class DolphinLoggerService(Script): - def install(self, env): - import params - env.set_params(params) - self.install_packages(env) - Execute(('chmod', '-R', '777', params.dolphin_home)) - Execute(('chown', '-R', params.dolphin_user + ":" + params.dolphin_group, params.dolphin_home)) - def configure(self, env): - import params - params.pika_slave = True - env.set_params(params) - - dolphin_env() - - def start(self, env): - import params - env.set_params(params) - self.configure(env) - no_op_test = format("ls {dolphin_pidfile_dir}/logger-server.pid >/dev/null 2>&1 && ps `cat {dolphin_pidfile_dir}/logger-server.pid` | grep `cat {dolphin_pidfile_dir}/logger-server.pid` >/dev/null 2>&1") - - start_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh start logger-server") - Execute(start_cmd, user=params.dolphin_user, not_if=no_op_test) - - def stop(self, env): - import params - env.set_params(params) - stop_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh stop logger-server") - Execute(stop_cmd, user=params.dolphin_user) - time.sleep(5) - - def status(self, env): - import status_params - env.set_params(status_params) - check_process_status(status_params.dolphin_run_dir + "logger-server.pid") - - -if __name__ == "__main__": - DolphinLoggerService().execute() diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_master_service.py b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_master_service.py deleted file mode 100644 index 8d64935d26e5cb50ebe27129f56c7ef114c5b833..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_master_service.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -""" -import time -from resource_management import * - -from dolphin_env import dolphin_env - - -class DolphinMasterService(Script): - def install(self, env): - import params - env.set_params(params) - self.install_packages(env) - Execute(('chmod', '-R', '777', params.dolphin_home)) - Execute(('chown', '-R', params.dolphin_user + ":" + params.dolphin_group, params.dolphin_home)) - - def configure(self, env): - import params - params.pika_slave = True - env.set_params(params) - - dolphin_env() - - def start(self, env): - import params - env.set_params(params) - self.configure(env) - no_op_test = format("ls {dolphin_pidfile_dir}/master-server.pid >/dev/null 2>&1 && ps `cat {dolphin_pidfile_dir}/master-server.pid` | grep `cat {dolphin_pidfile_dir}/master-server.pid` >/dev/null 2>&1") - start_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh start master-server") - Execute(start_cmd, user=params.dolphin_user, not_if=no_op_test) - - def stop(self, env): - import params - env.set_params(params) - stop_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh stop master-server") - Execute(stop_cmd, user=params.dolphin_user) - time.sleep(5) - - def status(self, env): - import status_params - env.set_params(status_params) - check_process_status(status_params.dolphin_run_dir + "master-server.pid") - - -if __name__ == "__main__": - DolphinMasterService().execute() diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_worker_service.py b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_worker_service.py deleted file mode 100644 index 1f542c06c26e787aaf584877355fc458bb23ee47..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_worker_service.py +++ /dev/null @@ -1,61 +0,0 @@ -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -""" -import time -from resource_management import * - -from dolphin_env import dolphin_env - - -class DolphinWorkerService(Script): - def install(self, env): - import params - env.set_params(params) - self.install_packages(env) - Execute(('chmod', '-R', '777', params.dolphin_home)) - Execute(('chown', '-R', params.dolphin_user + ":" + params.dolphin_group, params.dolphin_home)) - - def configure(self, env): - import params - params.pika_slave = True - env.set_params(params) - - dolphin_env() - - def start(self, env): - import params - env.set_params(params) - self.configure(env) - no_op_test = format("ls {dolphin_pidfile_dir}/worker-server.pid >/dev/null 2>&1 && ps `cat {dolphin_pidfile_dir}/worker-server.pid` | grep `cat {dolphin_pidfile_dir}/worker-server.pid` >/dev/null 2>&1") - start_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh start worker-server") - Execute(start_cmd, user=params.dolphin_user, not_if=no_op_test) - - def stop(self, env): - import params - env.set_params(params) - stop_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh stop worker-server") - Execute(stop_cmd, user=params.dolphin_user) - time.sleep(5) - - def status(self, env): - import status_params - env.set_params(status_params) - check_process_status(status_params.dolphin_run_dir + "worker-server.pid") - - -if __name__ == "__main__": - DolphinWorkerService().execute() diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/params.py b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/params.py deleted file mode 100644 index 5a9994f55955be44a284162dcc8fa4a0c4e9fa93..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/params.py +++ /dev/null @@ -1,155 +0,0 @@ -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -""" - - -import sys -from resource_management import * -from resource_management.core.logger import Logger -from resource_management.libraries.functions import default - -Logger.initialize_logger() -reload(sys) -sys.setdefaultencoding('utf-8') - -# server configurations -config = Script.get_config() - -# conf_dir = "/etc/" -dolphin_home = "/opt/soft/dolphinscheduler" -dolphin_conf_dir = dolphin_home + "/conf" -dolphin_log_dir = dolphin_home + "/logs" -dolphin_bin_dir = dolphin_home + "/bin" -dolphin_lib_jars = dolphin_home + "/lib/*" -dolphin_pidfile_dir = "/opt/soft/run/dolphinscheduler" - -rmHosts = default("/clusterHostInfo/rm_host", []) - -# dolphin-env -dolphin_env_map = {} -dolphin_env_map.update(config['configurations']['dolphin-env']) - -# which user to install and admin dolphin scheduler -dolphin_user = dolphin_env_map['dolphin.user'] -dolphin_group = dolphin_env_map['dolphin.group'] - -# .dolphinscheduler_env.sh -dolphin_env_path = dolphin_conf_dir + '/env/dolphinscheduler_env.sh' -dolphin_env_content = dolphin_env_map['dolphinscheduler-env-content'] - -# database config -dolphin_database_config = {} -dolphin_database_config['dolphin_database_type'] = dolphin_env_map['dolphin.database.type'] -dolphin_database_config['dolphin_database_username'] = dolphin_env_map['dolphin.database.username'] -dolphin_database_config['dolphin_database_password'] = dolphin_env_map['dolphin.database.password'] -if 'mysql' == dolphin_database_config['dolphin_database_type']: - dolphin_database_config['dolphin_database_driver'] = 'com.mysql.jdbc.Driver' - dolphin_database_config['driverDelegateClass'] = 'org.quartz.impl.jdbcjobstore.StdJDBCDelegate' - dolphin_database_config['dolphin_database_url'] = 'jdbc:mysql://' + dolphin_env_map['dolphin.database.host'] \ - + ':' + dolphin_env_map['dolphin.database.port'] \ - + '/dolphinscheduler?useUnicode=true&characterEncoding=UTF-8' -else: - dolphin_database_config['dolphin_database_driver'] = 'org.postgresql.Driver' - dolphin_database_config['driverDelegateClass'] = 'org.quartz.impl.jdbcjobstore.PostgreSQLDelegate' - dolphin_database_config['dolphin_database_url'] = 'jdbc:postgresql://' + dolphin_env_map['dolphin.database.host'] \ - + ':' + dolphin_env_map['dolphin.database.port'] \ - + '/dolphinscheduler' - - - - - -# application-alert.properties -dolphin_alert_map = {} -wechat_push_url = 'https://qyapi.weixin.qq.com/cgi-bin/message/send?access_token=$token' -wechat_token_url = 'https://qyapi.weixin.qq.com/cgi-bin/gettoken?corpid=$corpId&corpsecret=$secret' -wechat_team_send_msg = '{\"toparty\":\"{toParty}\",\"agentid\":\"{agentId}\",\"msgtype\":\"text\",\"text\":{\"content\":\"{msg}\"},\"safe\":\"0\"}' -wechat_user_send_msg = '{\"touser\":\"{toUser}\",\"agentid\":\"{agentId}\",\"msgtype\":\"markdown\",\"markdown\":{\"content\":\"{msg}\"}}' - -dolphin_alert_config_map = config['configurations']['dolphin-alert'] - -if dolphin_alert_config_map['enterprise.wechat.enable']: - dolphin_alert_map['enterprise.wechat.push.ur'] = wechat_push_url - dolphin_alert_map['enterprise.wechat.token.url'] = wechat_token_url - dolphin_alert_map['enterprise.wechat.team.send.msg'] = wechat_team_send_msg - dolphin_alert_map['enterprise.wechat.user.send.msg'] = wechat_user_send_msg - -dolphin_alert_map.update(dolphin_alert_config_map) - - - -# application-api.properties -dolphin_app_api_map = {} -dolphin_app_api_map.update(config['configurations']['dolphin-application-api']) - - -# common.properties -dolphin_common_map = {} - -if 'yarn-site' in config['configurations'] and \ - 'yarn.resourcemanager.webapp.address' in config['configurations']['yarn-site']: - yarn_resourcemanager_webapp_address = config['configurations']['yarn-site']['yarn.resourcemanager.webapp.address'] - yarn_application_status_address = 'http://' + yarn_resourcemanager_webapp_address + '/ws/v1/cluster/apps/%s' - dolphin_common_map['yarn.application.status.address'] = yarn_application_status_address - -rmHosts = default("/clusterHostInfo/rm_host", []) -if len(rmHosts) > 1: - dolphin_common_map['yarn.resourcemanager.ha.rm.ids'] = ','.join(rmHosts) -else: - dolphin_common_map['yarn.resourcemanager.ha.rm.ids'] = '' - -dolphin_common_map_tmp = config['configurations']['dolphin-common'] -data_basedir_path = dolphin_common_map_tmp['data.basedir.path'] -dolphin_common_map['dolphinscheduler.env.path'] = dolphin_env_path -dolphin_common_map.update(config['configurations']['dolphin-common']) - -# datasource.properties -dolphin_datasource_map = {} -dolphin_datasource_map['spring.datasource.type'] = 'com.alibaba.druid.pool.DruidDataSource' -dolphin_datasource_map['spring.datasource.driver-class-name'] = dolphin_database_config['dolphin_database_driver'] -dolphin_datasource_map['spring.datasource.url'] = dolphin_database_config['dolphin_database_url'] -dolphin_datasource_map['spring.datasource.username'] = dolphin_database_config['dolphin_database_username'] -dolphin_datasource_map['spring.datasource.password'] = dolphin_database_config['dolphin_database_password'] -dolphin_datasource_map.update(config['configurations']['dolphin-datasource']) - -# master.properties -dolphin_master_map = config['configurations']['dolphin-master'] - -# quartz.properties -dolphin_quartz_map = {} -dolphin_quartz_map['org.quartz.jobStore.driverDelegateClass'] = dolphin_database_config['driverDelegateClass'] -dolphin_quartz_map.update(config['configurations']['dolphin-quartz']) - -# worker.properties -dolphin_worker_map = config['configurations']['dolphin-worker'] - -# zookeeper.properties -dolphin_zookeeper_map={} -zookeeperHosts = default("/clusterHostInfo/zookeeper_hosts", []) -if len(zookeeperHosts) > 0 and "clientPort" in config['configurations']['zoo.cfg']: - clientPort = config['configurations']['zoo.cfg']['clientPort'] - zookeeperPort = ":" + clientPort + "," - dolphin_zookeeper_map['zookeeper.quorum'] = zookeeperPort.join(zookeeperHosts) + ":" + clientPort -dolphin_zookeeper_map.update(config['configurations']['dolphin-zookeeper']) -if 'spring.servlet.multipart.max-file-size' in dolphin_app_api_map: - file_size = dolphin_app_api_map['spring.servlet.multipart.max-file-size'] - dolphin_app_api_map['spring.servlet.multipart.max-file-size'] = file_size + "MB" -if 'spring.servlet.multipart.max-request-size' in dolphin_app_api_map: - request_size = dolphin_app_api_map['spring.servlet.multipart.max-request-size'] - dolphin_app_api_map['spring.servlet.multipart.max-request-size'] = request_size + "MB" - - diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/service_check.py b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/service_check.py deleted file mode 100644 index 0e12f699324e943de71a18e255e854a13c9bb5ed..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/service_check.py +++ /dev/null @@ -1,31 +0,0 @@ -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -""" - -from resource_management import * -from resource_management.libraries.functions import get_unique_id_and_date - -class ServiceCheck(Script): - def service_check(self, env): - import params - #env.set_params(params) - - # Execute(format("which pika_server")) - -if __name__ == "__main__": - ServiceCheck().execute() diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/status_params.py b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/status_params.py deleted file mode 100644 index 24b2c8b1bcb87bb03b1bba71f569d190eab0843b..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/status_params.py +++ /dev/null @@ -1,23 +0,0 @@ -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -""" - -from resource_management import * - -config = Script.get_config() - -dolphin_run_dir = "/opt/soft/run/dolphinscheduler/" diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/datasource.properties.j2 b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/datasource.properties.j2 deleted file mode 100644 index 40aed83543e7db639051101c6ec6ce0d4af2b42a..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/datasource.properties.j2 +++ /dev/null @@ -1,20 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -{% for key, value in dolphin_datasource_map.iteritems() -%} - {{key}}={{value}} -{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/dolphin-daemon.sh.j2 b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/dolphin-daemon.sh.j2 deleted file mode 100644 index c5cc11fb6271dadd197fea3f7a4f04b8210a2eb5..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/dolphin-daemon.sh.j2 +++ /dev/null @@ -1,116 +0,0 @@ -#!/bin/sh -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -usage="Usage: dolphinscheduler-daemon.sh (start|stop) " - -# if no args specified, show usage -if [ $# -le 1 ]; then - echo $usage - exit 1 -fi - -startStop=$1 -shift -command=$1 -shift - -echo "Begin $startStop $command......" - -BIN_DIR=`dirname $0` -BIN_DIR=`cd "$BIN_DIR"; pwd` -DOLPHINSCHEDULER_HOME=$BIN_DIR/.. - -export HOSTNAME=`hostname` - -DOLPHINSCHEDULER_LIB_JARS={{dolphin_lib_jars}} - -DOLPHINSCHEDULER_OPTS="-server -Xmx16g -Xms1g -Xss512k -XX:+UseConcMarkSweepGC -XX:+CMSParallelRemarkEnabled -XX:LargePageSizeInBytes=10m -XX:+UseFastAccessorMethods -XX:+UseCMSInitiatingOccupancyOnly -XX:CMSInitiatingOccupancyFraction=70" -STOP_TIMEOUT=5 - -log={{dolphin_log_dir}}/dolphinscheduler-$command-$HOSTNAME.out -pid={{dolphin_pidfile_dir}}/$command.pid - -cd $DOLPHINSCHEDULER_HOME - -if [ "$command" = "api-server" ]; then - LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/logback-api.xml -Dspring.profiles.active=api" - CLASS=org.apache.dolphinscheduler.api.ApiApplicationServer -elif [ "$command" = "master-server" ]; then - LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/logback-master.xml -Ddruid.mysql.usePingMethod=false" - CLASS=org.apache.dolphinscheduler.server.master.MasterServer -elif [ "$command" = "worker-server" ]; then - LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/logback-worker.xml -Ddruid.mysql.usePingMethod=false" - CLASS=org.apache.dolphinscheduler.server.worker.WorkerServer -elif [ "$command" = "alert-server" ]; then - LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/logback-alert.xml" - CLASS=org.apache.dolphinscheduler.alert.AlertServer -elif [ "$command" = "logger-server" ]; then - CLASS=org.apache.dolphinscheduler.server.log.LoggerServer -else - echo "Error: No command named \`$command' was found." - exit 1 -fi - -case $startStop in - (start) - - if [ -f $pid ]; then - if kill -0 `cat $pid` > /dev/null 2>&1; then - echo $command running as process `cat $pid`. Stop it first. - exit 1 - fi - fi - - echo starting $command, logging to $log - - exec_command="$LOG_FILE $DOLPHINSCHEDULER_OPTS -classpath {{dolphin_conf_dir}}:{{dolphin_lib_jars}} $CLASS" - - echo "nohup java $exec_command > $log 2>&1 < /dev/null &" - nohup java $exec_command > $log 2>&1 < /dev/null & - echo $! > $pid - ;; - - (stop) - - if [ -f $pid ]; then - TARGET_PID=`cat $pid` - if kill -0 $TARGET_PID > /dev/null 2>&1; then - echo stopping $command - kill $TARGET_PID - sleep $STOP_TIMEOUT - if kill -0 $TARGET_PID > /dev/null 2>&1; then - echo "$command did not stop gracefully after $STOP_TIMEOUT seconds: killing with kill -9" - kill -9 $TARGET_PID - fi - else - echo no $command to stop - fi - rm -f $pid - else - echo no $command to stop - fi - ;; - - (*) - echo $usage - exit 1 - ;; - -esac - -echo "End $startStop $command." \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/master.properties.j2 b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/master.properties.j2 deleted file mode 100644 index d9b85e14cf78ecca1d34f425cde02b408739ced7..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/master.properties.j2 +++ /dev/null @@ -1,20 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -{% for key, value in dolphin_master_map.iteritems() -%} - {{key}}={{value}} -{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/quartz.properties.j2 b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/quartz.properties.j2 deleted file mode 100644 index e027a263b5bc0810e2e67d5a6386fd2bbb17ee43..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/quartz.properties.j2 +++ /dev/null @@ -1,20 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -{% for key, value in dolphin_quartz_map.iteritems() -%} - {{key}}={{value}} -{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/worker.properties.j2 b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/worker.properties.j2 deleted file mode 100644 index a008b74084ebf8cf63e975d06b6f1bee76a4e2a8..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/worker.properties.j2 +++ /dev/null @@ -1,20 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -{% for key, value in dolphin_worker_map.iteritems() -%} - {{key}}={{value}} -{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/zookeeper.properties.j2 b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/zookeeper.properties.j2 deleted file mode 100644 index 9eb14eaef319d5d56324cccb47f5c3b4b2878681..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/zookeeper.properties.j2 +++ /dev/null @@ -1,20 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -{% for key, value in dolphin_zookeeper_map.iteritems() -%} - {{key}}={{value}} -{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/quicklinks/quicklinks.json b/ambari_plugin/common-services/DOLPHIN/1.3.0/quicklinks/quicklinks.json deleted file mode 100755 index 8753004fefa3b6605d261a20756c6019b28695cb..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.0/quicklinks/quicklinks.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "name": "default", - "description": "default quick links configuration", - "configuration": { - "protocol": - { - "type":"http" - }, - - "links": [ - { - "name": "dolphin-application-ui", - "label": "DolphinApplication UI", - "requires_user_name": "false", - "component_name": "DOLPHIN_API", - "url": "%@://%@:%@/dolphinscheduler/ui/view/login/index.html", - "port":{ - "http_property": "server.port", - "http_default_port": "12345", - "regex": "^(\\d+)$", - "site": "dolphin-application-api" - } - } - ] - } -} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/themes/theme.json b/ambari_plugin/common-services/DOLPHIN/1.3.0/themes/theme.json deleted file mode 100644 index 953e2323f8c84fe430b790e514284cac5e022f2c..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.0/themes/theme.json +++ /dev/null @@ -1,661 +0,0 @@ -{ - "name": "default", - "description": "Default theme for Dolphin Scheduler service", - "configuration": { - "layouts": [ - { - "name": "default", - "tabs": [ - { - "name": "settings", - "display-name": "Settings", - "layout": { - "tab-rows": "3", - "tab-columns": "3", - "sections": [ - { - "name": "dolphin-env-config", - "display-name": "Dolphin Env Config", - "row-index": "0", - "column-index": "0", - "row-span": "1", - "column-span": "2", - "section-rows": "1", - "section-columns": "2", - "subsections": [ - { - "name": "env-row1-col1", - "display-name": "Deploy User Info", - "row-index": "0", - "column-index": "0", - "row-span": "1", - "column-span": "1" - }, - { - "name": "env-row1-col2", - "display-name": "System Env Optimization", - "row-index": "0", - "column-index": "1", - "row-span": "1", - "column-span": "1" - } - ] - }, - { - "name": "dolphin-database-config", - "display-name": "Database Config", - "row-index": "1", - "column-index": "0", - "row-span": "1", - "column-span": "2", - "section-rows": "1", - "section-columns": "3", - "subsections": [ - { - "name": "database-row1-col1", - "row-index": "0", - "column-index": "0", - "row-span": "1", - "column-span": "1" - }, - { - "name": "database-row1-col2", - "row-index": "0", - "column-index": "1", - "row-span": "1", - "column-span": "1" - }, - { - "name": "database-row1-col3", - "row-index": "0", - "column-index": "2", - "row-span": "1", - "column-span": "1" - } - ] - }, - { - "name": "dynamic-config", - "row-index": "2", - "column-index": "0", - "row-span": "1", - "column-span": "2", - "section-rows": "1", - "section-columns": "3", - "subsections": [ - { - "name": "dynamic-row1-col1", - "display-name": "Resource FS Config", - "row-index": "0", - "column-index": "0", - "row-span": "1", - "column-span": "1" - }, - { - "name": "dynamic-row1-col2", - "display-name": "Kerberos Info", - "row-index": "0", - "column-index": "1", - "row-span": "1", - "column-span": "1" - }, - { - "name": "dynamic-row1-col3", - "display-name": "Wechat Info", - "row-index": "0", - "column-index": "1", - "row-span": "1", - "column-span": "1" - } - ] - } - ] - } - } - ] - } - ], - "placement": { - "configuration-layout": "default", - "configs": [ - { - "config": "dolphin-env/dolphin.database.type", - "subsection-name": "database-row1-col1" - }, - { - "config": "dolphin-env/dolphin.database.host", - "subsection-name": "database-row1-col2" - }, - { - "config": "dolphin-env/dolphin.database.port", - "subsection-name": "database-row1-col2" - }, - { - "config": "dolphin-env/dolphin.database.username", - "subsection-name": "database-row1-col3" - }, - { - "config": "dolphin-env/dolphin.database.password", - "subsection-name": "database-row1-col3" - }, - { - "config": "dolphin-env/dolphin.user", - "subsection-name": "env-row1-col1" - }, - { - "config": "dolphin-env/dolphin.group", - "subsection-name": "env-row1-col1" - }, - { - "config": "dolphin-env/dolphinscheduler-env-content", - "subsection-name": "env-row1-col2" - }, - { - "config": "dolphin-common/resource.storage.type", - "subsection-name": "dynamic-row1-col1" - }, - { - "config": "dolphin-common/resource.upload.path", - "subsection-name": "dynamic-row1-col1", - "depends-on": [ - { - "configs":[ - "dolphin-common/resource.storage.type" - ], - "if": "${dolphin-common/resource.storage.type} === HDFS || ${dolphin-common/resource.storage.type} === S3", - "then": { - "property_value_attributes": { - "visible": true - } - }, - "else": { - "property_value_attributes": { - "visible": false - } - } - } - ] - }, - { - "config": "dolphin-common/hdfs.root.user", - "subsection-name": "dynamic-row1-col1", - "depends-on": [ - { - "configs":[ - "dolphin-common/resource.storage.type" - ], - "if": "${dolphin-common/resource.storage.type} === HDFS", - "then": { - "property_value_attributes": { - "visible": true - } - }, - "else": { - "property_value_attributes": { - "visible": false - } - } - } - ] - }, - { - "config": "dolphin-common/data.store2hdfs.basepath", - "subsection-name": "dynamic-row1-col1", - "depends-on": [ - { - "configs":[ - "dolphin-common/resource.storage.type" - ], - "if": "${dolphin-common/resource.storage.type} === HDFS", - "then": { - "property_value_attributes": { - "visible": true - } - }, - "else": { - "property_value_attributes": { - "visible": false - } - } - } - ] - }, - { - "config": "dolphin-common/fs.defaultFS", - "subsection-name": "dynamic-row1-col1", - "depends-on": [ - { - "configs":[ - "dolphin-common/resource.storage.type" - ], - "if": "${dolphin-common/resource.storage.type} === HDFS", - "then": { - "property_value_attributes": { - "visible": true - } - }, - "else": { - "property_value_attributes": { - "visible": false - } - } - } - ] - }, - { - "config": "dolphin-common/fs.s3a.endpoint", - "subsection-name": "dynamic-row1-col1", - "depends-on": [ - { - "configs":[ - "dolphin-common/resource.storage.type" - ], - "if": "${dolphin-common/resource.storage.type} === S3", - "then": { - "property_value_attributes": { - "visible": true - } - }, - "else": { - "property_value_attributes": { - "visible": false - } - } - } - ] - }, - { - "config": "dolphin-common/fs.s3a.access.key", - "subsection-name": "dynamic-row1-col1", - "depends-on": [ - { - "configs":[ - "dolphin-common/resource.storage.type" - ], - "if": "${dolphin-common/resource.storage.type} === S3", - "then": { - "property_value_attributes": { - "visible": true - } - }, - "else": { - "property_value_attributes": { - "visible": false - } - } - } - ] - }, - { - "config": "dolphin-common/fs.s3a.secret.key", - "subsection-name": "dynamic-row1-col1", - "depends-on": [ - { - "configs":[ - "dolphin-common/resource.storage.type" - ], - "if": "${dolphin-common/resource.storage.type} === S3", - "then": { - "property_value_attributes": { - "visible": true - } - }, - "else": { - "property_value_attributes": { - "visible": false - } - } - } - ] - }, - { - "config": "dolphin-common/hadoop.security.authentication.startup.state", - "subsection-name": "dynamic-row1-col2" - }, - { - "config": "dolphin-common/java.security.krb5.conf.path", - "subsection-name": "dynamic-row1-col2", - "depends-on": [ - { - "configs":[ - "dolphin-common/hadoop.security.authentication.startup.state" - ], - "if": "${dolphin-common/hadoop.security.authentication.startup.state}", - "then": { - "property_value_attributes": { - "visible": true - } - }, - "else": { - "property_value_attributes": { - "visible": false - } - } - } - ] - }, - { - "config": "dolphin-common/login.user.keytab.username", - "subsection-name": "dynamic-row1-col2", - "depends-on": [ - { - "configs":[ - "dolphin-common/hadoop.security.authentication.startup.state" - ], - "if": "${dolphin-common/hadoop.security.authentication.startup.state}", - "then": { - "property_value_attributes": { - "visible": true - } - }, - "else": { - "property_value_attributes": { - "visible": false - } - } - } - ] - }, - { - "config": "dolphin-common/login.user.keytab.path", - "subsection-name": "dynamic-row1-col2", - "depends-on": [ - { - "configs":[ - "dolphin-common/hadoop.security.authentication.startup.state" - ], - "if": "${dolphin-common/hadoop.security.authentication.startup.state}", - "then": { - "property_value_attributes": { - "visible": true - } - }, - "else": { - "property_value_attributes": { - "visible": false - } - } - } - ] - }, - { - "config": "dolphin-common/kerberos.expire.time", - "subsection-name": "dynamic-row1-col2", - "depends-on": [ - { - "configs":[ - "dolphin-common/hadoop.security.authentication.startup.state" - ], - "if": "${dolphin-common/hadoop.security.authentication.startup.state}", - "then": { - "property_value_attributes": { - "visible": true - } - }, - "else": { - "property_value_attributes": { - "visible": false - } - } - } - ] - }, - { - "config": "dolphin-alert/enterprise.wechat.enable", - "subsection-name": "dynamic-row1-col3" - }, - { - "config": "dolphin-alert/enterprise.wechat.corp.id", - "subsection-name": "dynamic-row1-col3", - "depends-on": [ - { - "configs":[ - "dolphin-alert/enterprise.wechat.enable" - ], - "if": "${dolphin-alert/enterprise.wechat.enable}", - "then": { - "property_value_attributes": { - "visible": true - } - }, - "else": { - "property_value_attributes": { - "visible": false - } - } - } - ] - }, - { - "config": "dolphin-alert/enterprise.wechat.secret", - "subsection-name": "dynamic-row1-col3", - "depends-on": [ - { - "configs":[ - "dolphin-alert/enterprise.wechat.enable" - ], - "if": "${dolphin-alert/enterprise.wechat.enable}", - "then": { - "property_value_attributes": { - "visible": true - } - }, - "else": { - "property_value_attributes": { - "visible": false - } - } - } - ] - }, - { - "config": "dolphin-alert/enterprise.wechat.agent.id", - "subsection-name": "dynamic-row1-col3", - "depends-on": [ - { - "configs":[ - "dolphin-alert/enterprise.wechat.enable" - ], - "if": "${dolphin-alert/enterprise.wechat.enable}", - "then": { - "property_value_attributes": { - "visible": true - } - }, - "else": { - "property_value_attributes": { - "visible": false - } - } - } - ] - }, - { - "config": "dolphin-alert/enterprise.wechat.users", - "subsection-name": "dynamic-row1-col3", - "depends-on": [ - { - "configs":[ - "dolphin-alert/enterprise.wechat.enable" - ], - "if": "${dolphin-alert/enterprise.wechat.enable}", - "then": { - "property_value_attributes": { - "visible": true - } - }, - "else": { - "property_value_attributes": { - "visible": false - } - } - } - ] - } - ] - }, - "widgets": [ - { - "config": "dolphin-env/dolphin.database.type", - "widget": { - "type": "combo" - } - }, - { - "config": "dolphin-env/dolphin.database.host", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-env/dolphin.database.port", - "widget": { - "type": "text-field", - "units": [ - { - "unit-name": "int" - } - ] - } - }, - { - "config": "dolphin-env/dolphin.database.username", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-env/dolphin.database.password", - "widget": { - "type": "password" - } - }, - { - "config": "dolphin-env/dolphin.user", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-env/dolphin.group", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-env/dolphinscheduler-env-content", - "widget": { - "type": "text-area" - } - }, - { - "config": "dolphin-common/resource.storage.type", - "widget": { - "type": "combo" - } - }, - { - "config": "dolphin-common/resource.upload.path", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-common/hdfs.root.user", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-common/data.store2hdfs.basepath", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-common/fs.defaultFS", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-common/fs.s3a.endpoint", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-common/fs.s3a.access.key", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-common/fs.s3a.secret.key", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-common/hadoop.security.authentication.startup.state", - "widget": { - "type": "toggle" - } - }, - { - "config": "dolphin-common/java.security.krb5.conf.path", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-common/login.user.keytab.username", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-common/login.user.keytab.path", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-common/kerberos.expire.time", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-alert/enterprise.wechat.enable", - "widget": { - "type": "toggle" - } - }, - { - "config": "dolphin-alert/enterprise.wechat.corp.id", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-alert/enterprise.wechat.secret", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-alert/enterprise.wechat.agent.id", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-alert/enterprise.wechat.users", - "widget": { - "type": "text-field" - } - } - ] - } -} diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/alerts.json b/ambari_plugin/common-services/DOLPHIN/1.3.3/alerts.json deleted file mode 100644 index 184f021ac3d2e683246ad5dc2098f507da8a3f98..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.3/alerts.json +++ /dev/null @@ -1,158 +0,0 @@ -{ - "DOLPHIN": { - "service": [], - "DOLPHIN_API": [ - { - "name": "dolphin_api_port_check", - "label": "dolphin_api_port_check", - "description": "dolphin_api_port_check.", - "interval": 10, - "scope": "ANY", - "source": { - "type": "PORT", - "uri": "{{dolphin-application-api/server.port}}", - "default_port": 12345, - "reporting": { - "ok": { - "text": "TCP OK - {0:.3f}s response on port {1}" - }, - "warning": { - "text": "TCP OK - {0:.3f}s response on port {1}", - "value": 1.5 - }, - "critical": { - "text": "Connection failed: {0} to {1}:{2}", - "value": 5.0 - } - } - } - } - ], - "DOLPHIN_LOGGER": [ - { - "name": "dolphin_logger_port_check", - "label": "dolphin_logger_port_check", - "description": "dolphin_logger_port_check.", - "interval": 10, - "scope": "ANY", - "source": { - "type": "PORT", - "uri": "{{dolphin-common/loggerserver.rpc.port}}", - "default_port": 50051, - "reporting": { - "ok": { - "text": "TCP OK - {0:.3f}s response on port {1}" - }, - "warning": { - "text": "TCP OK - {0:.3f}s response on port {1}", - "value": 1.5 - }, - "critical": { - "text": "Connection failed: {0} to {1}:{2}", - "value": 5.0 - } - } - } - } - ], - "DOLPHIN_MASTER": [ - { - "name": "DOLPHIN_MASTER_CHECK", - "label": "check dolphin scheduler master status", - "description": "", - "interval":10, - "scope": "HOST", - "enabled": true, - "source": { - "type": "SCRIPT", - "path": "DOLPHIN/1.3.3/package/alerts/alert_dolphin_scheduler_status.py", - "parameters": [ - - { - "name": "connection.timeout", - "display_name": "Connection Timeout", - "value": 5.0, - "type": "NUMERIC", - "description": "The maximum time before this alert is considered to be CRITICAL", - "units": "seconds", - "threshold": "CRITICAL" - }, - { - "name": "alertName", - "display_name": "alertName", - "value": "DOLPHIN_MASTER", - "type": "STRING", - "description": "alert name" - } - ] - } - } - ], - "DOLPHIN_WORKER": [ - { - "name": "DOLPHIN_WORKER_CHECK", - "label": "check dolphin scheduler worker status", - "description": "", - "interval":10, - "scope": "HOST", - "enabled": true, - "source": { - "type": "SCRIPT", - "path": "DOLPHIN/1.3.3/package/alerts/alert_dolphin_scheduler_status.py", - "parameters": [ - - { - "name": "connection.timeout", - "display_name": "Connection Timeout", - "value": 5.0, - "type": "NUMERIC", - "description": "The maximum time before this alert is considered to be CRITICAL", - "units": "seconds", - "threshold": "CRITICAL" - }, - { - "name": "alertName", - "display_name": "alertName", - "value": "DOLPHIN_WORKER", - "type": "STRING", - "description": "alert name" - } - ] - } - } - ], - "DOLPHIN_ALERT": [ - { - "name": "DOLPHIN_DOLPHIN_ALERT_CHECK", - "label": "check dolphin scheduler alert status", - "description": "", - "interval":10, - "scope": "HOST", - "enabled": true, - "source": { - "type": "SCRIPT", - "path": "DOLPHIN/1.3.3/package/alerts/alert_dolphin_scheduler_status.py", - "parameters": [ - - { - "name": "connection.timeout", - "display_name": "Connection Timeout", - "value": 5.0, - "type": "NUMERIC", - "description": "The maximum time before this alert is considered to be CRITICAL", - "units": "seconds", - "threshold": "CRITICAL" - }, - { - "name": "alertName", - "display_name": "alertName", - "value": "DOLPHIN_ALERT", - "type": "STRING", - "description": "alert name" - } - ] - } - } - ] - } -} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-alert.xml b/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-alert.xml deleted file mode 100644 index 32abcc791d9687393fbab8af8ad5fb587b059841..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-alert.xml +++ /dev/null @@ -1,137 +0,0 @@ - - - - alert.type - EMAIL - alert type is EMAIL/SMS - - - - mail.protocol - SMTP - - - - - mail.server.host - xxx.xxx.com - - - - - mail.server.port - 25 - - int - - - - - - mail.sender - admin - - - - - mail.user - admin - - - - - mail.passwd - 000000 - - PASSWORD - - password - - - - - - mail.smtp.starttls.enable - true - - boolean - - - - - - mail.smtp.ssl.enable - true - - boolean - - - - - - mail.smtp.ssl.trust - xxx.xxx.com - - - - - - enterprise.wechat.enable - false - - - value-list - - - true - - - - false - - - - 1 - - - - - enterprise.wechat.corp.id - wechatId - - - - - enterprise.wechat.secret - secret - - - - - enterprise.wechat.agent.id - agentId - - - - - enterprise.wechat.users - wechatUsers - - - - \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-application-api.xml b/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-application-api.xml deleted file mode 100644 index 766c0f477dfb9d96c44466f88d4b075d70beb807..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-application-api.xml +++ /dev/null @@ -1,87 +0,0 @@ - - - - server.port - 12345 - - server port - - - int - - - - server.servlet.session.timeout - 7200 - - int - - - - - - server.servlet.context-path - /dolphinscheduler/ - - - - - spring.servlet.multipart.max-file-size - 1024 - - MB - int - - - - - - spring.servlet.multipart.max-request-size - 1024 - - MB - int - - - - - - server.jetty.max-http-post-size - 5000000 - - int - - - - - - spring.messages.encoding - UTF-8 - - - - spring.messages.basename - i18n/messages - - - - security.authentication.type - PASSWORD - - - \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-common.xml b/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-common.xml deleted file mode 100644 index 7d4fb8329b2d37e0889845361a8d8876d46ddd69..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-common.xml +++ /dev/null @@ -1,232 +0,0 @@ - - - - dolphinscheduler.queue.impl - zookeeper - - Task queue implementation, default "zookeeper" - - - - - zookeeper.dolphinscheduler.root - /dolphinscheduler - - dolphinscheduler root directory - - - - - zookeeper.session.timeout - 300 - - int - - - - - - - zookeeper.connection.timeout - 30000 - - int - - - - - - - zookeeper.retry.base.sleep - 100 - - int - - - - - - - zookeeper.retry.max.sleep - 30000 - - int - - - - - - - zookeeper.retry.maxtime - 10 - - int - - - - - - - - res.upload.startup.type - Choose Resource Upload Startup Type - - Resource upload startup type : HDFS,S3,NONE - - NONE - - value-list - - - HDFS - - - - S3 - - - - NONE - - - - 1 - - - - - hdfs.root.user - hdfs - - Users who have permission to create directories under the HDFS root path - - - - - data.store2hdfs.basepath - /dolphinscheduler - - Data base dir, resource file will store to this hadoop hdfs path, self configuration, - please make sure the directory exists on hdfs and have read write permissions。 - "/dolphinscheduler" is recommended - - - - - data.basedir.path - /tmp/dolphinscheduler - - User data directory path, self configuration, - please make sure the directory exists and have read write permissions - - - - - hadoop.security.authentication.startup.state - false - - value-list - - - true - - - - false - - - - 1 - - - - - java.security.krb5.conf.path - /opt/krb5.conf - - java.security.krb5.conf path - - - - - login.user.keytab.username - hdfs-mycluster@ESZ.COM - - LoginUserFromKeytab user - - - - - login.user.keytab.path - /opt/hdfs.headless.keytab - - LoginUserFromKeytab path - - - - - resource.view.suffixs - txt,log,sh,conf,cfg,py,java,sql,hql,xml,properties - - - - - fs.defaultFS - hdfs://mycluster:8020 - - HA or single namenode, - If namenode ha needs to copy core-site.xml and hdfs-site.xml to the conf directory, - support s3,for example : s3a://dolphinscheduler - - - - - fs.s3a.endpoint - http://host:9010 - - s3 need,s3 endpoint - - - - - fs.s3a.access.key - A3DXS30FO22544RE - - s3 need,s3 access key - - - - - fs.s3a.secret.key - OloCLq3n+8+sdPHUhJ21XrSxTC+JK - - s3 need,s3 secret key - - - - - loggerserver.rpc.port - 50051 - - intF - - - - - - \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-datasource.xml b/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-datasource.xml deleted file mode 100644 index 02d8de0482ab0d13db8571da5fea62af52e7cddb..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-datasource.xml +++ /dev/null @@ -1,206 +0,0 @@ - - - - spring.datasource.initialSize - 5 - - Init connection number - - - int - - - - - spring.datasource.minIdle - 5 - - Min connection number - - - int - - - - - spring.datasource.maxActive - 50 - - Max connection number - - - int - - - - - spring.datasource.maxWait - 60000 - - Max wait time for get a connection in milliseconds. - If configuring maxWait, fair locks are enabled by default and concurrency efficiency decreases. - If necessary, unfair locks can be used by configuring the useUnfairLock attribute to true. - - - int - - - - - spring.datasource.timeBetweenEvictionRunsMillis - 60000 - - Milliseconds for check to close free connections - - - int - - - - - spring.datasource.timeBetweenConnectErrorMillis - 60000 - - The Destroy thread detects the connection interval and closes the physical connection in milliseconds - if the connection idle time is greater than or equal to minEvictableIdleTimeMillis. - - - int - - - - - spring.datasource.minEvictableIdleTimeMillis - 300000 - - The longest time a connection remains idle without being evicted, in milliseconds - - - int - - - - - spring.datasource.validationQuery - SELECT 1 - - The SQL used to check whether the connection is valid requires a query statement. - If validation Query is null, testOnBorrow, testOnReturn, and testWhileIdle will not work. - - - - - spring.datasource.validationQueryTimeout - 3 - - int - - - Check whether the connection is valid for timeout, in seconds - - - - - spring.datasource.testWhileIdle - true - - boolean - - - When applying for a connection, - if it is detected that the connection is idle longer than time Between Eviction Runs Millis, - validation Query is performed to check whether the connection is valid - - - - - spring.datasource.testOnBorrow - true - - boolean - - - Execute validation to check if the connection is valid when applying for a connection - - - - - spring.datasource.testOnReturn - false - - boolean - - - Execute validation to check if the connection is valid when the connection is returned - - - - - spring.datasource.defaultAutoCommit - true - - boolean - - - - - - - spring.datasource.keepAlive - false - - boolean - - - - - - - - spring.datasource.poolPreparedStatements - true - - boolean - - - Open PSCache, specify count PSCache for every connection - - - - - spring.datasource.maxPoolPreparedStatementPerConnectionSize - 20 - - int - - - - - - spring.datasource.spring.datasource.filters - stat,wall,log4j - - - - - spring.datasource.connectionProperties - druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000 - - - - \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-env.xml b/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-env.xml deleted file mode 100644 index 18501c6d0a8a7b55803cdd638ba249c3fb9571c6..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-env.xml +++ /dev/null @@ -1,124 +0,0 @@ - - - - dolphin.database.type - mysql - Dolphin Scheduler DataBase Type Which Is Select - Dolphin Database Type - - value-list - - - mysql - - - - postgresql - - - - 1 - - - - - - dolphin.database.host - - Dolphin Database Host - - - - - dolphin.database.port - - Dolphin Database Port - - - - - dolphin.database.username - - Dolphin Database Username - - - - - dolphin.database.password - - Dolphin Database Password - PASSWORD - - password - - - - - - dolphin.user - - Which user to install and admin dolphin scheduler - Deploy User - - - - dolphin.group - - Which user to install and admin dolphin scheduler - Deploy Group - - - - - dolphinscheduler-env-content - Dolphinscheduler Env template - This is the jinja template for dolphinscheduler.env.sh file - # -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -export HADOOP_HOME=/opt/soft/hadoop -export HADOOP_CONF_DIR=/opt/soft/hadoop/etc/hadoop -export SPARK_HOME1=/opt/soft/spark1 -export SPARK_HOME2=/opt/soft/spark2 -export PYTHON_HOME=/opt/soft/python -export JAVA_HOME=/opt/soft/java -export HIVE_HOME=/opt/soft/hive -export FLINK_HOME=/opt/soft/flink -export DATAX_HOME=/opt/soft/datax - - content - false - false - - - - \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-master.xml b/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-master.xml deleted file mode 100644 index c8eec047fcc468f87b5042ac74f838160ca9475b..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-master.xml +++ /dev/null @@ -1,88 +0,0 @@ - - - - master.exec.threads - 100 - - int - - master execute thread num - - - - master.exec.task.num - 20 - - int - - master execute task number in parallel - - - - master.heartbeat.interval - 10 - - int - - master heartbeat interval - - - - master.task.commit.retryTimes - 5 - - int - - master commit task retry times - - - - master.task.commit.interval - 1000 - - int - - master commit task interval - - - - master.max.cpuload.avg - 100 - - int - - only less than cpu avg load, master server can work. default value : the number of cpu cores * 2 - - - - master.reserved.memory - 0.3 - only larger than reserved memory, master server can work. default value : physical memory * 1/10, unit is G. - - - - - master.listen.port - 5678 - - int - - master listen port - - - \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-quartz.xml b/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-quartz.xml deleted file mode 100644 index 7a0c68b051806ecc7cb80889eeac82a72ddb6e75..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-quartz.xml +++ /dev/null @@ -1,126 +0,0 @@ - - - - org.quartz.scheduler.instanceName - DolphinScheduler - - - - - org.quartz.scheduler.instanceId - AUTO - - - - org.quartz.scheduler.makeSchedulerThreadDaemon - true - - boolean - - - - - org.quartz.jobStore.useProperties - false - - boolean - - - - - org.quartz.threadPool.class - org.quartz.simpl.SimpleThreadPool - - - - org.quartz.threadPool.makeThreadsDaemons - true - - boolean - - - - - org.quartz.threadPool.threadCount - 25 - - int - - - - - org.quartz.threadPool.threadPriority - 5 - - int - - - - - org.quartz.jobStore.class - org.quartz.impl.jdbcjobstore.JobStoreTX - - - - org.quartz.jobStore.tablePrefix - QRTZ_ - - - - org.quartz.jobStore.isClustered - true - - boolean - - - - - org.quartz.jobStore.misfireThreshold - 60000 - - int - - - - - org.quartz.jobStore.clusterCheckinInterval - 5000 - - int - - - - - org.quartz.jobStore.acquireTriggersWithinLock - true - - boolean - - - - - org.quartz.jobStore.dataSource - myDs - - - - org.quartz.dataSource.myDs.connectionProvider.class - org.apache.dolphinscheduler.service.quartz.DruidConnectionProvider - - - \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-worker.xml b/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-worker.xml deleted file mode 100644 index aaa5463ad4c96f55720c85921d6920c8eef7f93f..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-worker.xml +++ /dev/null @@ -1,78 +0,0 @@ - - - - worker.exec.threads - 100 - - int - - worker execute thread num - - - - worker.heartbeat.interval - 10 - - int - - worker heartbeat interval - - - - worker.max.cpuload.avg - 100 - - int - - only less than cpu avg load, worker server can work. default value : the number of cpu cores * 2 - - - - - worker.reserved.memory - 0.3 - only larger than reserved memory, worker server can work. default value : physical memory * 1/10, - unit is G. - - - - - worker.listen.port - 1234 - - int - - worker listen port - - - - worker.groups - default - default worker group - - - - worker.host.weigth - 100 - - int - - worker host weight - - - \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/metainfo.xml b/ambari_plugin/common-services/DOLPHIN/1.3.3/metainfo.xml deleted file mode 100644 index d3a82d700ff0c231cd3e143f3143030c9c3affbc..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.3/metainfo.xml +++ /dev/null @@ -1,137 +0,0 @@ - - - - 2.0 - - - DOLPHIN - Dolphin Scheduler - 分布式易扩展的可视化DAG工作流任务调度系统 - 1.3.3 - - - DOLPHIN_MASTER - DS Master - MASTER - 1+ - - - PYTHON - 600 - - - - - DOLPHIN_LOGGER - DS Logger - SLAVE - 1+ - - - PYTHON - 600 - - - - - DOLPHIN_WORKER - DS Worker - SLAVE - 1+ - - - DOLPHIN/DOLPHIN_LOGGER - host - - true - - - - - - PYTHON - 600 - - - - - DOLPHIN_ALERT - DS Alert - SLAVE - 1 - - - PYTHON - 600 - - - - - DOLPHIN_API - DS_Api - SLAVE - 1 - - - PYTHON - 600 - - - - - - ZOOKEEPER - - - - - any - - - apache-dolphinscheduler* - - - - - - - dolphin-alert - dolphin-app-api - dolphin-app-dao - dolphin-common - dolphin-env - dolphin-quartz - - - - - theme.json - true - - - - quicklinks - - - quicklinks.json - true - - - - - \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/alerts/alert_dolphin_scheduler_status.py b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/alerts/alert_dolphin_scheduler_status.py deleted file mode 100644 index 87cc7b453b9aaff148387ff454c73e0178336196..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/alerts/alert_dolphin_scheduler_status.py +++ /dev/null @@ -1,124 +0,0 @@ -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -""" - -import socket -import urllib2 -import os -import logging -import ambari_simplejson as json -from resource_management.libraries.script.script import Script -import sys -reload(sys) -sys.setdefaultencoding('utf-8') - -logger = logging.getLogger('ambari_alerts') - -config = Script.get_config() - - -def get_tokens(): - """ - Returns a tuple of tokens in the format {{site/property}} that will be used - to build the dictionary passed into execute - - :rtype tuple - """ - -def get_info(url, connection_timeout): - response = None - - try: - response = urllib2.urlopen(url, timeout=connection_timeout) - json_data = response.read() - return json_data - finally: - if response is not None: - try: - response.close() - except: - pass - - -def execute(configurations={}, parameters={}, host_name=None): - """ - Returns a tuple containing the result code and a pre-formatted result label - - Keyword arguments: - configurations : a mapping of configuration key to value - parameters : a mapping of script parameter key to value - host_name : the name of this host where the alert is running - - :type configurations dict - :type parameters dict - :type host_name str - """ - - alert_name = parameters['alertName'] - - dolphin_pidfile_dir = "/opt/soft/run/dolphinscheduler" - - pid = "0" - - - from resource_management.core import sudo - - is_running = True - pid_file_path = "" - if alert_name == 'DOLPHIN_MASTER': - pid_file_path = dolphin_pidfile_dir + "/master-server.pid" - elif alert_name == 'DOLPHIN_WORKER': - pid_file_path = dolphin_pidfile_dir + "/worker-server.pid" - elif alert_name == 'DOLPHIN_ALERT': - pid_file_path = dolphin_pidfile_dir + "/alert-server.pid" - elif alert_name == 'DOLPHIN_LOGGER': - pid_file_path = dolphin_pidfile_dir + "/logger-server.pid" - elif alert_name == 'DOLPHIN_API': - pid_file_path = dolphin_pidfile_dir + "/api-server.pid" - - if not pid_file_path or not os.path.isfile(pid_file_path): - is_running = False - - try: - pid = int(sudo.read_file(pid_file_path)) - except: - is_running = False - - try: - # Kill will not actually kill the process - # From the doc: - # If sig is 0, then no signal is sent, but error checking is still - # performed; this can be used to check for the existence of a - # process ID or process group ID. - sudo.kill(pid, 0) - except OSError: - is_running = False - - if host_name is None: - host_name = socket.getfqdn() - - if not is_running: - result_code = "CRITICAL" - else: - result_code = "OK" - - label = "The comment {0} of DOLPHIN_SCHEDULER on {1} is {2}".format(alert_name, host_name, result_code) - - return ((result_code, [label])) - -if __name__ == "__main__": - pass diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_alert_service.py b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_alert_service.py deleted file mode 100644 index e78c38d272f67bffb5e71cefddb10a21f489787b..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_alert_service.py +++ /dev/null @@ -1,62 +0,0 @@ -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -""" -import time -from resource_management import * - -from dolphin_env import dolphin_env - - -class DolphinAlertService(Script): - def install(self, env): - import params - env.set_params(params) - self.install_packages(env) - Execute(('chmod', '-R', '777', params.dolphin_home)) - Execute(('chown', '-R', params.dolphin_user + ":" + params.dolphin_group, params.dolphin_home)) - - def configure(self, env): - import params - params.pika_slave = True - env.set_params(params) - - dolphin_env() - - def start(self, env): - import params - env.set_params(params) - self.configure(env) - no_op_test = format("ls {dolphin_pidfile_dir}/alert-server.pid >/dev/null 2>&1 && ps `cat {dolphin_pidfile_dir}/alert-server.pid` | grep `cat {dolphin_pidfile_dir}/alert-server.pid` >/dev/null 2>&1") - - start_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh start alert-server") - Execute(start_cmd, user=params.dolphin_user, not_if=no_op_test) - - def stop(self, env): - import params - env.set_params(params) - stop_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh stop alert-server") - Execute(stop_cmd, user=params.dolphin_user) - time.sleep(5) - - def status(self, env): - import status_params - env.set_params(status_params) - check_process_status(status_params.dolphin_run_dir + "alert-server.pid") - - -if __name__ == "__main__": - DolphinAlertService().execute() diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_api_service.py b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_api_service.py deleted file mode 100644 index 5a28924a9a7531a9c1091b3a136fd68d69815486..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_api_service.py +++ /dev/null @@ -1,71 +0,0 @@ -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -""" -import time -from resource_management import * - -from dolphin_env import dolphin_env - - -class DolphinApiService(Script): - def install(self, env): - import params - env.set_params(params) - self.install_packages(env) - Execute(('chmod', '-R', '777', params.dolphin_home)) - Execute(('chown', '-R', params.dolphin_user + ":" + params.dolphin_group, params.dolphin_home)) - - def configure(self, env): - import params - params.pika_slave = True - env.set_params(params) - - dolphin_env() - - def start(self, env): - import params - env.set_params(params) - self.configure(env) - - #init - init_cmd=format("sh " + params.dolphin_home + "/script/create-dolphinscheduler.sh") - Execute(init_cmd, user=params.dolphin_user) - - #upgrade - upgrade_cmd=format("sh " + params.dolphin_home + "/script/upgrade-dolphinscheduler.sh") - Execute(upgrade_cmd, user=params.dolphin_user) - - no_op_test = format("ls {dolphin_pidfile_dir}/api-server.pid >/dev/null 2>&1 && ps `cat {dolphin_pidfile_dir}/api-server.pid` | grep `cat {dolphin_pidfile_dir}/api-server.pid` >/dev/null 2>&1") - - start_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh start api-server") - Execute(start_cmd, user=params.dolphin_user, not_if=no_op_test) - - def stop(self, env): - import params - env.set_params(params) - stop_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh stop api-server") - Execute(stop_cmd, user=params.dolphin_user) - time.sleep(5) - - def status(self, env): - import status_params - env.set_params(status_params) - check_process_status(status_params.dolphin_run_dir + "api-server.pid") - - -if __name__ == "__main__": - DolphinApiService().execute() diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_env.py b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_env.py deleted file mode 100644 index 1661d76c753e09ff402dbebeac1afd41fa548632..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_env.py +++ /dev/null @@ -1,123 +0,0 @@ -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -""" - -from resource_management import * - - -def dolphin_env(): - import params - - Directory(params.dolphin_pidfile_dir, - mode=0777, - owner=params.dolphin_user, - group=params.dolphin_group, - create_parents=True - ) - Directory(params.dolphin_log_dir, - mode=0777, - owner=params.dolphin_user, - group=params.dolphin_group, - create_parents=True - ) - Directory(params.dolphin_conf_dir, - mode=0777, - owner=params.dolphin_user, - group=params.dolphin_group, - create_parents=True - ) - - Directory(params.dolphin_common_map['data.basedir.path'], - mode=0777, - owner=params.dolphin_user, - group=params.dolphin_group, - create_parents=True - ) - - - File(format(params.dolphin_env_path), - mode=0777, - content=InlineTemplate(params.dolphin_env_content), - owner=params.dolphin_user, - group=params.dolphin_group - ) - - - File(format(params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh"), - mode=0755, - content=Template("dolphin-daemon.sh.j2"), - owner=params.dolphin_user, - group=params.dolphin_group - ) - - File(format(params.dolphin_conf_dir + "/master.properties"), - mode=0755, - content=Template("master.properties.j2"), - owner=params.dolphin_user, - group=params.dolphin_group - ) - - File(format(params.dolphin_conf_dir + "/worker.properties"), - mode=0755, - content=Template("worker.properties.j2"), - owner=params.dolphin_user, - group=params.dolphin_group - ) - - - File(format(params.dolphin_conf_dir + "/alert.properties"), - mode=0755, - content=Template("alert.properties.j2"), - owner=params.dolphin_user, - group=params.dolphin_group - ) - - File(format(params.dolphin_conf_dir + "/datasource.properties"), - mode=0755, - content=Template("datasource.properties.j2"), - owner=params.dolphin_user, - group=params.dolphin_group - ) - - File(format(params.dolphin_conf_dir + "/application-api.properties"), - mode=0755, - content=Template("application-api.properties.j2"), - owner=params.dolphin_user, - group=params.dolphin_group - ) - - File(format(params.dolphin_conf_dir + "/common.properties"), - mode=0755, - content=Template("common.properties.j2"), - owner=params.dolphin_user, - group=params.dolphin_group - ) - - File(format(params.dolphin_conf_dir + "/quartz.properties"), - mode=0755, - content=Template("quartz.properties.j2"), - owner=params.dolphin_user, - group=params.dolphin_group - ) - - File(format(params.dolphin_conf_dir + "/zookeeper.properties"), - mode=0755, - content=Template("zookeeper.properties.j2"), - owner=params.dolphin_user, - group=params.dolphin_group - ) diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_logger_service.py b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_logger_service.py deleted file mode 100644 index fb47e132e17cccbcfc22cabc13d3b5fb7d93d52e..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_logger_service.py +++ /dev/null @@ -1,61 +0,0 @@ -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -""" -import time -from resource_management import * - -from dolphin_env import dolphin_env - - -class DolphinLoggerService(Script): - def install(self, env): - import params - env.set_params(params) - self.install_packages(env) - Execute(('chmod', '-R', '777', params.dolphin_home)) - Execute(('chown', '-R', params.dolphin_user + ":" + params.dolphin_group, params.dolphin_home)) - def configure(self, env): - import params - params.pika_slave = True - env.set_params(params) - - dolphin_env() - - def start(self, env): - import params - env.set_params(params) - self.configure(env) - no_op_test = format("ls {dolphin_pidfile_dir}/logger-server.pid >/dev/null 2>&1 && ps `cat {dolphin_pidfile_dir}/logger-server.pid` | grep `cat {dolphin_pidfile_dir}/logger-server.pid` >/dev/null 2>&1") - - start_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh start logger-server") - Execute(start_cmd, user=params.dolphin_user, not_if=no_op_test) - - def stop(self, env): - import params - env.set_params(params) - stop_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh stop logger-server") - Execute(stop_cmd, user=params.dolphin_user) - time.sleep(5) - - def status(self, env): - import status_params - env.set_params(status_params) - check_process_status(status_params.dolphin_run_dir + "logger-server.pid") - - -if __name__ == "__main__": - DolphinLoggerService().execute() diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_master_service.py b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_master_service.py deleted file mode 100644 index 8d64935d26e5cb50ebe27129f56c7ef114c5b833..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_master_service.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -""" -import time -from resource_management import * - -from dolphin_env import dolphin_env - - -class DolphinMasterService(Script): - def install(self, env): - import params - env.set_params(params) - self.install_packages(env) - Execute(('chmod', '-R', '777', params.dolphin_home)) - Execute(('chown', '-R', params.dolphin_user + ":" + params.dolphin_group, params.dolphin_home)) - - def configure(self, env): - import params - params.pika_slave = True - env.set_params(params) - - dolphin_env() - - def start(self, env): - import params - env.set_params(params) - self.configure(env) - no_op_test = format("ls {dolphin_pidfile_dir}/master-server.pid >/dev/null 2>&1 && ps `cat {dolphin_pidfile_dir}/master-server.pid` | grep `cat {dolphin_pidfile_dir}/master-server.pid` >/dev/null 2>&1") - start_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh start master-server") - Execute(start_cmd, user=params.dolphin_user, not_if=no_op_test) - - def stop(self, env): - import params - env.set_params(params) - stop_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh stop master-server") - Execute(stop_cmd, user=params.dolphin_user) - time.sleep(5) - - def status(self, env): - import status_params - env.set_params(status_params) - check_process_status(status_params.dolphin_run_dir + "master-server.pid") - - -if __name__ == "__main__": - DolphinMasterService().execute() diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_worker_service.py b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_worker_service.py deleted file mode 100644 index 1f542c06c26e787aaf584877355fc458bb23ee47..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_worker_service.py +++ /dev/null @@ -1,61 +0,0 @@ -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -""" -import time -from resource_management import * - -from dolphin_env import dolphin_env - - -class DolphinWorkerService(Script): - def install(self, env): - import params - env.set_params(params) - self.install_packages(env) - Execute(('chmod', '-R', '777', params.dolphin_home)) - Execute(('chown', '-R', params.dolphin_user + ":" + params.dolphin_group, params.dolphin_home)) - - def configure(self, env): - import params - params.pika_slave = True - env.set_params(params) - - dolphin_env() - - def start(self, env): - import params - env.set_params(params) - self.configure(env) - no_op_test = format("ls {dolphin_pidfile_dir}/worker-server.pid >/dev/null 2>&1 && ps `cat {dolphin_pidfile_dir}/worker-server.pid` | grep `cat {dolphin_pidfile_dir}/worker-server.pid` >/dev/null 2>&1") - start_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh start worker-server") - Execute(start_cmd, user=params.dolphin_user, not_if=no_op_test) - - def stop(self, env): - import params - env.set_params(params) - stop_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh stop worker-server") - Execute(stop_cmd, user=params.dolphin_user) - time.sleep(5) - - def status(self, env): - import status_params - env.set_params(status_params) - check_process_status(status_params.dolphin_run_dir + "worker-server.pid") - - -if __name__ == "__main__": - DolphinWorkerService().execute() diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/params.py b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/params.py deleted file mode 100644 index 5a9994f55955be44a284162dcc8fa4a0c4e9fa93..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/params.py +++ /dev/null @@ -1,155 +0,0 @@ -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -""" - - -import sys -from resource_management import * -from resource_management.core.logger import Logger -from resource_management.libraries.functions import default - -Logger.initialize_logger() -reload(sys) -sys.setdefaultencoding('utf-8') - -# server configurations -config = Script.get_config() - -# conf_dir = "/etc/" -dolphin_home = "/opt/soft/dolphinscheduler" -dolphin_conf_dir = dolphin_home + "/conf" -dolphin_log_dir = dolphin_home + "/logs" -dolphin_bin_dir = dolphin_home + "/bin" -dolphin_lib_jars = dolphin_home + "/lib/*" -dolphin_pidfile_dir = "/opt/soft/run/dolphinscheduler" - -rmHosts = default("/clusterHostInfo/rm_host", []) - -# dolphin-env -dolphin_env_map = {} -dolphin_env_map.update(config['configurations']['dolphin-env']) - -# which user to install and admin dolphin scheduler -dolphin_user = dolphin_env_map['dolphin.user'] -dolphin_group = dolphin_env_map['dolphin.group'] - -# .dolphinscheduler_env.sh -dolphin_env_path = dolphin_conf_dir + '/env/dolphinscheduler_env.sh' -dolphin_env_content = dolphin_env_map['dolphinscheduler-env-content'] - -# database config -dolphin_database_config = {} -dolphin_database_config['dolphin_database_type'] = dolphin_env_map['dolphin.database.type'] -dolphin_database_config['dolphin_database_username'] = dolphin_env_map['dolphin.database.username'] -dolphin_database_config['dolphin_database_password'] = dolphin_env_map['dolphin.database.password'] -if 'mysql' == dolphin_database_config['dolphin_database_type']: - dolphin_database_config['dolphin_database_driver'] = 'com.mysql.jdbc.Driver' - dolphin_database_config['driverDelegateClass'] = 'org.quartz.impl.jdbcjobstore.StdJDBCDelegate' - dolphin_database_config['dolphin_database_url'] = 'jdbc:mysql://' + dolphin_env_map['dolphin.database.host'] \ - + ':' + dolphin_env_map['dolphin.database.port'] \ - + '/dolphinscheduler?useUnicode=true&characterEncoding=UTF-8' -else: - dolphin_database_config['dolphin_database_driver'] = 'org.postgresql.Driver' - dolphin_database_config['driverDelegateClass'] = 'org.quartz.impl.jdbcjobstore.PostgreSQLDelegate' - dolphin_database_config['dolphin_database_url'] = 'jdbc:postgresql://' + dolphin_env_map['dolphin.database.host'] \ - + ':' + dolphin_env_map['dolphin.database.port'] \ - + '/dolphinscheduler' - - - - - -# application-alert.properties -dolphin_alert_map = {} -wechat_push_url = 'https://qyapi.weixin.qq.com/cgi-bin/message/send?access_token=$token' -wechat_token_url = 'https://qyapi.weixin.qq.com/cgi-bin/gettoken?corpid=$corpId&corpsecret=$secret' -wechat_team_send_msg = '{\"toparty\":\"{toParty}\",\"agentid\":\"{agentId}\",\"msgtype\":\"text\",\"text\":{\"content\":\"{msg}\"},\"safe\":\"0\"}' -wechat_user_send_msg = '{\"touser\":\"{toUser}\",\"agentid\":\"{agentId}\",\"msgtype\":\"markdown\",\"markdown\":{\"content\":\"{msg}\"}}' - -dolphin_alert_config_map = config['configurations']['dolphin-alert'] - -if dolphin_alert_config_map['enterprise.wechat.enable']: - dolphin_alert_map['enterprise.wechat.push.ur'] = wechat_push_url - dolphin_alert_map['enterprise.wechat.token.url'] = wechat_token_url - dolphin_alert_map['enterprise.wechat.team.send.msg'] = wechat_team_send_msg - dolphin_alert_map['enterprise.wechat.user.send.msg'] = wechat_user_send_msg - -dolphin_alert_map.update(dolphin_alert_config_map) - - - -# application-api.properties -dolphin_app_api_map = {} -dolphin_app_api_map.update(config['configurations']['dolphin-application-api']) - - -# common.properties -dolphin_common_map = {} - -if 'yarn-site' in config['configurations'] and \ - 'yarn.resourcemanager.webapp.address' in config['configurations']['yarn-site']: - yarn_resourcemanager_webapp_address = config['configurations']['yarn-site']['yarn.resourcemanager.webapp.address'] - yarn_application_status_address = 'http://' + yarn_resourcemanager_webapp_address + '/ws/v1/cluster/apps/%s' - dolphin_common_map['yarn.application.status.address'] = yarn_application_status_address - -rmHosts = default("/clusterHostInfo/rm_host", []) -if len(rmHosts) > 1: - dolphin_common_map['yarn.resourcemanager.ha.rm.ids'] = ','.join(rmHosts) -else: - dolphin_common_map['yarn.resourcemanager.ha.rm.ids'] = '' - -dolphin_common_map_tmp = config['configurations']['dolphin-common'] -data_basedir_path = dolphin_common_map_tmp['data.basedir.path'] -dolphin_common_map['dolphinscheduler.env.path'] = dolphin_env_path -dolphin_common_map.update(config['configurations']['dolphin-common']) - -# datasource.properties -dolphin_datasource_map = {} -dolphin_datasource_map['spring.datasource.type'] = 'com.alibaba.druid.pool.DruidDataSource' -dolphin_datasource_map['spring.datasource.driver-class-name'] = dolphin_database_config['dolphin_database_driver'] -dolphin_datasource_map['spring.datasource.url'] = dolphin_database_config['dolphin_database_url'] -dolphin_datasource_map['spring.datasource.username'] = dolphin_database_config['dolphin_database_username'] -dolphin_datasource_map['spring.datasource.password'] = dolphin_database_config['dolphin_database_password'] -dolphin_datasource_map.update(config['configurations']['dolphin-datasource']) - -# master.properties -dolphin_master_map = config['configurations']['dolphin-master'] - -# quartz.properties -dolphin_quartz_map = {} -dolphin_quartz_map['org.quartz.jobStore.driverDelegateClass'] = dolphin_database_config['driverDelegateClass'] -dolphin_quartz_map.update(config['configurations']['dolphin-quartz']) - -# worker.properties -dolphin_worker_map = config['configurations']['dolphin-worker'] - -# zookeeper.properties -dolphin_zookeeper_map={} -zookeeperHosts = default("/clusterHostInfo/zookeeper_hosts", []) -if len(zookeeperHosts) > 0 and "clientPort" in config['configurations']['zoo.cfg']: - clientPort = config['configurations']['zoo.cfg']['clientPort'] - zookeeperPort = ":" + clientPort + "," - dolphin_zookeeper_map['zookeeper.quorum'] = zookeeperPort.join(zookeeperHosts) + ":" + clientPort -dolphin_zookeeper_map.update(config['configurations']['dolphin-zookeeper']) -if 'spring.servlet.multipart.max-file-size' in dolphin_app_api_map: - file_size = dolphin_app_api_map['spring.servlet.multipart.max-file-size'] - dolphin_app_api_map['spring.servlet.multipart.max-file-size'] = file_size + "MB" -if 'spring.servlet.multipart.max-request-size' in dolphin_app_api_map: - request_size = dolphin_app_api_map['spring.servlet.multipart.max-request-size'] - dolphin_app_api_map['spring.servlet.multipart.max-request-size'] = request_size + "MB" - - diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/service_check.py b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/service_check.py deleted file mode 100644 index 0e12f699324e943de71a18e255e854a13c9bb5ed..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/service_check.py +++ /dev/null @@ -1,31 +0,0 @@ -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -""" - -from resource_management import * -from resource_management.libraries.functions import get_unique_id_and_date - -class ServiceCheck(Script): - def service_check(self, env): - import params - #env.set_params(params) - - # Execute(format("which pika_server")) - -if __name__ == "__main__": - ServiceCheck().execute() diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/status_params.py b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/status_params.py deleted file mode 100644 index 24b2c8b1bcb87bb03b1bba71f569d190eab0843b..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/status_params.py +++ /dev/null @@ -1,23 +0,0 @@ -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -""" - -from resource_management import * - -config = Script.get_config() - -dolphin_run_dir = "/opt/soft/run/dolphinscheduler/" diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/alert.properties.j2 b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/alert.properties.j2 deleted file mode 100644 index 73840b8c18109e00fa348c2c3113189eec810a95..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/alert.properties.j2 +++ /dev/null @@ -1,20 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -{% for key, value in dolphin_alert_map.iteritems() -%} - {{key}}={{value}} -{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/application-api.properties.j2 b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/application-api.properties.j2 deleted file mode 100644 index 70118003b9234a461047d5c4b5edf7cae6bb52c9..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/application-api.properties.j2 +++ /dev/null @@ -1,20 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -{% for key, value in dolphin_app_api_map.iteritems() -%} - {{key}}={{value}} -{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/common.properties.j2 b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/common.properties.j2 deleted file mode 100644 index 2220c4effaa8daad2cdf4d46b54cded4e8b44844..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/common.properties.j2 +++ /dev/null @@ -1,20 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -{% for key, value in dolphin_common_map.iteritems() -%} - {{key}}={{value}} -{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/datasource.properties.j2 b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/datasource.properties.j2 deleted file mode 100644 index 40aed83543e7db639051101c6ec6ce0d4af2b42a..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/datasource.properties.j2 +++ /dev/null @@ -1,20 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -{% for key, value in dolphin_datasource_map.iteritems() -%} - {{key}}={{value}} -{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/dolphin-daemon.sh.j2 b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/dolphin-daemon.sh.j2 deleted file mode 100644 index c5cc11fb6271dadd197fea3f7a4f04b8210a2eb5..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/dolphin-daemon.sh.j2 +++ /dev/null @@ -1,116 +0,0 @@ -#!/bin/sh -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -usage="Usage: dolphinscheduler-daemon.sh (start|stop) " - -# if no args specified, show usage -if [ $# -le 1 ]; then - echo $usage - exit 1 -fi - -startStop=$1 -shift -command=$1 -shift - -echo "Begin $startStop $command......" - -BIN_DIR=`dirname $0` -BIN_DIR=`cd "$BIN_DIR"; pwd` -DOLPHINSCHEDULER_HOME=$BIN_DIR/.. - -export HOSTNAME=`hostname` - -DOLPHINSCHEDULER_LIB_JARS={{dolphin_lib_jars}} - -DOLPHINSCHEDULER_OPTS="-server -Xmx16g -Xms1g -Xss512k -XX:+UseConcMarkSweepGC -XX:+CMSParallelRemarkEnabled -XX:LargePageSizeInBytes=10m -XX:+UseFastAccessorMethods -XX:+UseCMSInitiatingOccupancyOnly -XX:CMSInitiatingOccupancyFraction=70" -STOP_TIMEOUT=5 - -log={{dolphin_log_dir}}/dolphinscheduler-$command-$HOSTNAME.out -pid={{dolphin_pidfile_dir}}/$command.pid - -cd $DOLPHINSCHEDULER_HOME - -if [ "$command" = "api-server" ]; then - LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/logback-api.xml -Dspring.profiles.active=api" - CLASS=org.apache.dolphinscheduler.api.ApiApplicationServer -elif [ "$command" = "master-server" ]; then - LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/logback-master.xml -Ddruid.mysql.usePingMethod=false" - CLASS=org.apache.dolphinscheduler.server.master.MasterServer -elif [ "$command" = "worker-server" ]; then - LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/logback-worker.xml -Ddruid.mysql.usePingMethod=false" - CLASS=org.apache.dolphinscheduler.server.worker.WorkerServer -elif [ "$command" = "alert-server" ]; then - LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/logback-alert.xml" - CLASS=org.apache.dolphinscheduler.alert.AlertServer -elif [ "$command" = "logger-server" ]; then - CLASS=org.apache.dolphinscheduler.server.log.LoggerServer -else - echo "Error: No command named \`$command' was found." - exit 1 -fi - -case $startStop in - (start) - - if [ -f $pid ]; then - if kill -0 `cat $pid` > /dev/null 2>&1; then - echo $command running as process `cat $pid`. Stop it first. - exit 1 - fi - fi - - echo starting $command, logging to $log - - exec_command="$LOG_FILE $DOLPHINSCHEDULER_OPTS -classpath {{dolphin_conf_dir}}:{{dolphin_lib_jars}} $CLASS" - - echo "nohup java $exec_command > $log 2>&1 < /dev/null &" - nohup java $exec_command > $log 2>&1 < /dev/null & - echo $! > $pid - ;; - - (stop) - - if [ -f $pid ]; then - TARGET_PID=`cat $pid` - if kill -0 $TARGET_PID > /dev/null 2>&1; then - echo stopping $command - kill $TARGET_PID - sleep $STOP_TIMEOUT - if kill -0 $TARGET_PID > /dev/null 2>&1; then - echo "$command did not stop gracefully after $STOP_TIMEOUT seconds: killing with kill -9" - kill -9 $TARGET_PID - fi - else - echo no $command to stop - fi - rm -f $pid - else - echo no $command to stop - fi - ;; - - (*) - echo $usage - exit 1 - ;; - -esac - -echo "End $startStop $command." \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/master.properties.j2 b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/master.properties.j2 deleted file mode 100644 index d9b85e14cf78ecca1d34f425cde02b408739ced7..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/master.properties.j2 +++ /dev/null @@ -1,20 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -{% for key, value in dolphin_master_map.iteritems() -%} - {{key}}={{value}} -{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/quartz.properties.j2 b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/quartz.properties.j2 deleted file mode 100644 index e027a263b5bc0810e2e67d5a6386fd2bbb17ee43..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/quartz.properties.j2 +++ /dev/null @@ -1,20 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -{% for key, value in dolphin_quartz_map.iteritems() -%} - {{key}}={{value}} -{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/worker.properties.j2 b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/worker.properties.j2 deleted file mode 100644 index a008b74084ebf8cf63e975d06b6f1bee76a4e2a8..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/worker.properties.j2 +++ /dev/null @@ -1,20 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -{% for key, value in dolphin_worker_map.iteritems() -%} - {{key}}={{value}} -{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/zookeeper.properties.j2 b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/zookeeper.properties.j2 deleted file mode 100644 index 9eb14eaef319d5d56324cccb47f5c3b4b2878681..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/zookeeper.properties.j2 +++ /dev/null @@ -1,20 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -{% for key, value in dolphin_zookeeper_map.iteritems() -%} - {{key}}={{value}} -{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/quicklinks/quicklinks.json b/ambari_plugin/common-services/DOLPHIN/1.3.3/quicklinks/quicklinks.json deleted file mode 100644 index 8753004fefa3b6605d261a20756c6019b28695cb..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.3/quicklinks/quicklinks.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "name": "default", - "description": "default quick links configuration", - "configuration": { - "protocol": - { - "type":"http" - }, - - "links": [ - { - "name": "dolphin-application-ui", - "label": "DolphinApplication UI", - "requires_user_name": "false", - "component_name": "DOLPHIN_API", - "url": "%@://%@:%@/dolphinscheduler/ui/view/login/index.html", - "port":{ - "http_property": "server.port", - "http_default_port": "12345", - "regex": "^(\\d+)$", - "site": "dolphin-application-api" - } - } - ] - } -} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/themes/theme.json b/ambari_plugin/common-services/DOLPHIN/1.3.3/themes/theme.json deleted file mode 100644 index 953e2323f8c84fe430b790e514284cac5e022f2c..0000000000000000000000000000000000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.3.3/themes/theme.json +++ /dev/null @@ -1,661 +0,0 @@ -{ - "name": "default", - "description": "Default theme for Dolphin Scheduler service", - "configuration": { - "layouts": [ - { - "name": "default", - "tabs": [ - { - "name": "settings", - "display-name": "Settings", - "layout": { - "tab-rows": "3", - "tab-columns": "3", - "sections": [ - { - "name": "dolphin-env-config", - "display-name": "Dolphin Env Config", - "row-index": "0", - "column-index": "0", - "row-span": "1", - "column-span": "2", - "section-rows": "1", - "section-columns": "2", - "subsections": [ - { - "name": "env-row1-col1", - "display-name": "Deploy User Info", - "row-index": "0", - "column-index": "0", - "row-span": "1", - "column-span": "1" - }, - { - "name": "env-row1-col2", - "display-name": "System Env Optimization", - "row-index": "0", - "column-index": "1", - "row-span": "1", - "column-span": "1" - } - ] - }, - { - "name": "dolphin-database-config", - "display-name": "Database Config", - "row-index": "1", - "column-index": "0", - "row-span": "1", - "column-span": "2", - "section-rows": "1", - "section-columns": "3", - "subsections": [ - { - "name": "database-row1-col1", - "row-index": "0", - "column-index": "0", - "row-span": "1", - "column-span": "1" - }, - { - "name": "database-row1-col2", - "row-index": "0", - "column-index": "1", - "row-span": "1", - "column-span": "1" - }, - { - "name": "database-row1-col3", - "row-index": "0", - "column-index": "2", - "row-span": "1", - "column-span": "1" - } - ] - }, - { - "name": "dynamic-config", - "row-index": "2", - "column-index": "0", - "row-span": "1", - "column-span": "2", - "section-rows": "1", - "section-columns": "3", - "subsections": [ - { - "name": "dynamic-row1-col1", - "display-name": "Resource FS Config", - "row-index": "0", - "column-index": "0", - "row-span": "1", - "column-span": "1" - }, - { - "name": "dynamic-row1-col2", - "display-name": "Kerberos Info", - "row-index": "0", - "column-index": "1", - "row-span": "1", - "column-span": "1" - }, - { - "name": "dynamic-row1-col3", - "display-name": "Wechat Info", - "row-index": "0", - "column-index": "1", - "row-span": "1", - "column-span": "1" - } - ] - } - ] - } - } - ] - } - ], - "placement": { - "configuration-layout": "default", - "configs": [ - { - "config": "dolphin-env/dolphin.database.type", - "subsection-name": "database-row1-col1" - }, - { - "config": "dolphin-env/dolphin.database.host", - "subsection-name": "database-row1-col2" - }, - { - "config": "dolphin-env/dolphin.database.port", - "subsection-name": "database-row1-col2" - }, - { - "config": "dolphin-env/dolphin.database.username", - "subsection-name": "database-row1-col3" - }, - { - "config": "dolphin-env/dolphin.database.password", - "subsection-name": "database-row1-col3" - }, - { - "config": "dolphin-env/dolphin.user", - "subsection-name": "env-row1-col1" - }, - { - "config": "dolphin-env/dolphin.group", - "subsection-name": "env-row1-col1" - }, - { - "config": "dolphin-env/dolphinscheduler-env-content", - "subsection-name": "env-row1-col2" - }, - { - "config": "dolphin-common/resource.storage.type", - "subsection-name": "dynamic-row1-col1" - }, - { - "config": "dolphin-common/resource.upload.path", - "subsection-name": "dynamic-row1-col1", - "depends-on": [ - { - "configs":[ - "dolphin-common/resource.storage.type" - ], - "if": "${dolphin-common/resource.storage.type} === HDFS || ${dolphin-common/resource.storage.type} === S3", - "then": { - "property_value_attributes": { - "visible": true - } - }, - "else": { - "property_value_attributes": { - "visible": false - } - } - } - ] - }, - { - "config": "dolphin-common/hdfs.root.user", - "subsection-name": "dynamic-row1-col1", - "depends-on": [ - { - "configs":[ - "dolphin-common/resource.storage.type" - ], - "if": "${dolphin-common/resource.storage.type} === HDFS", - "then": { - "property_value_attributes": { - "visible": true - } - }, - "else": { - "property_value_attributes": { - "visible": false - } - } - } - ] - }, - { - "config": "dolphin-common/data.store2hdfs.basepath", - "subsection-name": "dynamic-row1-col1", - "depends-on": [ - { - "configs":[ - "dolphin-common/resource.storage.type" - ], - "if": "${dolphin-common/resource.storage.type} === HDFS", - "then": { - "property_value_attributes": { - "visible": true - } - }, - "else": { - "property_value_attributes": { - "visible": false - } - } - } - ] - }, - { - "config": "dolphin-common/fs.defaultFS", - "subsection-name": "dynamic-row1-col1", - "depends-on": [ - { - "configs":[ - "dolphin-common/resource.storage.type" - ], - "if": "${dolphin-common/resource.storage.type} === HDFS", - "then": { - "property_value_attributes": { - "visible": true - } - }, - "else": { - "property_value_attributes": { - "visible": false - } - } - } - ] - }, - { - "config": "dolphin-common/fs.s3a.endpoint", - "subsection-name": "dynamic-row1-col1", - "depends-on": [ - { - "configs":[ - "dolphin-common/resource.storage.type" - ], - "if": "${dolphin-common/resource.storage.type} === S3", - "then": { - "property_value_attributes": { - "visible": true - } - }, - "else": { - "property_value_attributes": { - "visible": false - } - } - } - ] - }, - { - "config": "dolphin-common/fs.s3a.access.key", - "subsection-name": "dynamic-row1-col1", - "depends-on": [ - { - "configs":[ - "dolphin-common/resource.storage.type" - ], - "if": "${dolphin-common/resource.storage.type} === S3", - "then": { - "property_value_attributes": { - "visible": true - } - }, - "else": { - "property_value_attributes": { - "visible": false - } - } - } - ] - }, - { - "config": "dolphin-common/fs.s3a.secret.key", - "subsection-name": "dynamic-row1-col1", - "depends-on": [ - { - "configs":[ - "dolphin-common/resource.storage.type" - ], - "if": "${dolphin-common/resource.storage.type} === S3", - "then": { - "property_value_attributes": { - "visible": true - } - }, - "else": { - "property_value_attributes": { - "visible": false - } - } - } - ] - }, - { - "config": "dolphin-common/hadoop.security.authentication.startup.state", - "subsection-name": "dynamic-row1-col2" - }, - { - "config": "dolphin-common/java.security.krb5.conf.path", - "subsection-name": "dynamic-row1-col2", - "depends-on": [ - { - "configs":[ - "dolphin-common/hadoop.security.authentication.startup.state" - ], - "if": "${dolphin-common/hadoop.security.authentication.startup.state}", - "then": { - "property_value_attributes": { - "visible": true - } - }, - "else": { - "property_value_attributes": { - "visible": false - } - } - } - ] - }, - { - "config": "dolphin-common/login.user.keytab.username", - "subsection-name": "dynamic-row1-col2", - "depends-on": [ - { - "configs":[ - "dolphin-common/hadoop.security.authentication.startup.state" - ], - "if": "${dolphin-common/hadoop.security.authentication.startup.state}", - "then": { - "property_value_attributes": { - "visible": true - } - }, - "else": { - "property_value_attributes": { - "visible": false - } - } - } - ] - }, - { - "config": "dolphin-common/login.user.keytab.path", - "subsection-name": "dynamic-row1-col2", - "depends-on": [ - { - "configs":[ - "dolphin-common/hadoop.security.authentication.startup.state" - ], - "if": "${dolphin-common/hadoop.security.authentication.startup.state}", - "then": { - "property_value_attributes": { - "visible": true - } - }, - "else": { - "property_value_attributes": { - "visible": false - } - } - } - ] - }, - { - "config": "dolphin-common/kerberos.expire.time", - "subsection-name": "dynamic-row1-col2", - "depends-on": [ - { - "configs":[ - "dolphin-common/hadoop.security.authentication.startup.state" - ], - "if": "${dolphin-common/hadoop.security.authentication.startup.state}", - "then": { - "property_value_attributes": { - "visible": true - } - }, - "else": { - "property_value_attributes": { - "visible": false - } - } - } - ] - }, - { - "config": "dolphin-alert/enterprise.wechat.enable", - "subsection-name": "dynamic-row1-col3" - }, - { - "config": "dolphin-alert/enterprise.wechat.corp.id", - "subsection-name": "dynamic-row1-col3", - "depends-on": [ - { - "configs":[ - "dolphin-alert/enterprise.wechat.enable" - ], - "if": "${dolphin-alert/enterprise.wechat.enable}", - "then": { - "property_value_attributes": { - "visible": true - } - }, - "else": { - "property_value_attributes": { - "visible": false - } - } - } - ] - }, - { - "config": "dolphin-alert/enterprise.wechat.secret", - "subsection-name": "dynamic-row1-col3", - "depends-on": [ - { - "configs":[ - "dolphin-alert/enterprise.wechat.enable" - ], - "if": "${dolphin-alert/enterprise.wechat.enable}", - "then": { - "property_value_attributes": { - "visible": true - } - }, - "else": { - "property_value_attributes": { - "visible": false - } - } - } - ] - }, - { - "config": "dolphin-alert/enterprise.wechat.agent.id", - "subsection-name": "dynamic-row1-col3", - "depends-on": [ - { - "configs":[ - "dolphin-alert/enterprise.wechat.enable" - ], - "if": "${dolphin-alert/enterprise.wechat.enable}", - "then": { - "property_value_attributes": { - "visible": true - } - }, - "else": { - "property_value_attributes": { - "visible": false - } - } - } - ] - }, - { - "config": "dolphin-alert/enterprise.wechat.users", - "subsection-name": "dynamic-row1-col3", - "depends-on": [ - { - "configs":[ - "dolphin-alert/enterprise.wechat.enable" - ], - "if": "${dolphin-alert/enterprise.wechat.enable}", - "then": { - "property_value_attributes": { - "visible": true - } - }, - "else": { - "property_value_attributes": { - "visible": false - } - } - } - ] - } - ] - }, - "widgets": [ - { - "config": "dolphin-env/dolphin.database.type", - "widget": { - "type": "combo" - } - }, - { - "config": "dolphin-env/dolphin.database.host", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-env/dolphin.database.port", - "widget": { - "type": "text-field", - "units": [ - { - "unit-name": "int" - } - ] - } - }, - { - "config": "dolphin-env/dolphin.database.username", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-env/dolphin.database.password", - "widget": { - "type": "password" - } - }, - { - "config": "dolphin-env/dolphin.user", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-env/dolphin.group", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-env/dolphinscheduler-env-content", - "widget": { - "type": "text-area" - } - }, - { - "config": "dolphin-common/resource.storage.type", - "widget": { - "type": "combo" - } - }, - { - "config": "dolphin-common/resource.upload.path", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-common/hdfs.root.user", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-common/data.store2hdfs.basepath", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-common/fs.defaultFS", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-common/fs.s3a.endpoint", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-common/fs.s3a.access.key", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-common/fs.s3a.secret.key", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-common/hadoop.security.authentication.startup.state", - "widget": { - "type": "toggle" - } - }, - { - "config": "dolphin-common/java.security.krb5.conf.path", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-common/login.user.keytab.username", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-common/login.user.keytab.path", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-common/kerberos.expire.time", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-alert/enterprise.wechat.enable", - "widget": { - "type": "toggle" - } - }, - { - "config": "dolphin-alert/enterprise.wechat.corp.id", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-alert/enterprise.wechat.secret", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-alert/enterprise.wechat.agent.id", - "widget": { - "type": "text-field" - } - }, - { - "config": "dolphin-alert/enterprise.wechat.users", - "widget": { - "type": "text-field" - } - } - ] - } -} diff --git a/ambari_plugin/statcks/DOLPHIN/metainfo.xml b/ambari_plugin/statcks/DOLPHIN/metainfo.xml deleted file mode 100755 index ea40cd304d244993ed98d661595411e70458436a..0000000000000000000000000000000000000000 --- a/ambari_plugin/statcks/DOLPHIN/metainfo.xml +++ /dev/null @@ -1,26 +0,0 @@ - - - - 2.0 - - - DOLPHIN - common-services/DOLPHIN/1.3.0 - - - \ No newline at end of file diff --git a/docker/README.md b/docker/README.md index dfa6198c04eab04e11524126b137e8782fb7cab2..3765e70582ae939aaf9291e19df964a90e61588e 100644 --- a/docker/README.md +++ b/docker/README.md @@ -2,10 +2,10 @@ ### QuickStart in Docker -[![EN doc](https://img.shields.io/badge/document-English-blue.svg)](https://dolphinscheduler.apache.org/en-us/docs/latest/user_doc/docker-deployment.html) -[![CN doc](https://img.shields.io/badge/文档-中文版-blue.svg)](https://dolphinscheduler.apache.org/zh-cn/docs/latest/user_doc/docker-deployment.html) +[![EN doc](https://img.shields.io/badge/document-English-blue.svg)](https://dolphinscheduler.apache.org/en-us/docs/2.0.7/user_doc/guide/installation/docker.html) +[![CN doc](https://img.shields.io/badge/文档-中文版-blue.svg)](https://dolphinscheduler.apache.org/zh-cn/docs/2.0.7/user_doc/guide/installation/docker.html) ### QuickStart in Kubernetes -[![EN doc](https://img.shields.io/badge/document-English-blue.svg)](https://dolphinscheduler.apache.org/en-us/docs/latest/user_doc/kubernetes-deployment.html) -[![CN doc](https://img.shields.io/badge/文档-中文版-blue.svg)](https://dolphinscheduler.apache.org/zh-cn/docs/latest/user_doc/kubernetes-deployment.html) +[![EN doc](https://img.shields.io/badge/document-English-blue.svg)](https://dolphinscheduler.apache.org/en-us/docs/2.0.7/user_doc/guide/installation/kubernetes.html) +[![CN doc](https://img.shields.io/badge/文档-中文版-blue.svg)](https://dolphinscheduler.apache.org/zh-cn/docs/2.0.7/user_doc/guide/installation/kubernetes.html) diff --git a/docker/build/conf/dolphinscheduler/application-api.properties.tpl b/docker/build/conf/dolphinscheduler/application-api.properties.tpl index d78db2d63101c03889a05a681765f097c69e54b9..393a33ce0cb05849eae452cabf8227bdab0ebc0f 100644 --- a/docker/build/conf/dolphinscheduler/application-api.properties.tpl +++ b/docker/build/conf/dolphinscheduler/application-api.properties.tpl @@ -38,6 +38,9 @@ server.compression.mime-types=text/html,text/xml,text/plain,text/css,text/javasc # max http post size server.jetty.max-http-form-post-size=5000000 +# max http header size +server.max-http-header-size=81920 + # messages encoding spring.messages.encoding=UTF-8 diff --git a/docker/build/conf/dolphinscheduler/datasource.properties.tpl b/docker/build/conf/dolphinscheduler/datasource.properties.tpl index 9177732ba69a28cbe7f6b00a4c7040b6e035a181..c135752e1563bcfaafe312713194e1b258002945 100644 --- a/docker/build/conf/dolphinscheduler/datasource.properties.tpl +++ b/docker/build/conf/dolphinscheduler/datasource.properties.tpl @@ -21,6 +21,9 @@ spring.datasource.url=jdbc:${DATABASE_TYPE}://${DATABASE_HOST}:${DATABASE_PORT}/ spring.datasource.username=${DATABASE_USERNAME} spring.datasource.password=${DATABASE_PASSWORD} +# hikari configuration +spring.datasource.hikari.maximum-pool-size=${HIKARI_MAXIMUM_POOL_SIZE} + # mysql example #spring.datasource.driver-class-name=com.mysql.jdbc.Driver #spring.datasource.url=jdbc:mysql://127.0.0.1:3306/dolphinscheduler?useUnicode=true&characterEncoding=UTF-8 diff --git a/docker/build/conf/dolphinscheduler/logback/logback-master.xml b/docker/build/conf/dolphinscheduler/logback/logback-master.xml index f0d2c81df22a784504b590849e3a0cd39074668c..0d61968451de28bd5494623d1bf73969fc65dea9 100644 --- a/docker/build/conf/dolphinscheduler/logback/logback-master.xml +++ b/docker/build/conf/dolphinscheduler/logback/logback-master.xml @@ -45,7 +45,7 @@ ${log.base}/${taskAppId}.log - [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %messsage%n + [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} [%thread] - %messsage%n UTF-8 @@ -66,7 +66,7 @@ - [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n + [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %logger{96}:[%line] - %msg%n UTF-8 diff --git a/docker/build/conf/dolphinscheduler/logback/logback-worker.xml b/docker/build/conf/dolphinscheduler/logback/logback-worker.xml index 7127219873476cd5fb9869ba3b28f58bac679e71..4aca8b8bc356648ad266e2b168cc49ccdaf6d341 100644 --- a/docker/build/conf/dolphinscheduler/logback/logback-worker.xml +++ b/docker/build/conf/dolphinscheduler/logback/logback-worker.xml @@ -46,7 +46,7 @@ ${log.base}/${taskAppId}.log - [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %messsage%n + [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} [%thread] - %messsage%n UTF-8 @@ -66,7 +66,7 @@ - [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %messsage%n + [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %logger{96}:[%line] - %messsage%n UTF-8 diff --git a/docker/build/conf/dolphinscheduler/master.properties.tpl b/docker/build/conf/dolphinscheduler/master.properties.tpl index 046d5c15ffacd8190e8590c0ada939c585864b7c..5d130fa41114263a97cd351016f19f5dacf2e81f 100644 --- a/docker/build/conf/dolphinscheduler/master.properties.tpl +++ b/docker/build/conf/dolphinscheduler/master.properties.tpl @@ -44,3 +44,9 @@ master.max.cpuload.avg=${MASTER_MAX_CPULOAD_AVG} # master reserved memory, only lower than system available memory, master server can schedule. default value 0.3, the unit is G master.reserved.memory=${MASTER_RESERVED_MEMORY} +# master failover interval minutes +master.failover.interval=${MASTER_FAILOVER_INTERVAL} +# master kill yarn job when handle failover +master.kill.yarn.job.when.handle.failover=${MASTER_KILL_YARN_JOB_WHEN_HANDLE_FAILOVER} +# master.persist.event.state.threads +master.persist.event.state.threads=${MASTER_PERSIST_EVENT_STATE_THREADS} \ No newline at end of file diff --git a/docker/build/conf/dolphinscheduler/quartz.properties.tpl b/docker/build/conf/dolphinscheduler/quartz.properties.tpl index 10f18122bff460563060ef9289c58e1ad1ca1889..5f011f91513f673e4a6b8f1e2d2fa1bb7b9da103 100644 --- a/docker/build/conf/dolphinscheduler/quartz.properties.tpl +++ b/docker/build/conf/dolphinscheduler/quartz.properties.tpl @@ -32,7 +32,8 @@ #org.quartz.threadPool.class = org.quartz.simpl.SimpleThreadPool #org.quartz.threadPool.makeThreadsDaemons = true -#org.quartz.threadPool.threadCount = 25 +org.quartz.threadPool.threadCount = ${ORG_QUARTZ_THREADPOOL_THREADCOUNT} +org.quartz.scheduler.batchTriggerAcquisitionMaxCount = ${ORG_QUARTZ_SCHEDULER_BATCHTRIGGERACQUISTITIONMAXCOUNT} #org.quartz.threadPool.threadPriority = 5 #============================================================================ @@ -51,4 +52,4 @@ #============================================================================ # Configure Datasources #============================================================================ -#org.quartz.dataSource.myDs.connectionProvider.class = org.apache.dolphinscheduler.service.quartz.DruidConnectionProvider +#org.quartz.dataSource.myDs.connectionProvider.class = org.apache.dolphinscheduler.service.quartz.HikariConnectionProvider diff --git a/docker/build/conf/dolphinscheduler/registry.properties.tpl b/docker/build/conf/dolphinscheduler/registry.properties.tpl index 40836f5a120263361d7eb2dee8b2d07b88b944c1..e1ac10434a53be6acb0985805d199c35b4ae1113 100644 --- a/docker/build/conf/dolphinscheduler/registry.properties.tpl +++ b/docker/build/conf/dolphinscheduler/registry.properties.tpl @@ -15,13 +15,6 @@ # limitations under the License. # -#registry.plugin.dir config the Registry Plugin dir. -registry.plugin.dir=${REGISTRY_PLUGIN_DIR} - registry.plugin.name=${REGISTRY_PLUGIN_NAME} registry.servers=${REGISTRY_SERVERS} - -#maven.local.repository=/usr/local/localRepository - -#registry.plugin.binding config the Registry Plugin need be load when development and run in IDE -#registry.plugin.binding=./dolphinscheduler-registry-plugin/dolphinscheduler-registry-zookeeper/pom.xml +session.timeout.ms=${SESSION_TIMEOUT_MS} \ No newline at end of file diff --git a/docker/build/conf/dolphinscheduler/supervisor/supervisor.ini b/docker/build/conf/dolphinscheduler/supervisor/supervisor.ini index 19166f48d97696910335f96df2f09ea4dce77d49..e5314c19f420ecb308151c8847ad26d76f405e38 100644 --- a/docker/build/conf/dolphinscheduler/supervisor/supervisor.ini +++ b/docker/build/conf/dolphinscheduler/supervisor/supervisor.ini @@ -105,3 +105,18 @@ killasgroup=true redirect_stderr=true stdout_logfile=/dev/fd/1 stdout_logfile_maxbytes=0 + +[program:python-gateway] +command=%(ENV_DOLPHINSCHEDULER_BIN)s/dolphinscheduler-daemon.sh start python-gateway +directory=%(ENV_DOLPHINSCHEDULER_HOME)s +priority=999 +autostart=%(ENV_STANDALONE_START_ENABLED)s +autorestart=true +startsecs=5 +stopwaitsecs=3 +exitcodes=0 +stopasgroup=true +killasgroup=true +redirect_stderr=true +stdout_logfile=/dev/fd/1 +stdout_logfile_maxbytes=0 diff --git a/docker/build/conf/dolphinscheduler/worker.properties.tpl b/docker/build/conf/dolphinscheduler/worker.properties.tpl index 94a33526116b5fc9c3c5bf9ee15eb0d5bd49cad6..e1f157456175c15e8c82fab4b7249a56492d414b 100644 --- a/docker/build/conf/dolphinscheduler/worker.properties.tpl +++ b/docker/build/conf/dolphinscheduler/worker.properties.tpl @@ -41,3 +41,6 @@ worker.groups=${WORKER_GROUPS} # alert server listen host alert.listen.host=${ALERT_LISTEN_HOST} + +# worker retry report task statues interval seconds +worker.retry.report.task.statues.interval=${WORKER_RETRY_REPORT_TASK_STATUS_INTERVAL} \ No newline at end of file diff --git a/docker/build/hooks/build b/docker/build/hooks/build index 70ea260dea28049ae4f249fdb5996d80f8a0068d..1e6a96549b4757d7edc2a44da43397da51dd5eb6 100755 --- a/docker/build/hooks/build +++ b/docker/build/hooks/build @@ -18,41 +18,17 @@ set -e -echo "------ dolphinscheduler start - build -------" -printenv +ROOT_DIR=$(dirname "$0")/../../.. +MVN="$ROOT_DIR"/mvnw +VERSION=$("$MVN" -q -DforceStdout -N org.apache.maven.plugins:maven-help-plugin:3.2.0:evaluate -Dexpression=project.version) -if [ -z "${VERSION}" ] -then - echo "set default environment variable [VERSION]" - export VERSION=$(cat $(pwd)/pom.xml | grep '' -m 1 | awk '{print $1}' | sed 's///' | sed 's/<\/version>//') -fi +DOCKER_REPO=${DOCKER_REPO:-"apache/dolphinscheduler"} +TAG=${TAG:-"$VERSION"} -if [ "${DOCKER_REPO}x" = "x" ] -then - echo "set default environment variable [DOCKER_REPO]" - export DOCKER_REPO='apache/dolphinscheduler' -fi +echo "Building Docker image as: $DOCKER_REPO:$TAG" -echo "Version: $VERSION" -echo "Repo: $DOCKER_REPO" +"$MVN" -B clean package -Prelease -Dmaven.test.skip=true -Dhttp.keepAlive=false -Dmaven.wagon.http.pool=false -Dmaven.wagon.httpconnectionManager.ttlSeconds=120 -echo -e "Current Directory is $(pwd)\n" +cp "$ROOT_DIR"/dolphinscheduler-dist/target/apache-dolphinscheduler-$VERSION-bin.tar.gz "$ROOT_DIR"/docker/build/ -# maven package(Project Directory) -echo -e "./mvnw -B clean package -Prelease -Dmaven.test.skip=true -Dhttp.keepAlive=false -Dmaven.wagon.http.pool=false -Dmaven.wagon.httpconnectionManager.ttlSeconds=120" -./mvnw -B clean package -Prelease -Dmaven.test.skip=true -Dhttp.keepAlive=false -Dmaven.wagon.http.pool=false -Dmaven.wagon.httpconnectionManager.ttlSeconds=120 - -# mv dolphinscheduler-bin.tar.gz file to docker/build directory -echo -e "mv $(pwd)/dolphinscheduler-dist/target/apache-dolphinscheduler-${VERSION}-bin.tar.gz $(pwd)/docker/build/\n" -mv $(pwd)/dolphinscheduler-dist/target/apache-dolphinscheduler-${VERSION}-bin.tar.gz $(pwd)/docker/build/ - -# docker build -BUILD_COMMAND="docker build --build-arg VERSION=${VERSION} -t $DOCKER_REPO:${VERSION} $(pwd)/docker/build/" -echo -e "$BUILD_COMMAND\n" -if (docker info 2> /dev/null | grep -i "ERROR"); then - sudo $BUILD_COMMAND -else - $BUILD_COMMAND -fi - -echo "------ dolphinscheduler end - build -------" +docker build --build-arg VERSION=$VERSION -t $DOCKER_REPO:$TAG "$ROOT_DIR"/docker/build/ diff --git a/docker/build/startup-init-conf.sh b/docker/build/startup-init-conf.sh index 35f70bbdf04886b426958a08f50c3258471146af..ada19848852f25a17e16babe55c90c3c53515d51 100755 --- a/docker/build/startup-init-conf.sh +++ b/docker/build/startup-init-conf.sh @@ -25,22 +25,33 @@ echo "init env variables" #============================================================================ # Database #============================================================================ -export DATABASE_TYPE=${DATABASE_TYPE:-"postgresql"} -export DATABASE_DRIVER=${DATABASE_DRIVER:-"org.postgresql.Driver"} -export DATABASE_HOST=${DATABASE_HOST:-"127.0.0.1"} -export DATABASE_PORT=${DATABASE_PORT:-"5432"} -export DATABASE_USERNAME=${DATABASE_USERNAME:-"root"} -export DATABASE_PASSWORD=${DATABASE_PASSWORD:-"root"} -export DATABASE_DATABASE=${DATABASE_DATABASE:-"dolphinscheduler"} -export DATABASE_PARAMS=${DATABASE_PARAMS:-"characterEncoding=utf8"} +[ "h2" == ${DATABASE_TYPE} ] || export DATABASE_TYPE=${DATABASE_TYPE:-"postgresql"} +# export DATABASE_DRIVER=${DATABASE_DRIVER:-"org.postgresql.Driver"} +[ "h2" == ${DATABASE_TYPE} ] || export DATABASE_HOST=${DATABASE_HOST:-"127.0.0.1"} +[ "h2" == ${DATABASE_TYPE} ] || export DATABASE_PORT=${DATABASE_PORT:-"5432"} +[ "h2" == ${DATABASE_TYPE} ] || export DATABASE_USERNAME=${DATABASE_USERNAME:-"root"} +[ "h2" == ${DATABASE_TYPE} ] || export DATABASE_PASSWORD=${DATABASE_PASSWORD:-"root"} +[ "h2" == ${DATABASE_TYPE} ] || export DATABASE_DATABASE=${DATABASE_DATABASE:-"dolphinscheduler"} +[ "h2" == ${DATABASE_TYPE} ] || export DATABASE_PARAMS=${DATABASE_PARAMS:-"characterEncoding=utf8"} +# transform params to SPRING_DATASOURCE_XXX +[ "h2" == ${DATABASE_TYPE} ] || export SPRING_DATASOURCE_URL="jdbc:${DATABASE_TYPE}://${DATABASE_HOST}:${DATABASE_PORT}/${DATABASE_DATABASE}?${DATABASE_PARAMS}" +[ "h2" == ${DATABASE_TYPE} ] || export SPRING_DATASOURCE_USERNAME=${DATABASE_USERNAME} +[ "h2" == ${DATABASE_TYPE} ] || export SPRING_DATASOURCE_PASSWORD=${DATABASE_PASSWORD} +# export SPRING_DATASOURCE_DRIVER_CLASS_NAME=${DATABASE_DRIVER} +export SPRING_DATASOURCE_HIKARI_MAXIMUM_POOL_SIZE=${HIKARI_MAXIMUM_POOL_SIZE:-"50"} + +#============================================================================ +# Quartz +#============================================================================ +export ORG_QUARTZ_THREADPOOL_THREADCOUNT=${ORG_QUARTZ_THREADPOOL_THREADCOUNT:-"25"} +export ORG_QUARTZ_SCHEDULER_BATCHTRIGGERACQUISTITIONMAXCOUNT=${ORG_QUARTZ_SCHEDULER_BATCHTRIGGERACQUISTITIONMAXCOUNT:-"1"} #============================================================================ # Registry #============================================================================ -export REGISTRY_PLUGIN_DIR=${REGISTRY_PLUGIN_DIR:-"lib/plugin/registry"} export REGISTRY_PLUGIN_NAME=${REGISTRY_PLUGIN_NAME:-"zookeeper"} export REGISTRY_SERVERS=${REGISTRY_SERVERS:-"127.0.0.1:2181"} - +export SESSION_TIMEOUT_MS=${SESSION_TIMEOUT_MS:-"30000"} #============================================================================ # Common #============================================================================ @@ -108,7 +119,6 @@ export ALERT_LISTEN_HOST=${ALERT_LISTEN_HOST:-"localhost"} # Alert Server #============================================================================ export ALERT_SERVER_OPTS=${ALERT_SERVER_OPTS:-"-Xms512m -Xmx512m -Xmn256m"} -export ALERT_PLUGIN_DIR=${ALERT_PLUGIN_DIR:-"lib/plugin/alert"} #============================================================================ # Api Server diff --git a/docker/build/startup.sh b/docker/build/startup.sh index 7f3b7d0d2081ef4b6b1418de0ae6d233fe94814e..8838b802f5e68575f86047c3011083fc96068c4b 100755 --- a/docker/build/startup.sh +++ b/docker/build/startup.sh @@ -25,6 +25,7 @@ export API_START_ENABLED=false export ALERT_START_ENABLED=false export LOGGER_START_ENABLED=false export STANDALONE_START_ENABLED=false +export PYTHON_GATEWAY_START_ENABLED=false # wait database waitDatabase() { @@ -68,13 +69,14 @@ waitZK() { printUsage() { echo -e "Dolphin Scheduler is a distributed and easy-to-expand visual DAG workflow scheduling system," echo -e "dedicated to solving the complex dependencies in data processing, making the scheduling system out of the box for data processing.\n" - echo -e "Usage: [ all | master-server | worker-server | api-server | alert-server | standalone-server ]\n" - printf "%-13s: %s\n" "all" "Run master-server, worker-server, api-server and alert-server" - printf "%-13s: %s\n" "master-server" "MasterServer is mainly responsible for DAG task split, task submission monitoring." - printf "%-13s: %s\n" "worker-server" "WorkerServer is mainly responsible for task execution and providing log services." - printf "%-13s: %s\n" "api-server" "ApiServer is mainly responsible for processing requests and providing the front-end UI layer." - printf "%-13s: %s\n" "alert-server" "AlertServer mainly include Alarms." - printf "%-13s: %s\n" "standalone-server" "Standalone server that uses embedded zookeeper and database, only for testing and demostration." + echo -e "Usage: [ all | master-server | worker-server | api-server | alert-server | standalone-server | python-gateway-server]\n" + printf "%-13s: %s\n" "all" "Run master-server, worker-server, api-server, alert-server, python-gateway-server" + printf "%-13s: %s\n" "master-server" "MasterServer is mainly responsible for DAG task split, task submission monitoring." + printf "%-13s: %s\n" "worker-server" "WorkerServer is mainly responsible for task execution and providing log services." + printf "%-13s: %s\n" "api-server" "ApiServer is mainly responsible for processing requests and providing the front-end UI layer." + printf "%-13s: %s\n" "alert-server" "AlertServer mainly include Alarms." + printf "%-13s: %s\n" "standalone-server" "Standalone server that uses embedded zookeeper and database, only for testing and demostration." + printf "%-13s: %s\n" "python-gateway-server" "Python gateway is a backend server for python API." } # init config file @@ -95,6 +97,7 @@ case "$1" in waitZK waitDatabase export MASTER_START_ENABLED=true + export LOGGER_START_ENABLED=true ;; (worker-server) waitZK @@ -115,6 +118,9 @@ case "$1" in (standalone-server) export STANDALONE_START_ENABLED=true ;; + (python-gateway-server) + export PYTHON_GATEWAY_START_ENABLED=true + ;; (help) printUsage exit 1 diff --git a/docker/docker-swarm/config.env.sh b/docker/docker-swarm/config.env.sh index afc09b0f11156e599ae5b20487190ebb22472b94..3b368f410229d00af9273cec1c42dfa7773e4ca2 100755 --- a/docker/docker-swarm/config.env.sh +++ b/docker/docker-swarm/config.env.sh @@ -39,7 +39,6 @@ DATABASE_PARAMS=characterEncoding=utf8 #============================================================================ # Registry #============================================================================ -REGISTRY_PLUGIN_DIR=lib/plugin/registry REGISTRY_PLUGIN_NAME=zookeeper REGISTRY_SERVERS=dolphinscheduler-zookeeper:2181 @@ -93,6 +92,9 @@ MASTER_TASK_COMMIT_RETRYTIMES=5 MASTER_TASK_COMMIT_INTERVAL=1000 MASTER_MAX_CPULOAD_AVG=-1 MASTER_RESERVED_MEMORY=0.3 +MASTER_FAILOVER_INTERVAL=10 +MASTER_KILL_YARN_JOB_WHEN_HANDLE_FAILOVER=true +MASTER_PERSIST_EVENT_STATE_THREADS=10 #============================================================================ # Worker Server @@ -104,13 +106,13 @@ WORKER_HOST_WEIGHT=100 WORKER_MAX_CPULOAD_AVG=-1 WORKER_RESERVED_MEMORY=0.3 WORKER_GROUPS=default +WORKER_RETRY_REPORT_TASK_STATUES_INTERVAL=10 ALERT_LISTEN_HOST=dolphinscheduler-alert #============================================================================ # Alert Server #============================================================================ ALERT_SERVER_OPTS=-Xms512m -Xmx512m -Xmn256m -ALERT_PLUGIN_DIR=lib/plugin/alert #============================================================================ # Api Server diff --git a/docker/docker-swarm/docker-compose.yml b/docker/docker-swarm/docker-compose.yml index 3f63e79074e1730048ffca8fa224e093af5a9c91..b4f986b39b85553bf63774c4e8116f1598e60236 100644 --- a/docker/docker-swarm/docker-compose.yml +++ b/docker/docker-swarm/docker-compose.yml @@ -44,7 +44,7 @@ services: - dolphinscheduler dolphinscheduler-api: - image: apache/dolphinscheduler:2.0.0-SNAPSHOT + image: apache/dolphinscheduler:2.0.9 command: api-server ports: - 12345:12345 @@ -68,7 +68,7 @@ services: - dolphinscheduler dolphinscheduler-alert: - image: apache/dolphinscheduler:2.0.0-SNAPSHOT + image: apache/dolphinscheduler:2.0.9 command: alert-server environment: TZ: Asia/Shanghai @@ -87,7 +87,7 @@ services: - dolphinscheduler dolphinscheduler-master: - image: apache/dolphinscheduler:2.0.0-SNAPSHOT + image: apache/dolphinscheduler:2.0.9 command: master-server environment: TZ: Asia/Shanghai @@ -108,7 +108,7 @@ services: - dolphinscheduler dolphinscheduler-worker: - image: apache/dolphinscheduler:2.0.0-SNAPSHOT + image: apache/dolphinscheduler:2.0.9 command: worker-server environment: TZ: Asia/Shanghai @@ -140,4 +140,4 @@ volumes: dolphinscheduler-worker-data: dolphinscheduler-logs: dolphinscheduler-shared-local: - dolphinscheduler-resource-local: \ No newline at end of file + dolphinscheduler-resource-local: diff --git a/docker/docker-swarm/docker-stack.yml b/docker/docker-swarm/docker-stack.yml index d8939fd30cc2962885a0c6013aef401a1055d49e..f617e28b02d5e579337f34687aab594bc2d97871 100644 --- a/docker/docker-swarm/docker-stack.yml +++ b/docker/docker-swarm/docker-stack.yml @@ -48,7 +48,7 @@ services: replicas: 1 dolphinscheduler-api: - image: apache/dolphinscheduler:2.0.0-SNAPSHOT + image: apache/dolphinscheduler:2.0.9 command: api-server ports: - 12345:12345 @@ -71,7 +71,7 @@ services: replicas: 1 dolphinscheduler-alert: - image: apache/dolphinscheduler:2.0.0-SNAPSHOT + image: apache/dolphinscheduler:2.0.9 command: alert-server environment: TZ: Asia/Shanghai @@ -90,7 +90,7 @@ services: replicas: 1 dolphinscheduler-master: - image: apache/dolphinscheduler:2.0.0-SNAPSHOT + image: apache/dolphinscheduler:2.0.9 command: master-server environment: TZ: Asia/Shanghai @@ -110,7 +110,7 @@ services: replicas: 1 dolphinscheduler-worker: - image: apache/dolphinscheduler:2.0.0-SNAPSHOT + image: apache/dolphinscheduler:2.0.9 command: worker-server environment: TZ: Asia/Shanghai @@ -141,4 +141,4 @@ volumes: dolphinscheduler-worker-data: dolphinscheduler-logs: dolphinscheduler-shared-local: - dolphinscheduler-resource-local: \ No newline at end of file + dolphinscheduler-resource-local: diff --git a/docker/kubernetes/dolphinscheduler/Chart.yaml b/docker/kubernetes/dolphinscheduler/Chart.yaml index 5056392854456ad164076c9b6084ff236a6dc040..44a9ffee66a8948d0caaeb55d475243946c80f9a 100644 --- a/docker/kubernetes/dolphinscheduler/Chart.yaml +++ b/docker/kubernetes/dolphinscheduler/Chart.yaml @@ -35,18 +35,24 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. -version: 2.0.0 +version: 2.0.3 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. -appVersion: 2.0.0-SNAPSHOT +appVersion: 2.0.9 dependencies: - name: postgresql version: 10.3.18 - repository: https://charts.bitnami.com/bitnami + # Due to a change in the Bitnami repo, https://charts.bitnami.com/bitnami was truncated only + # containing entries for the latest 6 months (from January 2022 on). + # This URL: https://raw.githubusercontent.com/bitnami/charts/archive-full-index/bitnami + # contains the full 'index.yaml'. + # See detail here: https://github.com/bitnami/charts/issues/10833 + repository: https://raw.githubusercontent.com/bitnami/charts/archive-full-index/bitnami condition: postgresql.enabled - name: zookeeper version: 6.5.3 - repository: https://charts.bitnami.com/bitnami + # Same as above. + repository: https://raw.githubusercontent.com/bitnami/charts/archive-full-index/bitnami condition: zookeeper.enabled diff --git a/docker/kubernetes/dolphinscheduler/templates/_helpers.tpl b/docker/kubernetes/dolphinscheduler/templates/_helpers.tpl index 2b0786fb31e7e5b39ac8d197ff65b485c752a3ee..5ef83a72d9b133e9c5246b45f0c325e879c60d58 100644 --- a/docker/kubernetes/dolphinscheduler/templates/_helpers.tpl +++ b/docker/kubernetes/dolphinscheduler/templates/_helpers.tpl @@ -166,12 +166,6 @@ Create a database environment variables. Create a registry environment variables. */}} {{- define "dolphinscheduler.registry.env_vars" -}} -- name: REGISTRY_PLUGIN_DIR - {{- if .Values.zookeeper.enabled }} - value: "lib/plugin/registry" - {{- else }} - value: {{ .Values.externalRegistry.registryPluginDir }} - {{- end }} - name: REGISTRY_PLUGIN_NAME {{- if .Values.zookeeper.enabled }} value: "zookeeper" @@ -239,4 +233,4 @@ Create a fsFileResourcePersistence volumeMount. - mountPath: {{ default "/dolphinscheduler" .Values.common.configmap.RESOURCE_UPLOAD_PATH | quote }} name: {{ include "dolphinscheduler.fullname" . }}-fs-file {{- end -}} -{{- end -}} \ No newline at end of file +{{- end -}} diff --git a/docker/kubernetes/dolphinscheduler/templates/deployment-dolphinscheduler-api.yaml b/docker/kubernetes/dolphinscheduler/templates/deployment-dolphinscheduler-api.yaml index 65b5dea810943cfc1eb741c7c8675fd0c13a26d4..b2838dff4792f406a179fc3ea9134824ded2e7da 100644 --- a/docker/kubernetes/dolphinscheduler/templates/deployment-dolphinscheduler-api.yaml +++ b/docker/kubernetes/dolphinscheduler/templates/deployment-dolphinscheduler-api.yaml @@ -68,7 +68,7 @@ spec: - name: TZ value: {{ .Values.timezone }} {{- include "dolphinscheduler.database.env_vars" . | nindent 12 }} - {{- include "dolphinscheduler.zookeeper.env_vars" . | nindent 12 }} + {{- include "dolphinscheduler.registry.env_vars" . | nindent 12 }} {{- include "dolphinscheduler.fs_s3a.env_vars" . | nindent 12 }} envFrom: - configMapRef: diff --git a/docker/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-master.yaml b/docker/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-master.yaml index 735d3cd08c2c734754892bd7080e54c154dae517..686d2609580e2dd90ded5247d123046b70538c5a 100644 --- a/docker/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-master.yaml +++ b/docker/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-master.yaml @@ -65,7 +65,7 @@ spec: - name: TZ value: {{ .Values.timezone }} {{- include "dolphinscheduler.database.env_vars" . | nindent 12 }} - {{- include "dolphinscheduler.zookeeper.env_vars" . | nindent 12 }} + {{- include "dolphinscheduler.registry.env_vars" . | nindent 12 }} {{- include "dolphinscheduler.fs_s3a.env_vars" . | nindent 12 }} envFrom: - configMapRef: diff --git a/docker/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-worker.yaml b/docker/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-worker.yaml index 60b67538bed7501369230e05a5362fd51311a7f0..392ae0773bca0c96c379c1e4ee5c06bb50b9301d 100644 --- a/docker/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-worker.yaml +++ b/docker/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-worker.yaml @@ -69,7 +69,7 @@ spec: - name: ALERT_LISTEN_HOST value: {{ include "dolphinscheduler.fullname" . }}-alert {{- include "dolphinscheduler.database.env_vars" . | nindent 12 }} - {{- include "dolphinscheduler.zookeeper.env_vars" . | nindent 12 }} + {{- include "dolphinscheduler.registry.env_vars" . | nindent 12 }} {{- include "dolphinscheduler.fs_s3a.env_vars" . | nindent 12 }} envFrom: - configMapRef: diff --git a/docker/kubernetes/dolphinscheduler/values.yaml b/docker/kubernetes/dolphinscheduler/values.yaml index ef28b8edfbb56c771ab5db733fd8c115d6646c94..2dabecb2f96f82c8f45086333f4f0486f685d711 100644 --- a/docker/kubernetes/dolphinscheduler/values.yaml +++ b/docker/kubernetes/dolphinscheduler/values.yaml @@ -23,7 +23,7 @@ timezone: "Asia/Shanghai" image: repository: "apache/dolphinscheduler" - tag: "2.0.0-SNAPSHOT" + tag: "2.0.9" pullPolicy: "IfNotPresent" pullSecret: "" @@ -53,6 +53,10 @@ externalDatabase: ## If not exists external zookeeper, by default, Dolphinscheduler's zookeeper will use it. zookeeper: enabled: true + tickTime: 3000 + maxSessionTimeout: 60000 + initLimit: 300 + maxClientCnxns: 2000 fourlwCommandsWhitelist: "srvr,ruok,wchs,cons" persistence: enabled: false @@ -100,6 +104,9 @@ common: HIVE_HOME: "/opt/soft/hive" FLINK_HOME: "/opt/soft/flink" DATAX_HOME: "/opt/soft/datax" + SESSION_TIMEOUT_MS: 60000 + ORG_QUARTZ_THREADPOOL_THREADCOUNT: "25" + ORG_QUARTZ_SCHEDULER_BATCHTRIGGERACQUISTITIONMAXCOUNT: "1" ## Shared storage persistence mounted into api, master and worker, such as Hadoop, Spark, Flink and DataX binary package sharedStoragePersistence: enabled: false @@ -148,6 +155,7 @@ master: # cpu: "500m" ## Configmap configmap: + LOGGER_SERVER_OPTS: "-Xms512m -Xmx512m -Xmn256m" MASTER_SERVER_OPTS: "-Xms1g -Xmx1g -Xmn512m" MASTER_EXEC_THREADS: "100" MASTER_EXEC_TASK_NUM: "20" @@ -158,6 +166,11 @@ master: MASTER_TASK_COMMIT_INTERVAL: "1000" MASTER_MAX_CPULOAD_AVG: "-1" MASTER_RESERVED_MEMORY: "0.3" + MASTER_FAILOVER_INTERVAL: 10 + MASTER_KILL_YARN_JOB_WHEN_HANDLE_FAILOVER: "true" + ORG_QUARTZ_THREADPOOL_THREADCOUNT: "25" + ORG_QUARTZ_SCHEDULER_BATCHTRIGGERACQUISTITIONMAXCOUNT: "1" + MASTER_PERSIST_EVENT_STATE_THREADS: 10 ## Periodic probe of container liveness. Container will be restarted if the probe fails. Cannot be updated. ## More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes livenessProbe: @@ -225,6 +238,7 @@ worker: WORKER_MAX_CPULOAD_AVG: "-1" WORKER_RESERVED_MEMORY: "0.3" WORKER_GROUPS: "default" + WORKER_RETRY_REPORT_TASK_STATUS_INTERVAL: 600 ## Periodic probe of container liveness. Container will be restarted if the probe fails. Cannot be updated. ## More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes livenessProbe: @@ -299,7 +313,6 @@ alert: ## Configmap configmap: ALERT_SERVER_OPTS: "-Xms512m -Xmx512m -Xmn256m" - ALERT_PLUGIN_DIR: "lib/plugin/alert" ## Periodic probe of container liveness. Container will be restarted if the probe fails. Cannot be updated. ## More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes livenessProbe: @@ -411,4 +424,4 @@ ingress: path: "/dolphinscheduler" tls: enabled: false - secretName: "dolphinscheduler-tls" \ No newline at end of file + secretName: "dolphinscheduler-tls" diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertChannelFactory.java b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertChannelFactory.java deleted file mode 100644 index 61518b6cbfc50260a8c1458bd518e95ec7ac0f06..0000000000000000000000000000000000000000 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertChannelFactory.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.alert.dingtalk; - -import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_FALSE; -import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_NO; -import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_TRUE; -import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_YES; - -import org.apache.dolphinscheduler.spi.alert.AlertChannel; -import org.apache.dolphinscheduler.spi.alert.AlertChannelFactory; -import org.apache.dolphinscheduler.spi.params.input.InputParam; -import org.apache.dolphinscheduler.spi.params.PasswordParam; -import org.apache.dolphinscheduler.spi.params.radio.RadioParam; -import org.apache.dolphinscheduler.spi.params.base.ParamsOptions; -import org.apache.dolphinscheduler.spi.params.base.PluginParams; -import org.apache.dolphinscheduler.spi.params.base.Validate; - -import java.util.Arrays; -import java.util.List; - -/** - * DingTalkAlertChannelFactory - */ -public class DingTalkAlertChannelFactory implements AlertChannelFactory { - @Override - public String getName() { - return "DingTalk"; - } - - @Override - public List getParams() { - InputParam webHookParam = InputParam.newBuilder(DingTalkParamsConstants.NAME_DING_TALK_WEB_HOOK, DingTalkParamsConstants.DING_TALK_WEB_HOOK) - .addValidate(Validate.newBuilder() - .setRequired(true) - .build()) - .build(); - InputParam keywordParam = InputParam.newBuilder(DingTalkParamsConstants.NAME_DING_TALK_KEYWORD, DingTalkParamsConstants.DING_TALK_KEYWORD) - .addValidate(Validate.newBuilder() - .setRequired(true) - .build()) - .build(); - RadioParam isEnableProxy = - RadioParam.newBuilder(DingTalkParamsConstants.NAME_DING_TALK_PROXY_ENABLE, DingTalkParamsConstants.NAME_DING_TALK_PROXY_ENABLE) - .addParamsOptions(new ParamsOptions(STRING_YES, STRING_TRUE, false)) - .addParamsOptions(new ParamsOptions(STRING_NO, STRING_FALSE, false)) - .setValue(STRING_TRUE) - .addValidate(Validate.newBuilder() - .setRequired(false) - .build()) - .build(); - InputParam proxyParam = - InputParam.newBuilder(DingTalkParamsConstants.NAME_DING_TALK_PROXY, DingTalkParamsConstants.DING_TALK_PROXY) - .addValidate(Validate.newBuilder() - .setRequired(false).build()) - .build(); - - InputParam portParam = InputParam.newBuilder(DingTalkParamsConstants.NAME_DING_TALK_PORT, DingTalkParamsConstants.DING_TALK_PORT) - .addValidate(Validate.newBuilder() - .setRequired(false).build()) - .build(); - - InputParam userParam = - InputParam.newBuilder(DingTalkParamsConstants.NAME_DING_TALK_USER, DingTalkParamsConstants.DING_TALK_USER) - .addValidate(Validate.newBuilder() - .setRequired(false).build()) - .build(); - PasswordParam passwordParam = PasswordParam.newBuilder(DingTalkParamsConstants.NAME_DING_TALK_PASSWORD, DingTalkParamsConstants.DING_TALK_PASSWORD) - .setPlaceholder("if enable use authentication, you need input password") - .build(); - - return Arrays.asList(webHookParam, keywordParam, isEnableProxy, proxyParam, portParam, userParam, passwordParam); - } - - @Override - public AlertChannel create() { - return new DingTalkAlertChannel(); - } -} diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/pom.xml b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/pom.xml deleted file mode 100644 index 2b0d3b666e43228403ca2675b73f5f5430c244ce..0000000000000000000000000000000000000000 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/pom.xml +++ /dev/null @@ -1,79 +0,0 @@ - - - - - dolphinscheduler-alert-plugin - org.apache.dolphinscheduler - 2.0.0-SNAPSHOT - - 4.0.0 - - org.apache.dolphinscheduler - dolphinscheduler-alert-wechat - dolphinscheduler-plugin - - - - com.google.guava - guava - - - - ch.qos.logback - logback-classic - - - - org.apache.httpcomponents - httpclient - - - - com.fasterxml.jackson.core - jackson-databind - provided - - - - junit - junit - test - - - - org.mockito - mockito-core - jar - test - - - - org.jacoco - org.jacoco.agent - runtime - test - - - - - - dolphinscheduler-alert-wechat-${project.version} - - \ No newline at end of file diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/exception/WeChatAlertException.java b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/exception/WeChatAlertException.java deleted file mode 100644 index f36bc2ff8a31b90bc507d727cc06128f049435bf..0000000000000000000000000000000000000000 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/exception/WeChatAlertException.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.alert.wechat.exception; - -public class WeChatAlertException extends RuntimeException { - - /** - * Create Runtime Exception - * - * @param errMsg - Error message - */ - public WeChatAlertException(String errMsg) { - super(errMsg); - } -} diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-api/pom.xml b/dolphinscheduler-alert/dolphinscheduler-alert-api/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..4bf9466dedd4e3a80cfc6e42f2a3a9787f787b9b --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-api/pom.xml @@ -0,0 +1,36 @@ + + + + + + dolphinscheduler-alert + org.apache.dolphinscheduler + 2.0.10-SNAPSHOT + + 4.0.0 + dolphinscheduler-alert-api + + + + org.apache.dolphinscheduler + dolphinscheduler-spi + + + diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-api/src/main/java/org/apache/dolphinscheduler/alert/api/AlertChannel.java b/dolphinscheduler-alert/dolphinscheduler-alert-api/src/main/java/org/apache/dolphinscheduler/alert/api/AlertChannel.java new file mode 100644 index 0000000000000000000000000000000000000000..fceff92d49b1dcba660c9fe2e7c3b76a4863f3a7 --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-api/src/main/java/org/apache/dolphinscheduler/alert/api/AlertChannel.java @@ -0,0 +1,24 @@ +/* + * Licensed to Apache Software Foundation (ASF) under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Apache Software Foundation (ASF) licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.dolphinscheduler.alert.api; + +public interface AlertChannel { + AlertResult process(AlertInfo info); +} diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-api/src/main/java/org/apache/dolphinscheduler/alert/api/AlertChannelFactory.java b/dolphinscheduler-alert/dolphinscheduler-alert-api/src/main/java/org/apache/dolphinscheduler/alert/api/AlertChannelFactory.java new file mode 100644 index 0000000000000000000000000000000000000000..c889d62f32764376c8df1e8abd166900214c5463 --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-api/src/main/java/org/apache/dolphinscheduler/alert/api/AlertChannelFactory.java @@ -0,0 +1,35 @@ +/* + * Licensed to Apache Software Foundation (ASF) under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Apache Software Foundation (ASF) licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.dolphinscheduler.alert.api; + +import org.apache.dolphinscheduler.spi.params.base.PluginParams; + +import java.util.List; + +public interface AlertChannelFactory { + String name(); + + AlertChannel create(); + /** + * Returns the configurable parameters that this plugin needs to display on the web ui + */ + List params(); + +} diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-api/src/main/java/org/apache/dolphinscheduler/alert/api/AlertConstants.java b/dolphinscheduler-alert/dolphinscheduler-alert-api/src/main/java/org/apache/dolphinscheduler/alert/api/AlertConstants.java new file mode 100644 index 0000000000000000000000000000000000000000..3fae801f4fde6d435f2597d9aaf956e28aa10192 --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-api/src/main/java/org/apache/dolphinscheduler/alert/api/AlertConstants.java @@ -0,0 +1,33 @@ +/* + * Licensed to Apache Software Foundation (ASF) under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Apache Software Foundation (ASF) licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.dolphinscheduler.alert.api; + +public final class AlertConstants { + /** + * the field name of alert show type + **/ + public static final String SHOW_TYPE = "show_type"; + + public static final String NAME_SHOW_TYPE = "showType"; + + private AlertConstants() { + throw new UnsupportedOperationException("This is a utility class and cannot be instantiated"); + } +} diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-api/src/main/java/org/apache/dolphinscheduler/alert/api/AlertData.java b/dolphinscheduler-alert/dolphinscheduler-alert-api/src/main/java/org/apache/dolphinscheduler/alert/api/AlertData.java new file mode 100644 index 0000000000000000000000000000000000000000..9d1db84779c11a08cebeb0813ddbe9969c5d46d8 --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-api/src/main/java/org/apache/dolphinscheduler/alert/api/AlertData.java @@ -0,0 +1,168 @@ +/* + * Licensed to Apache Software Foundation (ASF) under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Apache Software Foundation (ASF) licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.dolphinscheduler.alert.api; + +public class AlertData { + private int id; + private String title; + private String content; + private String log; + + public AlertData(int id, String title, String content, String log) { + this.id = id; + this.title = title; + this.content = content; + this.log = log; + } + + public AlertData() { + } + + public static AlertDataBuilder builder() { + return new AlertDataBuilder(); + } + + public int getId() { + return this.id; + } + + public AlertData setId(int id) { + this.id = id; + return this; + } + + public String getTitle() { + return this.title; + } + + public AlertData setTitle(String title) { + this.title = title; + return this; + } + + public String getContent() { + return this.content; + } + + public AlertData setContent(String content) { + this.content = content; + return this; + } + + public String getLog() { + return this.log; + } + + public AlertData setLog(String log) { + this.log = log; + return this; + } + + public boolean equals(final Object o) { + if (o == this) { + return true; + } + if (!(o instanceof AlertData)) { + return false; + } + final AlertData other = (AlertData) o; + if (!other.canEqual((Object) this)) { + return false; + } + if (this.getId() != other.getId()) { + return false; + } + final Object this$title = this.getTitle(); + final Object other$title = other.getTitle(); + if (this$title == null ? other$title != null : !this$title.equals(other$title)) { + return false; + } + final Object this$content = this.getContent(); + final Object other$content = other.getContent(); + if (this$content == null ? other$content != null : !this$content.equals(other$content)) { + return false; + } + final Object this$log = this.getLog(); + final Object other$log = other.getLog(); + if (this$log == null ? other$log != null : !this$log.equals(other$log)) { + return false; + } + return true; + } + + protected boolean canEqual(final Object other) { + return other instanceof AlertData; + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + result = result * PRIME + this.getId(); + final Object $title = this.getTitle(); + result = result * PRIME + ($title == null ? 43 : $title.hashCode()); + final Object $content = this.getContent(); + result = result * PRIME + ($content == null ? 43 : $content.hashCode()); + final Object $log = this.getLog(); + result = result * PRIME + ($log == null ? 43 : $log.hashCode()); + return result; + } + + public String toString() { + return "AlertData(id=" + this.getId() + ", title=" + this.getTitle() + ", content=" + this.getContent() + ", log=" + this.getLog() + ")"; + } + + public static class AlertDataBuilder { + private int id; + private String title; + private String content; + private String log; + + AlertDataBuilder() { + } + + public AlertDataBuilder id(int id) { + this.id = id; + return this; + } + + public AlertDataBuilder title(String title) { + this.title = title; + return this; + } + + public AlertDataBuilder content(String content) { + this.content = content; + return this; + } + + public AlertDataBuilder log(String log) { + this.log = log; + return this; + } + + public AlertData build() { + return new AlertData(id, title, content, log); + } + + public String toString() { + return "AlertData.AlertDataBuilder(id=" + this.id + ", title=" + this.title + ", content=" + this.content + ", log=" + this.log + ")"; + } + } +} diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-api/src/main/java/org/apache/dolphinscheduler/alert/api/AlertInfo.java b/dolphinscheduler-alert/dolphinscheduler-alert-api/src/main/java/org/apache/dolphinscheduler/alert/api/AlertInfo.java new file mode 100644 index 0000000000000000000000000000000000000000..6dbc8205e6ec462fa4984ff19f74a1ca93a43aed --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-api/src/main/java/org/apache/dolphinscheduler/alert/api/AlertInfo.java @@ -0,0 +1,125 @@ +/* + * Licensed to Apache Software Foundation (ASF) under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Apache Software Foundation (ASF) licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.dolphinscheduler.alert.api; + +import java.util.Map; + +public class AlertInfo { + private Map alertParams; + private AlertData alertData; + + public AlertInfo(Map alertParams, AlertData alertData) { + this.alertParams = alertParams; + this.alertData = alertData; + } + + public AlertInfo() { + } + + public static AlertInfoBuilder builder() { + return new AlertInfoBuilder(); + } + + public Map getAlertParams() { + return this.alertParams; + } + + public AlertInfo setAlertParams(Map alertParams) { + this.alertParams = alertParams; + return this; + } + + public AlertData getAlertData() { + return this.alertData; + } + + public AlertInfo setAlertData(AlertData alertData) { + this.alertData = alertData; + return this; + } + + public boolean equals(final Object o) { + if (o == this) { + return true; + } + if (!(o instanceof AlertInfo)) { + return false; + } + final AlertInfo other = (AlertInfo) o; + if (!other.canEqual((Object) this)) { + return false; + } + final Object this$alertParams = this.getAlertParams(); + final Object other$alertParams = other.getAlertParams(); + if (this$alertParams == null ? other$alertParams != null : !this$alertParams.equals(other$alertParams)) { + return false; + } + final Object this$alertData = this.getAlertData(); + final Object other$alertData = other.getAlertData(); + if (this$alertData == null ? other$alertData != null : !this$alertData.equals(other$alertData)) { + return false; + } + return true; + } + + protected boolean canEqual(final Object other) { + return other instanceof AlertInfo; + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $alertParams = this.getAlertParams(); + result = result * PRIME + ($alertParams == null ? 43 : $alertParams.hashCode()); + final Object $alertData = this.getAlertData(); + result = result * PRIME + ($alertData == null ? 43 : $alertData.hashCode()); + return result; + } + + public String toString() { + return "AlertInfo(alertParams=" + this.getAlertParams() + ", alertData=" + this.getAlertData() + ")"; + } + + public static class AlertInfoBuilder { + private Map alertParams; + private AlertData alertData; + + AlertInfoBuilder() { + } + + public AlertInfoBuilder alertParams(Map alertParams) { + this.alertParams = alertParams; + return this; + } + + public AlertInfoBuilder alertData(AlertData alertData) { + this.alertData = alertData; + return this; + } + + public AlertInfo build() { + return new AlertInfo(alertParams, alertData); + } + + public String toString() { + return "AlertInfo.AlertInfoBuilder(alertParams=" + this.alertParams + ", alertData=" + this.alertData + ")"; + } + } +} diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-api/src/main/java/org/apache/dolphinscheduler/alert/api/AlertResult.java b/dolphinscheduler-alert/dolphinscheduler-alert-api/src/main/java/org/apache/dolphinscheduler/alert/api/AlertResult.java new file mode 100644 index 0000000000000000000000000000000000000000..4343c13408f460f3dad2530bbb68d6a195a0a9bb --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-api/src/main/java/org/apache/dolphinscheduler/alert/api/AlertResult.java @@ -0,0 +1,123 @@ +/* + * Licensed to Apache Software Foundation (ASF) under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Apache Software Foundation (ASF) licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.dolphinscheduler.alert.api; + +public class AlertResult { + private String status; + private String message; + + public AlertResult(String status, String message) { + this.status = status; + this.message = message; + } + + public AlertResult() { + } + + public static AlertResultBuilder builder() { + return new AlertResultBuilder(); + } + + public String getStatus() { + return this.status; + } + + public AlertResult setStatus(String status) { + this.status = status; + return this; + } + + public String getMessage() { + return this.message; + } + + public AlertResult setMessage(String message) { + this.message = message; + return this; + } + + public boolean equals(final Object o) { + if (o == this) { + return true; + } + if (!(o instanceof AlertResult)) { + return false; + } + final AlertResult other = (AlertResult) o; + if (!other.canEqual((Object) this)) { + return false; + } + final Object this$status = this.getStatus(); + final Object other$status = other.getStatus(); + if (this$status == null ? other$status != null : !this$status.equals(other$status)) { + return false; + } + final Object this$message = this.getMessage(); + final Object other$message = other.getMessage(); + if (this$message == null ? other$message != null : !this$message.equals(other$message)) { + return false; + } + return true; + } + + protected boolean canEqual(final Object other) { + return other instanceof AlertResult; + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $status = this.getStatus(); + result = result * PRIME + ($status == null ? 43 : $status.hashCode()); + final Object $message = this.getMessage(); + result = result * PRIME + ($message == null ? 43 : $message.hashCode()); + return result; + } + + public String toString() { + return "AlertResult(status=" + this.getStatus() + ", message=" + this.getMessage() + ")"; + } + + public static class AlertResultBuilder { + private String status; + private String message; + + AlertResultBuilder() { + } + + public AlertResultBuilder status(String status) { + this.status = status; + return this; + } + + public AlertResultBuilder message(String message) { + this.message = message; + return this; + } + + public AlertResult build() { + return new AlertResult(status, message); + } + + public String toString() { + return "AlertResult.AlertResultBuilder(status=" + this.status + ", message=" + this.message + ")"; + } + } +} diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-api/src/main/java/org/apache/dolphinscheduler/alert/api/ShowType.java b/dolphinscheduler-alert/dolphinscheduler-alert-api/src/main/java/org/apache/dolphinscheduler/alert/api/ShowType.java new file mode 100644 index 0000000000000000000000000000000000000000..48f987752853a318185a8e3658510c7c504b2437 --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-api/src/main/java/org/apache/dolphinscheduler/alert/api/ShowType.java @@ -0,0 +1,49 @@ +/* + * Licensed to Apache Software Foundation (ASF) under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Apache Software Foundation (ASF) licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.dolphinscheduler.alert.api; + +public enum ShowType { + /** + * 0 TABLE; + * 1 TEXT; + * 2 attachment; + * 3 TABLE+attachment; + */ + TABLE(0, "table"), + TEXT(1, "text"), + ATTACHMENT(2, "attachment"), + TABLEATTACHMENT(3, "table attachment"); + + private final int code; + private final String descp; + + ShowType(int code, String descp) { + this.code = code; + this.descp = descp; + } + + public int getCode() { + return code; + } + + public String getDescp() { + return descp; + } +} diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/pom.xml b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/pom.xml similarity index 46% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/pom.xml rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/pom.xml index 646d5feb75a5c60529fe06c8b1616575dd4bf459..16cda677d04f63be5a668964178fc9d69c362744 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/pom.xml +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/pom.xml @@ -15,70 +15,25 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - + - dolphinscheduler-alert-plugin + dolphinscheduler-alert-plugins org.apache.dolphinscheduler - 2.0.0-SNAPSHOT + 2.0.10-SNAPSHOT 4.0.0 - - org.apache.dolphinscheduler dolphinscheduler-alert-dingtalk - dolphinscheduler-plugin + jar - org.apache.httpcomponents httpclient + com.google.guava guava - - - ch.qos.logback - logback-classic - - - - org.slf4j - slf4j-api - - - - com.fasterxml.jackson.core - jackson-annotations - provided - - - - junit - junit - test - - - - org.mockito - mockito-core - jar - test - - - - org.jacoco - org.jacoco.agent - runtime - test - - - - dolphinscheduler-alert-dingtalk-${project.version} - - - \ No newline at end of file + diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertChannel.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertChannel.java similarity index 80% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertChannel.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertChannel.java index 6b13f24d8680e8b492d70667224d5e00c4459bd8..74c440fe76c57c174e24330f97988fa67f194fd3 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertChannel.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertChannel.java @@ -17,21 +17,17 @@ package org.apache.dolphinscheduler.plugin.alert.dingtalk; -import org.apache.dolphinscheduler.spi.alert.AlertChannel; -import org.apache.dolphinscheduler.spi.alert.AlertData; -import org.apache.dolphinscheduler.spi.alert.AlertInfo; -import org.apache.dolphinscheduler.spi.alert.AlertResult; +import org.apache.dolphinscheduler.alert.api.AlertChannel; +import org.apache.dolphinscheduler.alert.api.AlertData; +import org.apache.dolphinscheduler.alert.api.AlertInfo; +import org.apache.dolphinscheduler.alert.api.AlertResult; import java.util.Map; -/** - * DingTalkAlertChannel - */ -public class DingTalkAlertChannel implements AlertChannel { +public final class DingTalkAlertChannel implements AlertChannel { @Override public AlertResult process(AlertInfo alertInfo) { - AlertData alertData = alertInfo.getAlertData(); Map paramsMap = alertInfo.getAlertParams(); if (null == paramsMap) { diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertChannelFactory.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertChannelFactory.java new file mode 100644 index 0000000000000000000000000000000000000000..2f7afee07e6e116a221027b7cba82d477b54f118 --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertChannelFactory.java @@ -0,0 +1,144 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.alert.dingtalk; + +import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_FALSE; +import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_NO; +import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_TRUE; +import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_YES; + +import org.apache.dolphinscheduler.alert.api.AlertChannel; +import org.apache.dolphinscheduler.alert.api.AlertChannelFactory; +import org.apache.dolphinscheduler.spi.params.PasswordParam; +import org.apache.dolphinscheduler.spi.params.base.ParamsOptions; +import org.apache.dolphinscheduler.spi.params.base.PluginParams; +import org.apache.dolphinscheduler.spi.params.base.Validate; +import org.apache.dolphinscheduler.spi.params.input.InputParam; +import org.apache.dolphinscheduler.spi.params.radio.RadioParam; + +import java.util.Arrays; +import java.util.List; + +import com.google.auto.service.AutoService; + +@AutoService(AlertChannelFactory.class) +public final class DingTalkAlertChannelFactory implements AlertChannelFactory { + @Override + public String name() { + return "DingTalk"; + } + + @Override + public AlertChannel create() { + return new DingTalkAlertChannel(); + } + + @Override + public List params() { + + InputParam webHookParam = InputParam + .newBuilder(DingTalkParamsConstants.NAME_DING_TALK_WEB_HOOK, DingTalkParamsConstants.DING_TALK_WEB_HOOK) + .addValidate(Validate.newBuilder() + .setRequired(true) + .build()) + .build(); + + InputParam keywordParam = InputParam + .newBuilder(DingTalkParamsConstants.NAME_DING_TALK_KEYWORD, DingTalkParamsConstants.DING_TALK_KEYWORD) + .addValidate(Validate.newBuilder() + .setRequired(false) + .build()) + .build(); + + InputParam secretParam = InputParam + .newBuilder(DingTalkParamsConstants.NAME_DING_TALK_SECRET, DingTalkParamsConstants.DING_TALK_SECRET) + .addValidate(Validate.newBuilder() + .setRequired(false) + .build()) + .build(); + + RadioParam msgTypeParam = RadioParam + .newBuilder(DingTalkParamsConstants.NAME_DING_TALK_MSG_TYPE, DingTalkParamsConstants.DING_TALK_MSG_TYPE) + .addParamsOptions(new ParamsOptions(DingTalkParamsConstants.DING_TALK_MSG_TYPE_TEXT, DingTalkParamsConstants.DING_TALK_MSG_TYPE_TEXT, false)) + .addParamsOptions(new ParamsOptions(DingTalkParamsConstants.DING_TALK_MSG_TYPE_MARKDOWN, DingTalkParamsConstants.DING_TALK_MSG_TYPE_MARKDOWN, false)) + .setValue(DingTalkParamsConstants.DING_TALK_MSG_TYPE_TEXT) + .addValidate(Validate.newBuilder() + .setRequired(false) + .build()) + .build(); + + InputParam atMobilesParam = InputParam + .newBuilder(DingTalkParamsConstants.NAME_DING_TALK_AT_MOBILES, DingTalkParamsConstants.DING_TALK_AT_MOBILES) + .addValidate(Validate.newBuilder() + .setRequired(false) + .build()) + .build(); + InputParam atUserIdsParam = InputParam + .newBuilder(DingTalkParamsConstants.NAME_DING_TALK_AT_USERIDS, DingTalkParamsConstants.DING_TALK_AT_USERIDS) + .addValidate(Validate.newBuilder() + .setRequired(false) + .build()) + .build(); + RadioParam isAtAll = RadioParam + .newBuilder(DingTalkParamsConstants.NAME_DING_TALK_AT_ALL, DingTalkParamsConstants.DING_TALK_AT_ALL) + .addParamsOptions(new ParamsOptions(STRING_YES, STRING_TRUE, false)) + .addParamsOptions(new ParamsOptions(STRING_NO, STRING_FALSE, false)) + .setValue(STRING_FALSE) + .addValidate(Validate.newBuilder() + .setRequired(false) + .build()) + .build(); + + RadioParam isEnableProxy = RadioParam + .newBuilder(DingTalkParamsConstants.NAME_DING_TALK_PROXY_ENABLE, DingTalkParamsConstants.DING_TALK_PROXY_ENABLE) + .addParamsOptions(new ParamsOptions(STRING_YES, STRING_TRUE, false)) + .addParamsOptions(new ParamsOptions(STRING_NO, STRING_FALSE, false)) + .setValue(STRING_FALSE) + .addValidate(Validate.newBuilder() + .setRequired(false) + .build()) + .build(); + InputParam proxyParam = InputParam + .newBuilder(DingTalkParamsConstants.NAME_DING_TALK_PROXY, DingTalkParamsConstants.DING_TALK_PROXY) + .addValidate(Validate.newBuilder() + .setRequired(false) + .build()) + .build(); + + InputParam portParam = InputParam + .newBuilder(DingTalkParamsConstants.NAME_DING_TALK_PORT, DingTalkParamsConstants.DING_TALK_PORT) + .addValidate(Validate.newBuilder() + .setRequired(false) + .build()) + .build(); + + InputParam userParam = InputParam + .newBuilder(DingTalkParamsConstants.NAME_DING_TALK_USER, DingTalkParamsConstants.DING_TALK_USER) + .addValidate(Validate.newBuilder() + .setRequired(false) + .build()) + .build(); + PasswordParam passwordParam = PasswordParam + .newBuilder(DingTalkParamsConstants.NAME_DING_TALK_PASSWORD, DingTalkParamsConstants.DING_TALK_PASSWORD) + .setPlaceholder("if enable use authentication, you need input password") + .build(); + + return Arrays.asList(webHookParam, keywordParam, secretParam, msgTypeParam, atMobilesParam, atUserIdsParam, isAtAll, isEnableProxy, proxyParam, portParam, userParam, passwordParam); + } + +} diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkParamsConstants.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkParamsConstants.java similarity index 50% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkParamsConstants.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkParamsConstants.java index e94da80f2032d49bbb17f632742945e7cdc5ce01..e5b667ae30dcb5510a17dee53984977bf6a30bec 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkParamsConstants.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkParamsConstants.java @@ -17,29 +17,47 @@ package org.apache.dolphinscheduler.plugin.alert.dingtalk; -/** - * DingTalkParamsConstants - */ -public class DingTalkParamsConstants { - +public final class DingTalkParamsConstants { + static final String DING_TALK_PROXY_ENABLE = "$t('isEnableProxy')"; + static final String NAME_DING_TALK_PROXY_ENABLE = "IsEnableProxy"; - static final String DING_TALK_PROXY_ENABLE = "isEnableProxy"; - static final String DING_TALK_WEB_HOOK = "webhook"; + static final String DING_TALK_WEB_HOOK = "$t('webhook')"; static final String NAME_DING_TALK_WEB_HOOK = "WebHook"; - static final String DING_TALK_KEYWORD = "keyword"; + + static final String DING_TALK_KEYWORD = "$t('keyword')"; static final String NAME_DING_TALK_KEYWORD = "Keyword"; - static final String NAME_DING_TALK_PROXY_ENABLE = "IsEnableProxy"; - static final String DING_TALK_PROXY = "proxy"; + + static final String DING_TALK_SECRET = "$t('secret')"; + static final String NAME_DING_TALK_SECRET = "Secret"; + + static final String DING_TALK_MSG_TYPE = "$t('msgType')"; + static final String NAME_DING_TALK_MSG_TYPE = "MsgType"; + + static final String DING_TALK_MSG_TYPE_TEXT = "text"; + static final String DING_TALK_MSG_TYPE_MARKDOWN = "markdown"; + + static final String DING_TALK_AT_MOBILES = "$t('atMobiles')"; + static final String NAME_DING_TALK_AT_MOBILES = "AtMobiles"; + + static final String DING_TALK_AT_USERIDS = "$t('atUserIds')"; + static final String NAME_DING_TALK_AT_USERIDS = "AtUserIds"; + + static final String DING_TALK_AT_ALL = "$t('isAtAll')"; + static final String NAME_DING_TALK_AT_ALL = "IsAtAll"; + + static final String DING_TALK_PROXY = "$t('proxy')"; static final String NAME_DING_TALK_PROXY = "Proxy"; - static final String DING_TALK_PORT = "port"; + + static final String DING_TALK_PORT = "$t('port')"; static final String NAME_DING_TALK_PORT = "Port"; - static final String DING_TALK_USER = "user"; + + static final String DING_TALK_USER = "$t('user')"; static final String NAME_DING_TALK_USER = "User"; - static final String DING_TALK_PASSWORD = "password"; + + static final String DING_TALK_PASSWORD = "$t('password')"; static final String NAME_DING_TALK_PASSWORD = "Password"; private DingTalkParamsConstants() { - throw new IllegalStateException("Utility class"); + throw new UnsupportedOperationException("This is a utility class and cannot be instantiated"); } - } diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkSender.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkSender.java similarity index 46% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkSender.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkSender.java index 0d87e4779a2bb38c4bc89cccc9c78dc08e4a0d7b..8519e5468c7b7687f2baa077aac461d9d9aad477 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkSender.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkSender.java @@ -17,9 +17,10 @@ package org.apache.dolphinscheduler.plugin.alert.dingtalk; -import org.apache.dolphinscheduler.spi.alert.AlertResult; +import org.apache.dolphinscheduler.alert.api.AlertResult; import org.apache.dolphinscheduler.spi.utils.JSONUtils; +import org.apache.commons.codec.binary.Base64; import org.apache.commons.codec.binary.StringUtils; import org.apache.http.HttpEntity; import org.apache.http.HttpHost; @@ -36,23 +37,39 @@ import org.apache.http.impl.client.HttpClients; import org.apache.http.util.EntityUtils; import java.io.IOException; +import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; import java.util.HashMap; import java.util.Map; +import java.util.Objects; + +import javax.crypto.Mac; +import javax.crypto.spec.SecretKeySpec; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * Ding Talk Sender + *

+ * https://open.dingtalk.com/document/robots/custom-robot-access + * https://open.dingtalk.com/document/robots/customize-robot-security-settings + *

*/ -public class DingTalkSender { +public final class DingTalkSender { + private static final Logger logger = LoggerFactory.getLogger(DingTalkSender.class); - private String url; + private final String url; + private final String keyword; + private final String secret; + private String msgType; - private String keyword; + private final String atMobiles; + private final String atUserIds; + private final Boolean atAll; - private Boolean enableProxy; + private final Boolean enableProxy; private String proxy; @@ -65,19 +82,25 @@ public class DingTalkSender { DingTalkSender(Map config) { url = config.get(DingTalkParamsConstants.NAME_DING_TALK_WEB_HOOK); keyword = config.get(DingTalkParamsConstants.NAME_DING_TALK_KEYWORD); + secret = config.get(DingTalkParamsConstants.NAME_DING_TALK_SECRET); + msgType = config.get(DingTalkParamsConstants.NAME_DING_TALK_MSG_TYPE); + + atMobiles = config.get(DingTalkParamsConstants.NAME_DING_TALK_AT_MOBILES); + atUserIds = config.get(DingTalkParamsConstants.NAME_DING_TALK_AT_USERIDS); + atAll = Boolean.valueOf(config.get(DingTalkParamsConstants.NAME_DING_TALK_AT_ALL)); + enableProxy = Boolean.valueOf(config.get(DingTalkParamsConstants.NAME_DING_TALK_PROXY_ENABLE)); if (Boolean.TRUE.equals(enableProxy)) { port = Integer.parseInt(config.get(DingTalkParamsConstants.NAME_DING_TALK_PORT)); proxy = config.get(DingTalkParamsConstants.NAME_DING_TALK_PROXY); - user = config.get(DingTalkParamsConstants.DING_TALK_USER); + user = config.get(DingTalkParamsConstants.NAME_DING_TALK_USER); password = config.get(DingTalkParamsConstants.NAME_DING_TALK_PASSWORD); } - } - private static HttpPost constructHttpPost(String url, String msg, String charset) { + private static HttpPost constructHttpPost(String url, String msg) { HttpPost post = new HttpPost(url); - StringEntity entity = new StringEntity(msg, charset); + StringEntity entity = new StringEntity(msg, StandardCharsets.UTF_8); post.setEntity(entity); post.addHeader("Content-Type", "application/json; charset=utf-8"); return post; @@ -99,18 +122,7 @@ public class DingTalkSender { return RequestConfig.custom().setProxy(httpProxy).build(); } - private static String textToJsonString(String text) { - Map items = new HashMap<>(); - items.put("msgtype", "text"); - Map textContent = new HashMap<>(); - byte[] byt = StringUtils.getBytesUtf8(text); - String txt = StringUtils.newStringUtf8(byt); - textContent.put("content", txt); - items.put("text", textContent); - return JSONUtils.toJsonString(items); - } - - private static AlertResult checkSendDingTalkSendMsgResult(String result) { + private AlertResult checkSendDingTalkSendMsgResult(String result) { AlertResult alertResult = new AlertResult(); alertResult.setStatus("false"); @@ -135,6 +147,13 @@ public class DingTalkSender { return alertResult; } + /** + * send dingtalk msg handler + * + * @param title title + * @param content content + * @return + */ public AlertResult sendDingTalkMsg(String title, String content) { AlertResult alertResult; try { @@ -151,8 +170,9 @@ public class DingTalkSender { private String sendMsg(String title, String content) throws IOException { - String msgToJson = textToJsonString(title + content + "#" + keyword); - HttpPost httpPost = constructHttpPost(url, msgToJson, "UTF-8"); + String msg = generateMsgJson(title, content); + + HttpPost httpPost = constructHttpPost(org.apache.dolphinscheduler.spi.utils.StringUtils.isBlank(secret) ? url : generateSignedUrl(), msg); CloseableHttpClient httpClient; if (Boolean.TRUE.equals(enableProxy)) { @@ -173,19 +193,143 @@ public class DingTalkSender { } finally { response.close(); } - logger.info("Ding Talk send title :{},content : {}, resp: {}", title, content, resp); + logger.info("Ding Talk send msg :{}, resp: {}", msg, resp); return resp; } finally { httpClient.close(); } } - public static class DingTalkSendMsgResponse { + /** + * generate msg json + * + * @param title title + * @param content content + * @return msg + */ + private String generateMsgJson(String title, String content) { + if (org.apache.dolphinscheduler.spi.utils.StringUtils.isBlank(msgType)) { + msgType = DingTalkParamsConstants.DING_TALK_MSG_TYPE_TEXT; + } + Map items = new HashMap<>(); + items.put("msgtype", msgType); + Map text = new HashMap<>(); + items.put(msgType, text); + + if (DingTalkParamsConstants.DING_TALK_MSG_TYPE_MARKDOWN.equals(msgType)) { + generateMarkdownMsg(title, content, text); + } else { + generateTextMsg(title, content, text); + } + + setMsgAt(items); + return JSONUtils.toJsonString(items); + + } + + /** + * generate text msg + * + * @param title title + * @param content content + * @param text text + */ + private void generateTextMsg(String title, String content, Map text) { + StringBuilder builder = new StringBuilder(title); + builder.append("\n"); + builder.append(content); + if (org.apache.dolphinscheduler.spi.utils.StringUtils.isNotBlank(keyword)) { + builder.append(" "); + builder.append(keyword); + } + byte[] byt = StringUtils.getBytesUtf8(builder.toString()); + String txt = StringUtils.newStringUtf8(byt); + text.put("content", txt); + } + + /** + * generate markdown msg + * + * @param title title + * @param content content + * @param text text + */ + private void generateMarkdownMsg(String title, String content, Map text) { + StringBuilder builder = new StringBuilder(content); + if (org.apache.dolphinscheduler.spi.utils.StringUtils.isNotBlank(keyword)) { + builder.append(" "); + builder.append(keyword); + } + builder.append("\n\n"); + if (org.apache.dolphinscheduler.spi.utils.StringUtils.isNotBlank(atMobiles)) { + Arrays.stream(atMobiles.split(",")).forEach(value -> { + builder.append("@"); + builder.append(value); + builder.append(" "); + }); + } + if (org.apache.dolphinscheduler.spi.utils.StringUtils.isNotBlank(atUserIds)) { + Arrays.stream(atUserIds.split(",")).forEach(value -> { + builder.append("@"); + builder.append(value); + builder.append(" "); + }); + } + + byte[] byt = StringUtils.getBytesUtf8(builder.toString()); + String txt = StringUtils.newStringUtf8(byt); + text.put("title", title); + text.put("text", txt); + } + + /** + * configure msg @person + * + * @param items items + */ + private void setMsgAt(Map items) { + Map at = new HashMap<>(); + + String[] atMobileArray = org.apache.dolphinscheduler.spi.utils.StringUtils.isNotBlank(atMobiles) ? atMobiles.split(",") : new String[0]; + String[] atUserArray = org.apache.dolphinscheduler.spi.utils.StringUtils.isNotBlank(atUserIds) ? atUserIds.split(",") : new String[0]; + boolean isAtAll = Objects.isNull(atAll) ? false : atAll; + + at.put("atMobiles", atMobileArray); + at.put("atUserIds", atUserArray); + at.put("isAtAll", isAtAll); + + items.put("at", at); + } + + /** + * generate sign url + * + * @return sign url + */ + private String generateSignedUrl() { + Long timestamp = System.currentTimeMillis(); + String stringToSign = timestamp + "\n" + secret; + String sign = org.apache.dolphinscheduler.spi.utils.StringUtils.EMPTY; + try { + Mac mac = Mac.getInstance("HmacSHA256"); + mac.init(new SecretKeySpec(secret.getBytes("UTF-8"), "HmacSHA256")); + byte[] signData = mac.doFinal(stringToSign.getBytes("UTF-8")); + sign = URLEncoder.encode(new String(Base64.encodeBase64(signData)),"UTF-8"); + } catch (Exception e) { + logger.error("generate sign error, message:{}", e); + } + return url + "×tamp=" + timestamp + "&sign=" + sign; + } + + static final class DingTalkSendMsgResponse { private Integer errcode; private String errmsg; + public DingTalkSendMsgResponse() { + } + public Integer getErrcode() { - return errcode; + return this.errcode; } public void setErrcode(Integer errcode) { @@ -193,12 +337,49 @@ public class DingTalkSender { } public String getErrmsg() { - return errmsg; + return this.errmsg; } public void setErrmsg(String errmsg) { this.errmsg = errmsg; } - } + @Override + public boolean equals(final Object o) { + if (o == this) { + return true; + } + if (!(o instanceof DingTalkSendMsgResponse)) { + return false; + } + final DingTalkSendMsgResponse other = (DingTalkSendMsgResponse) o; + final Object this$errcode = this.getErrcode(); + final Object other$errcode = other.getErrcode(); + if (this$errcode == null ? other$errcode != null : !this$errcode.equals(other$errcode)) { + return false; + } + final Object this$errmsg = this.getErrmsg(); + final Object other$errmsg = other.getErrmsg(); + if (this$errmsg == null ? other$errmsg != null : !this$errmsg.equals(other$errmsg)) { + return false; + } + return true; + } + + @Override + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $errcode = this.getErrcode(); + result = result * PRIME + ($errcode == null ? 43 : $errcode.hashCode()); + final Object $errmsg = this.getErrmsg(); + result = result * PRIME + ($errmsg == null ? 43 : $errmsg.hashCode()); + return result; + } + + @Override + public String toString() { + return "DingTalkSender.DingTalkSendMsgResponse(errcode=" + this.getErrcode() + ", errmsg=" + this.getErrmsg() + ")"; + } + } } diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/src/test/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertChannelFactoryTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/test/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertChannelFactoryTest.java similarity index 89% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/src/test/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertChannelFactoryTest.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/test/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertChannelFactoryTest.java index 7c25f1ebf02e31feb18bace5b8c76513beb572af..ea1cd1a0b978ff58653cc4ab5c13e8f2eed37dc5 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/src/test/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertChannelFactoryTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/test/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertChannelFactoryTest.java @@ -17,28 +17,22 @@ package org.apache.dolphinscheduler.plugin.alert.dingtalk; -import org.apache.dolphinscheduler.spi.alert.AlertChannel; +import org.apache.dolphinscheduler.alert.api.AlertChannel; import org.apache.dolphinscheduler.spi.params.base.PluginParams; import org.apache.dolphinscheduler.spi.utils.JSONUtils; import java.util.List; import org.junit.Assert; -import org.junit.Ignore; import org.junit.Test; -/** - * DingTalkAlertChannelFactoryTest - */ -@Ignore public class DingTalkAlertChannelFactoryTest { - @Test public void testGetParams() { DingTalkAlertChannelFactory dingTalkAlertChannelFactory = new DingTalkAlertChannelFactory(); - List params = dingTalkAlertChannelFactory.getParams(); + List params = dingTalkAlertChannelFactory.params(); JSONUtils.toJsonString(params); - Assert.assertEquals(7, params.size()); + Assert.assertEquals(12, params.size()); } @Test diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/src/test/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkSenderTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/test/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkSenderTest.java similarity index 85% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/src/test/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkSenderTest.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/test/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkSenderTest.java index bc17d4185b31f02d5dd3bba30413eca63aa271f1..791a96f2f545b184f9ca9bc04aa168cf120e3bcb 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/src/test/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkSenderTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/test/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkSenderTest.java @@ -17,7 +17,7 @@ package org.apache.dolphinscheduler.plugin.alert.dingtalk; -import org.apache.dolphinscheduler.spi.alert.AlertResult; +import org.apache.dolphinscheduler.alert.api.AlertResult; import java.util.HashMap; import java.util.Map; @@ -26,18 +26,17 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; -/** - * DingTalkSenderTest - */ public class DingTalkSenderTest { - private static Map dingTalkConfig = new HashMap<>(); + private static final Map dingTalkConfig = new HashMap<>(); @Before public void initDingTalkConfig() { - dingTalkConfig.put(DingTalkParamsConstants.NAME_DING_TALK_KEYWORD, "keyWord"); + dingTalkConfig.put(DingTalkParamsConstants.NAME_DING_TALK_KEYWORD, "keyword"); dingTalkConfig.put(DingTalkParamsConstants.NAME_DING_TALK_WEB_HOOK, "url"); + dingTalkConfig.put(DingTalkParamsConstants.NAME_DING_TALK_MSG_TYPE, DingTalkParamsConstants.DING_TALK_MSG_TYPE_MARKDOWN); + dingTalkConfig.put(DingTalkParamsConstants.NAME_DING_TALK_PROXY_ENABLE, "false"); dingTalkConfig.put(DingTalkParamsConstants.NAME_DING_TALK_PASSWORD, "password"); dingTalkConfig.put(DingTalkParamsConstants.NAME_DING_TALK_PORT, "9988"); @@ -51,7 +50,7 @@ public class DingTalkSenderTest { dingTalkConfig.put(DingTalkParamsConstants.NAME_DING_TALK_PROXY_ENABLE, "true"); dingTalkSender = new DingTalkSender(dingTalkConfig); AlertResult alertResult = dingTalkSender.sendDingTalkMsg("title", "content test"); - Assert.assertEquals("false",alertResult.getStatus()); + Assert.assertEquals("false", alertResult.getStatus()); } } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/pom.xml b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..226c0acc8907e3d5f7fdebe682f1edbff9054823 --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/pom.xml @@ -0,0 +1,48 @@ + + + + + dolphinscheduler-alert-plugins + org.apache.dolphinscheduler + 2.0.10-SNAPSHOT + + 4.0.0 + dolphinscheduler-alert-email + jar + + + + org.apache.poi + poi + + + org.apache.poi + poi-ooxml + + + + com.google.guava + guava + + + + org.apache.commons + commons-email + + + diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannel.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannel.java similarity index 71% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannel.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannel.java index 3dbf0b8fb9c4723c9af46e9b04d5cb60f01eb70b..c5cdc333fd982ee774fbcac736ddf1237837769a 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannel.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannel.java @@ -17,21 +17,17 @@ package org.apache.dolphinscheduler.plugin.alert.email; -import org.apache.dolphinscheduler.spi.alert.AlertChannel; -import org.apache.dolphinscheduler.spi.alert.AlertData; -import org.apache.dolphinscheduler.spi.alert.AlertInfo; -import org.apache.dolphinscheduler.spi.alert.AlertResult; +import org.apache.dolphinscheduler.alert.api.AlertChannel; +import org.apache.dolphinscheduler.alert.api.AlertData; +import org.apache.dolphinscheduler.alert.api.AlertInfo; +import org.apache.dolphinscheduler.alert.api.AlertResult; import java.util.Map; import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -/** - * email alert channel . use email to seed the alertInfo - */ -public class EmailAlertChannel implements AlertChannel { - private static final Logger logger = LoggerFactory.getLogger(EmailAlertChannel.class); +public final class EmailAlertChannel implements AlertChannel { + private static final Logger log = org.slf4j.LoggerFactory.getLogger(EmailAlertChannel.class); @Override public AlertResult process(AlertInfo info) { @@ -44,25 +40,24 @@ public class EmailAlertChannel implements AlertChannel { MailSender mailSender = new MailSender(paramsMap); AlertResult alertResult = mailSender.sendMails(alert.getTitle(), alert.getContent()); - //send flag - boolean flag = false; + boolean flag; if (alertResult == null) { alertResult = new AlertResult(); alertResult.setStatus("false"); alertResult.setMessage("alert send error."); - logger.info("alert send error : {}", alertResult.getMessage()); + log.info("alert send error : {}", alertResult.getMessage()); return alertResult; } flag = Boolean.parseBoolean(String.valueOf(alertResult.getStatus())); if (flag) { - logger.info("alert send success"); + log.info("alert send success"); alertResult.setMessage("email send success."); } else { alertResult.setMessage("alert send error."); - logger.info("alert send error : {}", alertResult.getMessage()); + log.info("alert send error : {}", alertResult.getMessage()); } return alertResult; diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannelFactory.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannelFactory.java similarity index 47% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannelFactory.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannelFactory.java index 2eefb4932f09ad225a9facae6278d9152b972c40..a2c52618774a2abed5e48a17d96398fee179539d 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannelFactory.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannelFactory.java @@ -22,100 +22,99 @@ import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_NO; import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_TRUE; import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_YES; -import org.apache.dolphinscheduler.spi.alert.AlertChannel; -import org.apache.dolphinscheduler.spi.alert.AlertChannelFactory; -import org.apache.dolphinscheduler.spi.alert.AlertConstants; -import org.apache.dolphinscheduler.spi.alert.ShowType; -import org.apache.dolphinscheduler.spi.params.input.InputParam; +import org.apache.dolphinscheduler.alert.api.AlertChannel; +import org.apache.dolphinscheduler.alert.api.AlertChannelFactory; +import org.apache.dolphinscheduler.alert.api.AlertConstants; +import org.apache.dolphinscheduler.alert.api.ShowType; import org.apache.dolphinscheduler.spi.params.PasswordParam; -import org.apache.dolphinscheduler.spi.params.radio.RadioParam; import org.apache.dolphinscheduler.spi.params.base.ParamsOptions; import org.apache.dolphinscheduler.spi.params.base.PluginParams; import org.apache.dolphinscheduler.spi.params.base.Validate; +import org.apache.dolphinscheduler.spi.params.input.InputParam; +import org.apache.dolphinscheduler.spi.params.radio.RadioParam; import java.util.ArrayList; import java.util.List; -/** - * email alert factory - */ -public class EmailAlertChannelFactory implements AlertChannelFactory { +import com.google.auto.service.AutoService; + +@AutoService(AlertChannelFactory.class) +public final class EmailAlertChannelFactory implements AlertChannelFactory { @Override - public String getName() { + public String name() { return "Email"; } @Override - public List getParams() { - + public List params() { List paramsList = new ArrayList<>(); InputParam receivesParam = InputParam.newBuilder(MailParamsConstants.NAME_PLUGIN_DEFAULT_EMAIL_RECEIVERS, MailParamsConstants.PLUGIN_DEFAULT_EMAIL_RECEIVERS) - .setPlaceholder("please input receives") - .addValidate(Validate.newBuilder() - .setRequired(true) - .build()) - .build(); + .setPlaceholder("please input receives") + .addValidate(Validate.newBuilder() + .setRequired(true) + .build()) + .build(); InputParam receiveCcsParam = InputParam.newBuilder(MailParamsConstants.NAME_PLUGIN_DEFAULT_EMAIL_RECEIVERCCS, MailParamsConstants.PLUGIN_DEFAULT_EMAIL_RECEIVERCCS) - .build(); + .build(); InputParam mailSmtpHost = InputParam.newBuilder(MailParamsConstants.NAME_MAIL_SMTP_HOST, MailParamsConstants.MAIL_SMTP_HOST) - .addValidate(Validate.newBuilder().setRequired(true).build()) - .build(); + .addValidate(Validate.newBuilder().setRequired(true).build()) + .build(); InputParam mailSmtpPort = InputParam.newBuilder(MailParamsConstants.NAME_MAIL_SMTP_PORT, MailParamsConstants.MAIL_SMTP_PORT) - .setValue("25") - .addValidate(Validate.newBuilder() - .setRequired(true) - .build()) - .build(); + .setValue("25") + .addValidate(Validate.newBuilder() + .setRequired(true) + .build()) + .build(); InputParam mailSender = InputParam.newBuilder(MailParamsConstants.NAME_MAIL_SENDER, MailParamsConstants.MAIL_SENDER) - .addValidate(Validate.newBuilder().setRequired(true).build()) - .build(); + .addValidate(Validate.newBuilder().setRequired(true).build()) + .build(); RadioParam enableSmtpAuth = RadioParam.newBuilder(MailParamsConstants.NAME_MAIL_SMTP_AUTH, MailParamsConstants.MAIL_SMTP_AUTH) - .addParamsOptions(new ParamsOptions(STRING_YES, STRING_TRUE, false)) - .addParamsOptions(new ParamsOptions(STRING_NO, STRING_FALSE, false)) - .setValue(STRING_TRUE) - .addValidate(Validate.newBuilder().setRequired(true).build()) - .build(); + .addParamsOptions(new ParamsOptions(STRING_YES, STRING_TRUE, false)) + .addParamsOptions(new ParamsOptions(STRING_NO, STRING_FALSE, false)) + .setValue(STRING_TRUE) + .addValidate(Validate.newBuilder().setRequired(true).build()) + .build(); InputParam mailUser = InputParam.newBuilder(MailParamsConstants.NAME_MAIL_USER, MailParamsConstants.MAIL_USER) - .setPlaceholder("if enable use authentication, you need input user") - .build(); + .setPlaceholder("if enable use authentication, you need input user") + .build(); PasswordParam mailPassword = PasswordParam.newBuilder(MailParamsConstants.NAME_MAIL_PASSWD, MailParamsConstants.MAIL_PASSWD) - .setPlaceholder("if enable use authentication, you need input password") - .build(); + .setPlaceholder("if enable use authentication, you need input password") + .build(); RadioParam enableTls = RadioParam.newBuilder(MailParamsConstants.NAME_MAIL_SMTP_STARTTLS_ENABLE, MailParamsConstants.MAIL_SMTP_STARTTLS_ENABLE) - .addParamsOptions(new ParamsOptions(STRING_YES, STRING_TRUE, false)) - .addParamsOptions(new ParamsOptions(STRING_NO, STRING_FALSE, false)) - .setValue(STRING_FALSE) - .addValidate(Validate.newBuilder().setRequired(true).build()) - .build(); + .addParamsOptions(new ParamsOptions(STRING_YES, STRING_TRUE, false)) + .addParamsOptions(new ParamsOptions(STRING_NO, STRING_FALSE, false)) + .setValue(STRING_FALSE) + .addValidate(Validate.newBuilder().setRequired(true).build()) + .build(); RadioParam enableSsl = RadioParam.newBuilder(MailParamsConstants.NAME_MAIL_SMTP_SSL_ENABLE, MailParamsConstants.MAIL_SMTP_SSL_ENABLE) - .addParamsOptions(new ParamsOptions(STRING_YES, STRING_TRUE, false)) - .addParamsOptions(new ParamsOptions(STRING_NO, STRING_FALSE, false)) - .setValue(STRING_FALSE) - .addValidate(Validate.newBuilder().setRequired(true).build()) - .build(); + .addParamsOptions(new ParamsOptions(STRING_YES, STRING_TRUE, false)) + .addParamsOptions(new ParamsOptions(STRING_NO, STRING_FALSE, false)) + .setValue(STRING_FALSE) + .addValidate(Validate.newBuilder().setRequired(true).build()) + .build(); InputParam sslTrust = InputParam.newBuilder(MailParamsConstants.NAME_MAIL_SMTP_SSL_TRUST, MailParamsConstants.MAIL_SMTP_SSL_TRUST) - .setValue("*") - .addValidate(Validate.newBuilder().setRequired(true).build()) - .build(); - - RadioParam showType = RadioParam.newBuilder(AlertConstants.SHOW_TYPE, AlertConstants.SHOW_TYPE) - .addParamsOptions(new ParamsOptions(ShowType.TABLE.getDescp(), ShowType.TABLE.getDescp(), false)) - .addParamsOptions(new ParamsOptions(ShowType.TEXT.getDescp(), ShowType.TEXT.getDescp(), false)) - .addParamsOptions(new ParamsOptions(ShowType.ATTACHMENT.getDescp(), ShowType.ATTACHMENT.getDescp(), false)) - .addParamsOptions(new ParamsOptions(ShowType.TABLEATTACHMENT.getDescp(), ShowType.TABLEATTACHMENT.getDescp(), false)) - .setValue(ShowType.TABLE.getDescp()) - .addValidate(Validate.newBuilder().setRequired(true).build()) - .build(); + .setValue("*") + .addValidate(Validate.newBuilder().setRequired(true).build()) + .build(); + + RadioParam showType = RadioParam.newBuilder(AlertConstants.NAME_SHOW_TYPE, AlertConstants.SHOW_TYPE) + .addParamsOptions(new ParamsOptions(ShowType.TABLE.getDescp(), ShowType.TABLE.getDescp(), false)) + .addParamsOptions(new ParamsOptions(ShowType.TEXT.getDescp(), ShowType.TEXT.getDescp(), false)) + .addParamsOptions(new ParamsOptions(ShowType.ATTACHMENT.getDescp(), ShowType.ATTACHMENT.getDescp(), false)) + .addParamsOptions(new ParamsOptions(ShowType.TABLEATTACHMENT.getDescp(), ShowType.TABLEATTACHMENT.getDescp(), false)) + .setValue(ShowType.TABLE.getDescp()) + .addValidate(Validate.newBuilder().setRequired(true).build()) + .build(); paramsList.add(receivesParam); paramsList.add(receiveCcsParam); diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailConstants.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailConstants.java similarity index 52% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailConstants.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailConstants.java index 5eecaa07a73cfdd9d13605fe2cc07900e61d9804..4c6e1d37eea2af1787dbbb2439226c4451783581 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailConstants.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailConstants.java @@ -17,19 +17,11 @@ package org.apache.dolphinscheduler.plugin.alert.email; -public class EmailConstants { - - private EmailConstants() { - throw new IllegalStateException(EmailConstants.class.getName()); - } - - +public final class EmailConstants { public static final String XLS_FILE_PATH = "xls.file.path"; public static final String MAIL_TRANSPORT_PROTOCOL = "mail.transport.protocol"; - public static final String DEFAULT_SMTP_PORT = "25"; - public static final String TEXT_HTML_CHARSET_UTF_8 = "text/html;charset=utf-8"; public static final int NUMBER_1000 = 1000; @@ -42,32 +34,23 @@ public class EmailConstants { public static final String TR_END = ""; - public static final String TITLE = "title"; - - public static final String CONTENT = "content"; - public static final String TH = ""; public static final String TH_END = ""; - public static final String MARKDOWN_QUOTE = ">"; - - public static final String MARKDOWN_ENTER = "\n"; - - public static final String HTML_HEADER_PREFIX = new StringBuilder("") - .append("") - .append("") - .append("dolphinscheduler") - .append("") - .append("") - .append("") - .append("") - .append(" ") - .toString(); + public static final String HTML_HEADER_PREFIX = "" + + "" + + "" + + "dolphinscheduler" + + "" + + "" + + "" + + "" + + "
"; public static final String TABLE_BODY_HTML_TAIL = "
"; @@ -76,4 +59,8 @@ public class EmailConstants { public static final String EXCEL_SUFFIX_XLSX = ".xlsx"; public static final String SINGLE_SLASH = "/"; + + private EmailConstants() { + throw new UnsupportedOperationException("This is a utility class and cannot be instantiated"); + } } diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/ExcelUtils.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/ExcelUtils.java similarity index 92% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/ExcelUtils.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/ExcelUtils.java index 90069c6e07faf3e62f5ef766994c1e6e554e5a5e..1986f1c08fd01ef497b576bf088c2e648c9e932d 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/ExcelUtils.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/ExcelUtils.java @@ -36,21 +36,15 @@ import java.util.List; import java.util.Map; import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -/** - * excel utils - */ -public class ExcelUtils { +public final class ExcelUtils { + private static final int XLSX_WINDOW_ROW = 10000; + private static final Logger log = org.slf4j.LoggerFactory.getLogger(ExcelUtils.class); private ExcelUtils() { - throw new IllegalStateException("Utility class"); + throw new UnsupportedOperationException("This is a utility class and cannot be instantiated"); } - private static final Logger logger = LoggerFactory.getLogger(ExcelUtils.class); - - private static final int XLSX_WINDOW_ROW = 10000; - /** * generate excel file * @@ -61,14 +55,14 @@ public class ExcelUtils { public static void genExcelFile(String content, String title, String xlsFilePath) { File file = new File(xlsFilePath); if (!file.exists() && !file.mkdirs()) { - logger.error("Create xlsx directory error, path:{}", xlsFilePath); + log.error("Create xlsx directory error, path:{}", xlsFilePath); throw new AlertEmailException("Create xlsx directory error"); } List itemsList = JSONUtils.toList(content, LinkedHashMap.class); if (CollectionUtils.isEmpty(itemsList)) { - logger.error("itemsList is null"); + log.error("itemsList is null"); throw new AlertEmailException("itemsList is null"); } diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/MailParamsConstants.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/MailParamsConstants.java similarity index 94% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/MailParamsConstants.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/MailParamsConstants.java index 9b49b4705b7227001ff04bff960e90663773d721..e7b51432889120dd0380a2b433aa9b03f8deb3b3 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/MailParamsConstants.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/MailParamsConstants.java @@ -17,14 +17,7 @@ package org.apache.dolphinscheduler.plugin.alert.email; -/** - * mail plugin params json use - */ -public class MailParamsConstants { - - private MailParamsConstants() { - throw new IllegalStateException("Utility class"); - } +public final class MailParamsConstants { public static final String PLUGIN_DEFAULT_EMAIL_RECEIVERS = "$t('receivers')"; public static final String NAME_PLUGIN_DEFAULT_EMAIL_RECEIVERS = "receivers"; @@ -62,4 +55,7 @@ public class MailParamsConstants { public static final String MAIL_SMTP_SSL_TRUST = "mail.smtp.ssl.trust"; public static final String NAME_MAIL_SMTP_SSL_TRUST = "smtpSslTrust"; + private MailParamsConstants() { + throw new UnsupportedOperationException("This is a utility class and cannot be instantiated"); + } } diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/MailSender.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/MailSender.java similarity index 79% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/MailSender.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/MailSender.java index 33701de7bd0f9c7db1ea403e7e97798f262468d7..ba8acab010898fe19c2a6cfb61f789b01c113bbf 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/MailSender.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/MailSender.java @@ -17,76 +17,53 @@ package org.apache.dolphinscheduler.plugin.alert.email; -import static java.util.Objects.requireNonNull; - +import com.sun.mail.smtp.SMTPProvider; +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.mail.EmailException; +import org.apache.commons.mail.HtmlEmail; +import org.apache.dolphinscheduler.alert.api.AlertConstants; +import org.apache.dolphinscheduler.alert.api.AlertResult; +import org.apache.dolphinscheduler.alert.api.ShowType; import org.apache.dolphinscheduler.plugin.alert.email.exception.AlertEmailException; import org.apache.dolphinscheduler.plugin.alert.email.template.AlertTemplate; import org.apache.dolphinscheduler.plugin.alert.email.template.DefaultHTMLTemplate; -import org.apache.dolphinscheduler.spi.alert.AlertConstants; -import org.apache.dolphinscheduler.spi.alert.AlertResult; -import org.apache.dolphinscheduler.spi.alert.ShowType; import org.apache.dolphinscheduler.spi.utils.StringUtils; - -import org.apache.commons.collections4.CollectionUtils; -import org.apache.commons.mail.EmailException; -import org.apache.commons.mail.HtmlEmail; - -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.Properties; +import org.slf4j.Logger; import javax.activation.CommandMap; import javax.activation.MailcapCommandMap; -import javax.mail.Authenticator; -import javax.mail.Message; -import javax.mail.MessagingException; -import javax.mail.PasswordAuthentication; -import javax.mail.Session; -import javax.mail.Transport; -import javax.mail.internet.InternetAddress; -import javax.mail.internet.MimeBodyPart; -import javax.mail.internet.MimeMessage; -import javax.mail.internet.MimeMultipart; -import javax.mail.internet.MimeUtility; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import javax.mail.*; +import javax.mail.internet.*; +import java.io.File; +import java.io.IOException; +import java.util.*; -import com.sun.mail.smtp.SMTPProvider; +import static java.util.Objects.requireNonNull; -/** - * mail utils - */ -public class MailSender { - - public static final Logger logger = LoggerFactory.getLogger(MailSender.class); - - private List receivers; - private List receiverCcs; - private String mailProtocol = "SMTP"; - private String mailSmtpHost; - private String mailSmtpPort; - private String mailSenderEmail; - private String enableSmtpAuth; - private String mailUser; - private String mailPasswd; - private String mailUseStartTLS; - private String mailUseSSL; +public final class MailSender { + private static final Logger log = org.slf4j.LoggerFactory.getLogger(MailSender.class); + + private final List receivers; + private final List receiverCcs; + private final String mailProtocol = "SMTP"; + private final String mailSmtpHost; + private final String mailSmtpPort; + private final String mailSenderEmail; + private final String enableSmtpAuth; + private final String mailUser; + private final String mailPasswd; + private final String mailUseStartTLS; + private final String mailUseSSL; + private final String sslTrust; + private final String showType; + private final AlertTemplate alertTemplate; + private final String mustNotNull = " must not be null"; private String xlsFilePath; - private String sslTrust; - private String showType; - private AlertTemplate alertTemplate; - private String mustNotNull = " must not be null"; public MailSender(Map config) { - String receiversConfig = config.get(MailParamsConstants.NAME_PLUGIN_DEFAULT_EMAIL_RECEIVERS); if (receiversConfig == null || "".equals(receiversConfig)) { - throw new AlertEmailException(MailParamsConstants.PLUGIN_DEFAULT_EMAIL_RECEIVERS + mustNotNull); + throw new AlertEmailException(MailParamsConstants.NAME_PLUGIN_DEFAULT_EMAIL_RECEIVERS + mustNotNull); } receivers = Arrays.asList(receiversConfig.split(",")); @@ -95,37 +72,37 @@ public class MailSender { receiverCcs = new ArrayList<>(); if (receiverCcsConfig != null && !"".equals(receiverCcsConfig)) { - receiverCcs = Arrays.asList(receiverCcsConfig.split(",")); + receiverCcs.addAll(Arrays.asList(receiverCcsConfig.split(","))); } mailSmtpHost = config.get(MailParamsConstants.NAME_MAIL_SMTP_HOST); - requireNonNull(mailSmtpHost, MailParamsConstants.MAIL_SMTP_HOST + mustNotNull); + requireNonNull(mailSmtpHost, MailParamsConstants.NAME_MAIL_SMTP_HOST + mustNotNull); mailSmtpPort = config.get(MailParamsConstants.NAME_MAIL_SMTP_PORT); - requireNonNull(mailSmtpPort, MailParamsConstants.MAIL_SMTP_PORT + mustNotNull); + requireNonNull(mailSmtpPort, MailParamsConstants.NAME_MAIL_SMTP_PORT + mustNotNull); mailSenderEmail = config.get(MailParamsConstants.NAME_MAIL_SENDER); - requireNonNull(mailSenderEmail, MailParamsConstants.MAIL_SENDER + mustNotNull); + requireNonNull(mailSenderEmail, MailParamsConstants.NAME_MAIL_SENDER + mustNotNull); enableSmtpAuth = config.get(MailParamsConstants.NAME_MAIL_SMTP_AUTH); mailUser = config.get(MailParamsConstants.NAME_MAIL_USER); - requireNonNull(mailUser, MailParamsConstants.MAIL_USER + mustNotNull); + requireNonNull(mailUser, MailParamsConstants.NAME_MAIL_USER + mustNotNull); mailPasswd = config.get(MailParamsConstants.NAME_MAIL_PASSWD); - requireNonNull(mailPasswd, MailParamsConstants.MAIL_PASSWD + mustNotNull); + requireNonNull(mailPasswd, MailParamsConstants.NAME_MAIL_PASSWD + mustNotNull); mailUseStartTLS = config.get(MailParamsConstants.NAME_MAIL_SMTP_STARTTLS_ENABLE); - requireNonNull(mailUseStartTLS, MailParamsConstants.MAIL_SMTP_STARTTLS_ENABLE + mustNotNull); + requireNonNull(mailUseStartTLS, MailParamsConstants.NAME_MAIL_SMTP_STARTTLS_ENABLE + mustNotNull); mailUseSSL = config.get(MailParamsConstants.NAME_MAIL_SMTP_SSL_ENABLE); - requireNonNull(mailUseSSL, MailParamsConstants.MAIL_SMTP_SSL_ENABLE + mustNotNull); + requireNonNull(mailUseSSL, MailParamsConstants.NAME_MAIL_SMTP_SSL_ENABLE + mustNotNull); sslTrust = config.get(MailParamsConstants.NAME_MAIL_SMTP_SSL_TRUST); - requireNonNull(sslTrust, MailParamsConstants.MAIL_SMTP_SSL_TRUST + mustNotNull); + requireNonNull(sslTrust, MailParamsConstants.NAME_MAIL_SMTP_SSL_TRUST + mustNotNull); - showType = config.get(AlertConstants.SHOW_TYPE); - requireNonNull(showType, AlertConstants.SHOW_TYPE + mustNotNull); + showType = config.get(AlertConstants.NAME_SHOW_TYPE); + requireNonNull(showType, AlertConstants.NAME_SHOW_TYPE + mustNotNull); xlsFilePath = config.get(EmailConstants.XLS_FILE_PATH); if (StringUtils.isBlank(xlsFilePath)) { @@ -145,16 +122,6 @@ public class MailSender { return sendMails(this.receivers, this.receiverCcs, title, content); } - /** - * send mail to receivers - * - * @param title email title - * @param content email content - */ - public AlertResult sendMailsToReceiverOnly(String title, String content) { - return sendMails(this.receivers, null, title, content); - } - /** * send mail * @@ -206,8 +173,8 @@ public class MailSender { try { String partContent = (showType.equals(ShowType.ATTACHMENT.getDescp()) - ? "Please see the attachment " + title + EmailConstants.EXCEL_SUFFIX_XLSX - : htmlTable(content, false)); + ? "Please see the attachment " + title + EmailConstants.EXCEL_SUFFIX_XLSX + : htmlTable(content, false)); attachment(title, content, partContent); @@ -396,12 +363,12 @@ public class MailSender { public void deleteFile(File file) { if (file.exists()) { if (file.delete()) { - logger.info("delete success: {}", file.getAbsolutePath()); + log.info("delete success: {}", file.getAbsolutePath()); } else { - logger.info("delete fail: {}", file.getAbsolutePath()); + log.info("delete fail: {}", file.getAbsolutePath()); } } else { - logger.info("file not exists: {}", file.getAbsolutePath()); + log.info("file not exists: {}", file.getAbsolutePath()); } } @@ -409,7 +376,7 @@ public class MailSender { * handle exception */ private void handleException(AlertResult alertResult, Exception e) { - logger.error("Send email to {} failed", receivers, e); + log.error("Send email to {} failed", receivers, e); alertResult.setMessage("Send email to {" + String.join(",", receivers) + "} failed," + e.toString()); } diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/exception/AlertEmailException.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/exception/AlertEmailException.java similarity index 83% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/exception/AlertEmailException.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/exception/AlertEmailException.java index 466ccc06de802a98955cd55c2f4c422dcaf9f279..5b64d8de1efca2622b3fc32403596afb42461e42 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/exception/AlertEmailException.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/exception/AlertEmailException.java @@ -18,22 +18,10 @@ package org.apache.dolphinscheduler.plugin.alert.email.exception; public class AlertEmailException extends RuntimeException { - - /** - * Create Runtime exception - * - * @param errMsg - Error message - */ public AlertEmailException(String errMsg) { super(errMsg); } - /** - * Create Runtime exception - * - * @param errMsg - Error message - * @param cause - cause - */ public AlertEmailException(String errMsg, Throwable cause) { super(errMsg, cause); } diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/template/AlertTemplate.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/template/AlertTemplate.java similarity index 86% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/template/AlertTemplate.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/template/AlertTemplate.java index dec993d4d0d5e9710b8d80ff26496cc2a3a7f890..7f66e659b445b899597a36846dcb4402dc32efd6 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/template/AlertTemplate.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/template/AlertTemplate.java @@ -17,19 +17,16 @@ package org.apache.dolphinscheduler.plugin.alert.email.template; -import org.apache.dolphinscheduler.spi.alert.ShowType; +import org.apache.dolphinscheduler.alert.api.ShowType; -/** - * alert message template - */ public interface AlertTemplate { /** * get a message from a specified alert template * - * @param content alert message content + * @param content alert message content * @param showType show type - * @param showAll whether to show all + * @param showAll whether to show all * @return a message from a specified alert template */ String getMessageFromTemplate(String content, ShowType showType, boolean showAll); @@ -37,7 +34,7 @@ public interface AlertTemplate { /** * default showAll is true * - * @param content alert message content + * @param content alert message content * @param showType show type * @return a message from a specified alert template */ diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/template/DefaultHTMLTemplate.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/template/DefaultHTMLTemplate.java similarity index 95% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/template/DefaultHTMLTemplate.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/template/DefaultHTMLTemplate.java index 57187aa70cb98eb5dab8be6ec5e7bc4b1697e696..433cfda3f773632638afdfa8bb386f69a02596b7 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/template/DefaultHTMLTemplate.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/template/DefaultHTMLTemplate.java @@ -19,8 +19,8 @@ package org.apache.dolphinscheduler.plugin.alert.email.template; import static java.util.Objects.requireNonNull; +import org.apache.dolphinscheduler.alert.api.ShowType; import org.apache.dolphinscheduler.plugin.alert.email.EmailConstants; -import org.apache.dolphinscheduler.spi.alert.ShowType; import org.apache.dolphinscheduler.spi.utils.JSONUtils; import org.apache.dolphinscheduler.spi.utils.StringUtils; @@ -36,9 +36,6 @@ import org.slf4j.LoggerFactory; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; -/** - * the default html alert message template - */ public class DefaultHTMLTemplate implements AlertTemplate { public static final Logger logger = LoggerFactory.getLogger(DefaultHTMLTemplate.class); @@ -89,7 +86,7 @@ public class DefaultHTMLTemplate implements AlertTemplate { Map.Entry entry = iterator.next(); t.append(EmailConstants.TH).append(entry.getKey()).append(EmailConstants.TH_END); - cs.append(EmailConstants.TD).append(String.valueOf(entry.getValue())).append(EmailConstants.TD_END); + cs.append(EmailConstants.TD).append(entry.getValue()).append(EmailConstants.TD_END); } t.append(EmailConstants.TR_END); diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannelFactoryTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannelFactoryTest.java similarity index 82% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannelFactoryTest.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannelFactoryTest.java index 37a11e47fb598d5b1255a783cbd8e3d3e2210b60..5bb7c1d504ba4c30cc2cc5ba0e34994b4a65f5af 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannelFactoryTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannelFactoryTest.java @@ -17,7 +17,7 @@ package org.apache.dolphinscheduler.plugin.alert.email; -import org.apache.dolphinscheduler.spi.alert.AlertChannel; +import org.apache.dolphinscheduler.alert.api.AlertChannel; import org.apache.dolphinscheduler.spi.params.base.PluginParams; import java.util.List; @@ -25,27 +25,14 @@ import java.util.List; import org.junit.Assert; import org.junit.Test; -/** - * EmailAlertChannelFactory Tester. - * - * @version 1.0 - * @since
Aug 20, 2020
- */ public class EmailAlertChannelFactoryTest { - - /** - * Method: getParams() - */ @Test public void testGetParams() { EmailAlertChannelFactory emailAlertChannelFactory = new EmailAlertChannelFactory(); - List params = emailAlertChannelFactory.getParams(); + List params = emailAlertChannelFactory.params(); Assert.assertEquals(12, params.size()); } - /** - * Method: create() - */ @Test public void testCreate() { EmailAlertChannelFactory emailAlertChannelFactory = new EmailAlertChannelFactory(); diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannelTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannelTest.java similarity index 95% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannelTest.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannelTest.java index 9a87709e9a05f65babd161aa0458bb700e0ab944..a0237af50acb161b85db05c571f0fbeda3cef2c9 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannelTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannelTest.java @@ -17,18 +17,18 @@ package org.apache.dolphinscheduler.plugin.alert.email; -import org.apache.dolphinscheduler.spi.alert.AlertConstants; -import org.apache.dolphinscheduler.spi.alert.AlertData; -import org.apache.dolphinscheduler.spi.alert.AlertInfo; -import org.apache.dolphinscheduler.spi.alert.AlertResult; -import org.apache.dolphinscheduler.spi.alert.ShowType; -import org.apache.dolphinscheduler.spi.params.input.InputParam; +import org.apache.dolphinscheduler.alert.api.AlertConstants; +import org.apache.dolphinscheduler.alert.api.AlertData; +import org.apache.dolphinscheduler.alert.api.AlertInfo; +import org.apache.dolphinscheduler.alert.api.AlertResult; +import org.apache.dolphinscheduler.alert.api.ShowType; import org.apache.dolphinscheduler.spi.params.PasswordParam; import org.apache.dolphinscheduler.spi.params.PluginParamsTransfer; -import org.apache.dolphinscheduler.spi.params.radio.RadioParam; import org.apache.dolphinscheduler.spi.params.base.ParamsOptions; import org.apache.dolphinscheduler.spi.params.base.PluginParams; import org.apache.dolphinscheduler.spi.params.base.Validate; +import org.apache.dolphinscheduler.spi.params.input.InputParam; +import org.apache.dolphinscheduler.spi.params.radio.RadioParam; import org.apache.dolphinscheduler.spi.utils.JSONUtils; import java.util.ArrayList; @@ -39,9 +39,6 @@ import java.util.Map; import org.junit.Assert; import org.junit.Test; -/** - * EmailAlertChannel Tester. - */ public class EmailAlertChannelTest { /** diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/ExcelUtilsTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/ExcelUtilsTest.java similarity index 100% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/ExcelUtilsTest.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/ExcelUtilsTest.java diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/MailUtilsTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/MailUtilsTest.java similarity index 80% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/MailUtilsTest.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/MailUtilsTest.java index 5b0b084d72784e0fea5aff2fdf3822c6f395a728..540bcdee57558e79b4352337a3891a836803bb67 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/MailUtilsTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/MailUtilsTest.java @@ -17,34 +17,24 @@ package org.apache.dolphinscheduler.plugin.alert.email; +import org.apache.dolphinscheduler.alert.api.AlertConstants; +import org.apache.dolphinscheduler.alert.api.ShowType; import org.apache.dolphinscheduler.plugin.alert.email.template.AlertTemplate; import org.apache.dolphinscheduler.plugin.alert.email.template.DefaultHTMLTemplate; -import org.apache.dolphinscheduler.spi.alert.AlertConstants; -import org.apache.dolphinscheduler.spi.alert.ShowType; import org.apache.dolphinscheduler.spi.utils.JSONUtils; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; - import org.junit.BeforeClass; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - */ +import java.util.*; + public class MailUtilsTest { private static final Logger logger = LoggerFactory.getLogger(MailUtilsTest.class); - + static MailSender mailSender; private static Map emailConfig = new HashMap<>(); - private static AlertTemplate alertTemplate; - static MailSender mailSender; - @BeforeClass public static void initEmailConfig() { emailConfig.put(MailParamsConstants.NAME_MAIL_PROTOCOL, "smtp"); @@ -59,7 +49,7 @@ public class MailUtilsTest { emailConfig.put(MailParamsConstants.NAME_MAIL_SMTP_SSL_TRUST, "false"); emailConfig.put(MailParamsConstants.NAME_PLUGIN_DEFAULT_EMAIL_RECEIVERS, "347801120@qq.com"); emailConfig.put(MailParamsConstants.NAME_PLUGIN_DEFAULT_EMAIL_RECEIVERCCS, "347801120@qq.com"); - emailConfig.put(AlertConstants.SHOW_TYPE, ShowType.TEXT.getDescp()); + emailConfig.put(AlertConstants.NAME_SHOW_TYPE, ShowType.TEXT.getDescp()); alertTemplate = new DefaultHTMLTemplate(); mailSender = new MailSender(emailConfig); } @@ -68,19 +58,19 @@ public class MailUtilsTest { public void testSendMails() { String content = "[\"id:69\"," - + "\"name:UserBehavior-0--1193959466\"," - + "\"Job name: Start workflow\"," - + "\"State: SUCCESS\"," - + "\"Recovery:NO\"," - + "\"Run time: 1\"," - + "\"Start time: 2018-08-06 10:31:34.0\"," - + "\"End time: 2018-08-06 10:31:49.0\"," - + "\"Host: 192.168.xx.xx\"," - + "\"Notify group :4\"]"; + + "\"name:UserBehavior-0--1193959466\"," + + "\"Job name: Start workflow\"," + + "\"State: SUCCESS\"," + + "\"Recovery:NO\"," + + "\"Run time: 1\"," + + "\"Start time: 2018-08-06 10:31:34.0\"," + + "\"End time: 2018-08-06 10:31:49.0\"," + + "\"Host: 192.168.xx.xx\"," + + "\"Notify group :4\"]"; mailSender.sendMails( - "Mysql Exception", - content); + "Mysql Exception", + content); } public String list2String() { @@ -113,7 +103,7 @@ public class MailUtilsTest { public void testSendTableMail() { String title = "Mysql Exception"; String content = list2String(); - emailConfig.put(AlertConstants.SHOW_TYPE, ShowType.TABLE.getDescp()); + emailConfig.put(AlertConstants.NAME_SHOW_TYPE, ShowType.TABLE.getDescp()); mailSender = new MailSender(emailConfig); mailSender.sendMails(title, content); } @@ -121,7 +111,7 @@ public class MailUtilsTest { @Test public void testAttachmentFile() throws Exception { String content = list2String(); - emailConfig.put(AlertConstants.SHOW_TYPE, ShowType.ATTACHMENT.getDescp()); + emailConfig.put(AlertConstants.NAME_SHOW_TYPE, ShowType.ATTACHMENT.getDescp()); mailSender = new MailSender(emailConfig); mailSender.sendMails("gaojing", content); } @@ -129,7 +119,7 @@ public class MailUtilsTest { @Test public void testTableAttachmentFile() throws Exception { String content = list2String(); - emailConfig.put(AlertConstants.SHOW_TYPE, ShowType.TABLEATTACHMENT.getDescp()); + emailConfig.put(AlertConstants.NAME_SHOW_TYPE, ShowType.TABLEATTACHMENT.getDescp()); mailSender = new MailSender(emailConfig); mailSender.sendMails("gaojing", content); } diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/template/DefaultHTMLTemplateTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/template/DefaultHTMLTemplateTest.java similarity index 71% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/template/DefaultHTMLTemplateTest.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/template/DefaultHTMLTemplateTest.java index 3d941962d91e57c745054b7002fe2630554f71b9..d53c95bfbd97e7255a180d233f613a569630557a 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/template/DefaultHTMLTemplateTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/template/DefaultHTMLTemplateTest.java @@ -19,8 +19,8 @@ package org.apache.dolphinscheduler.plugin.alert.email.template; import static org.junit.Assert.assertEquals; +import org.apache.dolphinscheduler.alert.api.ShowType; import org.apache.dolphinscheduler.plugin.alert.email.EmailConstants; -import org.apache.dolphinscheduler.spi.alert.ShowType; import org.apache.dolphinscheduler.spi.utils.JSONUtils; import java.util.ArrayList; @@ -31,19 +31,12 @@ import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * test class for DefaultHTMLTemplate - */ public class DefaultHTMLTemplateTest { private static final Logger logger = LoggerFactory.getLogger(DefaultHTMLTemplateTest.class); - /** - * only need test method GetMessageFromTemplate - */ @Test public void testGetMessageFromTemplate() { - DefaultHTMLTemplate template = new DefaultHTMLTemplate(); String tableTypeMessage = template.getMessageFromTemplate(list2String(), ShowType.TABLE, true); @@ -55,11 +48,7 @@ public class DefaultHTMLTemplateTest { assertEquals(textTypeMessage, generateMockTextTypeResultByHand()); } - /** - * generate some simulation data - */ private String list2String() { - LinkedHashMap map1 = new LinkedHashMap<>(); map1.put("mysql service name", "mysql200"); map1.put("mysql address", "192.168.xx.xx"); @@ -84,22 +73,20 @@ public class DefaultHTMLTemplateTest { } private String generateMockTableTypeResultByHand() { - return EmailConstants.HTML_HEADER_PREFIX - + "" - + "mysql service namemysql addressdatabase client connectionsportno index of number" - + "\n" - + "mysql200192.168.xx.xx190330680" - + "mysql210192.168.xx.xx90330610" - + EmailConstants.TABLE_BODY_HTML_TAIL; + + "" + + "mysql service namemysql addressdatabase client connectionsportno index of number" + + "\n" + + "mysql200192.168.xx.xx190330680" + + "mysql210192.168.xx.xx90330610" + + EmailConstants.TABLE_BODY_HTML_TAIL; } private String generateMockTextTypeResultByHand() { - return EmailConstants.HTML_HEADER_PREFIX - + "{\"mysql service name\":\"mysql200\",\"mysql address\":\"192.168.xx.xx\",\"database client connections\":\"190\",\"port\":\"3306\",\"no index of number\":\"80\"}" - + "{\"mysql service name\":\"mysql210\",\"mysql address\":\"192.168.xx.xx\",\"database client connections\":\"90\",\"port\":\"3306\",\"no index of number\":\"10\"}" - + EmailConstants.TABLE_BODY_HTML_TAIL; + + "{\"mysql service name\":\"mysql200\",\"mysql address\":\"192.168.xx.xx\",\"database client connections\":\"190\",\"port\":\"3306\",\"no index of number\":\"80\"}" + + "{\"mysql service name\":\"mysql210\",\"mysql address\":\"192.168.xx.xx\",\"database client connections\":\"90\",\"port\":\"3306\",\"no index of number\":\"10\"}" + + EmailConstants.TABLE_BODY_HTML_TAIL; } } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/pom.xml b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..210bd734540524765292f5c1cc4fcd26c067a48b --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/pom.xml @@ -0,0 +1,38 @@ + + + + + dolphinscheduler-alert-plugins + org.apache.dolphinscheduler + 2.0.10-SNAPSHOT + + 4.0.0 + dolphinscheduler-alert-feishu + jar + + + + org.apache.httpcomponents + httpclient + + + com.google.guava + guava + + + diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuAlertChannel.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuAlertChannel.java similarity index 81% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuAlertChannel.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuAlertChannel.java index 3bbdaa999756c36940f63f5f222d2d510310f8b1..509eebe6157cb1c164fbb0f258caf0006cfe6a64 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuAlertChannel.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuAlertChannel.java @@ -17,17 +17,16 @@ package org.apache.dolphinscheduler.plugin.alert.feishu; -import org.apache.dolphinscheduler.spi.alert.AlertChannel; -import org.apache.dolphinscheduler.spi.alert.AlertData; -import org.apache.dolphinscheduler.spi.alert.AlertInfo; -import org.apache.dolphinscheduler.spi.alert.AlertResult; +import org.apache.dolphinscheduler.alert.api.AlertChannel; +import org.apache.dolphinscheduler.alert.api.AlertData; +import org.apache.dolphinscheduler.alert.api.AlertInfo; +import org.apache.dolphinscheduler.alert.api.AlertResult; import java.util.Map; -public class FeiShuAlertChannel implements AlertChannel { +public final class FeiShuAlertChannel implements AlertChannel { @Override public AlertResult process(AlertInfo alertInfo) { - AlertData alertData = alertInfo.getAlertData(); Map paramsMap = alertInfo.getAlertParams(); if (null == paramsMap) { diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuAlertChannelFactory.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuAlertChannelFactory.java similarity index 52% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuAlertChannelFactory.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuAlertChannelFactory.java index 6eb3376a4d54edb12c0798fcf2243884b2055e36..da78b9f39b5f96910834b51d3b813ecde0dc9b57 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuAlertChannelFactory.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuAlertChannelFactory.java @@ -22,59 +22,62 @@ import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_NO; import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_TRUE; import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_YES; -import org.apache.dolphinscheduler.spi.alert.AlertChannel; -import org.apache.dolphinscheduler.spi.alert.AlertChannelFactory; -import org.apache.dolphinscheduler.spi.params.input.InputParam; +import org.apache.dolphinscheduler.alert.api.AlertChannel; +import org.apache.dolphinscheduler.alert.api.AlertChannelFactory; import org.apache.dolphinscheduler.spi.params.PasswordParam; -import org.apache.dolphinscheduler.spi.params.radio.RadioParam; import org.apache.dolphinscheduler.spi.params.base.ParamsOptions; import org.apache.dolphinscheduler.spi.params.base.PluginParams; import org.apache.dolphinscheduler.spi.params.base.Validate; +import org.apache.dolphinscheduler.spi.params.input.InputParam; +import org.apache.dolphinscheduler.spi.params.radio.RadioParam; import java.util.Arrays; import java.util.List; -public class FeiShuAlertChannelFactory implements AlertChannelFactory { +import com.google.auto.service.AutoService; + +@AutoService(AlertChannelFactory.class) +public final class FeiShuAlertChannelFactory implements AlertChannelFactory { @Override - public String getName() { + public String name() { return "Feishu"; } @Override - public List getParams() { + public List params() { InputParam webHookParam = InputParam.newBuilder(FeiShuParamsConstants.NAME_WEB_HOOK, FeiShuParamsConstants.WEB_HOOK) - .addValidate(Validate.newBuilder() - .setRequired(true) - .build()) - .build(); + .addValidate(Validate.newBuilder() + .setRequired(true) + .build()) + .build(); RadioParam isEnableProxy = - RadioParam.newBuilder(FeiShuParamsConstants.NAME_FEI_SHU_PROXY_ENABLE, FeiShuParamsConstants.NAME_FEI_SHU_PROXY_ENABLE) - .addParamsOptions(new ParamsOptions(STRING_YES, STRING_TRUE, false)) - .addParamsOptions(new ParamsOptions(STRING_NO, STRING_FALSE, false)) - .setValue(STRING_TRUE) - .addValidate(Validate.newBuilder() - .setRequired(false) - .build()) - .build(); + RadioParam.newBuilder(FeiShuParamsConstants.NAME_FEI_SHU_PROXY_ENABLE, FeiShuParamsConstants.FEI_SHU_PROXY_ENABLE) + .addParamsOptions(new ParamsOptions(STRING_YES, STRING_TRUE, false)) + .addParamsOptions(new ParamsOptions(STRING_NO, STRING_FALSE, false)) + .setValue(STRING_TRUE) + .addValidate(Validate.newBuilder() + .setRequired(false) + .build()) + .build(); InputParam proxyParam = - InputParam.newBuilder(FeiShuParamsConstants.NAME_FEI_SHU_PROXY, FeiShuParamsConstants.FEI_SHU_PROXY) - .addValidate(Validate.newBuilder() - .setRequired(false).build()) - .build(); + InputParam.newBuilder(FeiShuParamsConstants.NAME_FEI_SHU_PROXY, FeiShuParamsConstants.FEI_SHU_PROXY) + .addValidate(Validate.newBuilder() + .setRequired(false).build()) + .build(); InputParam portParam = InputParam.newBuilder(FeiShuParamsConstants.NAME_FEI_SHU_PORT, FeiShuParamsConstants.FEI_SHU_PORT) - .addValidate(Validate.newBuilder() - .setRequired(false).build()) - .build(); + .addValidate(Validate.newBuilder() + .setRequired(false).build()) + .build(); InputParam userParam = - InputParam.newBuilder(FeiShuParamsConstants.NAME_FEI_SHU_USER, FeiShuParamsConstants.FEI_SHU_USER) - .addValidate(Validate.newBuilder() - .setRequired(false).build()) - .build(); + InputParam.newBuilder(FeiShuParamsConstants.NAME_FEI_SHU_USER, FeiShuParamsConstants.FEI_SHU_USER) + .addValidate(Validate.newBuilder() + .setRequired(false).build()) + .build(); PasswordParam passwordParam = PasswordParam.newBuilder(FeiShuParamsConstants.NAME_FEI_SHU_PASSWORD, FeiShuParamsConstants.FEI_SHU_PASSWORD) - .setPlaceholder("if enable use authentication, you need input password") - .build(); + .setPlaceholder("if enable use authentication, you need input password") + .build(); return Arrays.asList(webHookParam, isEnableProxy, proxyParam, portParam, userParam, passwordParam); diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuParamsConstants.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuParamsConstants.java similarity index 91% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuParamsConstants.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuParamsConstants.java index 0b3c329792140ef5403217230c3242f6a7c368a7..bcbad1786010a8ee423dd4d37d2abbe6a8beb357 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuParamsConstants.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuParamsConstants.java @@ -17,12 +17,7 @@ package org.apache.dolphinscheduler.plugin.alert.feishu; -public class FeiShuParamsConstants { - - private FeiShuParamsConstants() { - throw new IllegalStateException("Utility class"); - } - +public final class FeiShuParamsConstants { static final String WEB_HOOK = "webhook"; static final String NAME_WEB_HOOK = "webHook"; @@ -46,4 +41,8 @@ public class FeiShuParamsConstants { static final String FEI_SHU_PASSWORD = "password"; static final String NAME_FEI_SHU_PASSWORD = "password"; + + private FeiShuParamsConstants() { + throw new UnsupportedOperationException("This is a utility class and cannot be instantiated"); + } } diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuSender.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuSender.java similarity index 66% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuSender.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuSender.java index 8fdafe71829640db909c5fd681584aedfa217eba..dd40c755ba875879efb4b04d93a0a688232a8d45 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuSender.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuSender.java @@ -17,8 +17,8 @@ package org.apache.dolphinscheduler.plugin.alert.feishu; -import org.apache.dolphinscheduler.spi.alert.AlertData; -import org.apache.dolphinscheduler.spi.alert.AlertResult; +import org.apache.dolphinscheduler.alert.api.AlertData; +import org.apache.dolphinscheduler.alert.api.AlertResult; import org.apache.dolphinscheduler.spi.utils.JSONUtils; import org.apache.commons.codec.binary.StringUtils; @@ -31,23 +31,18 @@ import org.apache.http.util.EntityUtils; import java.io.IOException; import java.util.HashMap; -import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import com.fasterxml.jackson.annotation.JsonProperty; -public class FeiShuSender { - - private static final Logger logger = LoggerFactory.getLogger(FeiShuSender.class); - - private String url; - - private Boolean enableProxy; +public final class FeiShuSender { + private static final Logger log = org.slf4j.LoggerFactory.getLogger(FeiShuSender.class); + private final String url; + private final Boolean enableProxy; private String proxy; @@ -70,7 +65,6 @@ public class FeiShuSender { } private static String textToJsonString(AlertData alertData) { - Map items = new HashMap<>(2); items.put("msg_type", "text"); Map textContent = new HashMap<>(); @@ -87,14 +81,14 @@ public class FeiShuSender { if (org.apache.dolphinscheduler.spi.utils.StringUtils.isBlank(result)) { alertResult.setMessage("send fei shu msg error"); - logger.info("send fei shu msg error,fei shu server resp is null"); + log.info("send fei shu msg error,fei shu server resp is null"); return alertResult; } FeiShuSendMsgResponse sendMsgResponse = JSONUtils.parseObject(result, FeiShuSendMsgResponse.class); if (null == sendMsgResponse) { alertResult.setMessage("send fei shu msg fail"); - logger.info("send fei shu msg error,resp error"); + log.info("send fei shu msg error,resp error"); return alertResult; } if (sendMsgResponse.statusCode == 0) { @@ -103,7 +97,7 @@ public class FeiShuSender { return alertResult; } alertResult.setMessage(String.format("alert send fei shu msg error : %s", sendMsgResponse.getStatusMessage())); - logger.info("alert send fei shu msg error : {} ,Extra : {} ", sendMsgResponse.getStatusMessage(), sendMsgResponse.getExtra()); + log.info("alert send fei shu msg error : {} ,Extra : {} ", sendMsgResponse.getStatusMessage(), sendMsgResponse.getExtra()); return alertResult; } @@ -118,9 +112,7 @@ public class FeiShuSender { StringBuilder contents = new StringBuilder(100); contents.append(String.format("`%s`%n", alertData.getTitle())); for (Map map : list) { - Iterator> entries = map.entrySet().iterator(); - while (entries.hasNext()) { - Entry entry = entries.next(); + for (Entry entry : (Iterable>) map.entrySet()) { String key = entry.getKey(); String value = entry.getValue().toString(); contents.append(key + ":" + value); @@ -138,7 +130,7 @@ public class FeiShuSender { String resp = sendMsg(alertData); return checkSendFeiShuSendMsgResult(resp); } catch (Exception e) { - logger.info("send fei shu alert msg exception : {}", e.getMessage()); + log.info("send fei shu alert msg exception : {}", e.getMessage()); alertResult = new AlertResult(); alertResult.setStatus("false"); alertResult.setMessage("send fei shu alert fail."); @@ -161,7 +153,7 @@ public class FeiShuSender { int statusCode = response.getStatusLine().getStatusCode(); if (statusCode != HttpStatus.SC_OK) { - logger.error("send feishu message error, return http status code: {} ", statusCode); + log.error("send feishu message error, return http status code: {} ", statusCode); } String resp; try { @@ -171,14 +163,14 @@ public class FeiShuSender { } finally { response.close(); } - logger.info("Fei Shu send title :{} ,content :{}, resp: {}", alertData.getTitle(), alertData.getContent(), resp); + log.info("Fei Shu send title :{} ,content :{}, resp: {}", alertData.getTitle(), alertData.getContent(), resp); return resp; } finally { httpClient.close(); } } - public static class FeiShuSendMsgResponse { + static final class FeiShuSendMsgResponse { @JsonProperty("Extra") private String extra; @JsonProperty("StatusCode") @@ -186,29 +178,76 @@ public class FeiShuSender { @JsonProperty("StatusMessage") private String statusMessage; + public FeiShuSendMsgResponse() { + } + public String getExtra() { - return extra; + return this.extra; } + @JsonProperty("Extra") public void setExtra(String extra) { this.extra = extra; } public Integer getStatusCode() { - return statusCode; + return this.statusCode; } + @JsonProperty("StatusCode") public void setStatusCode(Integer statusCode) { this.statusCode = statusCode; } public String getStatusMessage() { - return statusMessage; + return this.statusMessage; } + @JsonProperty("StatusMessage") public void setStatusMessage(String statusMessage) { this.statusMessage = statusMessage; } - } + public boolean equals(final Object o) { + if (o == this) { + return true; + } + if (!(o instanceof FeiShuSendMsgResponse)) { + return false; + } + final FeiShuSendMsgResponse other = (FeiShuSendMsgResponse) o; + final Object this$extra = this.getExtra(); + final Object other$extra = other.getExtra(); + if (this$extra == null ? other$extra != null : !this$extra.equals(other$extra)) { + return false; + } + final Object this$statusCode = this.getStatusCode(); + final Object other$statusCode = other.getStatusCode(); + if (this$statusCode == null ? other$statusCode != null : !this$statusCode.equals(other$statusCode)) { + return false; + } + final Object this$statusMessage = this.getStatusMessage(); + final Object other$statusMessage = other.getStatusMessage(); + if (this$statusMessage == null ? other$statusMessage != null : !this$statusMessage.equals(other$statusMessage)) { + return false; + } + return true; + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $extra = this.getExtra(); + result = result * PRIME + ($extra == null ? 43 : $extra.hashCode()); + final Object $statusCode = this.getStatusCode(); + result = result * PRIME + ($statusCode == null ? 43 : $statusCode.hashCode()); + final Object $statusMessage = this.getStatusMessage(); + result = result * PRIME + ($statusMessage == null ? 43 : $statusMessage.hashCode()); + return result; + } + + public String toString() { + return "FeiShuSender.FeiShuSendMsgResponse(extra=" + this.getExtra() + ", statusCode=" + this.getStatusCode() + ", statusMessage=" + this.getStatusMessage() + ")"; + } + } } diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/HttpRequestUtil.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/HttpRequestUtil.java similarity index 92% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/HttpRequestUtil.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/HttpRequestUtil.java index 3d143579d49a7fa08439e51625f0bd99a26ad2dc..eca9b07d15ba1e5e95ec640b705c1b878e50b2e4 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/HttpRequestUtil.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/HttpRequestUtil.java @@ -28,7 +28,10 @@ import org.apache.http.impl.client.BasicCredentialsProvider; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; -public class HttpRequestUtil { +public final class HttpRequestUtil { + private HttpRequestUtil() { + throw new UnsupportedOperationException("This is a utility class and cannot be instantiated"); + } public static CloseableHttpClient getHttpClient(boolean enableProxy, String proxy, Integer port, String user, String password) { if (enableProxy) { diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/src/test/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuAlertChannelFactoryTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/test/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuAlertChannelFactoryTest.java similarity index 92% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/src/test/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuAlertChannelFactoryTest.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/test/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuAlertChannelFactoryTest.java index d73355dbc07755d3356969bc1b821a63586cb31d..ce51e00474f4a74a7e1b82cbe6ee87e52f2fd56a 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/src/test/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuAlertChannelFactoryTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/test/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuAlertChannelFactoryTest.java @@ -17,7 +17,7 @@ package org.apache.dolphinscheduler.plugin.alert.feishu; -import org.apache.dolphinscheduler.spi.alert.AlertChannel; +import org.apache.dolphinscheduler.alert.api.AlertChannel; import org.apache.dolphinscheduler.spi.params.base.PluginParams; import org.apache.dolphinscheduler.spi.utils.JSONUtils; @@ -31,7 +31,7 @@ public class FeiShuAlertChannelFactoryTest { @Test public void testGetParams() { FeiShuAlertChannelFactory feiShuAlertChannelFactory = new FeiShuAlertChannelFactory(); - List params = feiShuAlertChannelFactory.getParams(); + List params = feiShuAlertChannelFactory.params(); JSONUtils.toJsonString(params); Assert.assertEquals(6, params.size()); } diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/src/test/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuSenderTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/test/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuSenderTest.java similarity index 74% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/src/test/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuSenderTest.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/test/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuSenderTest.java index 05110d42fee76c2e857007700ee3d112d0708c24..8d5dfdbc6ab1814f1fc376c1f9d5bd1865f04a0f 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/src/test/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuSenderTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/test/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuSenderTest.java @@ -17,8 +17,8 @@ package org.apache.dolphinscheduler.plugin.alert.feishu; -import org.apache.dolphinscheduler.spi.alert.AlertData; -import org.apache.dolphinscheduler.spi.alert.AlertResult; +import org.apache.dolphinscheduler.alert.api.AlertData; +import org.apache.dolphinscheduler.alert.api.AlertResult; import java.util.HashMap; import java.util.Map; @@ -50,23 +50,23 @@ public class FeiShuSenderTest { @Test public void testFormatContent() { String alertMsg = "[\n" - + " {\n" - + " \"owner\": \"dolphinscheduler\",\n" - + " \"processEndTime\": \"2021-01-29 19:01:11\",\n" - + " \"processHost\": \"10.81.129.4:5678\",\n" - + " \"processId\": 2926,\n" - + " \"processName\": \"3-20210129190038108\",\n" - + " \"processStartTime\": \"2021-01-29 19:00:38\",\n" - + " \"processState\": \"SUCCESS\",\n" - + " \"processType\": \"START_PROCESS\",\n" - + " \"projectId\": 2,\n" - + " \"projectName\": \"testdelproject\",\n" - + " \"recovery\": \"NO\",\n" - + " \"retryTimes\": 0,\n" - + " \"runTimes\": 1,\n" - + " \"taskId\": 0\n" - + " }\n" - + "]"; + + " {\n" + + " \"owner\": \"dolphinscheduler\",\n" + + " \"processEndTime\": \"2021-01-29 19:01:11\",\n" + + " \"processHost\": \"10.81.129.4:5678\",\n" + + " \"processId\": 2926,\n" + + " \"processName\": \"3-20210129190038108\",\n" + + " \"processStartTime\": \"2021-01-29 19:00:38\",\n" + + " \"processState\": \"SUCCESS\",\n" + + " \"processType\": \"START_PROCESS\",\n" + + " \"projectId\": 2,\n" + + " \"projectName\": \"testdelproject\",\n" + + " \"recovery\": \"NO\",\n" + + " \"retryTimes\": 0,\n" + + " \"runTimes\": 1,\n" + + " \"taskId\": 0\n" + + " }\n" + + "]"; AlertData alertData = new AlertData(); alertData.setTitle(""); alertData.setContent(alertMsg); @@ -90,7 +90,7 @@ public class FeiShuSenderTest { AlertResult alertResult = feiShuSender.checkSendFeiShuSendMsgResult(""); Assert.assertFalse(Boolean.valueOf(alertResult.getStatus())); AlertResult alertResult2 = feiShuSender.checkSendFeiShuSendMsgResult("123"); - Assert.assertEquals("send fei shu msg fail",alertResult2.getMessage()); + Assert.assertEquals("send fei shu msg fail", alertResult2.getMessage()); String response = "{\"StatusCode\":\"0\",\"extra\":\"extra\",\"StatusMessage\":\"StatusMessage\"}"; AlertResult alertResult3 = feiShuSender.checkSendFeiShuSendMsgResult(response); diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/pom.xml b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..f87f23bd7cd75ca3e5c280f75caddda2662f38b6 --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/pom.xml @@ -0,0 +1,45 @@ + + + + + dolphinscheduler-alert-plugins + org.apache.dolphinscheduler + 2.0.10-SNAPSHOT + + 4.0.0 + dolphinscheduler-alert-http + jar + + + + com.google.guava + guava + + + + org.apache.httpcomponents + httpclient + + + + com.fasterxml.jackson.core + jackson-databind + provided + + + diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertChannel.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertChannel.java similarity index 78% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertChannel.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertChannel.java index cb550b75414494a7fce31c20ff1b8528b39fe413..14b416785dde7d184ff1dc39d31b7e2ee03a9d46 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertChannel.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertChannel.java @@ -17,20 +17,16 @@ package org.apache.dolphinscheduler.plugin.alert.http; -import org.apache.dolphinscheduler.spi.alert.AlertChannel; -import org.apache.dolphinscheduler.spi.alert.AlertData; -import org.apache.dolphinscheduler.spi.alert.AlertInfo; -import org.apache.dolphinscheduler.spi.alert.AlertResult; +import org.apache.dolphinscheduler.alert.api.AlertChannel; +import org.apache.dolphinscheduler.alert.api.AlertData; +import org.apache.dolphinscheduler.alert.api.AlertInfo; +import org.apache.dolphinscheduler.alert.api.AlertResult; import java.util.Map; -/** - * http alert channel,use sms message to seed the alertInfo - */ -public class HttpAlertChannel implements AlertChannel { +public final class HttpAlertChannel implements AlertChannel { @Override public AlertResult process(AlertInfo alertInfo) { - AlertData alertData = alertInfo.getAlertData(); Map paramsMap = alertInfo.getAlertParams(); if (null == paramsMap) { diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertChannelFactory.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertChannelFactory.java similarity index 52% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertChannelFactory.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertChannelFactory.java index d54885eb7feda2c8f07b933af93178cb067bd085..28b4c16e72e70ec763538c9f840b8b4a219cc9ed 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertChannelFactory.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertChannelFactory.java @@ -17,56 +17,56 @@ package org.apache.dolphinscheduler.plugin.alert.http; -import org.apache.dolphinscheduler.spi.alert.AlertChannel; -import org.apache.dolphinscheduler.spi.alert.AlertChannelFactory; -import org.apache.dolphinscheduler.spi.params.input.InputParam; +import org.apache.dolphinscheduler.alert.api.AlertChannel; +import org.apache.dolphinscheduler.alert.api.AlertChannelFactory; import org.apache.dolphinscheduler.spi.params.base.PluginParams; import org.apache.dolphinscheduler.spi.params.base.Validate; +import org.apache.dolphinscheduler.spi.params.input.InputParam; import java.util.Arrays; import java.util.List; -/** - * http alert factory - */ -public class HttpAlertChannelFactory implements AlertChannelFactory { +import com.google.auto.service.AutoService; + +@AutoService(AlertChannelFactory.class) +public final class HttpAlertChannelFactory implements AlertChannelFactory { @Override - public String getName() { + public String name() { return "Http"; } @Override - public List getParams() { + public List params() { InputParam url = InputParam.newBuilder(HttpAlertConstants.URL, HttpAlertConstants.URL) - .addValidate(Validate.newBuilder() - .setRequired(true) - .build()) - .build(); + .addValidate(Validate.newBuilder() + .setRequired(true) + .build()) + .build(); InputParam headerParams = InputParam.newBuilder(HttpAlertConstants.HEADER_PARAMS, HttpAlertConstants.HEADER_PARAMS) - .addValidate(Validate.newBuilder() - .setRequired(true) - .build()) - .build(); + .addValidate(Validate.newBuilder() + .setRequired(true) + .build()) + .build(); InputParam bodyParams = InputParam.newBuilder(HttpAlertConstants.BODY_PARAMS, HttpAlertConstants.BODY_PARAMS) - .addValidate(Validate.newBuilder() - .setRequired(true) - .build()) - .build(); + .addValidate(Validate.newBuilder() + .setRequired(true) + .build()) + .build(); InputParam contentField = InputParam.newBuilder(HttpAlertConstants.CONTENT_FIELD, HttpAlertConstants.CONTENT_FIELD) - .addValidate(Validate.newBuilder() - .setRequired(true) - .build()) - .build(); + .addValidate(Validate.newBuilder() + .setRequired(true) + .build()) + .build(); InputParam requestType = InputParam.newBuilder(HttpAlertConstants.REQUEST_TYPE, HttpAlertConstants.REQUEST_TYPE) - .addValidate(Validate.newBuilder() - .setRequired(true) - .build()) - .build(); + .addValidate(Validate.newBuilder() + .setRequired(true) + .build()) + .build(); return Arrays.asList(url, requestType, headerParams, bodyParams, contentField); } diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertConstants.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertConstants.java similarity index 89% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertConstants.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertConstants.java index 965860d868a8e984d8df7900085ddaff7af1cb17..58cac57c3aefe1d76350136ccd8ae2a41edc46bb 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertConstants.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertConstants.java @@ -17,11 +17,7 @@ package org.apache.dolphinscheduler.plugin.alert.http; -public class HttpAlertConstants { - - private HttpAlertConstants() { - } - +public final class HttpAlertConstants { public static final String URL = "url"; public static final String HEADER_PARAMS = "headerParams"; @@ -32,4 +28,7 @@ public class HttpAlertConstants { public static final String REQUEST_TYPE = "requestType"; + private HttpAlertConstants() { + throw new UnsupportedOperationException("This is a utility class and cannot be instantiated"); + } } diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpSender.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpSender.java similarity index 89% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpSender.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpSender.java index 7b9190494bc0738edf113ec3e0aa5327ea8b48ab..a04437bb8c4fe86b9c44c0fde1b17c2fabb62630 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpSender.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpSender.java @@ -17,10 +17,10 @@ package org.apache.dolphinscheduler.plugin.alert.http; -import org.apache.dolphinscheduler.spi.alert.AlertResult; +import com.fasterxml.jackson.databind.node.ObjectNode; +import org.apache.dolphinscheduler.alert.api.AlertResult; import org.apache.dolphinscheduler.spi.utils.JSONUtils; import org.apache.dolphinscheduler.spi.utils.StringUtils; - import org.apache.http.HttpEntity; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; @@ -30,48 +30,29 @@ import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.util.EntityUtils; +import org.slf4j.Logger; import java.util.HashMap; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.fasterxml.jackson.databind.node.ObjectNode; - -/** - * http send message - */ -public class HttpSender { - - public static final Logger logger = LoggerFactory.getLogger(HttpSender.class); - - private String url; - - private final String headerParams; - - private final String bodyParams; - - private final String contentField; - - private final String requestType; - - private HttpRequestBase httpRequest; - - +public final class HttpSender { + private static final Logger log = org.slf4j.LoggerFactory.getLogger(HttpSender.class); private static final String URL_SPLICE_CHAR = "?"; - /** * request type post */ private static final String REQUEST_TYPE_POST = "POST"; - /** * request type get */ private static final String REQUEST_TYPE_GET = "GET"; - private static final String DEFAULT_CHARSET = "utf-8"; + private final String headerParams; + private final String bodyParams; + private final String contentField; + private final String requestType; + private String url; + private HttpRequestBase httpRequest; public HttpSender(Map paramsMap) { @@ -102,7 +83,7 @@ public class HttpSender { alertResult.setStatus("true"); alertResult.setMessage(resp); } catch (Exception e) { - logger.error("send http alert msg exception : {}", e.getMessage()); + log.error("send http alert msg exception : {}", e.getMessage()); alertResult.setStatus("false"); alertResult.setMessage("send http request alert fail."); } @@ -157,16 +138,15 @@ public class HttpSender { /** * set body params */ - private void setMsgInRequestBody(String msg) { + private void setMsgInRequestBody(String msg) { ObjectNode objectNode = JSONUtils.parseObject(bodyParams); //set msg content field objectNode.put(contentField, msg); try { - StringEntity entity = new StringEntity(bodyParams, DEFAULT_CHARSET); - ((HttpPost)httpRequest).setEntity(entity); + StringEntity entity = new StringEntity(JSONUtils.toJsonString(objectNode), DEFAULT_CHARSET); + ((HttpPost) httpRequest).setEntity(entity); } catch (Exception e) { - logger.error("send http alert msg exception : {}", e.getMessage()); + log.error("send http alert msg exception : {}", e.getMessage()); } } - } diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/src/test/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertChannelFactoryTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/test/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertChannelFactoryTest.java similarity index 93% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/src/test/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertChannelFactoryTest.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/test/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertChannelFactoryTest.java index 25181ebd2616c4e9278624318c31a9ae718b3c90..089f78f3af1afb38020ad45b01b8cb8664325b72 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/src/test/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertChannelFactoryTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/test/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertChannelFactoryTest.java @@ -17,7 +17,7 @@ package org.apache.dolphinscheduler.plugin.alert.http; -import org.apache.dolphinscheduler.spi.alert.AlertChannel; +import org.apache.dolphinscheduler.alert.api.AlertChannel; import org.apache.dolphinscheduler.spi.params.base.PluginParams; import java.util.List; @@ -26,9 +26,6 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; -/** - * HttpAlertChannelFactory UT - */ public class HttpAlertChannelFactoryTest { private HttpAlertChannelFactory httpAlertChannelFactory; @@ -41,7 +38,7 @@ public class HttpAlertChannelFactoryTest { @Test public void getParamsTest() { - List pluginParamsList = httpAlertChannelFactory.getParams(); + List pluginParamsList = httpAlertChannelFactory.params(); Assert.assertEquals(5, pluginParamsList.size()); } diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/src/test/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertChannelTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/test/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertChannelTest.java similarity index 70% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/src/test/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertChannelTest.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/test/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertChannelTest.java index 2d29407e71914292180d599dcbb43f373729a1fb..ca63902aae341fc4aaa25a00cdd9405bb0df0b6d 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/src/test/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertChannelTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/test/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertChannelTest.java @@ -17,13 +17,13 @@ package org.apache.dolphinscheduler.plugin.alert.http; -import org.apache.dolphinscheduler.spi.alert.AlertData; -import org.apache.dolphinscheduler.spi.alert.AlertInfo; -import org.apache.dolphinscheduler.spi.alert.AlertResult; -import org.apache.dolphinscheduler.spi.params.input.InputParam; +import org.apache.dolphinscheduler.alert.api.AlertData; +import org.apache.dolphinscheduler.alert.api.AlertInfo; +import org.apache.dolphinscheduler.alert.api.AlertResult; import org.apache.dolphinscheduler.spi.params.PluginParamsTransfer; import org.apache.dolphinscheduler.spi.params.base.PluginParams; import org.apache.dolphinscheduler.spi.params.base.Validate; +import org.apache.dolphinscheduler.spi.params.input.InputParam; import org.apache.dolphinscheduler.spi.utils.JSONUtils; import java.util.ArrayList; @@ -33,9 +33,6 @@ import java.util.Map; import org.junit.Assert; import org.junit.Test; -/** - * HttpAlertChannel UT - */ public class HttpAlertChannelTest { @Test @@ -71,29 +68,29 @@ public class HttpAlertChannelTest { List paramsList = new ArrayList<>(); InputParam urlParam = InputParam.newBuilder("url", "url") - .setValue("http://www.baidu.com") - .addValidate(Validate.newBuilder().setRequired(true).build()) - .build(); + .setValue("http://www.baidu.com") + .addValidate(Validate.newBuilder().setRequired(true).build()) + .build(); InputParam headerParams = InputParam.newBuilder("headerParams", "headerParams") - .addValidate(Validate.newBuilder().setRequired(true).build()) - .setValue("{\"Content-Type\":\"application/json\"}") - .build(); + .addValidate(Validate.newBuilder().setRequired(true).build()) + .setValue("{\"Content-Type\":\"application/json\"}") + .build(); InputParam bodyParams = InputParam.newBuilder("bodyParams", "bodyParams") - .addValidate(Validate.newBuilder().setRequired(true).build()) - .setValue("{\"number\":\"13457654323\"}") - .build(); + .addValidate(Validate.newBuilder().setRequired(true).build()) + .setValue("{\"number\":\"13457654323\"}") + .build(); InputParam content = InputParam.newBuilder("contentField", "contentField") - .setValue("content") - .addValidate(Validate.newBuilder().setRequired(true).build()) - .build(); + .setValue("content") + .addValidate(Validate.newBuilder().setRequired(true).build()) + .build(); InputParam requestType = InputParam.newBuilder("requestType", "requestType") - .setValue("POST") - .addValidate(Validate.newBuilder().setRequired(true).build()) - .build(); + .setValue("POST") + .addValidate(Validate.newBuilder().setRequired(true).build()) + .build(); paramsList.add(urlParam); paramsList.add(headerParams); diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/src/test/java/org/apache/dolphinscheduler/plugin/alert/http/HttpSenderTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/test/java/org/apache/dolphinscheduler/plugin/alert/http/HttpSenderTest.java similarity index 87% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/src/test/java/org/apache/dolphinscheduler/plugin/alert/http/HttpSenderTest.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/test/java/org/apache/dolphinscheduler/plugin/alert/http/HttpSenderTest.java index d59c4d47bb06fe305f546338f754a94baef94a81..fa0dfe80f16b67a7142ed830cadf2e676f8a786c 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/src/test/java/org/apache/dolphinscheduler/plugin/alert/http/HttpSenderTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/test/java/org/apache/dolphinscheduler/plugin/alert/http/HttpSenderTest.java @@ -17,30 +17,26 @@ package org.apache.dolphinscheduler.plugin.alert.http; -import org.apache.dolphinscheduler.spi.alert.AlertResult; +import org.apache.dolphinscheduler.alert.api.AlertResult; +import org.junit.Assert; +import org.junit.Test; import java.util.HashMap; import java.util.Map; -import org.junit.Assert; -import org.junit.Test; - -/** - * HttpSender UT - */ public class HttpSenderTest { @Test public void sendTest() { - Map paramsMap = new HashMap<>(); - paramsMap.put(HttpAlertConstants.URL, "http://www.baidu.com"); + paramsMap.put(HttpAlertConstants.URL, "https://httpbin.org/post"); paramsMap.put(HttpAlertConstants.REQUEST_TYPE, "POST"); paramsMap.put(HttpAlertConstants.HEADER_PARAMS, "{\"Content-Type\":\"application/json\"}"); paramsMap.put(HttpAlertConstants.BODY_PARAMS, "{\"number\":\"13457654323\"}"); paramsMap.put(HttpAlertConstants.CONTENT_FIELD, "content"); HttpSender httpSender = new HttpSender(paramsMap); AlertResult alertResult = httpSender.send("Fault tolerance warning"); + Assert.assertTrue(alertResult.getMessage().contains("\"content\": \"Fault tolerance warning\"")); Assert.assertEquals("true", alertResult.getStatus()); } } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/pom.xml b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..1cd731f839c55235ccbcd5ea2feba723056d7fea --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/pom.xml @@ -0,0 +1,34 @@ + + + + + dolphinscheduler-alert-plugins + org.apache.dolphinscheduler + 2.0.10-SNAPSHOT + + 4.0.0 + dolphinscheduler-alert-script + jar + + + + com.google.guava + guava + + + diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/OSUtils.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/OSUtils.java similarity index 88% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/OSUtils.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/OSUtils.java index b8816724c7732a727191f9f19619e7adcdb3db4e..51e567b47e74697d9d7d59756482ba0f74bee293 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/OSUtils.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/OSUtils.java @@ -17,13 +17,9 @@ package org.apache.dolphinscheduler.plugin.alert.script; -/** - * OSUtils - */ -public class OSUtils { - +public final class OSUtils { private OSUtils() { - throw new UnsupportedOperationException("Construct OSUtils"); + throw new UnsupportedOperationException("This is a utility class and cannot be instantiated"); } static Boolean isWindows() { diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ProcessUtils.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ProcessUtils.java similarity index 84% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ProcessUtils.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ProcessUtils.java index d63a350051dc9d16ae977038f827e77a634ff76d..7008267877a3efa78c99cce15fbcb19a99d7c8b8 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ProcessUtils.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ProcessUtils.java @@ -20,17 +20,12 @@ package org.apache.dolphinscheduler.plugin.alert.script; import java.io.IOException; import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -/** - * ProcessUtils - */ -public class ProcessUtils { - - private static final Logger logger = LoggerFactory.getLogger(ProcessUtils.class); +public final class ProcessUtils { + private static final Logger log = org.slf4j.LoggerFactory.getLogger(ProcessUtils.class); private ProcessUtils() { - throw new IllegalStateException("Utility class"); + throw new UnsupportedOperationException("This is a utility class and cannot be instantiated"); } /** @@ -53,7 +48,7 @@ public class ProcessUtils { errorStreamGobbler.start(); return process.waitFor(); } catch (IOException | InterruptedException e) { - logger.error("execute alert script error {}", e.getMessage()); + log.error("execute alert script error {}", e.getMessage()); Thread.currentThread().interrupt(); } diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptAlertChannel.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptAlertChannel.java similarity index 77% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptAlertChannel.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptAlertChannel.java index af0c27615febd4ce8f9b819584411caf78751467..bd52955b747b728ed14c4af020b94a8d1a934b17 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptAlertChannel.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptAlertChannel.java @@ -17,18 +17,14 @@ package org.apache.dolphinscheduler.plugin.alert.script; -import org.apache.dolphinscheduler.spi.alert.AlertChannel; -import org.apache.dolphinscheduler.spi.alert.AlertData; -import org.apache.dolphinscheduler.spi.alert.AlertInfo; -import org.apache.dolphinscheduler.spi.alert.AlertResult; +import org.apache.dolphinscheduler.alert.api.AlertChannel; +import org.apache.dolphinscheduler.alert.api.AlertData; +import org.apache.dolphinscheduler.alert.api.AlertInfo; +import org.apache.dolphinscheduler.alert.api.AlertResult; import java.util.Map; -/** - * ScriptAlertChannel - */ -public class ScriptAlertChannel implements AlertChannel { - +public final class ScriptAlertChannel implements AlertChannel { @Override public AlertResult process(AlertInfo alertinfo) { AlertData alertData = alertinfo.getAlertData(); @@ -36,6 +32,6 @@ public class ScriptAlertChannel implements AlertChannel { if (null == paramsMap) { return new AlertResult("false", "script params is null"); } - return new ScriptSender(paramsMap).sendScriptAlert(alertData.getTitle(),alertData.getContent()); + return new ScriptSender(paramsMap).sendScriptAlert(alertData.getTitle(), alertData.getContent()); } } diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptAlertChannelFactory.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptAlertChannelFactory.java similarity index 52% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptAlertChannelFactory.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptAlertChannelFactory.java index 67cfc3931f384ef2d5b11e11f9ca5f83cad77c34..aacde95ba11303777cd3f1617c809b9029e30ee7 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptAlertChannelFactory.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptAlertChannelFactory.java @@ -17,49 +17,49 @@ package org.apache.dolphinscheduler.plugin.alert.script; -import org.apache.dolphinscheduler.spi.alert.AlertChannel; -import org.apache.dolphinscheduler.spi.alert.AlertChannelFactory; -import org.apache.dolphinscheduler.spi.params.input.InputParam; -import org.apache.dolphinscheduler.spi.params.radio.RadioParam; +import org.apache.dolphinscheduler.alert.api.AlertChannel; +import org.apache.dolphinscheduler.alert.api.AlertChannelFactory; import org.apache.dolphinscheduler.spi.params.base.ParamsOptions; import org.apache.dolphinscheduler.spi.params.base.PluginParams; import org.apache.dolphinscheduler.spi.params.base.Validate; +import org.apache.dolphinscheduler.spi.params.input.InputParam; +import org.apache.dolphinscheduler.spi.params.radio.RadioParam; import java.util.Arrays; import java.util.List; -/** - * ScriptAlertChannelFactory - */ -public class ScriptAlertChannelFactory implements AlertChannelFactory { +import com.google.auto.service.AutoService; +@AutoService(AlertChannelFactory.class) +public final class ScriptAlertChannelFactory implements AlertChannelFactory { @Override - public String getName() { + public String name() { return "Script"; } @Override - public List getParams() { + public List params() { InputParam scriptUserParam = InputParam.newBuilder(ScriptParamsConstants.NAME_SCRIPT_USER_PARAMS, ScriptParamsConstants.SCRIPT_USER_PARAMS) - .addValidate(Validate.newBuilder() - .setRequired(false) - .build()) - .setPlaceholder("please enter your custom parameters, which will be passed to you when calling your script") - .build(); + .addValidate(Validate.newBuilder() + .setRequired(false) + .build()) + .setPlaceholder("please enter your custom parameters, which will be passed to you when calling your script") + .build(); // need check file type and file exist InputParam scriptPathParam = InputParam.newBuilder(ScriptParamsConstants.NAME_SCRIPT_PATH, ScriptParamsConstants.SCRIPT_PATH) - .addValidate(Validate.newBuilder() - .setRequired(true) - .build()) - .setPlaceholder("please upload the file to the disk directory of the alert server, and ensure that the path is absolute and has the corresponding access rights") - .build(); + .addValidate(Validate.newBuilder() + .setRequired(true) + .build()) + .setPlaceholder("please upload the file to the disk directory of the alert server," + + " and ensure that the path is absolute and has the corresponding access rights") + .build(); RadioParam scriptTypeParams = RadioParam.newBuilder(ScriptParamsConstants.NAME_SCRIPT_TYPE, ScriptParamsConstants.SCRIPT_TYPE) - .addParamsOptions(new ParamsOptions(ScriptType.SHELL.getDescp(), ScriptType.SHELL.getDescp(), false)) - .setValue(ScriptType.SHELL.getDescp()) - .addValidate(Validate.newBuilder().setRequired(true).build()) - .build(); + .addParamsOptions(new ParamsOptions(ScriptType.SHELL.getDescp(), ScriptType.SHELL.getDescp(), false)) + .setValue(ScriptType.SHELL.getDescp()) + .addValidate(Validate.newBuilder().setRequired(true).build()) + .build(); return Arrays.asList(scriptUserParam, scriptPathParam, scriptTypeParams); } diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptParamsConstants.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptParamsConstants.java similarity index 89% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptParamsConstants.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptParamsConstants.java index 5b096d54ad4096a38d40dcbd603e11acae283a0a..1f1da4a368275477637dd94a0ecca98047b32144 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptParamsConstants.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptParamsConstants.java @@ -17,14 +17,7 @@ package org.apache.dolphinscheduler.plugin.alert.script; -/** - * ScriptParamsConstants - */ -public class ScriptParamsConstants { - - private ScriptParamsConstants() { - throw new IllegalStateException("Utility class"); - } +public final class ScriptParamsConstants { static final String SCRIPT_TYPE = "type"; @@ -37,4 +30,8 @@ public class ScriptParamsConstants { static final String SCRIPT_USER_PARAMS = "user.params"; static final String NAME_SCRIPT_USER_PARAMS = "userParams"; + + private ScriptParamsConstants() { + throw new UnsupportedOperationException("This is a utility class and cannot be instantiated"); + } } diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSender.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSender.java similarity index 71% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSender.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSender.java index 8e33b792d8633d518bfd812caea5dcfe8b3641a0..2791fc1d96a6a6dadce278511e1c91db318121b4 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSender.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSender.java @@ -17,31 +17,20 @@ package org.apache.dolphinscheduler.plugin.alert.script; -import org.apache.dolphinscheduler.spi.alert.AlertResult; - -import java.util.Map; - +import org.apache.dolphinscheduler.alert.api.AlertResult; import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * ScriptSender - */ -public class ScriptSender { - - private static final Logger logger = LoggerFactory.getLogger(ScriptSender.class); - - private String scriptPath; - - private String scriptType; - private String userParams; +import java.io.File; +import java.util.Map; +public final class ScriptSender { + private static final Logger log = org.slf4j.LoggerFactory.getLogger(ScriptSender.class); private static final String ALERT_TITLE_OPTION = " -t "; - private static final String ALERT_CONTENT_OPTION = " -c "; - private static final String ALERT_USER_PARAMS_OPTION = " -p "; + private final String scriptPath; + private final String scriptType; + private final String userParams; ScriptSender(Map config) { scriptPath = config.get(ScriptParamsConstants.NAME_SCRIPT_PATH); @@ -64,6 +53,21 @@ public class ScriptSender { alertResult.setMessage("shell script not support windows os"); return alertResult; } + //validate script path in case of injections + File shellScriptFile = new File(scriptPath); + //validate existence + if (!shellScriptFile.exists()) { + log.error("shell script not exist : {}", scriptPath); + alertResult.setMessage("shell script not exist : " + scriptPath); + return alertResult; + } + //validate is file + if (!shellScriptFile.isFile()) { + log.error("shell script is not a file : {}", scriptPath); + alertResult.setMessage("shell script is not a file : " + scriptPath); + return alertResult; + } + String[] cmd = {"/bin/sh", "-c", scriptPath + ALERT_TITLE_OPTION + "'" + title + "'" + ALERT_CONTENT_OPTION + "'" + content + "'" + ALERT_USER_PARAMS_OPTION + "'" + userParams + "'"}; int exitCode = ProcessUtils.executeScript(cmd); @@ -73,7 +77,7 @@ public class ScriptSender { return alertResult; } alertResult.setMessage("send script alert msg error,exitCode is " + exitCode); - logger.info("send script alert msg error,exitCode is {}", exitCode); + log.info("send script alert msg error,exitCode is {}", exitCode); return alertResult; } diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptType.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptType.java similarity index 94% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptType.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptType.java index ff3b8096bb7e0eaadebc8d2d1ba3ad7d1fee9035..cbb3e11f28a63d3141312df68bbf45d1f977ee8b 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptType.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptType.java @@ -17,26 +17,17 @@ package org.apache.dolphinscheduler.plugin.alert.script; -import java.util.HashMap; -import java.util.Map; - -/** - * ScriptType - */ public enum ScriptType { - - SHELL(0, "SHELL"), ; + private final int code; + private final String descp; ScriptType(int code, String descp) { this.code = code; this.descp = descp; } - private final int code; - private final String descp; - public int getCode() { return code; } diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/StreamGobbler.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/StreamGobbler.java similarity index 81% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/StreamGobbler.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/StreamGobbler.java index 41aabfe13dcd1a8f1ba8ee918b579c4e4cace840..6b1d19a072b455b01ff11bfe0edcf7c7d3371380 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/StreamGobbler.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/StreamGobbler.java @@ -23,16 +23,11 @@ import java.io.InputStream; import java.io.InputStreamReader; import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -/** - * StreamGobbler - */ -public class StreamGobbler extends Thread { - - private static final Logger logger = LoggerFactory.getLogger(StreamGobbler.class); +public final class StreamGobbler extends Thread { + private static final Logger log = org.slf4j.LoggerFactory.getLogger(StreamGobbler.class); - private InputStream inputStream; + private final InputStream inputStream; StreamGobbler(InputStream inputStream) { this.inputStream = inputStream; @@ -51,16 +46,16 @@ public class StreamGobbler extends Thread { output.append(System.getProperty("line.separator")); } if (output.length() > 0) { - logger.info("out put msg is{}", output); + log.info("out put msg is{}", output); } } catch (IOException e) { - logger.error("I/O error occurs {}", e.getMessage()); + log.error("I/O error occurs {}", e.getMessage()); } finally { try { inputBufferReader.close(); inputStreamReader.close(); } catch (IOException e) { - logger.error("I/O error occurs {}", e.getMessage()); + log.error("I/O error occurs {}", e.getMessage()); } } } diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/test/java/org/apache/dolphinscheduler/plugin/alert/script/ProcessUtilsTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/test/java/org/apache/dolphinscheduler/plugin/alert/script/ProcessUtilsTest.java similarity index 93% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/test/java/org/apache/dolphinscheduler/plugin/alert/script/ProcessUtilsTest.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/test/java/org/apache/dolphinscheduler/plugin/alert/script/ProcessUtilsTest.java index 1d847a06358e059cc4d760bf69809610dadacbbf..7ee473e9a7b2b358498a8b36003d4f4914f7ff84 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/test/java/org/apache/dolphinscheduler/plugin/alert/script/ProcessUtilsTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/test/java/org/apache/dolphinscheduler/plugin/alert/script/ProcessUtilsTest.java @@ -26,7 +26,7 @@ public class ProcessUtilsTest { private static final String rootPath = System.getProperty("user.dir"); - private static final String shellFilPath = rootPath + "/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/test/script/shell/test.sh"; + private static final String shellFilPath = rootPath + "/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/test/script/shell/test.sh"; private String[] cmd = {"/bin/sh", "-c", shellFilPath + " -t 1"}; diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/test/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptAlertChannelFactoryTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/test/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptAlertChannelFactoryTest.java similarity index 91% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/test/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptAlertChannelFactoryTest.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/test/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptAlertChannelFactoryTest.java index 8cedc2c38ecc93df947a76970395f0a2dbd6a1f7..ca45cf7aed6968f035bb08600a72b6c50fbd3f8f 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/test/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptAlertChannelFactoryTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/test/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptAlertChannelFactoryTest.java @@ -17,7 +17,7 @@ package org.apache.dolphinscheduler.plugin.alert.script; -import org.apache.dolphinscheduler.spi.alert.AlertChannel; +import org.apache.dolphinscheduler.alert.api.AlertChannel; import org.apache.dolphinscheduler.spi.params.base.PluginParams; import java.util.List; @@ -33,7 +33,7 @@ public class ScriptAlertChannelFactoryTest { @Test public void testGetParams() { ScriptAlertChannelFactory scriptAlertChannelFactory = new ScriptAlertChannelFactory(); - List params = scriptAlertChannelFactory.getParams(); + List params = scriptAlertChannelFactory.params(); Assert.assertEquals(3, params.size()); } diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/test/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSenderTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/test/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSenderTest.java similarity index 97% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/test/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSenderTest.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/test/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSenderTest.java index e022b9ebf7359b71d4905b38926c5409f612695d..445d0738b5df90d5eb2c67426df57355e59ef958 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/test/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSenderTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/test/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSenderTest.java @@ -17,7 +17,7 @@ package org.apache.dolphinscheduler.plugin.alert.script; -import org.apache.dolphinscheduler.spi.alert.AlertResult; +import org.apache.dolphinscheduler.alert.api.AlertResult; import java.util.HashMap; import java.util.Map; @@ -31,11 +31,9 @@ import org.junit.Test; */ public class ScriptSenderTest { - private static Map scriptConfig = new HashMap<>(); - private static final String rootPath = System.getProperty("user.dir"); - private static final String shellFilPath = rootPath + "/src/test/script/shell/scriptExample.sh"; + private static Map scriptConfig = new HashMap<>(); @Before public void initScriptConfig() { diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/test/script/shell/scriptExample.sh b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/test/script/shell/scriptExample.sh similarity index 100% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/test/script/shell/scriptExample.sh rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/test/script/shell/scriptExample.sh diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/test/script/shell/test.sh b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/test/script/shell/test.sh similarity index 100% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/test/script/shell/test.sh rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/test/script/shell/test.sh diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/pom.xml b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..2772e8ee78af7a5abbdeebd51d610cb1982ecc67 --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/pom.xml @@ -0,0 +1,39 @@ + + + + + dolphinscheduler-alert-plugins + org.apache.dolphinscheduler + 2.0.10-SNAPSHOT + + 4.0.0 + dolphinscheduler-alert-slack + jar + + + + org.apache.httpcomponents + httpclient + + + + com.google.guava + guava + + + diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackAlertChannel.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackAlertChannel.java similarity index 82% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackAlertChannel.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackAlertChannel.java index 6399d8bca9107c763af459ddbd92df6ed5859394..b63fed05740fdb7adebce43f9b65243e64cd9b1c 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackAlertChannel.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackAlertChannel.java @@ -17,18 +17,14 @@ package org.apache.dolphinscheduler.plugin.alert.slack; -import org.apache.dolphinscheduler.spi.alert.AlertChannel; -import org.apache.dolphinscheduler.spi.alert.AlertData; -import org.apache.dolphinscheduler.spi.alert.AlertInfo; -import org.apache.dolphinscheduler.spi.alert.AlertResult; +import org.apache.dolphinscheduler.alert.api.AlertChannel; +import org.apache.dolphinscheduler.alert.api.AlertData; +import org.apache.dolphinscheduler.alert.api.AlertInfo; +import org.apache.dolphinscheduler.alert.api.AlertResult; import java.util.Map; -/** - * SlackAlertChannel - */ -public class SlackAlertChannel implements AlertChannel { - +public final class SlackAlertChannel implements AlertChannel { @Override public AlertResult process(AlertInfo alertInfo) { AlertData alertData = alertInfo.getAlertData(); diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackAlertChannelFactory.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackAlertChannelFactory.java similarity index 61% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackAlertChannelFactory.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackAlertChannelFactory.java index d56976400a3886da7877a1d1beb3b8732a362cb7..9819ed4b33fb295cffc4f4e404b095aacfc4fb50 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackAlertChannelFactory.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackAlertChannelFactory.java @@ -17,42 +17,41 @@ package org.apache.dolphinscheduler.plugin.alert.slack; -import org.apache.dolphinscheduler.spi.alert.AlertChannel; -import org.apache.dolphinscheduler.spi.alert.AlertChannelFactory; -import org.apache.dolphinscheduler.spi.params.input.InputParam; +import org.apache.dolphinscheduler.alert.api.AlertChannel; +import org.apache.dolphinscheduler.alert.api.AlertChannelFactory; import org.apache.dolphinscheduler.spi.params.base.PluginParams; import org.apache.dolphinscheduler.spi.params.base.Validate; +import org.apache.dolphinscheduler.spi.params.input.InputParam; import java.util.LinkedList; import java.util.List; -/** - * Slack alert factory, see {@link AlertChannelFactory} - */ -public class SlackAlertChannelFactory implements AlertChannelFactory { +import com.google.auto.service.AutoService; +@AutoService(AlertChannelFactory.class) +public final class SlackAlertChannelFactory implements AlertChannelFactory { @Override - public String getName() { + public String name() { return "Slack"; } @Override - public List getParams() { + public List params() { List paramsList = new LinkedList<>(); InputParam webHookParam = InputParam.newBuilder(SlackParamsConstants.SLACK_WEN_HOOK_URL_NAME, SlackParamsConstants.SLACK_WEB_HOOK_URL) - .addValidate(Validate.newBuilder() - .setRequired(true) - .build()) - .setPlaceholder("Input WebHook Url") - .build(); + .addValidate(Validate.newBuilder() + .setRequired(true) + .build()) + .setPlaceholder("Input WebHook Url") + .build(); InputParam botName = InputParam.newBuilder(SlackParamsConstants.SLACK_BOT_NAME, SlackParamsConstants.SLACK_BOT) - .addValidate(Validate.newBuilder() - .setRequired(true) - .build()) - .setPlaceholder("Input the bot username") - .build(); + .addValidate(Validate.newBuilder() + .setRequired(true) + .build()) + .setPlaceholder("Input the bot username") + .build(); paramsList.add(webHookParam); paramsList.add(botName); diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackParamsConstants.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackParamsConstants.java similarity index 77% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackParamsConstants.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackParamsConstants.java index fd191c5d55af1fdd38752bfa086363bb3982193e..e43856ba3640d8dcaaa1a262e18c139b4a652fdf 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackParamsConstants.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackParamsConstants.java @@ -17,18 +17,16 @@ package org.apache.dolphinscheduler.plugin.alert.slack; -public class SlackParamsConstants { - - private SlackParamsConstants() { - - } - - public static final String SLACK_WEB_HOOK_URL = "WebHook"; - public static final String SLACK_WEN_HOOK_URL_NAME = "webHook"; - public static final String SLACK_BOT = "Username"; +public final class SlackParamsConstants { + public static final String SLACK_WEB_HOOK_URL = "webhook"; + public static final String SLACK_WEN_HOOK_URL_NAME = "WebHook"; + public static final String SLACK_BOT = "username"; public static final String SLACK_BOT_NAME = "username"; public static final String TEXT = "text"; public static final String ATTACHMENT = "attachments"; - public static final Integer MAX_SHOW_NUMBER = 100; + + private SlackParamsConstants() { + throw new UnsupportedOperationException("This is a utility class and cannot be instantiated"); + } } diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackSender.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackSender.java similarity index 88% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackSender.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackSender.java index 21a82862116c0fadef10bf1825645b56378779c5..579fbaa05c0966598e365acbc49d896db3343cc2 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackSender.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackSender.java @@ -39,17 +39,14 @@ import java.util.Objects; import java.util.stream.Collectors; import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import com.google.common.base.Preconditions; -public class SlackSender { +public final class SlackSender { + private static final Logger log = org.slf4j.LoggerFactory.getLogger(SlackSender.class); - private static final Logger logger = LoggerFactory.getLogger(SlackSender.class); - - private String webHookUrl; - - private String botName; + private final String webHookUrl; + private final String botName; public SlackSender(Map slackAlertParam) { webHookUrl = slackAlertParam.get(SlackParamsConstants.SLACK_WEN_HOOK_URL_NAME); @@ -86,7 +83,7 @@ public class SlackSender { HttpEntity entity = response.getEntity(); return EntityUtils.toString(entity, "UTF-8"); } catch (Exception e) { - logger.error("Send message to slack error.", e); + log.error("Send message to slack error.", e); return "System Exception"; } } @@ -122,17 +119,17 @@ public class SlackSender { final int elementLen = maxLen; StringBuilder stringBuilder = new StringBuilder(200); stringBuilder.append(headers.stream() - .map(header -> generateString(header, elementLen, " ")) - .collect(Collectors.joining("|"))); + .map(header -> generateString(header, elementLen, " ")) + .collect(Collectors.joining("|"))); stringBuilder.append("\n"); for (List element : elements) { stringBuilder.append(element.stream() - .map(lement -> generateString("", elementLen, "-")) - .collect(Collectors.joining("|"))); + .map(lement -> generateString("", elementLen, "-")) + .collect(Collectors.joining("|"))); stringBuilder.append("\n"); stringBuilder.append(element.stream() - .map(e -> generateString(e, elementLen, " ")) - .collect(Collectors.joining("|"))); + .map(e -> generateString(e, elementLen, " ")) + .collect(Collectors.joining("|"))); stringBuilder.append("\n"); } return String.format("```%s```", stringBuilder); diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-slack/src/test/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackAlertChannelFactoryTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/test/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackAlertChannelFactoryTest.java similarity index 87% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-slack/src/test/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackAlertChannelFactoryTest.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/test/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackAlertChannelFactoryTest.java index e294365da76084ab2a6169502f3c79eb317d6208..21ba907ee797e66a6bb1066030ded67965705d26 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-slack/src/test/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackAlertChannelFactoryTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/test/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackAlertChannelFactoryTest.java @@ -17,7 +17,7 @@ package org.apache.dolphinscheduler.plugin.alert.slack; -import org.apache.dolphinscheduler.spi.alert.AlertChannel; +import org.apache.dolphinscheduler.alert.api.AlertChannel; import org.apache.dolphinscheduler.spi.params.base.PluginParams; import java.util.List; @@ -31,12 +31,12 @@ public class SlackAlertChannelFactoryTest { @Test public void testTestGetName() { - Assert.assertEquals("Slack", slackAlertChannelFactory.getName()); + Assert.assertEquals("Slack", slackAlertChannelFactory.name()); } @Test public void testGetParams() { - List params = slackAlertChannelFactory.getParams(); + List params = slackAlertChannelFactory.params(); Assert.assertEquals(2, params.size()); } @@ -45,4 +45,4 @@ public class SlackAlertChannelFactoryTest { AlertChannel alertChannel = slackAlertChannelFactory.create(); Assert.assertTrue(alertChannel instanceof SlackAlertChannel); } -} \ No newline at end of file +} diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-slack/src/test/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackSenderTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/test/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackSenderTest.java similarity index 95% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-slack/src/test/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackSenderTest.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/test/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackSenderTest.java index a488026b42bbdbcbea2f4e4d7e220b20d569740d..c675aca0e8beced4774b144f120416f24df1bd1a 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-slack/src/test/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackSenderTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/test/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackSenderTest.java @@ -29,11 +29,11 @@ public class SlackSenderTest { public void testSendMessage() { Map alertparam = new HashMap<>(); alertparam.put(SlackParamsConstants.SLACK_WEN_HOOK_URL_NAME, - "https://hooks.slack.com/services/123456"); + "https://hooks.slack.com/services/123456"); alertparam.put(SlackParamsConstants.SLACK_BOT_NAME, "Dolphinscheduler"); SlackSender slackSender = new SlackSender(alertparam); String response = slackSender.sendMessage("test title", "test content"); Assert.assertNotEquals("ok", response); } -} \ No newline at end of file +} diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-slack/pom.xml b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/pom.xml similarity index 53% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-slack/pom.xml rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/pom.xml index 7dce7127151df89672a7733de1d08e3fa73fa820..2adfcaee1754eef7f98230521a3d7093181db556 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-slack/pom.xml +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/pom.xml @@ -15,27 +15,17 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - + - dolphinscheduler-alert-plugin + dolphinscheduler-alert-plugins org.apache.dolphinscheduler - 2.0.0-SNAPSHOT + 2.0.10-SNAPSHOT 4.0.0 - - org.apache.dolphinscheduler - dolphinscheduler-alert-slack - dolphinscheduler-plugin + dolphinscheduler-alert-wechat + jar - - - org.apache.httpcomponents - httpclient - - com.google.guava guava @@ -47,39 +37,14 @@ - org.slf4j - slf4j-api + org.apache.httpcomponents + httpclient com.fasterxml.jackson.core - jackson-annotations + jackson-databind provided - - - junit - junit - test - - - - org.mockito - mockito-core - jar - test - - - - org.jacoco - org.jacoco.agent - runtime - test - - - - dolphinscheduler-alert-slack-${project.version} - - - \ No newline at end of file + diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertChannel.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertChannel.java similarity index 80% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertChannel.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertChannel.java index 36cce09ff3e33f94a157f17529bebbba13cbedc5..94b43ea3006d729394aec98803da6d81b5e5a718 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertChannel.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertChannel.java @@ -17,18 +17,14 @@ package org.apache.dolphinscheduler.plugin.alert.wechat; -import org.apache.dolphinscheduler.spi.alert.AlertChannel; -import org.apache.dolphinscheduler.spi.alert.AlertData; -import org.apache.dolphinscheduler.spi.alert.AlertInfo; -import org.apache.dolphinscheduler.spi.alert.AlertResult; +import org.apache.dolphinscheduler.alert.api.AlertChannel; +import org.apache.dolphinscheduler.alert.api.AlertData; +import org.apache.dolphinscheduler.alert.api.AlertInfo; +import org.apache.dolphinscheduler.alert.api.AlertResult; import java.util.Map; -/** - * WeChatAlertChannel - */ -public class WeChatAlertChannel implements AlertChannel { - +public final class WeChatAlertChannel implements AlertChannel { @Override public AlertResult process(AlertInfo info) { AlertData alertData = info.getAlertData(); diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertChannelFactory.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertChannelFactory.java similarity index 42% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertChannelFactory.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertChannelFactory.java index 25b1318f4256221da9cea87a05ea39d82244c250..ef5fc6f4cb5919bbbd7eba6e152e7dd7d01fd576 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertChannelFactory.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertChannelFactory.java @@ -17,74 +17,80 @@ package org.apache.dolphinscheduler.plugin.alert.wechat; -import org.apache.dolphinscheduler.spi.alert.AlertChannel; -import org.apache.dolphinscheduler.spi.alert.AlertChannelFactory; -import org.apache.dolphinscheduler.spi.alert.AlertConstants; -import org.apache.dolphinscheduler.spi.alert.ShowType; -import org.apache.dolphinscheduler.spi.params.input.InputParam; -import org.apache.dolphinscheduler.spi.params.radio.RadioParam; +import org.apache.dolphinscheduler.alert.api.AlertChannel; +import org.apache.dolphinscheduler.alert.api.AlertChannelFactory; +import org.apache.dolphinscheduler.alert.api.AlertConstants; +import org.apache.dolphinscheduler.alert.api.ShowType; import org.apache.dolphinscheduler.spi.params.base.ParamsOptions; import org.apache.dolphinscheduler.spi.params.base.PluginParams; import org.apache.dolphinscheduler.spi.params.base.Validate; +import org.apache.dolphinscheduler.spi.params.input.InputParam; +import org.apache.dolphinscheduler.spi.params.radio.RadioParam; import java.util.Arrays; import java.util.List; -/** - * WeChatAlertChannelFactory - */ -public class WeChatAlertChannelFactory implements AlertChannelFactory { +import com.google.auto.service.AutoService; +@AutoService(AlertChannelFactory.class) +public final class WeChatAlertChannelFactory implements AlertChannelFactory { @Override - public String getName() { + public String name() { return "WeChat"; } @Override - public List getParams() { + public List params() { InputParam corpIdParam = InputParam.newBuilder(WeChatAlertParamsConstants.NAME_ENTERPRISE_WE_CHAT_CORP_ID, WeChatAlertParamsConstants.ENTERPRISE_WE_CHAT_CORP_ID) - .setPlaceholder("please input corp id ") - .addValidate(Validate.newBuilder() - .setRequired(true) - .build()) - .build(); + .setPlaceholder("please input corp id ") + .addValidate(Validate.newBuilder() + .setRequired(true) + .build()) + .build(); InputParam secretParam = InputParam.newBuilder(WeChatAlertParamsConstants.NAME_ENTERPRISE_WE_CHAT_SECRET, WeChatAlertParamsConstants.ENTERPRISE_WE_CHAT_SECRET) - .setPlaceholder("please input secret ") - .addValidate(Validate.newBuilder() - .setRequired(true) - .build()) - .build(); + .setPlaceholder("please input secret ") + .addValidate(Validate.newBuilder() + .setRequired(true) + .build()) + .build(); InputParam usersParam = InputParam.newBuilder(WeChatAlertParamsConstants.NAME_ENTERPRISE_WE_CHAT_USERS, WeChatAlertParamsConstants.ENTERPRISE_WE_CHAT_USERS) - .setPlaceholder("please input users ") - .addValidate(Validate.newBuilder() - .setRequired(true) - .build()) - .build(); + .setPlaceholder("please input users ") + .addValidate(Validate.newBuilder() + .setRequired(true) + .build()) + .build(); InputParam userSendMsgParam = InputParam.newBuilder(WeChatAlertParamsConstants.NAME_ENTERPRISE_WE_CHAT_USER_SEND_MSG, WeChatAlertParamsConstants.ENTERPRISE_WE_CHAT_USER_SEND_MSG) - .setPlaceholder("please input corp id ") - .addValidate(Validate.newBuilder() - .setRequired(true) - .build()) - .build(); + .setPlaceholder("please input corp id ") + .addValidate(Validate.newBuilder() + .setRequired(true) + .build()) + .build(); InputParam agentIdParam = InputParam.newBuilder(WeChatAlertParamsConstants.NAME_ENTERPRISE_WE_CHAT_AGENT_ID, WeChatAlertParamsConstants.ENTERPRISE_WE_CHAT_AGENT_ID) - .setPlaceholder("please input agent id ") - .addValidate(Validate.newBuilder() - .setRequired(true) - .build()) - .build(); + .setPlaceholder("please input agent id ") + .addValidate(Validate.newBuilder() + .setRequired(true) + .build()) + .build(); - RadioParam showType = RadioParam.newBuilder(AlertConstants.SHOW_TYPE, AlertConstants.SHOW_TYPE) - .addParamsOptions(new ParamsOptions(ShowType.TABLE.getDescp(), ShowType.TABLE.getDescp(), false)) - .addParamsOptions(new ParamsOptions(ShowType.TEXT.getDescp(), ShowType.TEXT.getDescp(), false)) - .setValue(ShowType.TABLE.getDescp()) + RadioParam sendType = RadioParam.newBuilder(WeChatAlertParamsConstants.NAME_ENTERPRISE_WE_CHAT_SEND_TYPE, WeChatAlertParamsConstants.ENTERPRISE_WE_CHAT_SEND_TYPE) + .addParamsOptions(new ParamsOptions(WeChatType.APP.getDescp(), WeChatType.APP.getDescp(), false)) + .addParamsOptions(new ParamsOptions(WeChatType.APPCHAT.getDescp(), WeChatType.APPCHAT.getDescp(), false)) + .setValue(WeChatType.APP.getDescp()) .addValidate(Validate.newBuilder().setRequired(true).build()) .build(); - return Arrays.asList(corpIdParam, secretParam, usersParam, userSendMsgParam, agentIdParam, showType); + RadioParam showType = RadioParam.newBuilder(AlertConstants.NAME_SHOW_TYPE, AlertConstants.SHOW_TYPE) + .addParamsOptions(new ParamsOptions(ShowType.TABLE.getDescp(), ShowType.TABLE.getDescp(), false)) + .addParamsOptions(new ParamsOptions(ShowType.TEXT.getDescp(), ShowType.TEXT.getDescp(), false)) + .setValue(ShowType.TABLE.getDescp()) + .addValidate(Validate.newBuilder().setRequired(true).build()) + .build(); + + return Arrays.asList(corpIdParam, secretParam, usersParam, userSendMsgParam, agentIdParam, sendType, showType); } @Override diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertConstants.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertConstants.java similarity index 81% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertConstants.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertConstants.java index 9aec21b9d934d860839eeffec0929b3f027f99c1..9c90d63e28feece8291ab3185eaeea90343471fd 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertConstants.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertConstants.java @@ -17,15 +17,7 @@ package org.apache.dolphinscheduler.plugin.alert.wechat; -/** - * WeChatAlertConstants - */ -public class WeChatAlertConstants { - - private WeChatAlertConstants() { - throw new IllegalStateException(WeChatAlertConstants.class.getName()); - } - +public final class WeChatAlertConstants { static final String MARKDOWN_QUOTE = ">"; static final String MARKDOWN_ENTER = "\n"; @@ -34,5 +26,12 @@ public class WeChatAlertConstants { static final String WE_CHAT_PUSH_URL = "https://qyapi.weixin.qq.com/cgi-bin/message/send?access_token={token}"; + static final String WE_CHAT_APP_CHAT_PUSH_URL = "https://qyapi.weixin.qq.com/cgi-bin/appchat/send?access_token" + + "={token}"; + static final String WE_CHAT_TOKEN_URL = "https://qyapi.weixin.qq.com/cgi-bin/gettoken?corpid={corpId}&corpsecret={secret}"; + + private WeChatAlertConstants() { + throw new UnsupportedOperationException("This is a utility class and cannot be instantiated"); + } } diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertParamsConstants.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertParamsConstants.java similarity index 85% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertParamsConstants.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertParamsConstants.java index c254b27ebfd2bcb87b92d3d1a1de82aba5c73fd0..8e8f6b1ff259bf16fca630f1140b0d0b8fb9eb14 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertParamsConstants.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertParamsConstants.java @@ -17,15 +17,7 @@ package org.apache.dolphinscheduler.plugin.alert.wechat; -/** - * WeChatAlertParamsConstants - */ -public class WeChatAlertParamsConstants { - - private WeChatAlertParamsConstants() { - throw new IllegalStateException(WeChatAlertParamsConstants.class.getName()); - } - +public final class WeChatAlertParamsConstants { static final String ENTERPRISE_WE_CHAT_CORP_ID = "corp.id"; static final String NAME_ENTERPRISE_WE_CHAT_CORP_ID = "corpId"; @@ -56,4 +48,12 @@ public class WeChatAlertParamsConstants { static final String NAME_ENTERPRISE_WE_CHAT_USERS = "users"; + static final String NAME_ENTERPRISE_WE_CHAT_SEND_TYPE = "sendType"; + + static final String ENTERPRISE_WE_CHAT_SEND_TYPE = "send.type"; + + + private WeChatAlertParamsConstants() { + throw new UnsupportedOperationException("This is a utility class and cannot be instantiated"); + } } diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatSender.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatSender.java similarity index 75% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatSender.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatSender.java index 6944da66bb3480f0022d7460d637d67f1959388e..e38eea598b9ee60cd86b61058ca673f0d63078e0 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatSender.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatSender.java @@ -17,15 +17,11 @@ package org.apache.dolphinscheduler.plugin.alert.wechat; -import static java.util.Objects.requireNonNull; - -import org.apache.dolphinscheduler.plugin.alert.wechat.exception.WeChatAlertException; -import org.apache.dolphinscheduler.spi.alert.AlertConstants; -import org.apache.dolphinscheduler.spi.alert.AlertResult; -import org.apache.dolphinscheduler.spi.alert.ShowType; +import org.apache.dolphinscheduler.alert.api.AlertConstants; +import org.apache.dolphinscheduler.alert.api.AlertResult; +import org.apache.dolphinscheduler.alert.api.ShowType; import org.apache.dolphinscheduler.spi.utils.JSONUtils; import org.apache.dolphinscheduler.spi.utils.StringUtils; - import org.apache.http.HttpEntity; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; @@ -34,41 +30,16 @@ import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.util.EntityUtils; +import org.slf4j.Logger; import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; -import java.util.HashMap; -import java.util.Iterator; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; +import java.util.*; import java.util.Map.Entry; -import java.util.Set; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * WeChatSender - */ -public class WeChatSender { - - private static Logger logger = LoggerFactory.getLogger(WeChatSender.class); - - private String weChatAgentId; - - private String weChatUsers; - - private String weChatUserSendMsg; - - private String weChatTokenUrlReplace; - - private String weChatToken; - - private String showType; +import static java.util.Objects.requireNonNull; +public final class WeChatSender { + private static final Logger log = org.slf4j.LoggerFactory.getLogger(WeChatSender.class); private static final String MUST_NOT_NULL = " must not null"; private static final String ALERT_STATUS = "false"; private static final String AGENT_ID_REG_EXP = "{agentId}"; @@ -77,6 +48,13 @@ public class WeChatSender { private static final String CORP_ID_REGEX = "{corpId}"; private static final String SECRET_REGEX = "{secret}"; private static final String TOKEN_REGEX = "{token}"; + private final String weChatAgentId; + private final String weChatUsers; + private final String weChatUserSendMsg; + private final String weChatTokenUrlReplace; + private final String weChatToken; + private final String sendType; + private final String showType; WeChatSender(Map config) { weChatAgentId = config.get(WeChatAlertParamsConstants.NAME_ENTERPRISE_WE_CHAT_AGENT_ID); @@ -85,58 +63,15 @@ public class WeChatSender { String weChatSecret = config.get(WeChatAlertParamsConstants.NAME_ENTERPRISE_WE_CHAT_SECRET); String weChatTokenUrl = WeChatAlertConstants.WE_CHAT_TOKEN_URL; weChatUserSendMsg = config.get(WeChatAlertParamsConstants.NAME_ENTERPRISE_WE_CHAT_USER_SEND_MSG); - showType = config.get(AlertConstants.SHOW_TYPE); - requireNonNull(showType, AlertConstants.SHOW_TYPE + MUST_NOT_NULL); + sendType = config.get(WeChatAlertParamsConstants.NAME_ENTERPRISE_WE_CHAT_SEND_TYPE); + showType = config.get(AlertConstants.NAME_SHOW_TYPE); + requireNonNull(showType, AlertConstants.NAME_SHOW_TYPE + MUST_NOT_NULL); weChatTokenUrlReplace = weChatTokenUrl .replace(CORP_ID_REGEX, weChatCorpId) .replace(SECRET_REGEX, weChatSecret); weChatToken = getToken(); } - /** - * make user multi user message - * - * @param toUser the toUser - * @param agentId the agentId - * @param msg the msg - * @return Enterprise WeChat send message - */ - private String makeUserSendMsg(Collection toUser, String agentId, String msg) { - String listUser = mkString(toUser); - return weChatUserSendMsg.replace(USER_REG_EXP, listUser) - .replace(AGENT_ID_REG_EXP, agentId) - .replace(MSG_REG_EXP, msg); - } - - /** - * send Enterprise WeChat - * - * @return Enterprise WeChat resp, demo: {"errcode":0,"errmsg":"ok","invaliduser":""} - */ - public AlertResult sendEnterpriseWeChat(String title, String content) { - AlertResult alertResult; - List userList = Arrays.asList(weChatUsers.split(",")); - String data = markdownByAlert(title, content); - String msg = makeUserSendMsg(userList, weChatAgentId, data); - if (null == weChatToken) { - alertResult = new AlertResult(); - alertResult.setMessage("send we chat alert fail,get weChat token error"); - alertResult.setStatus(ALERT_STATUS); - return alertResult; - } - String enterpriseWeChatPushUrlReplace = WeChatAlertConstants.WE_CHAT_PUSH_URL.replace(TOKEN_REGEX, weChatToken); - - try { - return checkWeChatSendMsgResult(post(enterpriseWeChatPushUrlReplace, msg)); - } catch (Exception e) { - logger.info("send we chat alert msg exception : {}", e.getMessage()); - alertResult = new AlertResult(); - alertResult.setMessage("send we chat alert fail"); - alertResult.setStatus(ALERT_STATUS); - } - return alertResult; - } - private static String post(String url, String data) throws IOException { try (CloseableHttpClient httpClient = HttpClients.createDefault()) { HttpPost httpPost = new HttpPost(url); @@ -150,7 +85,7 @@ public class WeChatSender { } finally { response.close(); } - logger.info("Enterprise WeChat send [{}], param:{}, resp:{}", + log.info("Enterprise WeChat send [{}], param:{}, resp:{}", url, data, resp); return resp; } @@ -166,8 +101,8 @@ public class WeChatSender { private static String markdownTable(String title, String content) { List mapItemsList = JSONUtils.toList(content, LinkedHashMap.class); if (null == mapItemsList || mapItemsList.isEmpty()) { - logger.error("itemsList is null"); - throw new WeChatAlertException("itemsList is null"); + log.error("itemsList is null"); + throw new RuntimeException("itemsList is null"); } StringBuilder contents = new StringBuilder(200); for (LinkedHashMap mapItems : mapItemsList) { @@ -199,7 +134,7 @@ public class WeChatSender { if (StringUtils.isNotEmpty(content)) { List mapItemsList = JSONUtils.toList(content, LinkedHashMap.class); if (null == mapItemsList || mapItemsList.isEmpty()) { - logger.error("itemsList is null"); + log.error("itemsList is null"); throw new RuntimeException("itemsList is null"); } @@ -220,31 +155,6 @@ public class WeChatSender { return null; } - /** - * Determine the mardown style based on the show type of the alert - * - * @return the markdown alert table/text - */ - private String markdownByAlert(String title, String content) { - String result = ""; - if (showType.equals(ShowType.TABLE.getDescp())) { - result = markdownTable(title, content); - } else if (showType.equals(ShowType.TEXT.getDescp())) { - result = markdownText(title, content); - } - return result; - - } - - private String getToken() { - try { - return get(weChatTokenUrlReplace); - } catch (IOException e) { - logger.info("we chat alert get token error{}", e.getMessage()); - } - return null; - } - private static String get(String url) throws IOException { String resp; @@ -266,7 +176,6 @@ public class WeChatSender { } private static String mkString(Iterable list) { - if (null == list || StringUtils.isEmpty("|")) { return null; } @@ -284,40 +193,19 @@ public class WeChatSender { return sb.toString(); } - public static class WeChatSendMsgResponse { - private Integer errcode; - private String errmsg; - - public Integer getErrcode() { - return errcode; - } - - public void setErrcode(Integer errcode) { - this.errcode = errcode; - } - - public String getErrmsg() { - return errmsg; - } - - public void setErrmsg(String errmsg) { - this.errmsg = errmsg; - } - } - private static AlertResult checkWeChatSendMsgResult(String result) { AlertResult alertResult = new AlertResult(); alertResult.setStatus(ALERT_STATUS); if (null == result) { alertResult.setMessage("we chat send fail"); - logger.info("send we chat msg error,resp is null"); + log.info("send we chat msg error,resp is null"); return alertResult; } WeChatSendMsgResponse sendMsgResponse = JSONUtils.parseObject(result, WeChatSendMsgResponse.class); if (null == sendMsgResponse) { alertResult.setMessage("we chat send fail"); - logger.info("send we chat msg error,resp error"); + log.info("send we chat msg error,resp error"); return alertResult; } if (sendMsgResponse.errcode == 0) { @@ -329,4 +217,137 @@ public class WeChatSender { alertResult.setMessage(sendMsgResponse.getErrmsg()); return alertResult; } + + /** + * make user multi user message + * + * @param toUser the toUser + * @param agentId the agentId + * @param msg the msg + * @return Enterprise WeChat send message + */ + private String makeUserSendMsg(Collection toUser, String agentId, String msg) { + String listUser = mkString(toUser); + return weChatUserSendMsg.replace(USER_REG_EXP, listUser) + .replace(AGENT_ID_REG_EXP, agentId) + .replace(MSG_REG_EXP, msg); + } + + /** + * send Enterprise WeChat + * + * @return Enterprise WeChat resp, demo: {"errcode":0,"errmsg":"ok","invaliduser":""} + */ + public AlertResult sendEnterpriseWeChat(String title, String content) { + AlertResult alertResult; + List userList = Arrays.asList(weChatUsers.split(",")); + String data = markdownByAlert(title, content); + String msg = makeUserSendMsg(userList, weChatAgentId, data); + if (null == weChatToken) { + alertResult = new AlertResult(); + alertResult.setMessage("send we chat alert fail,get weChat token error"); + alertResult.setStatus(ALERT_STATUS); + return alertResult; + } + String enterpriseWeChatPushUrlReplace = ""; + if (sendType.equals(WeChatType.APP.getDescp())) { + enterpriseWeChatPushUrlReplace = WeChatAlertConstants.WE_CHAT_PUSH_URL.replace(TOKEN_REGEX, weChatToken); + } else if (sendType.equals(WeChatType.APPCHAT.getDescp())) { + enterpriseWeChatPushUrlReplace = WeChatAlertConstants.WE_CHAT_APP_CHAT_PUSH_URL.replace(TOKEN_REGEX, weChatToken); + } + + try { + return checkWeChatSendMsgResult(post(enterpriseWeChatPushUrlReplace, msg)); + } catch (Exception e) { + log.info("send we chat alert msg exception : {}", e.getMessage()); + alertResult = new AlertResult(); + alertResult.setMessage("send we chat alert fail"); + alertResult.setStatus(ALERT_STATUS); + } + return alertResult; + } + + /** + * Determine the mardown style based on the show type of the alert + * + * @return the markdown alert table/text + */ + private String markdownByAlert(String title, String content) { + String result = ""; + if (showType.equals(ShowType.TABLE.getDescp())) { + result = markdownTable(title, content); + } else if (showType.equals(ShowType.TEXT.getDescp())) { + result = markdownText(title, content); + } + return result; + + } + + private String getToken() { + try { + return get(weChatTokenUrlReplace); + } catch (IOException e) { + log.info("we chat alert get token error{}", e.getMessage()); + } + return null; + } + + static final class WeChatSendMsgResponse { + private Integer errcode; + private String errmsg; + + public WeChatSendMsgResponse() { + } + + public Integer getErrcode() { + return this.errcode; + } + + public void setErrcode(Integer errcode) { + this.errcode = errcode; + } + + public String getErrmsg() { + return this.errmsg; + } + + public void setErrmsg(String errmsg) { + this.errmsg = errmsg; + } + + public boolean equals(final Object o) { + if (o == this) { + return true; + } + if (!(o instanceof WeChatSendMsgResponse)) { + return false; + } + final WeChatSendMsgResponse other = (WeChatSendMsgResponse) o; + final Object this$errcode = this.getErrcode(); + final Object other$errcode = other.getErrcode(); + if (this$errcode == null ? other$errcode != null : !this$errcode.equals(other$errcode)) { + return false; + } + final Object this$errmsg = this.getErrmsg(); + final Object other$errmsg = other.getErrmsg(); + if (this$errmsg == null ? other$errmsg != null : !this$errmsg.equals(other$errmsg)) { + return false; + } + return true; + } + + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $errcode = this.getErrcode(); + result = result * PRIME + ($errcode == null ? 43 : $errcode.hashCode()); + final Object $errmsg = this.getErrmsg(); + result = result * PRIME + ($errmsg == null ? 43 : $errmsg.hashCode()); + return result; + } + + public String toString() { + return "WeChatSender.WeChatSendMsgResponse(errcode=" + this.getErrcode() + ", errmsg=" + this.getErrmsg() + ")"; + } + } } diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/alert/ShowType.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatType.java similarity index 71% rename from dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/alert/ShowType.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatType.java index a95e73ff7f697d5ba841b729a48bf0eb4e8f38d6..eac208a97b9aac0d0ea9cdbe06dc83041c49805c 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/alert/ShowType.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatType.java @@ -15,32 +15,22 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.spi.alert; -/** - * show type for email - * all alert plugin can use ShowType , so let it in spi package - */ -public enum ShowType { - /** - * 0 TABLE; - * 1 TEXT; - * 2 attachment; - * 3 TABLE+attachment; - */ - TABLE(0, "table"), - TEXT(1, "text"), - ATTACHMENT(2, "attachment"), - TABLEATTACHMENT(3, "table attachment"); +package org.apache.dolphinscheduler.plugin.alert.wechat; - ShowType(int code, String descp) { - this.code = code; - this.descp = descp; - } +public enum WeChatType { + APP(1, "应用"), + APPCHAT(2, "群聊"), + ; private final int code; private final String descp; + WeChatType(int code, String descp) { + this.code = code; + this.descp = descp; + } + public int getCode() { return code; } diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/src/test/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertChannelFactoryTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/test/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertChannelFactoryTest.java similarity index 89% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/src/test/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertChannelFactoryTest.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/test/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertChannelFactoryTest.java index cc62d5aabf543aafcd734de1b1b3ea2ca8623785..648c206a33802eba331513a42a07a0adc9923ed2 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/src/test/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertChannelFactoryTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/test/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertChannelFactoryTest.java @@ -17,7 +17,7 @@ package org.apache.dolphinscheduler.plugin.alert.wechat; -import org.apache.dolphinscheduler.spi.alert.AlertChannel; +import org.apache.dolphinscheduler.alert.api.AlertChannel; import org.apache.dolphinscheduler.spi.params.base.PluginParams; import org.apache.dolphinscheduler.spi.utils.JSONUtils; @@ -34,9 +34,9 @@ public class WeChatAlertChannelFactoryTest { @Test public void testGetParams() { WeChatAlertChannelFactory weChatAlertChannelFactory = new WeChatAlertChannelFactory(); - List params = weChatAlertChannelFactory.getParams(); + List params = weChatAlertChannelFactory.params(); JSONUtils.toJsonString(params); - Assert.assertEquals(6, params.size()); + Assert.assertEquals(7, params.size()); } @Test diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/src/test/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatSenderTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/test/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatSenderTest.java similarity index 72% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/src/test/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatSenderTest.java rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/test/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatSenderTest.java index 61010ebe3ae35e3e5cb41192a656b989facb8b45..f4934bf9208b637f24f4c7bf6e7cf7cfd4650b6e 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/src/test/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatSenderTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/test/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatSenderTest.java @@ -17,17 +17,16 @@ package org.apache.dolphinscheduler.plugin.alert.wechat; -import org.apache.dolphinscheduler.spi.alert.AlertConstants; -import org.apache.dolphinscheduler.spi.alert.AlertResult; -import org.apache.dolphinscheduler.spi.alert.ShowType; - -import java.util.HashMap; -import java.util.Map; - +import org.apache.dolphinscheduler.alert.api.AlertConstants; +import org.apache.dolphinscheduler.alert.api.AlertResult; +import org.apache.dolphinscheduler.alert.api.ShowType; import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import java.util.HashMap; +import java.util.Map; + /** * WeChatSenderTest */ @@ -36,24 +35,24 @@ public class WeChatSenderTest { private static Map weChatConfig = new HashMap<>(); private String content = "[{\"id\":\"69\"," - + - "\"name\":\"UserBehavior-0--1193959466\"," - + - "\"Job name\":\"Start workflow\"," - + - "\"State\":\"SUCCESS\"," - + - "\"Recovery\":\"NO\"," - + - "\"Run time\":\"1\"," - + - "\"Start time\": \"2018-08-06 10:31:34.0\"," - + - "\"End time\": \"2018-08-06 10:31:49.0\"," - + - "\"Host\": \"192.168.xx.xx\"," - + - "\"Notify group\" :\"4\"}]"; + + + "\"name\":\"UserBehavior-0--1193959466\"," + + + "\"Job name\":\"Start workflow\"," + + + "\"State\":\"SUCCESS\"," + + + "\"Recovery\":\"NO\"," + + + "\"Run time\":\"1\"," + + + "\"Start time\": \"2018-08-06 10:31:34.0\"," + + + "\"End time\": \"2018-08-06 10:31:49.0\"," + + + "\"Host\": \"192.168.xx.xx\"," + + + "\"Notify group\" :\"4\"}]"; @Before public void initDingTalkConfig() { @@ -62,12 +61,12 @@ public class WeChatSenderTest { weChatConfig.put(WeChatAlertParamsConstants.NAME_ENTERPRISE_WE_CHAT_CORP_ID, "NAME_ENTERPRISE_WE_CHAT_CORP_ID"); weChatConfig.put(WeChatAlertParamsConstants.NAME_ENTERPRISE_WE_CHAT_SECRET, "NAME_ENTERPRISE_WE_CHAT_SECRET"); weChatConfig.put(WeChatAlertParamsConstants.NAME_ENTERPRISE_WE_CHAT_USER_SEND_MSG, "{\"touser\":\"{toUser}\",\"agentid\":{agentId}" - + - ",\"msgtype\":\"markdown\",\"markdown\":{\"content\":\"{msg}\"}}" + + + ",\"msgtype\":\"markdown\",\"markdown\":{\"content\":\"{msg}\"}}" ); weChatConfig.put(WeChatAlertParamsConstants.NAME_ENTERPRISE_WE_CHAT_USERS, "Kris"); weChatConfig.put(WeChatAlertParamsConstants.NAME_ENTERPRISE_WE_CHAT_TEAM_SEND_MSG, "msg"); - weChatConfig.put(AlertConstants.SHOW_TYPE, ShowType.TABLE.getDescp()); + weChatConfig.put(AlertConstants.NAME_SHOW_TYPE, ShowType.TABLE.getDescp()); } @Test @@ -80,7 +79,7 @@ public class WeChatSenderTest { @Test public void testSendWeChatTextMsg() { - weChatConfig.put(AlertConstants.SHOW_TYPE, ShowType.TEXT.getDescp()); + weChatConfig.put(AlertConstants.NAME_SHOW_TYPE, ShowType.TEXT.getDescp()); WeChatSender weChatSender = new WeChatSender(weChatConfig); AlertResult alertResult = weChatSender.sendEnterpriseWeChat("test", content); Assert.assertEquals("false", alertResult.getStatus()); diff --git a/dolphinscheduler-alert-plugin/pom.xml b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/pom.xml similarity index 73% rename from dolphinscheduler-alert-plugin/pom.xml rename to dolphinscheduler-alert/dolphinscheduler-alert-plugins/pom.xml index f5dc77b04f059ad72d20ea8a1e3513b55fe7c6f9..e8aae9f553a07c7dc9b7e58868ee0a6e2245a05d 100644 --- a/dolphinscheduler-alert-plugin/pom.xml +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/pom.xml @@ -15,18 +15,14 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - + - dolphinscheduler + dolphinscheduler-alert org.apache.dolphinscheduler - 2.0.0-SNAPSHOT + 2.0.10-SNAPSHOT 4.0.0 - - org.apache.dolphinscheduler - dolphinscheduler-alert-plugin + dolphinscheduler-alert-plugins pom @@ -40,12 +36,10 @@ - org.apache.dolphinscheduler - dolphinscheduler-spi - provided + dolphinscheduler-alert-api - \ No newline at end of file + diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/pom.xml b/dolphinscheduler-alert/dolphinscheduler-alert-server/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..3e0de686f185d8d645b57e34ae31ccb0a4bd8047 --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/pom.xml @@ -0,0 +1,129 @@ + + + + + 4.0.0 + + org.apache.dolphinscheduler + dolphinscheduler-alert + 2.0.10-SNAPSHOT + + dolphinscheduler-alert-server + ${project.artifactId} + jar + + + + + org.apache.dolphinscheduler + dolphinscheduler-remote + + + org.apache.dolphinscheduler + dolphinscheduler-alert-dingtalk + + + org.apache.dolphinscheduler + dolphinscheduler-alert-email + + + org.apache.dolphinscheduler + dolphinscheduler-alert-feishu + + + org.apache.dolphinscheduler + dolphinscheduler-alert-http + + + org.apache.dolphinscheduler + dolphinscheduler-alert-script + + + org.apache.dolphinscheduler + dolphinscheduler-alert-slack + + + org.apache.dolphinscheduler + dolphinscheduler-alert-wechat + + + + org.apache.dolphinscheduler + dolphinscheduler-dao + + + + com.fasterxml.jackson.core + jackson-core + + + + com.fasterxml.jackson.core + jackson-databind + + + + com.google.guava + guava + + + jsr305 + com.google.code.findbugs + + + + + ch.qos.logback + logback-classic + + + + + org.apache.poi + poi + + + + org.mockito + mockito-core + test + + + + org.mockito + mockito-inline + test + + + + + + + org.apache.maven.plugins + maven-jar-plugin + + + *.yaml + *.yml + *.xml + + + + + + diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertPluginManager.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertPluginManager.java new file mode 100644 index 0000000000000000000000000000000000000000..1ad06e261a515bcc6a44be3760517d356283469c --- /dev/null +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertPluginManager.java @@ -0,0 +1,89 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.alert; + +import static java.lang.String.format; + +import org.apache.dolphinscheduler.alert.api.AlertChannel; +import org.apache.dolphinscheduler.alert.api.AlertChannelFactory; +import org.apache.dolphinscheduler.common.enums.PluginType; +import org.apache.dolphinscheduler.dao.PluginDao; +import org.apache.dolphinscheduler.dao.entity.PluginDefine; +import org.apache.dolphinscheduler.spi.params.PluginParamsTransfer; +import org.apache.dolphinscheduler.spi.params.base.PluginParams; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.ServiceLoader; +import java.util.Set; + +import org.slf4j.Logger; +import org.springframework.boot.context.event.ApplicationReadyEvent; +import org.springframework.context.event.EventListener; +import org.springframework.stereotype.Component; + +@Component +public final class AlertPluginManager { + private static final Logger log = org.slf4j.LoggerFactory.getLogger(AlertPluginManager.class); + + private final PluginDao pluginDao; + + private final Map channelKeyedById = new HashMap<>(); + + public AlertPluginManager(PluginDao pluginDao) { + this.pluginDao = pluginDao; + } + + @EventListener + public void installPlugin(ApplicationReadyEvent readyEvent) { + final Set names = new HashSet<>(); + + ServiceLoader.load(AlertChannelFactory.class).forEach(factory -> { + final String name = factory.name(); + + log.info("Registering alert plugin: {}", name); + + if (!names.add(name)) { + throw new IllegalStateException(format("Duplicate alert plugins named '%s'", name)); + } + + final AlertChannel alertChannel = factory.create(); + + log.info("Registered alert plugin: {}", name); + + final List params = factory.params(); + final String paramsJson = PluginParamsTransfer.transferParamsToJson(params); + + final PluginDefine pluginDefine = new PluginDefine(name, PluginType.ALERT.getDesc(), paramsJson); + final int id = pluginDao.addOrUpdatePluginDefine(pluginDefine); + + channelKeyedById.put(id, alertChannel); + }); + } + + public Optional getAlertChannel(int id) { + return Optional.ofNullable(channelKeyedById.get(id)); + } + + public int size() { + return channelKeyedById.size(); + } +} diff --git a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/processor/AlertRequestProcessor.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertRequestProcessor.java similarity index 56% rename from dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/processor/AlertRequestProcessor.java rename to dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertRequestProcessor.java index 9421a975465fc1e7cd3142975045994d932eb285..0db7f70af395134e82f3bac5393388f23b98a17d 100644 --- a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/processor/AlertRequestProcessor.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertRequestProcessor.java @@ -15,11 +15,10 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.alert.processor; +package org.apache.dolphinscheduler.alert; + +import static com.google.common.base.Preconditions.checkArgument; -import org.apache.dolphinscheduler.alert.plugin.AlertPluginManager; -import org.apache.dolphinscheduler.alert.runner.AlertSender; -import org.apache.dolphinscheduler.dao.AlertDao; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.command.alert.AlertSendRequestCommand; @@ -28,35 +27,33 @@ import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; import org.apache.dolphinscheduler.remote.utils.JsonSerializer; import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.google.common.base.Preconditions; +import org.springframework.stereotype.Component; import io.netty.channel.Channel; -public class AlertRequestProcessor implements NettyRequestProcessor { +@Component +public final class AlertRequestProcessor implements NettyRequestProcessor { + private static final Logger log = org.slf4j.LoggerFactory.getLogger(AlertRequestProcessor.class); - private final Logger logger = LoggerFactory.getLogger(AlertRequestProcessor.class); - private final AlertDao alertDao; - private final AlertPluginManager alertPluginManager; + private final AlertSender alertSender; - public AlertRequestProcessor(AlertDao alertDao, AlertPluginManager alertPluginManager) { - this.alertDao = alertDao; - this.alertPluginManager = alertPluginManager; + public AlertRequestProcessor(AlertSender alertSender) { + this.alertSender = alertSender; } @Override public void process(Channel channel, Command command) { - Preconditions.checkArgument(CommandType.ALERT_SEND_REQUEST == command.getType(), - String.format("invalid command type : %s", command.getType())); + checkArgument(CommandType.ALERT_SEND_REQUEST == command.getType(), "invalid command type: %s", command.getType()); AlertSendRequestCommand alertSendRequestCommand = JsonSerializer.deserialize( - command.getBody(), AlertSendRequestCommand.class); - logger.info("received command : {}", alertSendRequestCommand); + command.getBody(), AlertSendRequestCommand.class); - AlertSender alertSender = new AlertSender(alertDao, alertPluginManager); - AlertSendResponseCommand alertSendResponseCommand = alertSender.syncHandler(alertSendRequestCommand.getGroupId(), alertSendRequestCommand.getTitle(), alertSendRequestCommand.getContent()); - channel.writeAndFlush(alertSendResponseCommand.convert2Command(command.getOpaque())); + log.info("Received command : {}", alertSendRequestCommand); + AlertSendResponseCommand alertSendResponseCommand = alertSender.syncHandler( + alertSendRequestCommand.getGroupId(), + alertSendRequestCommand.getTitle(), + alertSendRequestCommand.getContent()); + channel.writeAndFlush(alertSendResponseCommand.convert2Command(command.getOpaque())); } } diff --git a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/runner/AlertSender.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertSender.java similarity index 69% rename from dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/runner/AlertSender.java rename to dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertSender.java index d7bcc2c95fc789257c00047aca6a678db314cd79..ebc57e82bfd6ecd789ea8f2454f80223767f927b 100644 --- a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/runner/AlertSender.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertSender.java @@ -15,76 +15,63 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.alert.runner; +package org.apache.dolphinscheduler.alert; -import org.apache.dolphinscheduler.alert.plugin.AlertPluginManager; +import org.apache.dolphinscheduler.alert.api.AlertChannel; +import org.apache.dolphinscheduler.alert.api.AlertData; +import org.apache.dolphinscheduler.alert.api.AlertInfo; +import org.apache.dolphinscheduler.alert.api.AlertResult; import org.apache.dolphinscheduler.common.enums.AlertStatus; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.AlertDao; import org.apache.dolphinscheduler.dao.entity.Alert; import org.apache.dolphinscheduler.dao.entity.AlertPluginInstance; import org.apache.dolphinscheduler.remote.command.alert.AlertSendResponseCommand; import org.apache.dolphinscheduler.remote.command.alert.AlertSendResponseResult; -import org.apache.dolphinscheduler.spi.alert.AlertChannel; -import org.apache.dolphinscheduler.spi.alert.AlertData; -import org.apache.dolphinscheduler.spi.alert.AlertInfo; -import org.apache.dolphinscheduler.spi.alert.AlertResult; + +import org.apache.commons.collections.CollectionUtils; import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Optional; import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class AlertSender { +import org.springframework.stereotype.Component; - private static final Logger logger = LoggerFactory.getLogger(AlertSender.class); +@Component +public final class AlertSender { + private static final Logger log = org.slf4j.LoggerFactory.getLogger(AlertSender.class); - private List alertList; - private AlertDao alertDao; + private final AlertDao alertDao; private final AlertPluginManager alertPluginManager; - public AlertSender(AlertPluginManager alertPluginManager) { - this.alertPluginManager = alertPluginManager; - } - public AlertSender(AlertDao alertDao, AlertPluginManager alertPluginManager) { - super(); this.alertDao = alertDao; this.alertPluginManager = alertPluginManager; } - public AlertSender(List alertList, AlertDao alertDao, AlertPluginManager alertPluginManager) { - super(); - this.alertList = alertList; - this.alertDao = alertDao; - this.alertPluginManager = alertPluginManager; - } - - public void run() { - for (Alert alert : alertList) { + public void send(List alerts) { + for (Alert alert : alerts) { //get alert group from alert int alertGroupId = alert.getAlertGroupId(); List alertInstanceList = alertDao.listInstanceByAlertGroupId(alertGroupId); if (CollectionUtils.isEmpty(alertInstanceList)) { - logger.error("send alert msg fail,no bind plugin instance."); + log.error("send alert msg fail,no bind plugin instance."); alertDao.updateAlert(AlertStatus.EXECUTION_FAILURE, "no bind plugin instance", alert.getId()); continue; } AlertData alertData = new AlertData(); alertData.setId(alert.getId()) - .setContent(alert.getContent()) - .setLog(alert.getLog()) - .setTitle(alert.getTitle()); + .setContent(alert.getContent()) + .setLog(alert.getLog()) + .setTitle(alert.getTitle()); for (AlertPluginInstance instance : alertInstanceList) { AlertResult alertResult = this.alertResultHandler(instance, alertData); AlertStatus alertStatus = Boolean.parseBoolean(String.valueOf(alertResult.getStatus())) ? AlertStatus.EXECUTION_SUCCESS : AlertStatus.EXECUTION_FAILURE; alertDao.updateAlert(alertStatus, alertResult.getMessage(), alert.getId()); - } } @@ -99,30 +86,28 @@ public class AlertSender { * @return AlertSendResponseCommand */ public AlertSendResponseCommand syncHandler(int alertGroupId, String title, String content) { - List alertInstanceList = alertDao.listInstanceByAlertGroupId(alertGroupId); AlertData alertData = new AlertData(); alertData.setContent(content) - .setTitle(title); + .setTitle(title); boolean sendResponseStatus = true; List sendResponseResults = new ArrayList<>(); if (CollectionUtils.isEmpty(alertInstanceList)) { - sendResponseStatus = false; AlertSendResponseResult alertSendResponseResult = new AlertSendResponseResult(); String message = String.format("Alert GroupId %s send error : not found alert instance", alertGroupId); - alertSendResponseResult.setStatus(sendResponseStatus); + alertSendResponseResult.setStatus(false); alertSendResponseResult.setMessage(message); sendResponseResults.add(alertSendResponseResult); - logger.error("Alert GroupId {} send error : not found alert instance", alertGroupId); - return new AlertSendResponseCommand(sendResponseStatus, sendResponseResults); + log.error("Alert GroupId {} send error : not found alert instance", alertGroupId); + return new AlertSendResponseCommand(false, sendResponseResults); } for (AlertPluginInstance instance : alertInstanceList) { AlertResult alertResult = this.alertResultHandler(instance, alertData); AlertSendResponseResult alertSendResponseResult = new AlertSendResponseResult( - Boolean.parseBoolean(String.valueOf(alertResult.getStatus())), alertResult.getMessage()); + Boolean.parseBoolean(String.valueOf(alertResult.getStatus())), alertResult.getMessage()); sendResponseStatus = sendResponseStatus && alertSendResponseResult.getStatus(); sendResponseResults.add(alertSendResponseResult); } @@ -138,15 +123,14 @@ public class AlertSender { * @return AlertResult */ private AlertResult alertResultHandler(AlertPluginInstance instance, AlertData alertData) { - String pluginName = alertPluginManager.getPluginNameById(instance.getPluginDefineId()); - AlertChannel alertChannel = alertPluginManager.getAlertChannelMap().get(pluginName); + Optional alertChannel = alertPluginManager.getAlertChannel(instance.getPluginDefineId()); AlertResult alertResultExtend = new AlertResult(); String pluginInstanceName = instance.getInstanceName(); - if (alertChannel == null) { + if (!alertChannel.isPresent()) { String message = String.format("Alert Plugin %s send error : return value is null", pluginInstanceName); alertResultExtend.setStatus(String.valueOf(false)); alertResultExtend.setMessage(message); - logger.error("Alert Plugin {} send error : not found plugin {}", pluginInstanceName, pluginName); + log.error("Alert Plugin {} send error : not found plugin {}", pluginInstanceName, instance.getPluginDefineId()); return alertResultExtend; } @@ -156,29 +140,27 @@ public class AlertSender { alertInfo.setAlertParams(paramsMap); AlertResult alertResult; try { - alertResult = alertChannel.process(alertInfo); + alertResult = alertChannel.get().process(alertInfo); } catch (Exception e) { alertResult = new AlertResult("false", e.getMessage()); - logger.error("send alert error alert data id :{},", alertData.getId(), e); + log.error("send alert error alert data id :{},", alertData.getId(), e); } - if (alertResult == null) { String message = String.format("Alert Plugin %s send error : return alertResult value is null", pluginInstanceName); alertResultExtend.setStatus(String.valueOf(false)); alertResultExtend.setMessage(message); - logger.info("Alert Plugin {} send error : return alertResult value is null", pluginInstanceName); + log.info("Alert Plugin {} send error : return alertResult value is null", pluginInstanceName); } else if (!Boolean.parseBoolean(String.valueOf(alertResult.getStatus()))) { alertResultExtend.setStatus(String.valueOf(false)); alertResultExtend.setMessage(alertResult.getMessage()); - logger.info("Alert Plugin {} send error : {}", pluginInstanceName, alertResult.getMessage()); + log.info("Alert Plugin {} send error : {}", pluginInstanceName, alertResult.getMessage()); } else { String message = String.format("Alert Plugin %s send success", pluginInstanceName); alertResultExtend.setStatus(String.valueOf(true)); alertResultExtend.setMessage(message); - logger.info("Alert Plugin {} send success", pluginInstanceName); + log.info("Alert Plugin {} send success", pluginInstanceName); } return alertResultExtend; } - } diff --git a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/AlertServer.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertServer.java similarity index 31% rename from dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/AlertServer.java rename to dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertServer.java index b0a8c0348db1c91a91d60fe0e51ac34054565a12..f1aa562d06beddd294fdb8dc44577df205ee9f52 100644 --- a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/AlertServer.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertServer.java @@ -17,140 +17,108 @@ package org.apache.dolphinscheduler.alert; -import static org.apache.dolphinscheduler.alert.utils.Constants.ALERT_PROPERTIES_PATH; import static org.apache.dolphinscheduler.common.Constants.ALERT_RPC_PORT; -import org.apache.dolphinscheduler.alert.plugin.AlertPluginManager; -import org.apache.dolphinscheduler.alert.processor.AlertRequestProcessor; -import org.apache.dolphinscheduler.alert.runner.AlertSender; -import org.apache.dolphinscheduler.alert.utils.Constants; import org.apache.dolphinscheduler.common.thread.Stopper; -import org.apache.dolphinscheduler.common.utils.PropertyUtils; import org.apache.dolphinscheduler.dao.AlertDao; -import org.apache.dolphinscheduler.dao.DaoFactory; import org.apache.dolphinscheduler.dao.PluginDao; import org.apache.dolphinscheduler.dao.entity.Alert; import org.apache.dolphinscheduler.remote.NettyRemotingServer; import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.config.NettyServerConfig; -import org.apache.dolphinscheduler.spi.plugin.DolphinPluginLoader; -import org.apache.dolphinscheduler.spi.plugin.DolphinPluginManagerConfig; -import org.apache.dolphinscheduler.spi.utils.StringUtils; +import java.io.Closeable; import java.util.List; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; + +import javax.annotation.PreDestroy; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - -import com.google.common.collect.ImmutableList; - -public class AlertServer { - - private static final Logger logger = LoggerFactory.getLogger(AlertServer.class); - - private final PluginDao pluginDao = DaoFactory.getDaoInstance(PluginDao.class); - - private final AlertDao alertDao = DaoFactory.getDaoInstance(AlertDao.class); - - private AlertPluginManager alertPluginManager; - - public static final String ALERT_PLUGIN_BINDING = "alert.plugin.binding"; - - public static final String ALERT_PLUGIN_DIR = "alert.plugin.dir"; - - public static final String MAVEN_LOCAL_REPOSITORY = "maven.local.repository"; +import org.springframework.boot.WebApplicationType; +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; +import org.springframework.boot.builder.SpringApplicationBuilder; +import org.springframework.boot.context.event.ApplicationReadyEvent; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.event.EventListener; + +@EnableAutoConfiguration +@ComponentScan(value = { + "org.apache.dolphinscheduler.alert", + "org.apache.dolphinscheduler.dao" +}) +public class AlertServer implements Closeable { + private static final Logger log = LoggerFactory.getLogger(AlertServer.class); + + private final PluginDao pluginDao; + private final AlertDao alertDao; + private final AlertPluginManager alertPluginManager; + private final AlertSender alertSender; + private final AlertRequestProcessor alertRequestProcessor; private NettyRemotingServer server; - private static class AlertServerHolder { - private static final AlertServer INSTANCE = new AlertServer(); + public AlertServer(PluginDao pluginDao, AlertDao alertDao, AlertPluginManager alertPluginManager, AlertSender alertSender, AlertRequestProcessor alertRequestProcessor) { + this.pluginDao = pluginDao; + this.alertDao = alertDao; + this.alertPluginManager = alertPluginManager; + this.alertSender = alertSender; + this.alertRequestProcessor = alertRequestProcessor; } - public static AlertServer getInstance() { - return AlertServerHolder.INSTANCE; + public static void main(String[] args) { + new SpringApplicationBuilder(AlertServer.class) + .web(WebApplicationType.NONE) + .run(args); } - private AlertServer() { + @EventListener + public void start(ApplicationReadyEvent readyEvent) { + log.info("Starting Alert server"); + + checkTable(); + startServer(); + + Executors.newScheduledThreadPool(1) + .scheduleAtFixedRate(new Sender(), 5, 5, TimeUnit.SECONDS); + } + @Override + @PreDestroy + public void close() { + server.close(); } private void checkTable() { if (!pluginDao.checkPluginDefineTableExist()) { - logger.error("Plugin Define Table t_ds_plugin_define Not Exist . Please Create it First !"); + log.error("Plugin Define Table t_ds_plugin_define Not Exist . Please Create it First !"); System.exit(1); } } - private void initPlugin() { - DolphinPluginManagerConfig alertPluginManagerConfig = new DolphinPluginManagerConfig(); - alertPluginManagerConfig.setPlugins(PropertyUtils.getString(ALERT_PLUGIN_BINDING)); - if (StringUtils.isNotBlank(PropertyUtils.getString(ALERT_PLUGIN_DIR))) { - alertPluginManagerConfig.setInstalledPluginsDir(PropertyUtils.getString(ALERT_PLUGIN_DIR, Constants.ALERT_PLUGIN_PATH).trim()); - } - - if (StringUtils.isNotBlank(PropertyUtils.getString(MAVEN_LOCAL_REPOSITORY))) { - alertPluginManagerConfig.setMavenLocalRepository(PropertyUtils.getString(MAVEN_LOCAL_REPOSITORY).trim()); - } - - alertPluginManager = new AlertPluginManager(); - DolphinPluginLoader alertPluginLoader = new DolphinPluginLoader(alertPluginManagerConfig, ImmutableList.of(alertPluginManager)); - try { - alertPluginLoader.loadPlugins(); - } catch (Exception e) { - throw new RuntimeException("Load Alert Plugin Failed !", e); - } - } - - private void initRemoteServer() { + private void startServer() { NettyServerConfig serverConfig = new NettyServerConfig(); serverConfig.setListenPort(ALERT_RPC_PORT); - this.server = new NettyRemotingServer(serverConfig); - this.server.registerProcessor(CommandType.ALERT_SEND_REQUEST, new AlertRequestProcessor(alertDao, alertPluginManager)); - this.server.start(); - } - - private void runSender() { - new Thread(new Sender()).start(); - } - public void start() { - PropertyUtils.loadPropertyFile(ALERT_PROPERTIES_PATH); - checkTable(); - initPlugin(); - initRemoteServer(); - logger.info("alert server ready start "); - runSender(); - } - - public void stop() { - this.server.close(); - logger.info("alert server shut down"); + server = new NettyRemotingServer(serverConfig); + server.registerProcessor(CommandType.ALERT_SEND_REQUEST, alertRequestProcessor); + server.start(); } final class Sender implements Runnable { @Override public void run() { - while (Stopper.isRunning()) { - try { - Thread.sleep(Constants.ALERT_SCAN_INTERVAL); - } catch (InterruptedException e) { - logger.error(e.getMessage(), e); - Thread.currentThread().interrupt(); - } - if (alertPluginManager == null || alertPluginManager.getAlertChannelMap().size() == 0) { - logger.warn("No Alert Plugin . Cannot send alert info. "); - } else { - List alerts = alertDao.listWaitExecutionAlert(); - new AlertSender(alerts, alertDao, alertPluginManager).run(); - } + if (!Stopper.isRunning()) { + return; } - } - } - public static void main(String[] args) { - AlertServer alertServer = AlertServer.getInstance(); - alertServer.start(); - Runtime.getRuntime().addShutdownHook(new Thread(alertServer::stop)); + try { + final List alerts = alertDao.listPendingAlerts(); + alertSender.send(alerts); + } catch (Exception e) { + log.error("Failed to send alert", e); + } + } } - } diff --git a/dolphinscheduler-alert/src/main/resources/logback-alert.xml b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/resources/logback-alert.xml similarity index 95% rename from dolphinscheduler-alert/src/main/resources/logback-alert.xml rename to dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/resources/logback-alert.xml index 1718947dd13a6eaaf650d8d837629026d65398fd..48b3c35bffcc3eee91dedbe6030dfb5925c27ba1 100644 --- a/dolphinscheduler-alert/src/main/resources/logback-alert.xml +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/resources/logback-alert.xml @@ -17,7 +17,7 @@ --> - + @@ -49,4 +49,4 @@ - \ No newline at end of file + diff --git a/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/plugin/EmailAlertPluginTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/java/org/apache/dolphinscheduler/alert/plugin/EmailAlertPluginTest.java similarity index 57% rename from dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/plugin/EmailAlertPluginTest.java rename to dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/java/org/apache/dolphinscheduler/alert/plugin/EmailAlertPluginTest.java index 5c31225edca95e72324da8ddfdc7902e88bacd24..b1f8e54240587a2c9656c44cae249284f9147e75 100644 --- a/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/plugin/EmailAlertPluginTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/java/org/apache/dolphinscheduler/alert/plugin/EmailAlertPluginTest.java @@ -17,54 +17,62 @@ package org.apache.dolphinscheduler.alert.plugin; +import org.apache.dolphinscheduler.alert.AlertPluginManager; +import org.apache.dolphinscheduler.alert.AlertSender; import org.apache.dolphinscheduler.alert.AlertServer; -import org.apache.dolphinscheduler.alert.runner.AlertSender; -import org.apache.dolphinscheduler.alert.utils.Constants; +import org.apache.dolphinscheduler.alert.api.AlertConstants; +import org.apache.dolphinscheduler.alert.api.ShowType; import org.apache.dolphinscheduler.common.enums.AlertStatus; +import org.apache.dolphinscheduler.common.enums.ProfileType; import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.apache.dolphinscheduler.common.utils.PropertyUtils; import org.apache.dolphinscheduler.dao.AlertDao; -import org.apache.dolphinscheduler.dao.DaoFactory; import org.apache.dolphinscheduler.dao.PluginDao; import org.apache.dolphinscheduler.dao.entity.Alert; import org.apache.dolphinscheduler.dao.entity.AlertGroup; import org.apache.dolphinscheduler.dao.entity.AlertPluginInstance; import org.apache.dolphinscheduler.dao.entity.PluginDefine; -import org.apache.dolphinscheduler.spi.alert.AlertConstants; -import org.apache.dolphinscheduler.spi.alert.ShowType; -import org.apache.dolphinscheduler.spi.params.input.InputParam; import org.apache.dolphinscheduler.spi.params.PasswordParam; import org.apache.dolphinscheduler.spi.params.PluginParamsTransfer; -import org.apache.dolphinscheduler.spi.params.radio.RadioParam; import org.apache.dolphinscheduler.spi.params.base.DataType; import org.apache.dolphinscheduler.spi.params.base.ParamsOptions; import org.apache.dolphinscheduler.spi.params.base.PluginParams; import org.apache.dolphinscheduler.spi.params.base.Validate; -import org.apache.dolphinscheduler.spi.plugin.DolphinPluginLoader; -import org.apache.dolphinscheduler.spi.plugin.DolphinPluginManagerConfig; -import org.apache.dolphinscheduler.spi.utils.StringUtils; +import org.apache.dolphinscheduler.spi.params.input.InputParam; +import org.apache.dolphinscheduler.spi.params.radio.RadioParam; +import org.junit.Assert; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.ActiveProfiles; +import org.springframework.test.context.junit4.SpringRunner; import java.util.ArrayList; import java.util.Date; import java.util.LinkedHashMap; import java.util.List; -import org.junit.Assert; -import org.junit.Test; - -import com.google.common.collect.ImmutableList; - -/** - * test load and use alert plugin - */ +@ActiveProfiles(ProfileType.H2) +@RunWith(SpringRunner.class) +@SpringBootTest(classes = AlertServer.class) public class EmailAlertPluginTest { - - private AlertDao alertDao = DaoFactory.getDaoInstance(AlertDao.class); - private PluginDao pluginDao = DaoFactory.getDaoInstance(PluginDao.class); + @Autowired + private AlertDao alertDao; + @Autowired + private PluginDao pluginDao; + @Autowired + private AlertPluginManager manager; + @Autowired + private AlertSender alertSender; + + @BeforeClass + public static void setUpClass() { + System.setProperty("spring.profiles.active", "h2"); + } @Test public void testRunSend() { - //create alert group AlertGroup alertGroup = new AlertGroup(); alertGroup.setDescription("test alert group 1"); @@ -78,7 +86,7 @@ public class EmailAlertPluginTest { map1.put("mysql service name", "mysql200"); map1.put("mysql address", "192.168.xx.xx"); map1.put("port", "3306"); - map1.put(AlertConstants.SHOW_TYPE, ShowType.TEXT.getDescp()); + map1.put(AlertConstants.NAME_SHOW_TYPE, ShowType.TEXT.getDescp()); map1.put("no index of number", "80"); map1.put("database client connections", "190"); @@ -87,7 +95,7 @@ public class EmailAlertPluginTest { map2.put("mysql address", "192.168.xx.xx"); map2.put("port", "3306"); map2.put("no index of number", "10"); - map1.put(AlertConstants.SHOW_TYPE, ShowType.TABLE.getDescp()); + map1.put(AlertConstants.NAME_SHOW_TYPE, ShowType.TABLE.getDescp()); map2.put("database client connections", "90"); List> maps = new ArrayList<>(); @@ -102,42 +110,20 @@ public class EmailAlertPluginTest { List alertList = new ArrayList<>(); alertList.add(alert1); - //load email alert plugin - AlertPluginManager alertPluginManager = new AlertPluginManager(); - DolphinPluginManagerConfig alertPluginManagerConfig = new DolphinPluginManagerConfig(); - String path = DolphinPluginLoader.class.getClassLoader().getResource("").getPath(); - alertPluginManagerConfig.setPlugins(path + "../../../dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/pom.xml"); - if (StringUtils.isNotBlank(PropertyUtils.getString(AlertServer.ALERT_PLUGIN_DIR))) { - alertPluginManagerConfig.setInstalledPluginsDir(PropertyUtils.getString(AlertServer.ALERT_PLUGIN_DIR, Constants.ALERT_PLUGIN_PATH).trim()); - } - - if (StringUtils.isNotBlank(PropertyUtils.getString(AlertServer.MAVEN_LOCAL_REPOSITORY))) { - alertPluginManagerConfig.setMavenLocalRepository(PropertyUtils.getString(AlertServer.MAVEN_LOCAL_REPOSITORY).trim()); - } - - DolphinPluginLoader alertPluginLoader = new DolphinPluginLoader(alertPluginManagerConfig, ImmutableList.of(alertPluginManager)); - try { - alertPluginLoader.loadPlugins(); - } catch (Exception e) { - throw new RuntimeException("load Alert Plugin Failed !", e); - } - //create email alert plugin instance AlertPluginInstance alertPluginInstance = new AlertPluginInstance(); alertPluginInstance.setCreateTime(new Date()); alertPluginInstance.setInstanceName("test email alert"); - List pluginDefineList = pluginDao.getPluginDefineMapper().queryByNameAndType("Email", "alert"); - if (pluginDefineList == null || pluginDefineList.size() == 0) { + PluginDefine pluginDefine = pluginDao.getPluginDefineMapper().queryByNameAndType("Email", "alert"); + if (pluginDefine == null) { throw new RuntimeException("no alert plugin be load"); } - PluginDefine pluginDefine = pluginDefineList.get(0); alertPluginInstance.setPluginDefineId(pluginDefine.getId()); alertPluginInstance.setPluginInstanceParams(getEmailAlertParams()); alertDao.getAlertPluginInstanceMapper().insert(alertPluginInstance); - AlertSender alertSender = new AlertSender(alertList, alertDao, alertPluginManager); - alertSender.run(); + alertSender.send(alertList); Alert alertResult = alertDao.getAlertMapper().selectById(alert1.getId()); Assert.assertNotNull(alertResult); @@ -153,74 +139,74 @@ public class EmailAlertPluginTest { List paramsList = new ArrayList<>(); InputParam receivesParam = InputParam.newBuilder("receivers", "receivers") - .setValue("540957506@qq.com") - .addValidate(Validate.newBuilder().setRequired(true).build()) - .build(); + .setValue("540957506@qq.com") + .addValidate(Validate.newBuilder().setRequired(true).build()) + .build(); InputParam mailSmtpHost = InputParam.newBuilder("mailServerHost", "mail.smtp.host") - .addValidate(Validate.newBuilder().setRequired(true).build()) - .setValue("smtp.exmail.qq.com") - .build(); + .addValidate(Validate.newBuilder().setRequired(true).build()) + .setValue("smtp.exmail.qq.com") + .build(); InputParam mailSmtpPort = InputParam.newBuilder("mailServerPort", "mail.smtp.port") - .addValidate(Validate.newBuilder() - .setRequired(true) - .setType(DataType.NUMBER.getDataType()) - .build()) - .setValue(25) - .build(); + .addValidate(Validate.newBuilder() + .setRequired(true) + .setType(DataType.NUMBER.getDataType()) + .build()) + .setValue(25) + .build(); InputParam mailSender = InputParam.newBuilder("mailSender", "mail.sender") - .addValidate(Validate.newBuilder().setRequired(true).build()) - .setValue("easyscheduler@analysys.com.cn") - .build(); + .addValidate(Validate.newBuilder().setRequired(true).build()) + .setValue("easyscheduler@analysys.com.cn") + .build(); RadioParam enableSmtpAuth = RadioParam.newBuilder("enableSmtpAuth", "mail.smtp.auth") - .addParamsOptions(new ParamsOptions("YES", true, false)) - .addParamsOptions(new ParamsOptions("NO", false, false)) - .addValidate(Validate.newBuilder().setRequired(true).build()) - .setValue(true) - .build(); + .addParamsOptions(new ParamsOptions("YES", true, false)) + .addParamsOptions(new ParamsOptions("NO", false, false)) + .addValidate(Validate.newBuilder().setRequired(true).build()) + .setValue(true) + .build(); InputParam mailUser = InputParam.newBuilder("mailUser", "mail.user") - .setPlaceholder("if enable use authentication, you need input user") - .setValue("easyscheduler@analysys.com.cn") - .build(); + .setPlaceholder("if enable use authentication, you need input user") + .setValue("easyscheduler@analysys.com.cn") + .build(); PasswordParam mailPassword = PasswordParam.newBuilder("mailPasswd", "mail.passwd") - .setPlaceholder("if enable use authentication, you need input password") - .setValue("xxxxxxx") - .build(); + .setPlaceholder("if enable use authentication, you need input password") + .setValue("xxxxxxx") + .build(); RadioParam enableTls = RadioParam.newBuilder("starttlsEnable", "mail.smtp.starttls.enable") - .addParamsOptions(new ParamsOptions("YES", true, false)) - .addParamsOptions(new ParamsOptions("NO", false, false)) - .addValidate(Validate.newBuilder().setRequired(true).build()) - .setValue(true) - .build(); + .addParamsOptions(new ParamsOptions("YES", true, false)) + .addParamsOptions(new ParamsOptions("NO", false, false)) + .addValidate(Validate.newBuilder().setRequired(true).build()) + .setValue(true) + .build(); RadioParam enableSsl = RadioParam.newBuilder("sslEnable", "mail.smtp.ssl.enable") - .addParamsOptions(new ParamsOptions("YES", true, false)) - .addParamsOptions(new ParamsOptions("NO", false, false)) - .addValidate(Validate.newBuilder().setRequired(true).build()) - .setValue(false) - .build(); + .addParamsOptions(new ParamsOptions("YES", true, false)) + .addParamsOptions(new ParamsOptions("NO", false, false)) + .addValidate(Validate.newBuilder().setRequired(true).build()) + .setValue(false) + .build(); InputParam sslTrust = InputParam.newBuilder("mailSmtpSslTrust", "mail.smtp.ssl.trust") - .addValidate(Validate.newBuilder().setRequired(true).build()) - .setValue("smtp.exmail.qq.com") - .build(); + .addValidate(Validate.newBuilder().setRequired(true).build()) + .setValue("smtp.exmail.qq.com") + .build(); List emailShowTypeList = new ArrayList<>(); emailShowTypeList.add(new ParamsOptions(ShowType.TABLE.getDescp(), ShowType.TABLE.getDescp(), false)); emailShowTypeList.add(new ParamsOptions(ShowType.TEXT.getDescp(), ShowType.TEXT.getDescp(), false)); emailShowTypeList.add(new ParamsOptions(ShowType.ATTACHMENT.getDescp(), ShowType.ATTACHMENT.getDescp(), false)); emailShowTypeList.add(new ParamsOptions(ShowType.TABLEATTACHMENT.getDescp(), ShowType.TABLEATTACHMENT.getDescp(), false)); - RadioParam showType = RadioParam.newBuilder(AlertConstants.SHOW_TYPE, "showType") - .setOptions(emailShowTypeList) - .setValue(ShowType.TABLE.getDescp()) - .addValidate(Validate.newBuilder().setRequired(true).build()) - .build(); + RadioParam showType = RadioParam.newBuilder(AlertConstants.NAME_SHOW_TYPE, "showType") + .setOptions(emailShowTypeList) + .setValue(ShowType.TABLE.getDescp()) + .addValidate(Validate.newBuilder().setRequired(true).build()) + .build(); paramsList.add(receivesParam); paramsList.add(mailSmtpHost); diff --git a/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/processor/AlertRequestProcessorTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/java/org/apache/dolphinscheduler/alert/processor/AlertRequestProcessorTest.java similarity index 77% rename from dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/processor/AlertRequestProcessorTest.java rename to dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/java/org/apache/dolphinscheduler/alert/processor/AlertRequestProcessorTest.java index 052d2f3d55fcfd931b84ed8100d35727580d0087..20e67687a4568baeffc902a6ceac8d39ee565533 100644 --- a/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/processor/AlertRequestProcessorTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/java/org/apache/dolphinscheduler/alert/processor/AlertRequestProcessorTest.java @@ -17,7 +17,10 @@ package org.apache.dolphinscheduler.alert.processor; -import org.apache.dolphinscheduler.alert.plugin.AlertPluginManager; +import static org.mockito.Mockito.mock; + +import org.apache.dolphinscheduler.alert.AlertRequestProcessor; +import org.apache.dolphinscheduler.alert.AlertSender; import org.apache.dolphinscheduler.dao.AlertDao; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.CommandType; @@ -26,30 +29,21 @@ import org.apache.dolphinscheduler.remote.command.alert.AlertSendRequestCommand; import org.junit.Assert; import org.junit.Before; import org.junit.Test; -import org.powermock.api.mockito.PowerMockito; import io.netty.channel.Channel; -/** - * alert request processor test - */ public class AlertRequestProcessorTest { - - private AlertDao alertDao; - private AlertPluginManager alertPluginManager; - private AlertRequestProcessor alertRequestProcessor; @Before public void before() { - alertDao = PowerMockito.mock(AlertDao.class); - alertPluginManager = PowerMockito.mock(AlertPluginManager.class); - alertRequestProcessor = new AlertRequestProcessor(alertDao, alertPluginManager); + final AlertDao alertDao = mock(AlertDao.class); + alertRequestProcessor = new AlertRequestProcessor(new AlertSender(alertDao, null)); } @Test public void testProcess() { - Channel channel = PowerMockito.mock(Channel.class); + Channel channel = mock(Channel.class); AlertSendRequestCommand alertSendRequestCommand = new AlertSendRequestCommand(1, "title", "content"); Command reqCommand = alertSendRequestCommand.convert2Command(); Assert.assertEquals(CommandType.ALERT_SEND_REQUEST, reqCommand.getType()); diff --git a/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/runner/AlertSenderTest.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/java/org/apache/dolphinscheduler/alert/runner/AlertSenderTest.java similarity index 60% rename from dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/runner/AlertSenderTest.java rename to dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/java/org/apache/dolphinscheduler/alert/runner/AlertSenderTest.java index 3b84bdbe671f138f10ad8ada65d7ffa541c781c2..160afeb5d9c016154e9d88059391ba3e425df480 100644 --- a/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/runner/AlertSenderTest.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/test/java/org/apache/dolphinscheduler/alert/runner/AlertSenderTest.java @@ -17,34 +17,32 @@ package org.apache.dolphinscheduler.alert.runner; -import org.apache.dolphinscheduler.alert.plugin.AlertPluginManager; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import org.apache.dolphinscheduler.alert.AlertPluginManager; +import org.apache.dolphinscheduler.alert.AlertSender; +import org.apache.dolphinscheduler.alert.api.AlertChannel; +import org.apache.dolphinscheduler.alert.api.AlertResult; import org.apache.dolphinscheduler.dao.AlertDao; import org.apache.dolphinscheduler.dao.PluginDao; import org.apache.dolphinscheduler.dao.entity.Alert; import org.apache.dolphinscheduler.dao.entity.AlertPluginInstance; import org.apache.dolphinscheduler.dao.entity.PluginDefine; import org.apache.dolphinscheduler.remote.command.alert.AlertSendResponseCommand; -import org.apache.dolphinscheduler.spi.alert.AlertChannel; -import org.apache.dolphinscheduler.spi.alert.AlertResult; import java.util.ArrayList; import java.util.List; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; +import java.util.Optional; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; -import org.powermock.api.mockito.PowerMockito; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * alert sender test - */ public class AlertSenderTest { - private static final Logger logger = LoggerFactory.getLogger(AlertSenderTest.class); private AlertDao alertDao; @@ -55,10 +53,9 @@ public class AlertSenderTest { @Before public void before() { - alertDao = PowerMockito.mock(AlertDao.class); - pluginDao = PowerMockito.mock(PluginDao.class); - alertPluginManager = PowerMockito.mock(AlertPluginManager.class); - + alertDao = mock(AlertDao.class); + pluginDao = mock(PluginDao.class); + alertPluginManager = mock(AlertPluginManager.class); } @Test @@ -70,12 +67,12 @@ public class AlertSenderTest { alertSender = new AlertSender(alertDao, alertPluginManager); //1.alert instance does not exist - PowerMockito.when(alertDao.listInstanceByAlertGroupId(alertGroupId)).thenReturn(null); + when(alertDao.listInstanceByAlertGroupId(alertGroupId)).thenReturn(null); AlertSendResponseCommand alertSendResponseCommand = alertSender.syncHandler(alertGroupId, title, content); Assert.assertFalse(alertSendResponseCommand.getResStatus()); alertSendResponseCommand.getResResults().forEach(result -> - logger.info("alert send response result, status:{}, message:{}", result.getStatus(), result.getMessage())); + logger.info("alert send response result, status:{}, message:{}", result.getStatus(), result.getMessage())); //2.alert plugin does not exist int pluginDefineId = 1; @@ -83,59 +80,52 @@ public class AlertSenderTest { String pluginInstanceName = "alert-instance-mail"; List alertInstanceList = new ArrayList<>(); AlertPluginInstance alertPluginInstance = new AlertPluginInstance( - pluginDefineId, pluginInstanceParams, pluginInstanceName); + pluginDefineId, pluginInstanceParams, pluginInstanceName); alertInstanceList.add(alertPluginInstance); - PowerMockito.when(alertDao.listInstanceByAlertGroupId(1)).thenReturn(alertInstanceList); + when(alertDao.listInstanceByAlertGroupId(1)).thenReturn(alertInstanceList); String pluginName = "alert-plugin-mail"; PluginDefine pluginDefine = new PluginDefine(pluginName, "1", null); - PowerMockito.when(pluginDao.getPluginDefineById(pluginDefineId)).thenReturn(pluginDefine); + when(pluginDao.getPluginDefineById(pluginDefineId)).thenReturn(pluginDefine); alertSendResponseCommand = alertSender.syncHandler(alertGroupId, title, content); Assert.assertFalse(alertSendResponseCommand.getResStatus()); alertSendResponseCommand.getResResults().forEach(result -> - logger.info("alert send response result, status:{}, message:{}", result.getStatus(), result.getMessage())); + logger.info("alert send response result, status:{}, message:{}", result.getStatus(), result.getMessage())); //3.alert result value is null - AlertChannel alertChannelMock = PowerMockito.mock(AlertChannel.class); - PowerMockito.when(alertChannelMock.process(Mockito.any())).thenReturn(null); - Map alertChannelMap = new ConcurrentHashMap<>(); - alertChannelMap.put(pluginName, alertChannelMock); - PowerMockito.when(alertPluginManager.getAlertChannelMap()).thenReturn(alertChannelMap); - PowerMockito.when(alertPluginManager.getPluginNameById(Mockito.anyInt())).thenReturn("alert-plugin-mail"); + AlertChannel alertChannelMock = mock(AlertChannel.class); + when(alertChannelMock.process(Mockito.any())).thenReturn(null); + when(alertPluginManager.getAlertChannel(1)).thenReturn(Optional.of(alertChannelMock)); alertSendResponseCommand = alertSender.syncHandler(alertGroupId, title, content); Assert.assertFalse(alertSendResponseCommand.getResStatus()); alertSendResponseCommand.getResResults().forEach(result -> - logger.info("alert send response result, status:{}, message:{}", result.getStatus(), result.getMessage())); + logger.info("alert send response result, status:{}, message:{}", result.getStatus(), result.getMessage())); //4.abnormal information inside the alert plug-in code AlertResult alertResult = new AlertResult(); alertResult.setStatus(String.valueOf(false)); alertResult.setMessage("Abnormal information inside the alert plug-in code"); - PowerMockito.when(alertChannelMock.process(Mockito.any())).thenReturn(alertResult); - alertChannelMap = new ConcurrentHashMap<>(); - alertChannelMap.put(pluginName, alertChannelMock); - PowerMockito.when(alertPluginManager.getAlertChannelMap()).thenReturn(alertChannelMap); + when(alertChannelMock.process(Mockito.any())).thenReturn(alertResult); + when(alertPluginManager.getAlertChannel(1)).thenReturn(Optional.of(alertChannelMock)); alertSendResponseCommand = alertSender.syncHandler(alertGroupId, title, content); Assert.assertFalse(alertSendResponseCommand.getResStatus()); alertSendResponseCommand.getResResults().forEach(result -> - logger.info("alert send response result, status:{}, message:{}", result.getStatus(), result.getMessage())); + logger.info("alert send response result, status:{}, message:{}", result.getStatus(), result.getMessage())); //5.alert plugin send success alertResult = new AlertResult(); alertResult.setStatus(String.valueOf(true)); alertResult.setMessage(String.format("Alert Plugin %s send success", pluginInstanceName)); - PowerMockito.when(alertChannelMock.process(Mockito.any())).thenReturn(alertResult); - alertChannelMap = new ConcurrentHashMap<>(); - alertChannelMap.put(pluginName, alertChannelMock); - PowerMockito.when(alertPluginManager.getAlertChannelMap()).thenReturn(alertChannelMap); + when(alertChannelMock.process(Mockito.any())).thenReturn(alertResult); + when(alertPluginManager.getAlertChannel(1)).thenReturn(Optional.of(alertChannelMock)); alertSendResponseCommand = alertSender.syncHandler(alertGroupId, title, content); Assert.assertTrue(alertSendResponseCommand.getResStatus()); alertSendResponseCommand.getResResults().forEach(result -> - logger.info("alert send response result, status:{}, message:{}", result.getStatus(), result.getMessage())); + logger.info("alert send response result, status:{}, message:{}", result.getStatus(), result.getMessage())); } @@ -151,33 +141,28 @@ public class AlertSenderTest { alert.setContent(content); alertList.add(alert); - alertSender = new AlertSender(alertList, alertDao, alertPluginManager); + alertSender = new AlertSender(alertDao, alertPluginManager); int pluginDefineId = 1; String pluginInstanceParams = "alert-instance-mail-params"; String pluginInstanceName = "alert-instance-mail"; List alertInstanceList = new ArrayList<>(); AlertPluginInstance alertPluginInstance = new AlertPluginInstance( - pluginDefineId, pluginInstanceParams, pluginInstanceName); + pluginDefineId, pluginInstanceParams, pluginInstanceName); alertInstanceList.add(alertPluginInstance); - PowerMockito.when(alertDao.listInstanceByAlertGroupId(alertGroupId)).thenReturn(alertInstanceList); + when(alertDao.listInstanceByAlertGroupId(alertGroupId)).thenReturn(alertInstanceList); String pluginName = "alert-plugin-mail"; PluginDefine pluginDefine = new PluginDefine(pluginName, "1", null); - PowerMockito.when(pluginDao.getPluginDefineById(pluginDefineId)).thenReturn(pluginDefine); - PowerMockito.when(alertPluginManager.getPluginNameById(1)).thenReturn("alert-instance-mail"); + when(pluginDao.getPluginDefineById(pluginDefineId)).thenReturn(pluginDefine); AlertResult alertResult = new AlertResult(); alertResult.setStatus(String.valueOf(true)); alertResult.setMessage(String.format("Alert Plugin %s send success", pluginInstanceName)); - AlertChannel alertChannelMock = PowerMockito.mock(AlertChannel.class); - PowerMockito.when(alertChannelMock.process(Mockito.any())).thenReturn(alertResult); - ConcurrentHashMap alertChannelMap = new ConcurrentHashMap<>(); - alertChannelMap.put(pluginName, alertChannelMock); - PowerMockito.when(alertPluginManager.getAlertChannelMap()).thenReturn(alertChannelMap); + AlertChannel alertChannelMock = mock(AlertChannel.class); + when(alertChannelMock.process(Mockito.any())).thenReturn(alertResult); + when(alertPluginManager.getAlertChannel(1)).thenReturn(Optional.of(alertChannelMock)); Assert.assertTrue(Boolean.parseBoolean(alertResult.getStatus())); - alertSender.run(); - + alertSender.send(alertList); } - } diff --git a/dolphinscheduler-alert/pom.xml b/dolphinscheduler-alert/pom.xml index 456c076e8f6a52a8c9e2b909f50bb23f578de5fe..80d46f4906f0cd588480de78f70ea6d7923e989d 100644 --- a/dolphinscheduler-alert/pom.xml +++ b/dolphinscheduler-alert/pom.xml @@ -1,125 +1,48 @@ - 4.0.0 - org.apache.dolphinscheduler dolphinscheduler - 2.0.0-SNAPSHOT + org.apache.dolphinscheduler + 2.0.10-SNAPSHOT + 4.0.0 + pom dolphinscheduler-alert - ${project.artifactId} - jar - - - UTF-8 - - - - - org.apache.dolphinscheduler - dolphinscheduler-remote - - - - org.apache.dolphinscheduler - dolphinscheduler-dao - - - log4j-api - org.apache.logging.log4j - - - - - junit - junit - test - - - - com.fasterxml.jackson.core - jackson-core - - - - com.fasterxml.jackson.core - jackson-databind - + + dolphinscheduler-alert-api + dolphinscheduler-alert-plugins + dolphinscheduler-alert-server + + org.slf4j slf4j-api - - com.google.guava - guava - - - jsr305 - com.google.code.findbugs - - - - - ch.qos.logback - logback-classic - - - - - org.apache.poi - poi - - - - org.powermock - powermock-module-junit4 - test - - - - org.mockito - mockito-core - test - - - - org.powermock - powermock-api-mockito2 - test - - - org.mockito - mockito-core - - - - - - org.jacoco - org.jacoco.agent - runtime + org.springframework.boot + spring-boot-starter-test test - diff --git a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/plugin/AlertPluginManager.java b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/plugin/AlertPluginManager.java deleted file mode 100644 index 02f4b0ff8a4e90b62a15569e805a60f09b45457a..0000000000000000000000000000000000000000 --- a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/plugin/AlertPluginManager.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.alert.plugin; - -import static java.lang.String.format; -import static java.util.Objects.requireNonNull; - -import static com.google.common.base.Preconditions.checkState; - -import org.apache.dolphinscheduler.common.enums.PluginType; -import org.apache.dolphinscheduler.spi.plugin.AbstractDolphinPluginManager; -import org.apache.dolphinscheduler.dao.DaoFactory; -import org.apache.dolphinscheduler.dao.PluginDao; -import org.apache.dolphinscheduler.dao.entity.PluginDefine; -import org.apache.dolphinscheduler.spi.DolphinSchedulerPlugin; -import org.apache.dolphinscheduler.spi.alert.AlertChannel; -import org.apache.dolphinscheduler.spi.alert.AlertChannelFactory; -import org.apache.dolphinscheduler.spi.classloader.ThreadContextClassLoader; -import org.apache.dolphinscheduler.spi.params.PluginParamsTransfer; -import org.apache.dolphinscheduler.spi.params.base.PluginParams; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * load the configured alert plugin and manager them - */ -public class AlertPluginManager extends AbstractDolphinPluginManager { - private static final Logger logger = LoggerFactory.getLogger(AlertPluginManager.class); - - private final Map alertChannelFactoryMap = new ConcurrentHashMap<>(); - private final Map alertChannelMap = new ConcurrentHashMap<>(); - - /** - * k->pluginDefineId v->pluginDefineName - */ - private final Map pluginDefineMap = new HashMap<>(); - - private final PluginDao pluginDao = DaoFactory.getDaoInstance(PluginDao.class); - - private void addAlertChannelFactory(AlertChannelFactory alertChannelFactory) { - requireNonNull(alertChannelFactory, "alertChannelFactory is null"); - - if (alertChannelFactoryMap.putIfAbsent(alertChannelFactory.getName(), alertChannelFactory) != null) { - throw new IllegalArgumentException(format("Alert Plugin '%s' is already registered", alertChannelFactory.getName())); - } - - try { - loadAlertChannel(alertChannelFactory.getName()); - } catch (Exception e) { - throw new IllegalArgumentException(format("Alert Plugin '%s' is can not load .", alertChannelFactory.getName())); - } - } - - private void loadAlertChannel(String name) { - requireNonNull(name, "name is null"); - - AlertChannelFactory alertChannelFactory = alertChannelFactoryMap.get(name); - checkState(alertChannelFactory != null, "Alert Plugin %s is not registered", name); - - try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(alertChannelFactory.getClass().getClassLoader())) { - AlertChannel alertChannel = alertChannelFactory.create(); - this.alertChannelMap.put(name, alertChannel); - } - - logger.info("-- Loaded Alert Plugin {} --", name); - } - - Map getAlertChannelFactoryMap() { - return alertChannelFactoryMap; - } - - public Map getAlertChannelMap() { - return alertChannelMap; - } - - public String getPluginNameById(int id) { - return pluginDefineMap.get(id); - } - - @Override - public void installPlugin(DolphinSchedulerPlugin dolphinSchedulerPlugin) { - for (AlertChannelFactory alertChannelFactory : dolphinSchedulerPlugin.getAlertChannelFactorys()) { - logger.info("Registering Alert Plugin '{}'", alertChannelFactory.getName()); - this.addAlertChannelFactory(alertChannelFactory); - List params = alertChannelFactory.getParams(); - String nameEn = alertChannelFactory.getName(); - String paramsJson = PluginParamsTransfer.transferParamsToJson(params); - - PluginDefine pluginDefine = new PluginDefine(nameEn, PluginType.ALERT.getDesc(), paramsJson); - int id = pluginDao.addOrUpdatePluginDefine(pluginDefine); - pluginDefineMap.put(id, pluginDefine.getPluginName()); - } - } -} diff --git a/dolphinscheduler-alert/src/main/resources/alert.properties b/dolphinscheduler-alert/src/main/resources/alert.properties deleted file mode 100644 index c41ae61237d0212932380c5cd7ac86471f826148..0000000000000000000000000000000000000000 --- a/dolphinscheduler-alert/src/main/resources/alert.properties +++ /dev/null @@ -1,30 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -#This configuration file configures the configuration parameters related to the AlertServer. -#These parameters are only related to the AlertServer, and it has nothing to do with the specific Alert Plugin. -#eg : max retry num. -#eg : Alert Server Listener port - -#alert.plugin.dir config the Alert Plugin dir . AlertServer while find and load the Alert Plugin Jar from this dir when deploy and start AlertServer on the server . -#alert.plugin.dir=lib/plugin/alert - -#maven.local.repository=/Users/gaojun/Documents/jianguoyun/localRepository - -#alert.plugin.binding config the Alert Plugin need be load when development and run in IDE -#alert.plugin.binding=\ -# ./dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/pom.xml diff --git a/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/AlertServerTest.java b/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/AlertServerTest.java deleted file mode 100644 index 38fb6b055ecd30b7b10d4b5f815d7821552aba78..0000000000000000000000000000000000000000 --- a/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/AlertServerTest.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.alert; - -import org.apache.dolphinscheduler.alert.plugin.AlertPluginManager; -import org.apache.dolphinscheduler.spi.plugin.DolphinPluginLoader; -import org.apache.dolphinscheduler.spi.plugin.DolphinPluginManagerConfig; -import org.apache.dolphinscheduler.alert.runner.AlertSender; -import org.apache.dolphinscheduler.alert.utils.Constants; -import org.apache.dolphinscheduler.dao.AlertDao; -import org.apache.dolphinscheduler.dao.DaoFactory; -import org.apache.dolphinscheduler.dao.PluginDao; -import org.apache.dolphinscheduler.remote.NettyRemotingServer; -import org.apache.dolphinscheduler.spi.alert.AlertChannel; - -import java.util.concurrent.ConcurrentHashMap; - -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mockito; -import org.powermock.api.mockito.PowerMockito; -import org.powermock.core.classloader.annotations.PrepareForTest; -import org.powermock.modules.junit4.PowerMockRunner; - -@RunWith(PowerMockRunner.class) -@PrepareForTest({AlertServer.class, DaoFactory.class}) -public class AlertServerTest { - - @Before - public void before() { - - } - - @Test - public void testMain() throws Exception { - AlertDao alertDao = PowerMockito.mock(AlertDao.class); - PowerMockito.mockStatic(DaoFactory.class); - PowerMockito.when(DaoFactory.getDaoInstance(AlertDao.class)).thenReturn(alertDao); - - PluginDao pluginDao = PowerMockito.mock(PluginDao.class); - PowerMockito.when(DaoFactory.getDaoInstance(PluginDao.class)).thenReturn(pluginDao); - PowerMockito.when(pluginDao.checkPluginDefineTableExist()).thenReturn(true); - - AlertChannel alertChannelMock = PowerMockito.mock(AlertChannel.class); - - AlertPluginManager alertPluginManager = PowerMockito.mock(AlertPluginManager.class); - PowerMockito.whenNew(AlertPluginManager.class).withNoArguments().thenReturn(alertPluginManager); - ConcurrentHashMap alertChannelMap = new ConcurrentHashMap<>(); - alertChannelMap.put("pluginName", alertChannelMock); - PowerMockito.when(alertPluginManager.getPluginNameById(Mockito.anyInt())).thenReturn("pluginName"); - PowerMockito.when(alertPluginManager.getAlertChannelMap()).thenReturn(alertChannelMap); - - DolphinPluginManagerConfig alertPluginManagerConfig = PowerMockito.mock(DolphinPluginManagerConfig.class); - PowerMockito.whenNew(DolphinPluginManagerConfig.class).withNoArguments().thenReturn(alertPluginManagerConfig); - - NettyRemotingServer nettyRemotingServer = PowerMockito.mock(NettyRemotingServer.class); - PowerMockito.whenNew(NettyRemotingServer.class).withAnyArguments().thenReturn(nettyRemotingServer); - AlertSender alertSender = PowerMockito.mock(AlertSender.class); - PowerMockito.whenNew(AlertSender.class).withAnyArguments().thenReturn(alertSender); - - DolphinPluginLoader dolphinPluginLoader = PowerMockito.mock(DolphinPluginLoader.class); - PowerMockito.whenNew(DolphinPluginLoader.class).withAnyArguments().thenReturn(dolphinPluginLoader); - - AlertServer alertServer = AlertServer.getInstance(); - Assert.assertNotNull(alertServer); - - new Thread(() -> alertServer.start()).start(); - - Thread.sleep(5 * Constants.ALERT_SCAN_INTERVAL); - - alertServer.stop(); - } - -} diff --git a/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/plugin/DolphinPluginLoaderTest.java b/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/plugin/DolphinPluginLoaderTest.java deleted file mode 100644 index aceb6a1f72cb836f89d3b37727d1044114371bcd..0000000000000000000000000000000000000000 --- a/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/plugin/DolphinPluginLoaderTest.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.alert.plugin; - -import org.apache.dolphinscheduler.spi.plugin.DolphinPluginLoader; -import org.apache.dolphinscheduler.spi.plugin.DolphinPluginManagerConfig; - -import java.util.Objects; - -import org.junit.Assert; -import org.junit.Ignore; -import org.junit.Test; - -import com.google.common.collect.ImmutableList; - -/** - * DolphinPluginLoader Tester. - */ -@Ignore("load jar fail,don't care plugin,should mock plugin") -public class DolphinPluginLoaderTest { - - /** - * Method: loadPlugins() - */ - @Test - public void testLoadPlugins() { - AlertPluginManager alertPluginManager = new AlertPluginManager(); - DolphinPluginManagerConfig alertPluginManagerConfig = new DolphinPluginManagerConfig(); - String path = Objects.requireNonNull(DolphinPluginLoader.class.getClassLoader().getResource("")).getPath(); - alertPluginManagerConfig.setPlugins(path + "../../../dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/pom.xml"); - DolphinPluginLoader alertPluginLoader = new DolphinPluginLoader(alertPluginManagerConfig, ImmutableList.of(alertPluginManager)); - try { - alertPluginLoader.loadPlugins(); - } catch (Exception e) { - throw new RuntimeException("load Alert Plugin Failed !", e); - } - - Assert.assertNotNull(alertPluginManager.getAlertChannelFactoryMap().get("Email")); - } -} diff --git a/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/PropertyUtilsTest.java b/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/PropertyUtilsTest.java deleted file mode 100644 index d72c09ae42befab678de98608d85add05d69295d..0000000000000000000000000000000000000000 --- a/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/PropertyUtilsTest.java +++ /dev/null @@ -1,221 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.alert.utils; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertTrue; - -import org.apache.dolphinscheduler.common.enums.NodeType; -import org.apache.dolphinscheduler.common.utils.PropertyUtils; - -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Test PropertyUtils - * and the resource path is src/test/resources/alert.properties. - */ -public class PropertyUtilsTest { - - private static final Logger logger = LoggerFactory.getLogger(PropertyUtilsTest.class); - - /** - * Test getString - */ - @Test - public void testGetString() { - - //Expected "EMAIL" - String result = PropertyUtils.getString("alert.type"); - logger.info(result); - assertEquals("EMAIL", result); - - //Expected "xxx.xxx.test" - result = PropertyUtils.getString("mail.server.host"); - assertEquals("xxx.xxx.test", result); - - //If key is undefine in alert.properties, then return null - result = PropertyUtils.getString("abc"); - assertNull(result); - - //If key is null, then return null - result = PropertyUtils.getString(null); - assertNull(result); - } - - - /** - * Test getBoolean - */ - @Test - public void testGetBoolean() { - - //Expected true - Boolean result = PropertyUtils.getBoolean("mail.smtp.starttls.enable"); - assertTrue(result); - - //Expected false - result = PropertyUtils.getBoolean("mail.smtp.ssl.enable"); - assertFalse(result); - - //If key is undefine in alert.properties, then return null - result = PropertyUtils.getBoolean("abc"); - assertFalse(result); - - //If key is null, then return false - result = PropertyUtils.getBoolean(null); - assertFalse(result); - } - - /** - * Test getLong - */ - @Test - public void testGetLong() { - - //Expected 25 - long result = PropertyUtils.getLong("mail.server.port"); - assertSame(25L, result); - - //If key is null, then return -1 - result = PropertyUtils.getLong(null); - assertSame(-1L, result); - - //If key is undefine in alert.properties, then return -1 - result = PropertyUtils.getLong("abc"); - assertSame(-1L, result); - - //If key is undefine in alert.properties, and there is a defaultval, then return defaultval - result = PropertyUtils.getLong("abc", 200); - assertEquals(200L, result); - - //If the value can not parse to long ,it will log the error and return -1L - result = PropertyUtils.getLong("test.server.testnumber"); - assertSame(-1L, result); - } - - /** - * Test getDouble - */ - @Test - public void testGetDouble() { - - //Expected 3.0 - double result = PropertyUtils.getDouble("test.server.factor", 3.0); - assertEquals(3.0, result, 0); - - //If key is null, then return -1.0 - result = PropertyUtils.getDouble(null, -1.0); - assertEquals(-1.0, result, 0); - - //If key is undefine in alert.properties, then return -1 - result = PropertyUtils.getDouble("abc", -1.0); - assertEquals(-1.0, result, 0); - - //If key is undefine in alert.properties, and there is a defaultval, then return defaultval - result = PropertyUtils.getDouble("abc", 5.0); - assertEquals(5.0, result, 0); - - //If the value can not parse to double ,it will log the error and return -1.0 - result = PropertyUtils.getDouble("test.server.testnumber", -1.0); - assertEquals(-1.0, result, 0); - } - - /** - * Test getArray - */ - @Test - public void testGetArray() { - - //Expected length 3 - String[] result = PropertyUtils.getArray("test.server.list", ","); - assertEquals(result.length, 3); - - //Equal array values - assertEquals("xxx.xxx.test1", result[0]); - assertEquals("xxx.xxx.test2", result[1]); - assertEquals("xxx.xxx.test3", result[2]); - - //If key is null, then return -1 - result = PropertyUtils.getArray(null, ","); - assertNull(result); - - //If key is undefine in alert.properties, then return null - result = PropertyUtils.getArray("abc", ","); - assertNull(result); - - //If splitStr is null, then return null - result = PropertyUtils.getArray("test.server.list", null); - assertNull(result); - } - - /** - * test getInt - */ - @Test - public void testGetInt() { - - //Expected 25 - int result = PropertyUtils.getInt("mail.server.port"); - assertSame(25, result); - - //If key is null, then return -1 - result = PropertyUtils.getInt(null); - assertSame(-1, result); - - //If key is undefine in alert.properties, then return -1 - result = PropertyUtils.getInt("abc"); - assertSame(-1, result); - - //If key is undefine in alert.properties, and there is a defaultval, then return defaultval - result = PropertyUtils.getInt("abc", 300); - assertEquals(300, result); - - //If the value can not parse to int ,it will log the error and return -1 - result = PropertyUtils.getInt("test.server.testnumber"); - assertSame(-1, result); - } - - /** - * Test getEnum - */ - @Test - public void testGetEnum() { - - //Expected MASTER - NodeType nodeType = PropertyUtils.getEnum("test.server.enum1", NodeType.class, NodeType.WORKER); - assertEquals(NodeType.MASTER, nodeType); - - //Expected DEAD_SERVER - nodeType = PropertyUtils.getEnum("test.server.enum2", NodeType.class, NodeType.WORKER); - assertEquals(NodeType.DEAD_SERVER, nodeType); - - //If key is null, then return defaultval - nodeType = PropertyUtils.getEnum(null, NodeType.class, NodeType.WORKER); - assertEquals(NodeType.WORKER, nodeType); - - //If the value doesn't define in enum ,it will log the error and return -1 - nodeType = PropertyUtils.getEnum("test.server.enum3", NodeType.class, NodeType.WORKER); - assertEquals(NodeType.WORKER, nodeType); - } - -} diff --git a/dolphinscheduler-api/pom.xml b/dolphinscheduler-api/pom.xml index c6c976003844c828b38e5872a37b10b44caf1af5..ed03dbc9c327c63a4f2212218d52aa59038ad682 100644 --- a/dolphinscheduler-api/pom.xml +++ b/dolphinscheduler-api/pom.xml @@ -16,13 +16,12 @@ ~ limitations under the License. --> - + 4.0.0 org.apache.dolphinscheduler dolphinscheduler - 2.0.0-SNAPSHOT + 2.0.10-SNAPSHOT dolphinscheduler-api ${project.artifactId} @@ -35,6 +34,12 @@ dolphinscheduler-service + + org.apache.dolphinscheduler + dolphinscheduler-datasource-all + ${project.version} + + org.springframework.boot @@ -222,13 +227,10 @@ - - junit - junit - test + org.hibernate.validator + hibernate-validator - org.powermock powermock-module-junit4 @@ -239,20 +241,35 @@ org.powermock powermock-api-mockito2 test + + + + org.apache.curator + curator-test + ${curator.test} - org.mockito - mockito-core + org.javassist + javassist - - - - org.jacoco - org.jacoco.agent - runtime test - + + + + + org.apache.maven.plugins + maven-jar-plugin + + + *.yaml + *.yml + *.xml + + + + + diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/ApiApplicationServer.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/ApiApplicationServer.java index 6afb9d54c2e3948add4d1ac6b298267e841a2b6b..d622fa3a5c76e6799ba1a453ab7175c3105d29b5 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/ApiApplicationServer.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/ApiApplicationServer.java @@ -17,21 +17,39 @@ package org.apache.dolphinscheduler.api; -import org.springframework.boot.SpringApplication; -import org.springframework.boot.autoconfigure.SpringBootApplication; +import static org.apache.dolphinscheduler.common.Constants.SPRING_DATASOURCE_DRIVER_CLASS_NAME; + +import org.apache.dolphinscheduler.common.utils.PropertyUtils; + +import javax.annotation.PostConstruct; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; +import org.springframework.boot.builder.SpringApplicationBuilder; import org.springframework.boot.web.servlet.ServletComponentScan; import org.springframework.boot.web.servlet.support.SpringBootServletInitializer; import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.FilterType; -@SpringBootApplication +@EnableAutoConfiguration @ServletComponentScan -@ComponentScan(value = "org.apache.dolphinscheduler", - excludeFilters = @ComponentScan.Filter(type = FilterType.REGEX, pattern = "org.apache.dolphinscheduler.server.*")) +@ComponentScan(value = "org.apache.dolphinscheduler", excludeFilters = { + @ComponentScan.Filter(type = FilterType.REGEX, pattern = { + "org.apache.dolphinscheduler.server.*", + "org.apache.dolphinscheduler.alert.*" + }) +}) public class ApiApplicationServer extends SpringBootServletInitializer { public static void main(String[] args) { - SpringApplication.run(ApiApplicationServer.class, args); + new SpringApplicationBuilder(ApiApplicationServer.class).profiles("api").run(args); } + @Value("${spring.datasource.driver-class-name}") + private String driverClassName; + + @PostConstruct + public void run() { + PropertyUtils.setValue(SPRING_DATASOURCE_DRIVER_CLASS_NAME, driverClassName); + } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/aspect/AccessLogAnnotation.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/aspect/AccessLogAnnotation.java index a7bee62a7ef932f54b7041d44c684296901a3452..db62bcda3afc658d26a6df58625c0ae9f101143d 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/aspect/AccessLogAnnotation.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/aspect/AccessLogAnnotation.java @@ -28,7 +28,7 @@ import java.lang.annotation.Target; @Documented public @interface AccessLogAnnotation { // ignore request args - String[] ignoreRequestArgs() default {}; + String[] ignoreRequestArgs() default {"loginUser"}; boolean ignoreRequest() default false; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/aspect/AccessLogAspect.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/aspect/AccessLogAspect.java index a34d04124267489d53befcba434602228d5bb6b1..b20b6d84254a25fd76d423227cb6cb1e6257cc77 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/aspect/AccessLogAspect.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/aspect/AccessLogAspect.java @@ -19,14 +19,17 @@ package org.apache.dolphinscheduler.api.aspect; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.spi.utils.StringUtils; import java.lang.reflect.Method; import java.util.Arrays; import java.util.HashMap; -import java.util.List; import java.util.Set; import java.util.UUID; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import java.util.stream.Collectors; +import java.util.stream.IntStream; import javax.servlet.http.HttpServletRequest; @@ -46,6 +49,12 @@ import org.springframework.web.context.request.ServletRequestAttributes; public class AccessLogAspect { private static final Logger logger = LoggerFactory.getLogger(AccessLogAspect.class); + private static final String TRACE_ID = "traceId"; + + public static final String sensitiveDataRegEx = "(password=[\'\"]+)(\\S+)([\'\"]+)"; + + private static final Pattern sensitiveDataPattern = Pattern.compile(sensitiveDataRegEx, Pattern.CASE_INSENSITIVE); + @Pointcut("@annotation(org.apache.dolphinscheduler.api.aspect.AccessLogAnnotation)") public void logPointCut(){ // Do nothing because of it's a pointcut @@ -60,21 +69,26 @@ public class AccessLogAspect { Method method = sign.getMethod(); AccessLogAnnotation annotation = method.getAnnotation(AccessLogAnnotation.class); - String tranceId = UUID.randomUUID().toString(); + String traceId = UUID.randomUUID().toString(); // log request if (!annotation.ignoreRequest()) { ServletRequestAttributes attributes = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes(); if (attributes != null) { HttpServletRequest request = attributes.getRequest(); - + String traceIdFromHeader = request.getHeader(TRACE_ID); + if (!StringUtils.isEmpty(traceIdFromHeader)) { + traceId = traceIdFromHeader; + } // handle login info String userName = parseLoginInfo(request); // handle args String argsString = parseArgs(proceedingJoinPoint, annotation); - logger.info("REQUEST TRANCE_ID:{}, LOGIN_USER:{}, URI:{}, METHOD:{}, HANDLER:{}, ARGS:{}", - tranceId, + // handle sensitive data in the string + argsString = handleSensitiveData(argsString); + logger.info("REQUEST TRACE_ID:{}, LOGIN_USER:{}, URI:{}, METHOD:{}, HANDLER:{}, ARGS:{}", + traceId, userName, request.getRequestURI(), request.getMethod(), @@ -88,7 +102,7 @@ public class AccessLogAspect { // log response if (!annotation.ignoreResponse()) { - logger.info("RESPONSE TRANCE_ID:{}, BODY:{}, REQUEST DURATION:{} milliseconds", tranceId, ob, (System.currentTimeMillis() - startTime)); + logger.info("RESPONSE TRACE_ID:{}, BODY:{}, REQUEST DURATION:{} milliseconds", traceId, ob, (System.currentTimeMillis() - startTime)); } return ob; @@ -114,6 +128,28 @@ public class AccessLogAspect { return argsString; } + protected String handleSensitiveData(String originalData) { + Matcher matcher = sensitiveDataPattern.matcher(originalData.toLowerCase()); + IntStream stream = IntStream.builder().build(); + boolean exists = false; + while (matcher.find()) { + if (matcher.groupCount() == 3) { + stream = IntStream.concat(stream, IntStream.range(matcher.end(1),matcher.end(2))); + exists = true; + } + } + + if (exists) { + char[] chars = originalData.toCharArray(); + stream.forEach(idx -> { + chars[idx] = '*'; + }); + return new String(chars); + } + + return originalData; + } + private String parseLoginInfo(HttpServletRequest request) { String userName = "NOT LOGIN"; User loginUser = (User) (request.getAttribute(Constants.SESSION_USER)); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/aspect/CacheEvictAspect.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/aspect/CacheEvictAspect.java new file mode 100644 index 0000000000000000000000000000000000000000..e7f1ba188c29ac2deb47f98cca55c07f3630671b --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/aspect/CacheEvictAspect.java @@ -0,0 +1,138 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.aspect; + +import org.apache.dolphinscheduler.common.enums.CacheType; +import org.apache.dolphinscheduler.remote.command.CacheExpireCommand; +import org.apache.dolphinscheduler.service.cache.CacheNotifyService; +import org.apache.dolphinscheduler.service.cache.impl.CacheKeyGenerator; + +import org.apache.commons.lang3.StringUtils; + +import java.lang.reflect.Method; +import java.util.Arrays; +import java.util.List; + +import org.aspectj.lang.ProceedingJoinPoint; +import org.aspectj.lang.annotation.Around; +import org.aspectj.lang.annotation.Aspect; +import org.aspectj.lang.annotation.Pointcut; +import org.aspectj.lang.reflect.MethodSignature; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.cache.annotation.CacheConfig; +import org.springframework.cache.annotation.CacheEvict; +import org.springframework.expression.EvaluationContext; +import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.expression.spel.support.StandardEvaluationContext; +import org.springframework.stereotype.Component; + +/** + * aspect for cache evict + */ +@Aspect +@Component +public class CacheEvictAspect { + + private static final Logger logger = LoggerFactory.getLogger(CacheEvictAspect.class); + + /** + * symbol of spring el + */ + private static final String EL_SYMBOL = "#"; + + /** + * prefix of spring el + */ + private static final String P = "p"; + + @Autowired + private CacheKeyGenerator cacheKeyGenerator; + + @Autowired + private CacheNotifyService cacheNotifyService; + + @Pointcut("@annotation(org.springframework.cache.annotation.CacheEvict)") + public void cacheEvictPointCut() { + // Do nothing because of it's a pointcut + } + + @Around("cacheEvictPointCut()") + public Object doAround(ProceedingJoinPoint proceedingJoinPoint) throws Throwable { + MethodSignature sign = (MethodSignature) proceedingJoinPoint.getSignature(); + Method method = sign.getMethod(); + Object target = proceedingJoinPoint.getTarget(); + Object[] args = proceedingJoinPoint.getArgs(); + + Object result = proceedingJoinPoint.proceed(); + + CacheConfig cacheConfig = method.getDeclaringClass().getAnnotation(CacheConfig.class); + CacheEvict cacheEvict = method.getAnnotation(CacheEvict.class); + + CacheType cacheType = getCacheType(cacheConfig, cacheEvict); + if (cacheType != null) { + String cacheKey; + if (cacheEvict.key().isEmpty()) { + cacheKey = (String) cacheKeyGenerator.generate(target, method, args); + } else { + cacheKey = cacheEvict.key(); + if (cacheEvict.key().contains(EL_SYMBOL)) { + cacheKey = parseKey(cacheEvict.key(), Arrays.asList(args)); + } + } + if (StringUtils.isNotEmpty(cacheKey)) { + cacheNotifyService.notifyMaster(new CacheExpireCommand(cacheType, cacheKey).convert2Command()); + } + } + + return result; + } + + private CacheType getCacheType(CacheConfig cacheConfig, CacheEvict cacheEvict) { + String cacheName = null; + if (cacheEvict.cacheNames().length > 0) { + cacheName = cacheEvict.cacheNames()[0]; + } + if (cacheConfig.cacheNames().length > 0) { + cacheName = cacheConfig.cacheNames()[0]; + } + if (cacheName == null) { + return null; + } + for (CacheType cacheType : CacheType.values()) { + if (cacheType.getCacheName().equals(cacheName)) { + return cacheType; + } + } + return null; + } + + private String parseKey(String key, List paramList) { + SpelExpressionParser spelParser = new SpelExpressionParser(); + EvaluationContext ctx = new StandardEvaluationContext(); + for (int i = 0; i < paramList.size(); i++) { + ctx.setVariable(P + i, paramList.get(i)); + } + Object obj = spelParser.parseExpression(key).getValue(ctx); + if (null == obj) { + throw new RuntimeException("parseKey error"); + } + return obj.toString(); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/AppConfiguration.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/AppConfiguration.java index 0bcf43ee5f91bbe4546ad8a1536769d4623e656f..fb961169f0099ca8594ec0cdc470c0ce787a78d6 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/AppConfiguration.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/AppConfiguration.java @@ -45,7 +45,7 @@ public class AppConfiguration implements WebMvcConfigurer { public static final String LOGIN_INTERCEPTOR_PATH_PATTERN = "/**/*"; public static final String LOGIN_PATH_PATTERN = "/login"; - public static final String REGISTER_PATH_PATTERN = "/users/registry"; + public static final String REGISTER_PATH_PATTERN = "/users/register"; public static final String PATH_PATTERN = "/**"; public static final String LOCALE_LANGUAGE_COOKIE = "language"; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java index 01bb22d9ae18d1c0f51a0c39e4b8ad01c574755c..e412406cd584f4346865fe88c2cc60a454cf7c4c 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.api.controller; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_ACCESSTOKEN_BY_USER_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.CREATE_ACCESS_TOKEN_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.DELETE_ACCESS_TOKEN_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.GENERATE_TOKEN_ERROR; @@ -70,10 +71,15 @@ public class AccessTokenController extends BaseController { * @param loginUser login user * @param userId token for user id * @param expireTime expire time for the token - * @param token token + * @param token token string (if it is absent, it will be automatically generated) * @return create result state code */ - @ApiIgnore + @ApiOperation(value = "createToken", notes = "CREATE_TOKEN_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType = "Int"), + @ApiImplicitParam(name = "expireTime", value = "EXPIRE_TIME", required = true, dataType = "String", example = "2021-12-31 00:00:00"), + @ApiImplicitParam(name = "token", value = "TOKEN", required = false, dataType = "String", example = "xxxx") + }) @PostMapping() @ResponseStatus(HttpStatus.CREATED) @ApiException(CREATE_ACCESS_TOKEN_ERROR) @@ -81,7 +87,7 @@ public class AccessTokenController extends BaseController { public Result createToken(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "userId") int userId, @RequestParam(value = "expireTime") String expireTime, - @RequestParam(value = "token") String token) { + @RequestParam(value = "token", required = false) String token) { Map result = accessTokenService.createToken(loginUser, userId, expireTime, token); return returnDataList(result); @@ -140,6 +146,27 @@ public class AccessTokenController extends BaseController { return result; } + /** + * query access token for specified user + * + * @param loginUser login user + * @param userId user id + * @return token list for specified user + */ + @ApiOperation(value = "queryAccessTokenByUser", notes = "QUERY_ACCESS_TOKEN_BY_USER_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "userId", value = "USER_ID", dataType = "Int") + }) + @GetMapping(value = "/user/{userId}") + @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_ACCESSTOKEN_BY_USER_ERROR) + @AccessLogAnnotation(ignoreRequestArgs = "loginUser") + public Result queryAccessTokenByUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @PathVariable("userId") Integer userId) { + Map result = this.accessTokenService.queryAccessTokenByUser(loginUser, userId); + return this.returnDataList(result); + } + /** * delete access token by id * @@ -166,10 +193,16 @@ public class AccessTokenController extends BaseController { * @param id token id * @param userId token for user * @param expireTime token expire time - * @param token token string - * @return update result code + * @param token token string (if it is absent, it will be automatically generated) + * @return updated access token entity */ - @ApiIgnore + @ApiOperation(value = "updateToken", notes = "UPDATE_TOKEN_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "id", value = "TOKEN_ID", required = true, dataType = "Int"), + @ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType = "Int"), + @ApiImplicitParam(name = "expireTime", value = "EXPIRE_TIME", required = true, dataType = "String", example = "2021-12-31 00:00:00"), + @ApiImplicitParam(name = "token", value = "TOKEN", required = false, dataType = "String", example = "xxxx") + }) @PutMapping(value = "/{id}") @ResponseStatus(HttpStatus.OK) @ApiException(UPDATE_ACCESS_TOKEN_ERROR) @@ -178,7 +211,7 @@ public class AccessTokenController extends BaseController { @PathVariable(value = "id") int id, @RequestParam(value = "userId") int userId, @RequestParam(value = "expireTime") String expireTime, - @RequestParam(value = "token") String token) { + @RequestParam(value = "token", required = false) String token) { Map result = accessTokenService.updateToken(loginUser, id, userId, expireTime, token); return returnDataList(result); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertPluginInstanceController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertPluginInstanceController.java index 2e241a50c60bfa7058eda232613b4a12f26a4adf..43da273f5abcc573a599428b62f0c6312547ac75 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertPluginInstanceController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertPluginInstanceController.java @@ -30,6 +30,7 @@ import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.AlertPluginInstanceService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.dao.entity.User; import java.util.Map; @@ -213,12 +214,14 @@ public class AlertPluginInstanceController extends BaseController { * paging query alert plugin instance group list * * @param loginUser login user + * @param searchVal search value * @param pageNo page number * @param pageSize page size * @return alert plugin instance list page */ @ApiOperation(value = "queryAlertPluginInstanceListPaging", notes = "QUERY_ALERT_PLUGIN_INSTANCE_LIST_PAGING_NOTES") @ApiImplicitParams({ + @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type = "String"), @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", required = true, dataType = "Int", example = "1"), @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", required = true, dataType = "Int", example = "20") }) @@ -227,13 +230,15 @@ public class AlertPluginInstanceController extends BaseController { @ApiException(LIST_PAGING_ALERT_PLUGIN_INSTANCE_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result listPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "searchVal", required = false) String searchVal, @RequestParam("pageNo") Integer pageNo, @RequestParam("pageSize") Integer pageSize) { Result result = checkPageParams(pageNo, pageSize); if (!result.checkResult()) { return result; } - return alertPluginInstanceService.queryPluginPage(pageNo, pageSize); + searchVal = ParameterUtils.handleEscapes(searchVal); + return alertPluginInstanceService.listPaging(loginUser, searchVal, pageNo, pageSize); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java index 34ce49a0dcf4f2ef17b1ef94aea5a09679a0d5b5..a449a872a4a8ad0084e1f14914a60ce6a05b9bc2 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java @@ -34,13 +34,13 @@ import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.DataSourceService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO; -import org.apache.dolphinscheduler.common.datasource.ConnectionParam; -import org.apache.dolphinscheduler.common.datasource.DatasourceUtil; -import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.utils.CommonUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.BaseDataSourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.DatasourceUtil; +import org.apache.dolphinscheduler.spi.datasource.ConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; import java.util.Map; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java index db28f0b7d66aed8354e2d70da581375100b64ae0..4df65a991bef353d64df15f3336160aee689fbeb 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java @@ -23,6 +23,7 @@ import static org.apache.dolphinscheduler.api.enums.Status.START_PROCESS_INSTANC import org.apache.dolphinscheduler.api.aspect.AccessLogAnnotation; import org.apache.dolphinscheduler.api.enums.ExecuteType; +import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.ExecutorService; import org.apache.dolphinscheduler.api.utils.Result; @@ -36,8 +37,6 @@ import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.User; -import java.util.Map; - import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.web.bind.annotation.PathVariable; @@ -55,6 +54,13 @@ import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiParam; import springfox.documentation.annotations.ApiIgnore; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + /** * executor controller */ @@ -99,7 +105,7 @@ public class ExecutorController extends BaseController { @ApiImplicitParam(name = "runMode", value = "RUN_MODE", dataType = "RunMode"), @ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", required = true, dataType = "Priority"), @ApiImplicitParam(name = "workerGroup", value = "WORKER_GROUP", dataType = "String", example = "default"), - @ApiImplicitParam(name = "environmentCode", value = "ENVIRONMENT_CODE", dataType = "Long", example = "default"), + @ApiImplicitParam(name = "environmentCode", value = "ENVIRONMENT_CODE", dataType = "Long", example = "-1"), @ApiImplicitParam(name = "timeout", value = "TIMEOUT", dataType = "Int", example = "100"), @ApiImplicitParam(name = "expectedParallelismNumber", value = "EXPECTED_PARALLELISM_NUMBER", dataType = "Int", example = "8") }) @@ -138,6 +144,100 @@ public class ExecutorController extends BaseController { return returnDataList(result); } + /** + * batch execute process instance + * If any processDefinitionCode cannot be found, the failure information is returned and the status is set to + * failed. The successful task will run normally and will not stop + * + * @param loginUser login user + * @param projectCode project code + * @param processDefinitionCodes process definition codes + * @param scheduleTime schedule time + * @param failureStrategy failure strategy + * @param startNodeList start nodes list + * @param taskDependType task depend type + * @param execType execute type + * @param warningType warning type + * @param warningGroupId warning group id + * @param runMode run mode + * @param processInstancePriority process instance priority + * @param workerGroup worker group + * @param timeout timeout + * @param expectedParallelismNumber the expected parallelism number when execute complement in parallel mode + * @return start process result code + */ + @ApiOperation(value = "batchStartProcessInstance", notes = "BATCH_RUN_PROCESS_INSTANCE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "processDefinitionCodes", value = "PROCESS_DEFINITION_CODES", required = true, dataType = "String", example = "1,2,3"), + @ApiImplicitParam(name = "scheduleTime", value = "SCHEDULE_TIME", required = true, dataType = "String"), + @ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", required = true, dataType = "FailureStrategy"), + @ApiImplicitParam(name = "startNodeList", value = "START_NODE_LIST", dataType = "String"), + @ApiImplicitParam(name = "taskDependType", value = "TASK_DEPEND_TYPE", dataType = "TaskDependType"), + @ApiImplicitParam(name = "execType", value = "COMMAND_TYPE", dataType = "CommandType"), + @ApiImplicitParam(name = "warningType", value = "WARNING_TYPE", required = true, dataType = "WarningType"), + @ApiImplicitParam(name = "warningGroupId", value = "WARNING_GROUP_ID", required = true, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "runMode", value = "RUN_MODE", dataType = "RunMode"), + @ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", required = true, dataType = "Priority"), + @ApiImplicitParam(name = "workerGroup", value = "WORKER_GROUP", dataType = "String", example = "default"), + @ApiImplicitParam(name = "environmentCode", value = "ENVIRONMENT_CODE", dataType = "Long", example = "-1"), + @ApiImplicitParam(name = "timeout", value = "TIMEOUT", dataType = "Int", example = "100"), + @ApiImplicitParam(name = "expectedParallelismNumber", value = "EXPECTED_PARALLELISM_NUMBER", dataType = "Int", example = "8") + }) + @PostMapping(value = "batch-start-process-instance") + @ResponseStatus(HttpStatus.OK) + @ApiException(START_PROCESS_INSTANCE_ERROR) + @AccessLogAnnotation(ignoreRequestArgs = "loginUser") + public Result batchStartProcessInstance(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode, + @RequestParam(value = "processDefinitionCodes") String processDefinitionCodes, + @RequestParam(value = "scheduleTime", required = false) String scheduleTime, + @RequestParam(value = "failureStrategy", required = true) FailureStrategy failureStrategy, + @RequestParam(value = "startNodeList", required = false) String startNodeList, + @RequestParam(value = "taskDependType", required = false) TaskDependType taskDependType, + @RequestParam(value = "execType", required = false) CommandType execType, + @RequestParam(value = "warningType", required = true) WarningType warningType, + @RequestParam(value = "warningGroupId", required = false) int warningGroupId, + @RequestParam(value = "runMode", required = false) RunMode runMode, + @RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority, + @RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup, + @RequestParam(value = "environmentCode", required = false, defaultValue = "-1") Long environmentCode, + @RequestParam(value = "timeout", required = false) Integer timeout, + @RequestParam(value = "startParams", required = false) String startParams, + @RequestParam(value = "expectedParallelismNumber", required = false) Integer expectedParallelismNumber, + @RequestParam(value = "dryRun", defaultValue = "0", required = false) int dryRun) { + + if (timeout == null) { + timeout = Constants.MAX_TASK_TIMEOUT; + } + + Map startParamMap = null; + if (startParams != null) { + startParamMap = JSONUtils.toMap(startParams); + } + + Map result = new HashMap<>(); + List processDefinitionCodeArray = Arrays.asList(processDefinitionCodes.split(Constants.COMMA)); + List startFailedProcessDefinitionCodeList = new ArrayList<>(); + + processDefinitionCodeArray = processDefinitionCodeArray.stream().distinct().collect(Collectors.toList()); + + for (String strProcessDefinitionCode : processDefinitionCodeArray) { + long processDefinitionCode = Long.parseLong(strProcessDefinitionCode); + result = execService.execProcessInstance(loginUser, projectCode, processDefinitionCode, scheduleTime, execType, failureStrategy, + startNodeList, taskDependType, warningType, warningGroupId, runMode, processInstancePriority, workerGroup, environmentCode, timeout, startParamMap, expectedParallelismNumber, dryRun); + + if (!Status.SUCCESS.equals(result.get(Constants.STATUS))) { + startFailedProcessDefinitionCodeList.add(String.valueOf(processDefinitionCode)); + } + } + + if (!startFailedProcessDefinitionCodeList.isEmpty()) { + putMsg(result, Status.BATCH_START_PROCESS_INSTANCE_ERROR, String.join(Constants.COMMA, startFailedProcessDefinitionCodeList)); + } + + return returnDataList(result); + } + /** * do action to process instance:pause, stop, repeat, recover from pause, recover from stop * diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoggerController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoggerController.java index 3800264f6b705d748d7caa1914aee9cdb246004e..88c715fab6ed90868df1eb7eee707351c86a4511 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoggerController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoggerController.java @@ -32,6 +32,7 @@ import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestAttribute; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; @@ -43,6 +44,7 @@ import io.swagger.annotations.Api; import io.swagger.annotations.ApiImplicitParam; import io.swagger.annotations.ApiImplicitParams; import io.swagger.annotations.ApiOperation; +import io.swagger.annotations.ApiParam; import springfox.documentation.annotations.ApiIgnore; /** @@ -82,7 +84,6 @@ public class LoggerController extends BaseController { return loggerService.queryLog(taskInstanceId, skipNum, limit); } - /** * download log file * @@ -107,4 +108,59 @@ public class LoggerController extends BaseController { .body(logBytes); } + /** + * query task log in specified project + * + * @param loginUser login user + * @param projectCode project code + * @param taskInstanceId task instance id + * @param skipNum skip number + * @param limit limit + * @return task log content + */ + @ApiOperation(value = "queryLogInSpecifiedProject", notes = "QUERY_TASK_INSTANCE_LOG_IN_SPECIFIED_PROJECT_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "projectCode", value = "PROJECT_CODE", required = true, type = "Long"), + @ApiImplicitParam(name = "taskInstanceId", value = "TASK_ID", required = true, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "skipLineNum", value = "SKIP_LINE_NUM", required = true, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "limit", value = "LIMIT", required = true, dataType = "Int", example = "100") + }) + @GetMapping(value = "/{projectCode}/detail") + @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_TASK_INSTANCE_LOG_ERROR) + @AccessLogAnnotation(ignoreRequestArgs = "loginUser") + public Result queryLog(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode, + @RequestParam(value = "taskInstanceId") int taskInstanceId, + @RequestParam(value = "skipLineNum") int skipNum, + @RequestParam(value = "limit") int limit) { + return returnDataList(loggerService.queryLog(loginUser, projectCode, taskInstanceId, skipNum, limit)); + } + + /** + * download log file + * + * @param loginUser login user + * @param projectCode project code + * @param taskInstanceId task instance id + * @return log file content + */ + @ApiOperation(value = "downloadTaskLogInSpecifiedProject", notes = "DOWNLOAD_TASK_INSTANCE_LOG_IN_SPECIFIED_PROJECT_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "projectCode", value = "PROJECT_CODE", required = true, type = "Long"), + @ApiImplicitParam(name = "taskInstanceId", value = "TASK_ID", required = true, dataType = "Int", example = "100") + }) + @GetMapping(value = "/{projectCode}/download-log") + @ResponseBody + @ApiException(DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR) + @AccessLogAnnotation(ignoreRequestArgs = "loginUser") + public ResponseEntity downloadTaskLog(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode, + @RequestParam(value = "taskInstanceId") int taskInstanceId) { + byte[] logBytes = loggerService.getLogBytes(loginUser, projectCode, taskInstanceId); + return ResponseEntity + .ok() + .header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + System.currentTimeMillis() + ".log" + "\"") + .body(logBytes); + } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/MonitorController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/MonitorController.java index 92a9ffa5ad7bead9a5214395e6f3243fb571da50..9240ac45d85674c7e1d2230e32549c8cd0b38ed5 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/MonitorController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/MonitorController.java @@ -20,7 +20,6 @@ package org.apache.dolphinscheduler.api.controller; import static org.apache.dolphinscheduler.api.enums.Status.LIST_MASTERS_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.LIST_WORKERS_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.QUERY_DATABASE_STATE_ERROR; -import static org.apache.dolphinscheduler.api.enums.Status.QUERY_ZOOKEEPER_STATE_ERROR; import org.apache.dolphinscheduler.api.aspect.AccessLogAnnotation; import org.apache.dolphinscheduler.api.exceptions.ApiException; @@ -102,20 +101,4 @@ public class MonitorController extends BaseController { return returnDataList(result); } - /** - * query zookeeper state - * - * @param loginUser login user - * @return zookeeper information list - */ - @ApiOperation(value = "queryZookeeperState", notes = "QUERY_ZOOKEEPER_STATE_NOTES") - @GetMapping(value = "/zookeepers") - @ResponseStatus(HttpStatus.OK) - @ApiException(QUERY_ZOOKEEPER_STATE_ERROR) - @AccessLogAnnotation(ignoreRequestArgs = "loginUser") - public Result queryZookeeperState(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { - Map result = monitorService.queryZookeeperState(loginUser); - return returnDataList(result); - } - } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java index d6cb1bacb80aaa48ef4bea2960f0fc9763c92c7d..1d7b4853403778c602de9db7f7b751b38c00aa36 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java @@ -438,6 +438,24 @@ public class ProcessDefinitionController extends BaseController { return returnDataList(result); } + /** + * query Process definition simple list + * + * @param loginUser login user + * @param projectCode project code + * @return process definition list + */ + @ApiOperation(value = "querySimpleList", notes = "QUERY_PROCESS_DEFINITION_SIMPLE_LIST_NOTES") + @GetMapping(value = "/simple-list") + @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_PROCESS_DEFINITION_LIST) + @AccessLogAnnotation(ignoreRequestArgs = "loginUser") + public Result queryProcessDefinitionSimpleList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode) { + Map result = processDefinitionService.queryProcessDefinitionSimpleList(loginUser, projectCode); + return returnDataList(result); + } + /** * query process definition list paging * @@ -497,7 +515,7 @@ public class ProcessDefinitionController extends BaseController { @ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode, @PathVariable("code") long code, @RequestParam("limit") Integer limit) { - Map result = processDefinitionService.viewTree(code, limit); + Map result = processDefinitionService.viewTree(projectCode, code, limit); return returnDataList(result); } @@ -678,4 +696,115 @@ public class ProcessDefinitionController extends BaseController { Map result = processDefinitionService.importProcessDefinition(loginUser, projectCode, file); return returnDataList(result); } + + /** + * create empty process definition + * + * @param loginUser login user + * @param projectCode project code + * @param name process definition name + * @param description description + * @param globalParams globalParams + * @param timeout timeout + * @param tenantCode tenantCode + * @param scheduleJson scheduleJson + * @return process definition code + */ + @ApiOperation(value = "createEmptyProcessDefinition", notes = "CREATE_EMPTY_PROCESS_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "name", value = "PROCESS_DEFINITION_NAME", required = true, type = "String"), + @ApiImplicitParam(name = "projectCode", value = "PROJECT_CODE", required = true, dataType = "Long", example = "123456789"), + @ApiImplicitParam(name = "description", value = "PROCESS_DEFINITION_DESC", required = false, type = "String") + }) + @PostMapping(value = "/empty") + @ResponseStatus(HttpStatus.OK) + @ApiException(CREATE_PROCESS_DEFINITION_ERROR) + @AccessLogAnnotation(ignoreRequestArgs = "loginUser") + public Result createEmptyProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode, + @RequestParam(value = "name", required = true) String name, + @RequestParam(value = "description", required = false) String description, + @RequestParam(value = "globalParams", required = false, defaultValue = "[]") String globalParams, + @RequestParam(value = "timeout", required = false, defaultValue = "0") int timeout, + @RequestParam(value = "tenantCode", required = true) String tenantCode, + @RequestParam(value = "scheduleJson", required = false) String scheduleJson) { + return returnDataList(processDefinitionService.createEmptyProcessDefinition(loginUser, projectCode, name, description, globalParams, + timeout, tenantCode, scheduleJson)); + } + + /** + * update process definition basic info + * + * @param loginUser login user + * @param projectCode project code + * @param name process definition name + * @param code process definition code + * @param description description + * @param globalParams globalParams + * @param timeout timeout + * @param tenantCode tenantCode + * @param scheduleJson scheduleJson + * @param releaseState releaseState + * @return update result code + */ + @ApiOperation(value = "updateBasicInfo", notes = "UPDATE_PROCESS_DEFINITION_BASIC_INFO_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "name", value = "PROCESS_DEFINITION_NAME", required = true, type = "String"), + @ApiImplicitParam(name = "code", value = "PROCESS_DEFINITION_CODE", required = true, dataType = "Long", example = "123456789"), + @ApiImplicitParam(name = "description", value = "PROCESS_DEFINITION_DESC", required = false, type = "String"), + @ApiImplicitParam(name = "releaseState", value = "RELEASE_PROCESS_DEFINITION_NOTES", required = false, dataType = "ReleaseState") + }) + @PutMapping(value = "/{code}/basic-info") + @ResponseStatus(HttpStatus.OK) + @ApiException(UPDATE_PROCESS_DEFINITION_ERROR) + @AccessLogAnnotation(ignoreRequestArgs = "loginUser") + public Result updateProcessDefinitionBasicInfo(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode, + @RequestParam(value = "name", required = true) String name, + @PathVariable(value = "code", required = true) long code, + @RequestParam(value = "description", required = false) String description, + @RequestParam(value = "globalParams", required = false, defaultValue = "[]") String globalParams, + @RequestParam(value = "timeout", required = false, defaultValue = "0") int timeout, + @RequestParam(value = "tenantCode", required = true) String tenantCode, + @RequestParam(value = "scheduleJson", required = false) String scheduleJson, + @RequestParam(value = "releaseState", required = false, defaultValue = "OFFLINE") ReleaseState releaseState) { + Map result = processDefinitionService.updateProcessDefinitionBasicInfo(loginUser, projectCode, name, code, description, globalParams, + timeout, tenantCode, scheduleJson); + // If the update fails, the result will be returned directly + if (result.get(Constants.STATUS) != Status.SUCCESS) { + return returnDataList(result); + } + + // Judge whether to go online after editing,0 means offline, 1 means online + if (releaseState == ReleaseState.ONLINE) { + result = processDefinitionService.releaseWorkflowAndSchedule(loginUser, projectCode, code, releaseState); + } + return returnDataList(result); + } + + /** + * release process definition and schedule + * + * @param loginUser login user + * @param projectCode project code + * @param code process definition code + * @param releaseState releaseState + * @return update result code + */ + @ApiOperation(value = "releaseWorkflowAndSchedule", notes = "RELEASE_WORKFLOW_SCHEDULE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "projectCode", value = "PROCESS_DEFINITION_NAME", required = true, type = "Long"), + @ApiImplicitParam(name = "code", value = "PROCESS_DEFINITION_CODE", required = true, dataType = "Long", example = "123456789"), + @ApiImplicitParam(name = "releaseState", value = "RELEASE_PROCESS_DEFINITION_NOTES", required = true, dataType = "ReleaseState") + }) + @PostMapping(value = "/{code}/release-workflow") + @ResponseStatus(HttpStatus.OK) + @ApiException(RELEASE_PROCESS_DEFINITION_ERROR) + @AccessLogAnnotation(ignoreRequestArgs = "loginUser") + public Result releaseWorkflowAndSchedule(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode, + @PathVariable(value = "code", required = true) long code, + @RequestParam(value = "releaseState", required = true, defaultValue = "OFFLINE") ReleaseState releaseState) { + return returnDataList(processDefinitionService.releaseWorkflowAndSchedule(loginUser, projectCode, code, releaseState)); + } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java index d7ec4056dda8ad90d5b1548ac0754fa5a76ffd58..86b3b36d5437b3db65b561820318f486dc9d0692 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java @@ -100,7 +100,7 @@ public class ProcessInstanceController extends BaseController { */ @ApiOperation(value = "queryProcessInstanceListPaging", notes = "QUERY_PROCESS_INSTANCE_LIST_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "processDefiniteCode", value = "PROCESS_DEFINITION_CODE", dataType = "Long", example = "100"), + @ApiImplicitParam(name = "processDefineCode", value = "PROCESS_DEFINITION_CODE", dataType = "Long", example = "100"), @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type = "String"), @ApiImplicitParam(name = "executorName", value = "EXECUTOR_NAME", type = "String"), @ApiImplicitParam(name = "stateType", value = "EXECUTION_STATUS", type = "ExecutionStatus"), @@ -177,13 +177,13 @@ public class ProcessInstanceController extends BaseController { @ApiImplicitParams({ @ApiImplicitParam(name = "taskRelationJson", value = "TASK_RELATION_JSON", type = "String"), @ApiImplicitParam(name = "taskDefinitionJson", value = "TASK_DEFINITION_JSON", type = "String"), - @ApiImplicitParam(name = "id", value = "PROCESS_INSTANCE_ID", required = true, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "id", value = "PROCESS_INSTANCE_ID", required = true, dataType = "Int", example = "1"), @ApiImplicitParam(name = "scheduleTime", value = "SCHEDULE_TIME", type = "String"), - @ApiImplicitParam(name = "syncDefine", value = "SYNC_DEFINE", required = true, type = "Boolean"), - @ApiImplicitParam(name = "globalParams", value = "PROCESS_GLOBAL_PARAMS", type = "String"), + @ApiImplicitParam(name = "syncDefine", value = "SYNC_DEFINE", required = true, type = "Boolean", example = "false"), + @ApiImplicitParam(name = "globalParams", value = "PROCESS_GLOBAL_PARAMS", type = "String", example = "[]"), @ApiImplicitParam(name = "locations", value = "PROCESS_INSTANCE_LOCATIONS", type = "String"), - @ApiImplicitParam(name = "timeout", value = "PROCESS_TIMEOUT", type = "String"), - @ApiImplicitParam(name = "tenantCode", value = "TENANT_CODE", type = "Int", example = "0") + @ApiImplicitParam(name = "timeout", value = "PROCESS_TIMEOUT", type = "Int", example = "0"), + @ApiImplicitParam(name = "tenantCode", value = "TENANT_CODE", type = "String", example = "default") }) @PutMapping(value = "/{id}") @ResponseStatus(HttpStatus.OK) @@ -199,8 +199,7 @@ public class ProcessInstanceController extends BaseController { @RequestParam(value = "globalParams", required = false, defaultValue = "[]") String globalParams, @RequestParam(value = "locations", required = false) String locations, @RequestParam(value = "timeout", required = false, defaultValue = "0") int timeout, - @RequestParam(value = "tenantCode", required = true) String tenantCode, - @RequestParam(value = "flag", required = false) Flag flag) { + @RequestParam(value = "tenantCode", required = true) String tenantCode) { Map result = processInstanceService.updateProcessInstance(loginUser, projectCode, id, taskRelationJson, taskDefinitionJson, scheduleTime, syncDefine, globalParams, locations, timeout, tenantCode); return returnDataList(result); @@ -344,8 +343,9 @@ public class ProcessInstanceController extends BaseController { @ApiException(QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR) @AccessLogAnnotation public Result viewVariables(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode, @PathVariable("id") Integer id) { - Map result = processInstanceService.viewVariables(id); + Map result = processInstanceService.viewVariables(projectCode, id); return returnDataList(result); } @@ -368,7 +368,7 @@ public class ProcessInstanceController extends BaseController { public Result viewTree(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode, @PathVariable("id") Integer id) throws Exception { - Map result = processInstanceService.viewGantt(id); + Map result = processInstanceService.viewGantt(projectCode, id); return returnDataList(result); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessTaskRelationController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessTaskRelationController.java new file mode 100644 index 0000000000000000000000000000000000000000..710c8ac209f466151bed888e40a63ec0083d8344 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessTaskRelationController.java @@ -0,0 +1,257 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.controller; + +import static org.apache.dolphinscheduler.api.enums.Status.CREATE_PROCESS_TASK_RELATION_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.DATA_IS_NOT_VALID; +import static org.apache.dolphinscheduler.api.enums.Status.DELETE_EDGE_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.DELETE_TASK_PROCESS_RELATION_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.MOVE_PROCESS_TASK_RELATION_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_TASK_PROCESS_RELATION_ERROR; + +import org.apache.dolphinscheduler.api.aspect.AccessLogAnnotation; +import org.apache.dolphinscheduler.api.exceptions.ApiException; +import org.apache.dolphinscheduler.api.service.ProcessTaskRelationService; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.dao.entity.User; + +import java.util.HashMap; +import java.util.Map; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpStatus; +import org.springframework.web.bind.annotation.DeleteMapping; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestAttribute; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.ResponseStatus; +import org.springframework.web.bind.annotation.RestController; + +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import io.swagger.annotations.ApiParam; +import springfox.documentation.annotations.ApiIgnore; + +/** + * process task relation controller + */ +@Api(tags = "PROCESS_TASK_RELATION_TAG") +@RestController +@RequestMapping("projects/{projectCode}/process-task-relation") +public class ProcessTaskRelationController extends BaseController { + + @Autowired + private ProcessTaskRelationService processTaskRelationService; + + /** + * create process task relation + * + * @param loginUser login user + * @param projectCode project code + * @param processDefinitionCode processDefinitionCode + * @param preTaskCode preTaskCode + * @param postTaskCode postTaskCode + * @return create result code + */ + @ApiOperation(value = "save", notes = "CREATE_PROCESS_TASK_RELATION_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "projectCode", value = "PROJECT_CODE", required = true, type = "Long"), + @ApiImplicitParam(name = "processDefinitionCode", value = "PROCESS_DEFINITION_CODE", required = true, type = "Long"), + @ApiImplicitParam(name = "preTaskCode", value = "PRE_TASK_CODE", required = true, type = "Long"), + @ApiImplicitParam(name = "postTaskCode", value = "POST_TASK_CODE", required = true, type = "Long") + }) + @PostMapping() + @ResponseStatus(HttpStatus.CREATED) + @ApiException(CREATE_PROCESS_TASK_RELATION_ERROR) + @AccessLogAnnotation(ignoreRequestArgs = "loginUser") + public Result createProcessTaskRelation(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode, + @RequestParam(name = "processDefinitionCode", required = true) long processDefinitionCode, + @RequestParam(name = "preTaskCode", required = true) long preTaskCode, + @RequestParam(name = "postTaskCode", required = true) long postTaskCode) { + Map result = new HashMap<>(); + if (postTaskCode == 0L) { + putMsg(result, DATA_IS_NOT_VALID, "postTaskCode"); + } else if (processDefinitionCode == 0L) { + putMsg(result, DATA_IS_NOT_VALID, "processDefinitionCode"); + } else { + result = processTaskRelationService.createProcessTaskRelation(loginUser, projectCode, processDefinitionCode, preTaskCode, postTaskCode); + } + return returnDataList(result); + } + + /** + * delete process task relation (delete task from workflow) + * + * @param loginUser login user + * @param projectCode project code + * @param processDefinitionCode process definition code + * @param taskCode the post task code + * @return delete result code + */ + @ApiOperation(value = "deleteRelation", notes = "DELETE_PROCESS_TASK_RELATION_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "projectCode", value = "PROJECT_CODE", required = true, type = "Long"), + @ApiImplicitParam(name = "processDefinitionCode", value = "PROCESS_DEFINITION_CODE", required = true, type = "Long"), + @ApiImplicitParam(name = "taskCode", value = "TASK_CODE", required = true, type = "Long") + }) + @DeleteMapping(value = "/{taskCode}") + @ResponseStatus(HttpStatus.OK) + @ApiException(DELETE_TASK_PROCESS_RELATION_ERROR) + @AccessLogAnnotation(ignoreRequestArgs = "loginUser") + public Result deleteTaskProcessRelation(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode, + @RequestParam(name = "processDefinitionCode", required = true) long processDefinitionCode, + @PathVariable("taskCode") long taskCode) { + return returnDataList(processTaskRelationService.deleteTaskProcessRelation(loginUser, projectCode, processDefinitionCode, taskCode)); + } + + /** + * delete task upstream relation + * + * @param loginUser login user + * @param projectCode project code + * @param preTaskCodes the pre task codes, sep ',' + * @param taskCode the post task code + * @return delete result code + */ + @ApiOperation(value = "deleteUpstreamRelation", notes = "DELETE_UPSTREAM_RELATION_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "projectCode", value = "PROJECT_CODE", required = true, type = "Long"), + @ApiImplicitParam(name = "preTaskCodes", value = "PRE_TASK_CODES", required = true, type = "String", example = "1,2"), + @ApiImplicitParam(name = "taskCode", value = "TASK_CODE", required = true, type = "Long") + }) + @DeleteMapping(value = "/{taskCode}/upstream") + @ResponseStatus(HttpStatus.OK) + @ApiException(DELETE_TASK_PROCESS_RELATION_ERROR) + @AccessLogAnnotation(ignoreRequestArgs = "loginUser") + public Result deleteUpstreamRelation(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode, + @RequestParam(name = "preTaskCodes", required = true) String preTaskCodes, + @PathVariable("taskCode") long taskCode) { + return returnDataList(processTaskRelationService.deleteUpstreamRelation(loginUser, projectCode, preTaskCodes, taskCode)); + } + + /** + * delete task downstream relation + * + * @param loginUser login user + * @param projectCode project code + * @param postTaskCodes the post task codes, sep ',' + * @param taskCode the pre task code + * @return delete result code + */ + @ApiOperation(value = "deleteDownstreamRelation", notes = "DELETE_DOWNSTREAM_RELATION_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "projectCode", value = "PROJECT_CODE", required = true, type = "Long"), + @ApiImplicitParam(name = "postTaskCodes", value = "POST_TASK_CODES", required = true, type = "String", example = "1,2"), + @ApiImplicitParam(name = "taskCode", value = "TASK_CODE", required = true, type = "Long") + }) + @DeleteMapping(value = "/{taskCode}/downstream") + @ResponseStatus(HttpStatus.OK) + @ApiException(DELETE_TASK_PROCESS_RELATION_ERROR) + @AccessLogAnnotation(ignoreRequestArgs = "loginUser") + public Result deleteDownstreamRelation(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode, + @RequestParam(name = "postTaskCodes", required = true) String postTaskCodes, + @PathVariable("taskCode") long taskCode) { + return returnDataList(processTaskRelationService.deleteDownstreamRelation(loginUser, projectCode, postTaskCodes, taskCode)); + } + + /** + * query task upstream relation + * + * @param loginUser login user + * @param projectCode project code + * @param taskCode current task code (post task code) + * @return process task relation list + */ + @ApiOperation(value = "queryUpstreamRelation", notes = "QUERY_UPSTREAM_RELATION_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "projectCode", value = "PROJECT_CODE", required = true, type = "Long"), + @ApiImplicitParam(name = "taskCode", value = "TASK_CODE", required = true, type = "Long") + }) + @GetMapping(value = "/{taskCode}/upstream") + @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_TASK_PROCESS_RELATION_ERROR) + @AccessLogAnnotation(ignoreRequestArgs = "loginUser") + public Result queryUpstreamRelation(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode, + @PathVariable("taskCode") long taskCode) { + return returnDataList(processTaskRelationService.queryUpstreamRelation(loginUser, projectCode, taskCode)); + } + + /** + * query task downstream relation + * + * @param loginUser login user + * @param projectCode project code + * @param taskCode pre task code + * @return process task relation list + */ + @ApiOperation(value = "queryDownstreamRelation", notes = "QUERY_DOWNSTREAM_RELATION_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "projectCode", value = "PROJECT_CODE", required = true, type = "Long"), + @ApiImplicitParam(name = "taskCode", value = "TASK_CODE", required = true, type = "Long") + }) + @GetMapping(value = "/{taskCode}/downstream") + @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_TASK_PROCESS_RELATION_ERROR) + @AccessLogAnnotation(ignoreRequestArgs = "loginUser") + public Result queryDownstreamRelation(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode, + @PathVariable("taskCode") long taskCode) { + return returnDataList(processTaskRelationService.queryDownstreamRelation(loginUser, projectCode, taskCode)); + } + + /** + * delete edge + * + * @param loginUser login user + * @param projectCode project code + * @param processDefinitionCode process definition code + * @param preTaskCode pre task code + * @param postTaskCode post task code + * @return delete result code + */ + @ApiOperation(value = "deleteEdge", notes = "DELETE_EDGE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "projectCode", value = "PROJECT_CODE", required = true, type = "Long"), + @ApiImplicitParam(name = "processDefinitionCode", value = "PROCESS_DEFINITION_CODE", required = true, type = "Long"), + @ApiImplicitParam(name = "preTaskCode", value = "PRE_TASK_CODE", required = true, type = "Long"), + @ApiImplicitParam(name = "postTaskCode", value = "POST_TASK_CODE", required = true, type = "Long") + }) + @DeleteMapping(value = "/{processDefinitionCode}/{preTaskCode}/{postTaskCode}") + @ResponseStatus(HttpStatus.OK) + @ApiException(DELETE_EDGE_ERROR) + @AccessLogAnnotation(ignoreRequestArgs = "loginUser") + public Result deleteEdge(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) + @PathVariable long projectCode, + @PathVariable long processDefinitionCode, + @PathVariable long preTaskCode, + @PathVariable long postTaskCode) { + return returnDataList(processTaskRelationService.deleteEdge(loginUser, projectCode, processDefinitionCode, preTaskCode, postTaskCode)); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java index 1dfb7e5df861aaf66c2dc6a6d8b6fa04f0e200b1..22e582204ded262f16fb8fbd31cdab67018cf86c 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.api.controller; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_AUTHORIZED_USER; import static org.apache.dolphinscheduler.api.enums.Status.CREATE_PROJECT_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.DELETE_PROJECT_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR; @@ -237,6 +238,27 @@ public class ProjectController extends BaseController { return returnDataList(result); } + /** + * query authorized user + * + * @param loginUser login user + * @param projectCode project code + * @return users who have permission for the specified project + */ + @ApiOperation(value = "queryAuthorizedUser", notes = "QUERY_AUTHORIZED_USER_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "projectCode", value = "PROJECT_CODE", dataType = "Long", example = "100") + }) + @GetMapping(value = "/authed-user") + @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_AUTHORIZED_USER) + @AccessLogAnnotation(ignoreRequestArgs = "loginUser") + public Result queryAuthorizedUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("projectCode") Long projectCode) { + Map result = this.projectService.queryAuthorizedUser(loginUser, projectCode); + return this.returnDataList(result); + } + /** * query authorized and user created project * diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java index d8fc7c2932b6404349ad78908d99dcf3b220f610..0073ad0048153500bbe8a2a02b5033a3a1fbdf47 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java @@ -49,7 +49,7 @@ import org.apache.dolphinscheduler.api.service.UdfFuncService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ProgramType; -import org.apache.dolphinscheduler.common.enums.ResourceType; +import org.apache.dolphinscheduler.spi.enums.ResourceType; import org.apache.dolphinscheduler.common.enums.UdfType; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.dao.entity.User; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java index f3b5290d65a3133bda47d006f9424cd2c9d2a558..0e1ca4d3ab1dda0865c0e6a006e30a043eb38df9 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java @@ -143,11 +143,12 @@ public class SchedulerController extends BaseController { @ApiImplicitParam(name = "warningType", value = "WARNING_TYPE", type = "WarningType"), @ApiImplicitParam(name = "warningGroupId", value = "WARNING_GROUP_ID", dataType = "Int", example = "100"), @ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", type = "FailureStrategy"), - @ApiImplicitParam(name = "workerGroupId", value = "WORKER_GROUP_ID", dataType = "Int", example = "100"), + @ApiImplicitParam(name = "workerGroup", value = "WORKER_GROUP", dataType = "String", example = "default"), @ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", type = "Priority"), @ApiImplicitParam(name = "environmentCode", value = "ENVIRONMENT_CODE", dataType = "Long"), }) @PutMapping("/{id}") + @ResponseStatus(HttpStatus.OK) @ApiException(UPDATE_SCHEDULE_ERROR) @AccessLogAnnotation(ignoreRequestArgs = "loginUser") public Result updateSchedule(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, @@ -308,4 +309,48 @@ public class SchedulerController extends BaseController { Map result = schedulerService.previewSchedule(loginUser, schedule); return returnDataList(result); } + + /** + * update process definition schedule + * + * @param loginUser login user + * @param projectCode project code + * @param processDefinitionCode process definition code + * @param schedule scheduler + * @param warningType warning type + * @param warningGroupId warning group id + * @param failureStrategy failure strategy + * @param workerGroup worker group + * @param processInstancePriority process instance priority + * @return update result code + */ + @ApiOperation(value = "updateScheduleByProcessDefinitionCode", notes = "UPDATE_SCHEDULE_BY_PROCESS_DEFINITION_CODE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "processDefinitionCode", value = "PROCESS_DEFINITION_CODE", required = true, dataType = "Long", example = "12345678"), + @ApiImplicitParam(name = "schedule", value = "SCHEDULE", dataType = "String", example = "{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','crontab':'0 0 3/6 * * ? *'}"), + @ApiImplicitParam(name = "warningType", value = "WARNING_TYPE", type = "WarningType"), + @ApiImplicitParam(name = "warningGroupId", value = "WARNING_GROUP_ID", dataType = "Int", example = "100"), + @ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", type = "FailureStrategy"), + @ApiImplicitParam(name = "workerGroup", value = "WORKER_GROUP", dataType = "String", example = "default"), + @ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", type = "Priority"), + @ApiImplicitParam(name = "environmentCode", value = "ENVIRONMENT_CODE", dataType = "Long"), + }) + @PutMapping("/update/{code}") + @ResponseStatus(HttpStatus.OK) + @ApiException(UPDATE_SCHEDULE_ERROR) + @AccessLogAnnotation(ignoreRequestArgs = "loginUser") + public Result updateScheduleByProcessDefinitionCode(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, + @ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode, + @PathVariable(value = "code") long processDefinitionCode, + @RequestParam(value = "schedule") String schedule, + @RequestParam(value = "warningType", required = false, defaultValue = DEFAULT_WARNING_TYPE) WarningType warningType, + @RequestParam(value = "warningGroupId", required = false) int warningGroupId, + @RequestParam(value = "failureStrategy", required = false, defaultValue = "END") FailureStrategy failureStrategy, + @RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup, + @RequestParam(value = "environmentCode", required = false, defaultValue = "-1") long environmentCode, + @RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority) { + Map result = schedulerService.updateScheduleByProcessDefinitionCode(loginUser, projectCode, processDefinitionCode, schedule, + warningType, warningGroupId, failureStrategy, processInstancePriority, workerGroup, environmentCode); + return returnDataList(result); + } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskDefinitionController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskDefinitionController.java index a8ec2f51a3cd29835795555a4f2214462389cb14..f485bff3126f1fa00e872f8e948bbb54ac56c107 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskDefinitionController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskDefinitionController.java @@ -24,6 +24,7 @@ import static org.apache.dolphinscheduler.api.enums.Status.LOGIN_USER_QUERY_PROJ import static org.apache.dolphinscheduler.api.enums.Status.QUERY_DETAIL_OF_TASK_DEFINITION_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.QUERY_TASK_DEFINITION_LIST_PAGING_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.QUERY_TASK_DEFINITION_VERSIONS_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.RELEASE_TASK_DEFINITION_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.SWITCH_TASK_DEFINITION_VERSION_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_TASK_DEFINITION_ERROR; @@ -32,6 +33,7 @@ import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.TaskDefinitionService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.dao.entity.User; @@ -310,4 +312,31 @@ public class TaskDefinitionController extends BaseController { Map result = taskDefinitionService.genTaskCodeList(genNum); return returnDataList(result); } + + /** + * release task definition + * + * @param loginUser login user + * @param projectCode project code + * @param code task definition code + * @param releaseState releaseState + * @return update result code + */ + @ApiOperation(value = "releaseTaskDefinition", notes = "RELEASE_TASK_DEFINITION_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "projectCode", value = "PROCESS_DEFINITION_NAME", required = true, type = "Long"), + @ApiImplicitParam(name = "code", value = "TASK_DEFINITION_CODE", required = true, dataType = "Long", example = "123456789"), + @ApiImplicitParam(name = "releaseState", value = "RELEASE_PROCESS_DEFINITION_NOTES", required = true, dataType = "ReleaseState") + }) + @PostMapping(value = "/{code}/release") + @ResponseStatus(HttpStatus.OK) + @ApiException(RELEASE_TASK_DEFINITION_ERROR) + @AccessLogAnnotation(ignoreRequestArgs = "loginUser") + public Result releaseTaskDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectCode", value = "PROJECT_CODE", required = true) @PathVariable long projectCode, + @PathVariable(value = "code", required = true) long code, + @RequestParam(value = "releaseState", required = true, defaultValue = "OFFLINE") ReleaseState releaseState) { + Map result = taskDefinitionService.releaseTaskDefinition(loginUser, projectCode, code, releaseState); + return returnDataList(result); + } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java index 11de0389ad700f4a1a290c73ee816a9fd37db2e9..a79e5f709a76c466277b62c4df3e2b61e40e2281 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.api.controller; +import static org.apache.dolphinscheduler.api.enums.Status.REVOKE_PROJECT_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.AUTHORIZED_USER_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.CREATE_USER_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.DELETE_USER_BY_ID_ERROR; @@ -234,6 +235,54 @@ public class UsersController extends BaseController { return returnDataList(result); } + /** + * grant project by code + * + * @param loginUser login user + * @param userId user id + * @param projectCode project code + * @return grant result code + */ + @ApiOperation(value = "grantProjectByCode", notes = "GRANT_PROJECT_BY_CODE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "projectCode", value = "PROJECT_CODE", required = true, type = "Long") + }) + @PostMapping(value = "/grant-project-by-code") + @ResponseStatus(HttpStatus.OK) + @ApiException(GRANT_PROJECT_ERROR) + @AccessLogAnnotation + public Result grantProjectByCode(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "userId") int userId, + @RequestParam(value = "projectCode") long projectCode) { + Map result = this.usersService.grantProjectByCode(loginUser, userId, projectCode); + return this.returnDataList(result); + } + + /** + * revoke project + * + * @param loginUser login user + * @param userId user id + * @param projectCode project code + * @return revoke result code + */ + @ApiOperation(value = "revokeProject", notes = "REVOKE_PROJECT_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "projectCode", value = "PROJECT_CODE", required = true, type = "Long", example = "100") + }) + @PostMapping(value = "/revoke-project") + @ResponseStatus(HttpStatus.OK) + @ApiException(REVOKE_PROJECT_ERROR) + @AccessLogAnnotation + public Result revokeProject(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "userId") int userId, + @RequestParam(value = "projectCode") long projectCode) { + Map result = this.usersService.revokeProject(loginUser, userId, projectCode); + return returnDataList(result); + } + /** * grant resource * diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/DagDataSchedule.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/DagDataSchedule.java index 249d8b3a680eed19e2b38abb6326123a73a26f58..d944ffb3d8ce49f803624f780e1c3f79a6b9355d 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/DagDataSchedule.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/DagDataSchedule.java @@ -30,6 +30,9 @@ public class DagDataSchedule extends DagData { */ private Schedule schedule; + public DagDataSchedule() { + } + public DagDataSchedule(DagData dagData) { super(); this.setProcessDefinition(dagData.getProcessDefinition()); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/ResourceComponent.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/ResourceComponent.java index 999fc88b0458ef1a6147446347035d898bf8421a..c66172c2ec656dfb2653b045ec50d07432f8a8c0 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/ResourceComponent.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/ResourceComponent.java @@ -14,14 +14,16 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.dto.resources; -import com.fasterxml.jackson.annotation.JsonPropertyOrder; -import org.apache.dolphinscheduler.common.enums.ResourceType; +import org.apache.dolphinscheduler.spi.enums.ResourceType; import java.util.ArrayList; import java.util.List; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; + /** * resource component */ diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/treeview/Instance.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/treeview/Instance.java index e13a7d144032d0907e48d38a1aaf870f62d1333b..c39ece8ae64f54215a08bba7a28f91249935a445 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/treeview/Instance.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/treeview/Instance.java @@ -71,7 +71,7 @@ public class Instance { */ private String duration; - private int subflowId; + private long subflowCode; public Instance() { } @@ -83,7 +83,7 @@ public class Instance { this.type = type; } - public Instance(int id, String name, long code, String type, String state, Date startTime, Date endTime, String host, String duration, int subflowId) { + public Instance(int id, String name, long code, String type, String state, Date startTime, Date endTime, String host, String duration, long subflowCode) { this.id = id; this.name = name; this.code = code; @@ -93,7 +93,7 @@ public class Instance { this.endTime = endTime; this.host = host; this.duration = duration; - this.subflowId = subflowId; + this.subflowCode = subflowCode; } public Instance(int id, String name, long code, String type, String state, Date startTime, Date endTime, String host, String duration) { @@ -173,11 +173,11 @@ public class Instance { this.duration = duration; } - public int getSubflowId() { - return subflowId; + public long getSubflowCode() { + return subflowCode; } - public void setSubflowId(int subflowId) { - this.subflowId = subflowId; + public void setSubflowCode(long subflowCode) { + this.subflowCode = subflowCode; } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java index 7ea8f9bdee03b01b93e266b4f0b914bbdc67e853..90dbe771d4224ed502cfed22b53f657ece87caff 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java @@ -18,6 +18,7 @@ package org.apache.dolphinscheduler.api.enums; import java.util.Locale; +import java.util.Optional; import org.springframework.context.i18n.LocaleContextHolder; @@ -165,7 +166,7 @@ public enum Status { NAME_EXIST(10135, "name {0} already exists", "名称[{0}]已存在"), SAVE_ERROR(10136, "save error", "保存错误"), DELETE_PROJECT_ERROR_DEFINES_NOT_NULL(10137, "please delete the process definitions in project first!", "请先删除全部工作流定义"), - BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR(10117, "batch delete process instance by ids {0} error", "批量删除工作流实例错误"), + BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR(10117, "batch delete process instance by ids {0} error", "批量删除工作流实例错误: {0}"), PREVIEW_SCHEDULE_ERROR(10139, "preview schedule error", "预览调度配置错误"), PARSE_TO_CRON_EXPRESSION_ERROR(10140, "parse cron to cron expression error", "解析调度表达式错误"), SCHEDULE_START_TIME_END_TIME_SAME(10141, "The start time must not be the same as the end", "开始时间不能和结束时间一样"), @@ -210,6 +211,9 @@ public enum Status { QUERY_WORKER_ADDRESS_LIST_FAIL(10178, "query worker address list fail ", "查询worker地址列表失败"), TRANSFORM_PROJECT_OWNERSHIP(10179, "Please transform project ownership [{0}]", "请先转移项目所有权[{0}]"), QUERY_ALERT_GROUP_ERROR(10180, "query alert group error", "查询告警组错误"), + CURRENT_LOGIN_USER_TENANT_NOT_EXIST(10181, "the tenant of the currently login user is not specified", "未指定当前登录用户的租户"), + REVOKE_PROJECT_ERROR(10182, "revoke project error", "撤销项目授权错误"), + QUERY_AUTHORIZED_USER(10183, "query authorized user error", "查询拥有项目权限的用户错误"), UDF_FUNCTION_NOT_EXIST(20001, "UDF function not found", "UDF函数不存在"), UDF_FUNCTION_EXISTS(20002, "UDF function already exists", "UDF函数已存在"), @@ -246,6 +250,7 @@ public enum Status { COUNT_PROCESS_INSTANCE_STATE_ERROR(50012, "count process instance state error", "查询各状态流程实例数错误"), COUNT_PROCESS_DEFINITION_USER_ERROR(50013, "count process definition user error", "查询各用户流程定义数错误"), START_PROCESS_INSTANCE_ERROR(50014, "start process instance error", "运行工作流实例错误"), + BATCH_START_PROCESS_INSTANCE_ERROR(50014, "batch start process instance error: {0}", "批量运行工作流实例错误: {0}"), EXECUTE_PROCESS_INSTANCE_ERROR(50015, "execute process instance error", "操作工作流实例错误"), CHECK_PROCESS_DEFINITION_ERROR(50016, "check process definition error", "工作流定义错误"), QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR(50017, "query recipients and copyers by process definition error", "查询收件人和抄送人错误"), @@ -263,11 +268,11 @@ public enum Status { EXPORT_PROCESS_DEFINE_BY_ID_ERROR(50028, "export process definition by id error", "导出工作流定义错误"), BATCH_EXPORT_PROCESS_DEFINE_BY_IDS_ERROR(50028, "batch export process definition by ids error", "批量导出工作流定义错误"), IMPORT_PROCESS_DEFINE_ERROR(50029, "import process definition error", "导入工作流定义错误"), - TASK_DEFINE_NOT_EXIST(50030, "task definition {0} does not exist", "任务定义[{0}]不存在"), + TASK_DEFINE_NOT_EXIST(50030, "task definition [{0}] does not exist", "任务定义[{0}]不存在"), CREATE_PROCESS_TASK_RELATION_ERROR(50032, "create process task relation error", "创建工作流任务关系错误"), - PROCESS_TASK_RELATION_NOT_EXIST(50033, "process task relation {0} does not exist", "工作流任务关系[{0}]不存在"), + PROCESS_TASK_RELATION_NOT_EXIST(50033, "process task relation [{0}] does not exist", "工作流任务关系[{0}]不存在"), PROCESS_TASK_RELATION_EXIST(50034, "process task relation is already exist, processCode:[{0}]", "工作流任务关系已存在, processCode:[{0}]"), - PROCESS_DAG_IS_EMPTY(50035, "process dag can not be empty", "工作流dag不能为空"), + PROCESS_DAG_IS_EMPTY(50035, "process dag is empty", "工作流dag是空"), CHECK_PROCESS_TASK_RELATION_ERROR(50036, "check process task relation error", "工作流任务关系参数错误"), CREATE_TASK_DEFINITION_ERROR(50037, "create task definition error", "创建任务错误"), UPDATE_TASK_DEFINITION_ERROR(50038, "update task definition error", "更新任务定义错误"), @@ -278,13 +283,26 @@ public enum Status { QUERY_DETAIL_OF_TASK_DEFINITION_ERROR(50043, "query detail of task definition error", "查询任务详细信息错误"), QUERY_TASK_DEFINITION_LIST_PAGING_ERROR(50044, "query task definition list paging error", "分页查询任务定义列表错误"), TASK_DEFINITION_NAME_EXISTED(50045, "task definition name [{0}] already exists", "任务定义名称[{0}]已经存在"), + RELEASE_TASK_DEFINITION_ERROR(50046, "release task definition error", "上线任务错误"), + MOVE_PROCESS_TASK_RELATION_ERROR(50047, "move process task relation error", "移动任务到其他工作流错误"), + DELETE_TASK_PROCESS_RELATION_ERROR(50048, "delete process task relation error", "删除工作流任务关系错误"), + QUERY_TASK_PROCESS_RELATION_ERROR(50049, "query process task relation error", "查询工作流任务关系错误"), + TASK_DEFINE_STATE_ONLINE(50050, "task definition [{0}] is already on line", "任务定义[{0}]已上线"), + TASK_HAS_DOWNSTREAM(50051, "Task exists downstream [{0}] dependence", "任务存在下游[{0}]依赖"), + TASK_HAS_UPSTREAM(50052, "Task [{0}] exists upstream dependence", "任务[{0}]存在上游依赖"), + MAIN_TABLE_USING_VERSION(50053, "the version that the master table is using", "主表正在使用该版本"), + PROJECT_PROCESS_NOT_MATCH(50054, "the project and the process is not match", "项目和工作流不匹配"), + DELETE_EDGE_ERROR(50055, "delete edge error", "删除工作流任务连接线错误"), + NOT_SUPPORT_UPDATE_TASK_DEFINITION(50056, "task state does not support modification", "当前任务不支持修改"), + NOT_SUPPORT_COPY_TASK_TYPE(50057, "task type [{0}] does not support copy", "不支持复制的任务类型[{0}]"), + COMPLEMENT_PROCESS_INSTANCE_DATE_RANGE_ERROR(50059, "complement instances cannot be generated within the complement time range according to the schedule configuration", + "根据调度配置在补数时间范围内无法生成补数实例"), HDFS_NOT_STARTUP(60001, "hdfs not startup", "hdfs未启用"), /** * for monitor */ QUERY_DATABASE_STATE_ERROR(70001, "query database state error", "查询数据库状态错误"), - QUERY_ZOOKEEPER_STATE_ERROR(70002, "query zookeeper state error", "查询zookeeper状态错误"), CREATE_ACCESS_TOKEN_ERROR(70010, "create access token error", "创建访问token错误"), GENERATE_TOKEN_ERROR(70011, "generate token error", "生成token错误"), @@ -292,6 +310,7 @@ public enum Status { UPDATE_ACCESS_TOKEN_ERROR(70013, "update access token error", "更新访问token错误"), DELETE_ACCESS_TOKEN_ERROR(70014, "delete access token error", "删除访问token错误"), ACCESS_TOKEN_NOT_EXIST(70015, "access token not exist", "访问token不存在"), + QUERY_ACCESSTOKEN_BY_USER_ERROR(70016, "query access token by user error", "查询访问指定用户的token错误"), COMMAND_STATE_COUNT_ERROR(80001, "task instance state count error", "查询各状态任务实例数错误"), @@ -331,7 +350,10 @@ public enum Status { VERIFY_ENVIRONMENT_ERROR(1200011, "verify environment error", "验证环境信息错误"), ENVIRONMENT_WORKER_GROUPS_IS_INVALID(1200012, "environment worker groups is invalid format", "环境关联的工作组参数解析错误"), UPDATE_ENVIRONMENT_WORKER_GROUP_RELATION_ERROR(1200013,"You can't modify the worker group, because the worker group [{0}] and this environment [{1}] already be used in the task [{2}]", - "您不能修改工作组选项,因为该工作组 [{0}] 和 该环境 [{1}] 已经被用在任务 [{2}] 中"); + "您不能修改工作组选项,因为该工作组 [{0}] 和 该环境 [{1}] 已经被用在任务 [{2}] 中"), + NOT_ALLOW_TO_DISABLE_OWN_ACCOUNT(130020, "Not allow to disable your own account", "不能停用自己的账号"), + VERIFY_PARAMETER_NAME_FAILED(1300009, "The file name verify failed", "文件命名校验失败"), + TENANT_FULL_NAME_TOO_LONG_ERROR(1300016, "tenant's fullname is too long error", "租户名过长"); private final int code; @@ -355,4 +377,18 @@ public enum Status { return this.enMsg; } } + + /** + * Retrieve Status enum entity by status code. + * @param code + * @return + */ + public static Optional findStatusBy(int code) { + for (Status status : Status.values()) { + if (code == status.getCode()) { + return Optional.of(status); + } + } + return Optional.empty(); + } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java index fc4ed78d469df1485cc902a057d32ae6fc7463f6..99ac4c95ccdb439028e3db5104f9d6127c73666d 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java @@ -38,12 +38,21 @@ public interface AccessTokenService { */ Result queryAccessTokenList(User loginUser, String searchVal, Integer pageNo, Integer pageSize); + /** + * query access token for specified user + * + * @param loginUser login user + * @param userId user id + * @return token list for specified user + */ + Map queryAccessTokenByUser(User loginUser, Integer userId); + /** * create token * * @param userId token for user * @param expireTime token expire time - * @param token token string + * @param token token string (if it is absent, it will be automatically generated) * @return create result code */ Map createToken(User loginUser, int userId, String expireTime, String token); @@ -73,8 +82,8 @@ public interface AccessTokenService { * @param id token id * @param userId token for user * @param expireTime token expire time - * @param token token string - * @return update result code + * @param token token string (if it is absent, it will be automatically generated) + * @return updated access token entity */ Map updateToken(User loginUser, int id, int userId, String expireTime, String token); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AlertPluginInstanceService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AlertPluginInstanceService.java index f33a79a2b2a03b03aa620dd5f250ca348e7d742e..5296f31e11ac312a700f0ff8580e184a54eab55a 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AlertPluginInstanceService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AlertPluginInstanceService.java @@ -82,9 +82,11 @@ public interface AlertPluginInstanceService { /** * queryPluginPage - * @param pageIndex page index + * @param loginUser login user + * @param searchVal search value + * @param pageNo page index * @param pageSize page size * @return plugins */ - Result queryPluginPage(int pageIndex, int pageSize); + Result listPaging(User loginUser, String searchVal, int pageNo, int pageSize); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java index dc1637e2049b148a955830179aeb5226b13f5441..d2408471551f4082cc0900f7679ccd1f807bd658 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java @@ -18,10 +18,10 @@ package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO; -import org.apache.dolphinscheduler.common.datasource.ConnectionParam; -import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.BaseDataSourceParamDTO; +import org.apache.dolphinscheduler.spi.datasource.ConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; import java.util.Map; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java index 72d1892371a60ca719fc7834333c955bd56fc4b2..2fa065bca21632aa9178ce7e8080c27f7287dc6c 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java @@ -68,11 +68,12 @@ public interface ExecutorService { /** * check whether the process definition can be executed * + * @param projectCode project code * @param processDefinition process definition * @param processDefineCode process definition code * @return check result code */ - Map checkProcessDefinitionValid(ProcessDefinition processDefinition, long processDefineCode); + Map checkProcessDefinitionValid(long projectCode, ProcessDefinition processDefinition, long processDefineCode); /** * do action to process instance:pause, stop, repeat, recover from pause, recover from stop diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/LoggerService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/LoggerService.java index ef30a4030347fc17bb594b04f859c2eef0c4ec82..b252522c7c8805f2b37af7d450382e2a9d6df808 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/LoggerService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/LoggerService.java @@ -18,6 +18,9 @@ package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.dao.entity.User; + +import java.util.Map; /** * logger service @@ -43,4 +46,25 @@ public interface LoggerService { */ byte[] getLogBytes(int taskInstId); + /** + * query log + * + * @param loginUser login user + * @param projectCode project code + * @param taskInstId task instance id + * @param skipLineNum skip line number + * @param limit limit + * @return log string data + */ + Map queryLog(User loginUser, long projectCode, int taskInstId, int skipLineNum, int limit); + + /** + * get log bytes + * + * @param loginUser login user + * @param projectCode project code + * @param taskInstId task instance id + * @return log byte array + */ + byte[] getLogBytes(User loginUser, long projectCode, int taskInstId); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/MonitorService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/MonitorService.java index 0dbdc8045af77b8400b1be9da141110ae4cdd07e..c63e27386a10ed2b069cc1635e0c3ed947866fe3 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/MonitorService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/MonitorService.java @@ -35,7 +35,7 @@ public interface MonitorService { * @return data base state */ Map queryDatabaseState(User loginUser); - + /** * query master list * @@ -43,15 +43,7 @@ public interface MonitorService { * @return master information list */ Map queryMaster(User loginUser); - - /** - * query zookeeper state - * - * @param loginUser login user - * @return zookeeper information list - */ - Map queryZookeeperState(User loginUser); - + /** * query worker list * @@ -59,6 +51,6 @@ public interface MonitorService { * @return worker information list */ Map queryWorker(User loginUser); - + List getServerListFromRegistry(boolean isMaster); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java index 42fce02bad2b6726a500fa921940602d13f062c3..4e7abf36b0b89c8bf0095f47210f1124617f8919 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java @@ -19,8 +19,10 @@ package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.enums.ReleaseState; +import org.apache.dolphinscheduler.dao.entity.TaskDefinitionLog; import org.apache.dolphinscheduler.dao.entity.User; +import java.util.List; import java.util.Map; import javax.servlet.http.HttpServletResponse; @@ -68,6 +70,16 @@ public interface ProcessDefinitionService { Map queryProcessDefinitionList(User loginUser, long projectCode); + /** + * query process definition simple list + * + * @param loginUser login user + * @param projectCode project code + * @return definition simple list + */ + Map queryProcessDefinitionSimpleList(User loginUser, + long projectCode); + /** * query process definition list paging * @@ -235,7 +247,7 @@ public interface ProcessDefinitionService { * @param processTaskRelationJson process task relation json * @return check result code */ - Map checkProcessNodeList(String processTaskRelationJson); + Map checkProcessNodeList(String processTaskRelationJson, List taskDefinitionLogs); /** * get task node details based on process definition @@ -272,11 +284,12 @@ public interface ProcessDefinitionService { /** * Encapsulates the TreeView structure * + * @param projectCode project code * @param code process definition code * @param limit limit * @return tree view json data */ - Map viewTree(long code, Integer limit); + Map viewTree(long projectCode, long code, Integer limit); /** * switch the defined process definition version @@ -322,5 +335,64 @@ public interface ProcessDefinitionService { long code, int version); + /** + * create empty process definition + * + * @param loginUser login user + * @param projectCode project code + * @param name process definition name + * @param description description + * @param globalParams globalParams + * @param timeout timeout + * @param tenantCode tenantCode + * @param scheduleJson scheduleJson + * @return process definition code + */ + Map createEmptyProcessDefinition(User loginUser, + long projectCode, + String name, + String description, + String globalParams, + int timeout, + String tenantCode, + String scheduleJson); + + /** + * update process definition basic info + * + * @param loginUser login user + * @param projectCode project code + * @param name process definition name + * @param code process definition code + * @param description description + * @param globalParams globalParams + * @param timeout timeout + * @param tenantCode tenantCode + * @param scheduleJson scheduleJson + * @return update result code + */ + Map updateProcessDefinitionBasicInfo(User loginUser, + long projectCode, + String name, + long code, + String description, + String globalParams, + int timeout, + String tenantCode, + String scheduleJson); + + /** + * release process definition and schedule + * + * @param loginUser login user + * @param projectCode project code + * @param code process definition code + * @param releaseState releaseState + * @return update result code + */ + Map releaseWorkflowAndSchedule(User loginUser, + long projectCode, + long code, + ReleaseState releaseState); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java index 3dbf46d8b8eb0e9cf91defbb5969a89107548c25..073e1f3818196c52247e80c8d530cb570b259b37 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java @@ -21,7 +21,6 @@ package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.enums.DependResult; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; -import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.User; @@ -165,19 +164,21 @@ public interface ProcessInstanceService { /** * view process instance variables * + * @param projectCode project code * @param processInstanceId process instance id * @return variables data */ - Map viewVariables(Integer processInstanceId); + Map viewVariables(long projectCode, Integer processInstanceId); /** * encapsulation gantt structure * + * @param projectCode project code * @param processInstanceId process instance id * @return gantt tree data * @throws Exception exception when json parse */ - Map viewGantt(Integer processInstanceId) throws Exception; + Map viewGantt(long projectCode, Integer processInstanceId) throws Exception; /** * query process instance by processDefinitionCode and stateArray diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessTaskRelationService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessTaskRelationService.java new file mode 100644 index 0000000000000000000000000000000000000000..f31cc8be485a1728cbb054320b32b80c2da89723 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessTaskRelationService.java @@ -0,0 +1,122 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.dao.entity.User; + +import java.util.Map; + +/** + * process task relation service + */ +public interface ProcessTaskRelationService { + + /** + * create process task relation + * + * @param loginUser login user + * @param projectCode project code + * @param processDefinitionCode processDefinitionCode + * @param preTaskCode preTaskCode + * @param postTaskCode postTaskCode + * @return create result code + */ + Map createProcessTaskRelation(User loginUser, + long projectCode, + long processDefinitionCode, + long preTaskCode, + long postTaskCode); + + /** + * delete process task relation + * + * @param loginUser login user + * @param projectCode project code + * @param processDefinitionCode process definition code + * @param taskCode the post task code + * @return delete result code + */ + Map deleteTaskProcessRelation(User loginUser, + long projectCode, + long processDefinitionCode, + long taskCode); + + /** + * delete task upstream relation + * + * @param loginUser login user + * @param projectCode project code + * @param preTaskCodes the pre task codes, sep ',' + * @param taskCode the post task code + * @return delete result code + */ + Map deleteUpstreamRelation(User loginUser, + long projectCode, + String preTaskCodes, + long taskCode); + + /** + * delete task downstream relation + * + * @param loginUser login user + * @param projectCode project code + * @param postTaskCodes the post task codes, sep ',' + * @param taskCode the pre task code + * @return delete result code + */ + Map deleteDownstreamRelation(User loginUser, + long projectCode, + String postTaskCodes, + long taskCode); + + /** + * query task upstream relation + * + * @param loginUser login user + * @param projectCode project code + * @param taskCode current task code (post task code) + * @return process task relation list + */ + Map queryUpstreamRelation(User loginUser, + long projectCode, + long taskCode); + + /** + * query task downstream relation + * + * @param loginUser login user + * @param projectCode project code + * @param taskCode pre task code + * @return process task relation list + */ + Map queryDownstreamRelation(User loginUser, + long projectCode, + long taskCode); + + /** + * delete edge + * + * @param loginUser login user + * @param projectCode project code + * @param processDefinitionCode process definition code + * @param preTaskCode pre task code + * @param postTaskCode post task code + * @return delete result code + */ + Map deleteEdge(User loginUser, long projectCode, long processDefinitionCode, long preTaskCode, long postTaskCode); +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectService.java index dffa866ac80d0fab2a97b0c7bfec2578215da532..38c0c53386b5c79412980b5a27eb114ff8310db6 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectService.java @@ -46,6 +46,15 @@ public interface ProjectService { */ Map queryByCode(User loginUser, long projectCode); + /** + * query project details by name + * + * @param loginUser login user + * @param projectName project name + * @return project detail information + */ + Map queryByName(User loginUser, String projectName); + /** * check project and authorization * @@ -110,6 +119,15 @@ public interface ProjectService { */ Map queryAuthorizedProject(User loginUser, Integer userId); + /** + * query authorized user + * + * @param loginUser login user + * @param projectCode project code + * @return users who have permission for the specified project + */ + Map queryAuthorizedUser(User loginUser, Long projectCode); + /** * query authorized project * diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/QueueService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/QueueService.java index 7013520422961a169588e4e35a81fd2f306b809c..f978b96b6b0b04be3158877e28d874e08aff4b7b 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/QueueService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/QueueService.java @@ -76,4 +76,12 @@ public interface QueueService { */ Result verifyQueue(String queue, String queueName); + /** + * query queue by queueName + * + * @param queueName queue name + * @return queue object for provide queue name + */ + Map queryQueueName(String queueName); + } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java index f91da7756e8786e6bc8a35dbb013472793c9494f..25a5af24b1a8a63148267b5e3bab25a36cadb9eb 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java @@ -19,8 +19,8 @@ package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.enums.ProgramType; -import org.apache.dolphinscheduler.common.enums.ResourceType; import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.spi.enums.ResourceType; import java.io.IOException; import java.util.Map; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java index bad9b3596f4f13269d502b76fa6a0541d79a9824..b4788edcfe03b90e7b08cfa16f555866ce0453a7 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java @@ -148,4 +148,29 @@ public interface SchedulerService { * @return the next five fire time */ Map previewSchedule(User loginUser, String schedule); + + /** + * update process definition schedule + * + * @param loginUser login user + * @param projectCode project code + * @param processDefinitionCode process definition code + * @param scheduleExpression scheduleExpression + * @param warningType warning type + * @param warningGroupId warning group id + * @param failureStrategy failure strategy + * @param workerGroup worker group + * @param processInstancePriority process instance priority + * @return update result code + */ + Map updateScheduleByProcessDefinitionCode(User loginUser, + long projectCode, + long processDefinitionCode, + String scheduleExpression, + WarningType warningType, + int warningGroupId, + FailureStrategy failureStrategy, + Priority processInstancePriority, + String workerGroup, + long environmentCode); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskDefinitionService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskDefinitionService.java index e17432929d83dd590190ebb3dc970747de3510e3..192bf1695929659c9f9233ec258d5dc18f39f9fb 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskDefinitionService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskDefinitionService.java @@ -18,6 +18,7 @@ package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.dao.entity.User; import java.util.Map; @@ -156,4 +157,17 @@ public interface TaskDefinitionService { */ Map genTaskCodeList(Integer genNum); + /** + * release task definition + * + * @param loginUser login user + * @param projectCode project code + * @param code task definition code + * @param releaseState releaseState + * @return update result code + */ + Map releaseTaskDefinition(User loginUser, + long projectCode, + long code, + ReleaseState releaseState); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TenantService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TenantService.java index 30d98a130a3586b41507ac6622e6778c3d71614a..47a4082a30a69916407f6d78441919dd506ed1df 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TenantService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TenantService.java @@ -92,4 +92,20 @@ public interface TenantService { * @return true if tenant code can user, otherwise return false */ Result verifyTenantCode(String tenantCode); + + /** + * check if provide tenant code object exists + * + * @param tenantCode tenant code + * @return true if tenant code exists, false if not + */ + boolean checkTenantExists(String tenantCode); + + /** + * query tenant by tenant code + * + * @param tenantCode tenant code + * @return tenant list + */ + Map queryByTenantCode(String tenantCode); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java index a5c004b9644f989fe0331341a331fd781eb164c5..485702a0e83d60d3750a99c6d9b1043b76888cb1 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java @@ -153,6 +153,25 @@ public interface UsersService { Map grantProject(User loginUser, int userId, String projectIds); + /** + * grant project by code + * + * @param loginUser login user + * @param userId user id + * @param projectCode project code + * @return grant result code + */ + Map grantProjectByCode(User loginUser, int userId, long projectCode); + + /** + * revoke the project permission for specified user. + * @param loginUser Login user + * @param userId User id + * @param projectCode Project Code + * @return + */ + Map revokeProject(User loginUser, int userId, long projectCode); + /** * grant resource * diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AccessTokenServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AccessTokenServiceImpl.java index 0bc83711089fa18f7cd6d049009ec9b3fd5207d8..9fd9bbfc3da3ee8eb77f6793a10525012c323b4a 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AccessTokenServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AccessTokenServiceImpl.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.api.service.impl; +import org.apache.commons.lang3.StringUtils; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.AccessTokenService; import org.apache.dolphinscheduler.api.utils.PageInfo; @@ -31,6 +32,7 @@ import org.apache.dolphinscheduler.dao.mapper.AccessTokenMapper; import java.util.Date; import java.util.HashMap; +import java.util.List; import java.util.Map; import org.slf4j.Logger; @@ -78,12 +80,36 @@ public class AccessTokenServiceImpl extends BaseServiceImpl implements AccessTok return result; } + /** + * query access token for specified user + * + * @param loginUser login user + * @param userId user id + * @return token list for specified user + */ + @Override + public Map queryAccessTokenByUser(User loginUser, Integer userId) { + Map result = new HashMap<>(); + result.put(Constants.STATUS, false); + + // only admin can operate + if (isNotAdmin(loginUser, result)) { + return result; + } + + // query access token for specified user + List accessTokenList = this.accessTokenMapper.queryAccessTokenByUser(userId); + result.put(Constants.DATA_LIST, accessTokenList); + this.putMsg(result, Status.SUCCESS); + return result; + } + /** * create token * * @param userId token for user * @param expireTime token expire time - * @param token token string + * @param token token string (if it is absent, it will be automatically generated) * @return create result code */ @SuppressWarnings("checkstyle:WhitespaceAround") @@ -91,14 +117,23 @@ public class AccessTokenServiceImpl extends BaseServiceImpl implements AccessTok public Map createToken(User loginUser, int userId, String expireTime, String token) { Map result = new HashMap<>(); + // 1. check permission if (!hasPerm(loginUser,userId)) { putMsg(result, Status.USER_NO_OPERATION_PERM); return result; } + // 2. check if user is existed if (userId <= 0) { throw new IllegalArgumentException("User id should not less than or equals to 0."); } + + // 3. generate access token if absent + if (StringUtils.isBlank(token)) { + token = EncryptionUtils.getMd5(userId + expireTime + System.currentTimeMillis()); + } + + // 4. persist to the database AccessToken accessToken = new AccessToken(); accessToken.setUserId(userId); accessToken.setExpireTime(DateUtils.stringToDate(expireTime)); @@ -106,10 +141,10 @@ public class AccessTokenServiceImpl extends BaseServiceImpl implements AccessTok accessToken.setCreateTime(new Date()); accessToken.setUpdateTime(new Date()); - // insert int insert = accessTokenMapper.insert(accessToken); if (insert > 0) { + result.put(Constants.DATA_LIST, accessToken); putMsg(result, Status.SUCCESS); } else { putMsg(result, Status.CREATE_ACCESS_TOKEN_ERROR); @@ -172,22 +207,33 @@ public class AccessTokenServiceImpl extends BaseServiceImpl implements AccessTok * @param id token id * @param userId token for user * @param expireTime token expire time - * @param token token string - * @return update result code + * @param token token string (if it is absent, it will be automatically generated) + * @return updated access token entity */ @Override public Map updateToken(User loginUser, int id, int userId, String expireTime, String token) { Map result = new HashMap<>(); + + // 1. check permission if (!hasPerm(loginUser,userId)) { putMsg(result, Status.USER_NO_OPERATION_PERM); return result; } + + // 2. check if token is existed AccessToken accessToken = accessTokenMapper.selectById(id); if (accessToken == null) { logger.error("access token not exist, access token id {}", id); putMsg(result, Status.ACCESS_TOKEN_NOT_EXIST); return result; } + + // 3. generate access token if absent + if (StringUtils.isBlank(token)) { + token = EncryptionUtils.getMd5(userId + expireTime + System.currentTimeMillis()); + } + + // 4. persist to the database accessToken.setUserId(userId); accessToken.setExpireTime(DateUtils.stringToDate(expireTime)); accessToken.setToken(token); @@ -195,6 +241,7 @@ public class AccessTokenServiceImpl extends BaseServiceImpl implements AccessTok accessTokenMapper.updateById(accessToken); + result.put(Constants.DATA_LIST, accessToken); putMsg(result, Status.SUCCESS); return result; } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertGroupServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertGroupServiceImpl.java index 887b9113a443ac5a155b6498537150dba4fddd98..b210207b295559e1fe5854d0cac096c7296e46f4 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertGroupServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertGroupServiceImpl.java @@ -17,6 +17,9 @@ package org.apache.dolphinscheduler.api.service.impl; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import org.apache.commons.lang.StringUtils; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.AlertGroupService; import org.apache.dolphinscheduler.api.utils.PageInfo; @@ -25,15 +28,6 @@ import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.dao.entity.AlertGroup; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.AlertGroupMapper; -import org.apache.dolphinscheduler.dao.vo.AlertGroupVo; - -import org.apache.commons.lang.StringUtils; - -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -41,8 +35,10 @@ import org.springframework.dao.DuplicateKeyException; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; /** * alert group service impl @@ -116,12 +112,12 @@ public class AlertGroupServiceImpl extends BaseServiceImpl implements AlertGroup return result; } - Page page = new Page<>(pageNo, pageSize); - IPage alertGroupVoIPage = alertGroupMapper.queryAlertGroupVo(page, searchVal); - PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); - - pageInfo.setTotal((int) alertGroupVoIPage.getTotal()); - pageInfo.setTotalList(alertGroupVoIPage.getRecords()); + Page page = new Page<>(pageNo, pageSize); + IPage alertGroupIPage = alertGroupMapper.queryAlertGroupPage( + page, searchVal); + PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); + pageInfo.setTotal((int) alertGroupIPage.getTotal()); + pageInfo.setTotalList(alertGroupIPage.getRecords()); result.setData(pageInfo); putMsg(result, Status.SUCCESS); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertPluginInstanceServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertPluginInstanceServiceImpl.java index f271960b32f9ddd88c33b596c1af613a3bbe34a4..ec55880b4c3b5bb23fc378af2bc945a4a3fc2ad5 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertPluginInstanceServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertPluginInstanceServiceImpl.java @@ -23,7 +23,6 @@ import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.api.vo.AlertPluginInstanceVO; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.AlertPluginInstance; import org.apache.dolphinscheduler.dao.entity.PluginDefine; @@ -33,6 +32,8 @@ import org.apache.dolphinscheduler.dao.mapper.AlertPluginInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.PluginDefineMapper; import org.apache.dolphinscheduler.spi.params.PluginParamsTransfer; +import org.apache.commons.collections.CollectionUtils; + import java.util.ArrayList; import java.util.Arrays; import java.util.Date; @@ -187,14 +188,20 @@ public class AlertPluginInstanceServiceImpl extends BaseServiceImpl implements A } @Override - public Result queryPluginPage(int pageIndex, int pageSize) { - IPage pluginInstanceIPage = new Page<>(pageIndex, pageSize); - pluginInstanceIPage = alertPluginInstanceMapper.selectPage(pluginInstanceIPage, null); + public Result listPaging(User loginUser, String searchVal, int pageNo, int pageSize) { - PageInfo pageInfo = new PageInfo<>(pageIndex, pageSize); - pageInfo.setTotal((int) pluginInstanceIPage.getTotal()); - pageInfo.setTotalList(buildPluginInstanceVOList(pluginInstanceIPage.getRecords())); Result result = new Result(); + if (!isAdmin(loginUser)) { + putMsg(result,Status.USER_NO_OPERATION_PERM); + return result; + } + + Page page = new Page<>(pageNo, pageSize); + IPage alertPluginInstanceIPage = alertPluginInstanceMapper.queryByInstanceNamePage(page, searchVal); + + PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); + pageInfo.setTotal((int) alertPluginInstanceIPage.getTotal()); + pageInfo.setTotalList(buildPluginInstanceVOList(alertPluginInstanceIPage.getRecords())); result.setData(pageInfo); putMsg(result, Status.SUCCESS); return result; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataAnalysisServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataAnalysisServiceImpl.java index 1800dd2e3741f7148dc0323b448f38ff34bae5bb..2c66138bc119548d6622796f8f179ade516d1aee 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataAnalysisServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataAnalysisServiceImpl.java @@ -44,6 +44,7 @@ import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.commons.lang.StringUtils; +import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.HashMap; @@ -154,10 +155,13 @@ public class DataAnalysisServiceImpl extends BaseServiceImpl implements DataAnal } } + List processInstanceStateCounts = new ArrayList<>(); Long[] projectCodeArray = projectCode == 0 ? getProjectCodesArrays(loginUser) : new Long[] { projectCode }; - List processInstanceStateCounts = - instanceStateCounter.apply(start, end, projectCodeArray); + + if (projectCodeArray.length != 0 || loginUser.getUserType() == UserType.ADMIN_USER) { + processInstanceStateCounts = instanceStateCounter.apply(start, end, projectCodeArray); + } if (processInstanceStateCounts != null) { TaskCountDto taskCountResult = new TaskCountDto(processInstanceStateCounts); @@ -187,10 +191,13 @@ public class DataAnalysisServiceImpl extends BaseServiceImpl implements DataAnal } } + List defineGroupByUsers = new ArrayList<>(); Long[] projectCodeArray = projectCode == 0 ? getProjectCodesArrays(loginUser) : new Long[] { projectCode }; - List defineGroupByUsers = processDefinitionMapper.countDefinitionGroupByUser( - loginUser.getId(), projectCodeArray, isAdmin(loginUser)); + if (projectCodeArray.length != 0 || loginUser.getUserType() == UserType.ADMIN_USER) { + defineGroupByUsers = processDefinitionMapper.countDefinitionGroupByUser( + loginUser.getId(), projectCodeArray, isAdmin(loginUser)); + } DefineUserDto dto = new DefineUserDto(defineGroupByUsers); result.put(Constants.DATA_LIST, dto); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java index bd29aa81680d13c0370e89f7e03e9459a1c94e8f..aa640b1816047967c1063d168b3bd718c0cae466 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java @@ -22,16 +22,17 @@ import org.apache.dolphinscheduler.api.service.DataSourceService; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam; -import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO; -import org.apache.dolphinscheduler.common.datasource.ConnectionParam; -import org.apache.dolphinscheduler.common.datasource.DatasourceUtil; -import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.DataSource; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper; import org.apache.dolphinscheduler.dao.mapper.DataSourceUserMapper; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.BaseDataSourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.plugin.DataSourceClientProvider; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.DatasourceUtil; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.datasource.ConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; import org.apache.commons.lang.StringUtils; @@ -316,7 +317,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource @Override public Result checkConnection(DbType type, ConnectionParam connectionParam) { Result result = new Result<>(); - try (Connection connection = DatasourceUtil.getConnection(type, connectionParam)) { + try (Connection connection = DataSourceClientProvider.getInstance().getConnection(type, connectionParam)) { if (connection == null) { putMsg(result, Status.CONNECTION_TEST_FAILURE); return result; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/EnvironmentServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/EnvironmentServiceImpl.java index 72914854bc6d4394680aaff55649c39ad809a354..bd502e2e1af1d1b97ed6b56a3e76c49c95f0b4fb 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/EnvironmentServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/EnvironmentServiceImpl.java @@ -23,10 +23,9 @@ import org.apache.dolphinscheduler.api.service.EnvironmentService; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.common.utils.CodeGenerateUtils; +import org.apache.dolphinscheduler.common.utils.CodeGenerateUtils.CodeGenerateException; import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.apache.dolphinscheduler.common.utils.SnowFlakeUtils; -import org.apache.dolphinscheduler.common.utils.SnowFlakeUtils.SnowFlakeException; import org.apache.dolphinscheduler.dao.entity.Environment; import org.apache.dolphinscheduler.dao.entity.EnvironmentWorkerGroupRelation; import org.apache.dolphinscheduler.dao.entity.TaskDefinition; @@ -34,7 +33,7 @@ import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.EnvironmentMapper; import org.apache.dolphinscheduler.dao.mapper.EnvironmentWorkerGroupRelationMapper; import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper; - +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections4.SetUtils; import org.apache.commons.lang.StringUtils; @@ -115,9 +114,9 @@ public class EnvironmentServiceImpl extends BaseServiceImpl implements Environme env.setUpdateTime(new Date()); long code = 0L; try { - code = SnowFlakeUtils.getInstance().nextId(); + code = CodeGenerateUtils.getInstance().genCode(); env.setCode(code); - } catch (SnowFlakeException e) { + } catch (CodeGenerateException e) { logger.error("Environment code get error, ", e); } if (code == 0L) { @@ -367,17 +366,18 @@ public class EnvironmentServiceImpl extends BaseServiceImpl implements Environme env.setOperator(loginUser.getId()); env.setUpdateTime(new Date()); - int update = environmentMapper.update(env, new UpdateWrapper().lambda().eq(Environment::getCode,code)); + int update = environmentMapper.update(env, new UpdateWrapper().lambda().eq(Environment::getCode, code)); if (update > 0) { deleteWorkerGroupSet.stream().forEach(key -> { - if (!StringUtils.isEmpty(key)) { + if (StringUtils.isNotEmpty(key)) { relationMapper.delete(new QueryWrapper() .lambda() - .eq(EnvironmentWorkerGroupRelation::getEnvironmentCode,code)); + .eq(EnvironmentWorkerGroupRelation::getEnvironmentCode, code) + .eq(EnvironmentWorkerGroupRelation::getWorkerGroup, key)); } }); addWorkerGroupSet.stream().forEach(key -> { - if (!StringUtils.isEmpty(key)) { + if (StringUtils.isNotEmpty(key)) { EnvironmentWorkerGroupRelation relation = new EnvironmentWorkerGroupRelation(); relation.setEnvironmentCode(code); relation.setWorkerGroup(key); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ExecutorServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ExecutorServiceImpl.java index b910c4ba016800f9db7a93d98cdaf357ce880de9..2f1d088a8b2d639911e3ad377bf438a8c0f927f8 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ExecutorServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ExecutorServiceImpl.java @@ -17,39 +17,22 @@ package org.apache.dolphinscheduler.api.service.impl; -import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_END_DATE; -import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_START_DATE; -import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING; -import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_START_NODE_NAMES; -import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_START_PARAMS; -import static org.apache.dolphinscheduler.common.Constants.MAX_TASK_TIMEOUT; - import com.fasterxml.jackson.core.type.TypeReference; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections.MapUtils; +import org.apache.commons.lang.StringUtils; import org.apache.dolphinscheduler.api.enums.ExecuteType; import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.exceptions.ServiceException; import org.apache.dolphinscheduler.api.service.ExecutorService; import org.apache.dolphinscheduler.api.service.MonitorService; import org.apache.dolphinscheduler.api.service.ProjectService; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.CommandType; -import org.apache.dolphinscheduler.common.enums.ExecutionStatus; -import org.apache.dolphinscheduler.common.enums.FailureStrategy; -import org.apache.dolphinscheduler.common.enums.Priority; -import org.apache.dolphinscheduler.common.enums.ReleaseState; -import org.apache.dolphinscheduler.common.enums.RunMode; -import org.apache.dolphinscheduler.common.enums.TaskDependType; -import org.apache.dolphinscheduler.common.enums.WarningType; +import org.apache.dolphinscheduler.common.enums.*; import org.apache.dolphinscheduler.common.model.Server; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.apache.dolphinscheduler.dao.entity.Command; -import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; -import org.apache.dolphinscheduler.dao.entity.ProcessInstance; -import org.apache.dolphinscheduler.dao.entity.Project; -import org.apache.dolphinscheduler.dao.entity.Schedule; -import org.apache.dolphinscheduler.dao.entity.Tenant; -import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.entity.*; import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; @@ -57,22 +40,15 @@ import org.apache.dolphinscheduler.remote.command.StateEventChangeCommand; import org.apache.dolphinscheduler.remote.processor.StateEventCallbackService; import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.quartz.cron.CronUtils; - -import org.apache.commons.collections.MapUtils; -import org.apache.commons.lang.StringUtils; - -import java.util.ArrayList; -import java.util.Date; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; +import java.util.*; + +import static org.apache.dolphinscheduler.common.Constants.*; + /** * executor service impl */ @@ -149,7 +125,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ // check process define release state ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefinitionCode); - result = checkProcessDefinitionValid(processDefinition, processDefinitionCode); + result = checkProcessDefinitionValid(projectCode, processDefinition, processDefinitionCode); if (result.get(Constants.STATUS) != Status.SUCCESS) { return result; } @@ -169,17 +145,23 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ /** * create command */ - int create = this.createCommand(commandType, processDefinition.getCode(), - taskDependType, failureStrategy, startNodeList, cronTime, warningType, loginUser.getId(), - warningGroupId, runMode, processInstancePriority, workerGroup, environmentCode, startParams, expectedParallelismNumber, dryRun); - - if (create > 0) { - processDefinition.setWarningGroupId(warningGroupId); - processDefinitionMapper.updateById(processDefinition); - putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.START_PROCESS_INSTANCE_ERROR); + try { + int create = this.createCommand(commandType, processDefinition.getCode(), + taskDependType, failureStrategy, startNodeList, cronTime, warningType, loginUser.getId(), + warningGroupId, runMode, processInstancePriority, workerGroup, environmentCode, startParams, expectedParallelismNumber, dryRun); + + if (create > 0) { + processDefinition.setWarningGroupId(warningGroupId); + processDefinitionMapper.updateById(processDefinition); + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.START_PROCESS_INSTANCE_ERROR); + } + } catch (ServiceException e) { + Optional status = Status.findStatusBy(e.getCode()); + putMsg(result, status.orElse(Status.START_PROCESS_INSTANCE_ERROR)); } + return result; } @@ -204,14 +186,15 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ /** * check whether the process definition can be executed * + * @param projectCode project code * @param processDefinition process definition * @param processDefineCode process definition code * @return check result code */ @Override - public Map checkProcessDefinitionValid(ProcessDefinition processDefinition, long processDefineCode) { + public Map checkProcessDefinitionValid(long projectCode, ProcessDefinition processDefinition, long processDefineCode) { Map result = new HashMap<>(); - if (processDefinition == null) { + if (processDefinition == null || projectCode != processDefinition.getProjectCode()) { // check process definition exists putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefineCode); } else if (processDefinition.getReleaseState() != ReleaseState.ONLINE) { @@ -255,7 +238,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ ProcessDefinition processDefinition = processService.findProcessDefinition(processInstance.getProcessDefinitionCode(), processInstance.getProcessDefinitionVersion()); if (executeType != ExecuteType.STOP && executeType != ExecuteType.PAUSE) { - result = checkProcessDefinitionValid(processDefinition, processInstance.getProcessDefinitionCode()); + result = checkProcessDefinitionValid(projectCode, processDefinition, processInstance.getProcessDefinitionCode()); if (result.get(Constants.STATUS) != Status.SUCCESS) { return result; } @@ -283,13 +266,13 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ switch (executeType) { case REPEAT_RUNNING: - result = insertCommand(loginUser, processInstanceId, processDefinition.getCode(), CommandType.REPEAT_RUNNING, startParams); + result = insertCommand(loginUser, processInstanceId, processDefinition.getCode(), processDefinition.getVersion(), CommandType.REPEAT_RUNNING, startParams); break; case RECOVER_SUSPENDED_PROCESS: - result = insertCommand(loginUser, processInstanceId, processDefinition.getCode(), CommandType.RECOVER_SUSPENDED_PROCESS, startParams); + result = insertCommand(loginUser, processInstanceId, processDefinition.getCode(), processDefinition.getVersion(), CommandType.RECOVER_SUSPENDED_PROCESS, startParams); break; case START_FAILURE_TASK_PROCESS: - result = insertCommand(loginUser, processInstanceId, processDefinition.getCode(), CommandType.START_FAILURE_TASK_PROCESS, startParams); + result = insertCommand(loginUser, processInstanceId, processDefinition.getCode(), processDefinition.getVersion(), CommandType.START_FAILURE_TASK_PROCESS, startParams); break; case STOP: if (processInstance.getState() == ExecutionStatus.READY_STOP) { @@ -356,7 +339,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ } break; case RECOVER_SUSPENDED_PROCESS: - if (executionStatus.typeIsPause() || executionStatus.typeIsCancel()) { + if (executionStatus.typeIsCancel()) { checkResult = true; } break; @@ -409,10 +392,11 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ * @param loginUser login user * @param instanceId instance id * @param processDefinitionCode process definition code + * @param processVersion * @param commandType command type * @return insert result code */ - private Map insertCommand(User loginUser, Integer instanceId, long processDefinitionCode, CommandType commandType, String startParams) { + private Map insertCommand(User loginUser, Integer instanceId, long processDefinitionCode, int processVersion, CommandType commandType, String startParams) { Map result = new HashMap<>(); //To add startParams only when repeat running is needed @@ -427,6 +411,8 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ command.setProcessDefinitionCode(processDefinitionCode); command.setCommandParam(JSONUtils.toJsonString(cmdParam)); command.setExecutorId(loginUser.getId()); + command.setProcessDefinitionVersion(processVersion); + command.setProcessInstanceId(instanceId); if (!processService.verifyIsNeedCreateCommand(command)) { putMsg(result, Status.PROCESS_INSTANCE_EXECUTING_COMMAND, processDefinitionCode); @@ -462,11 +448,10 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ return result; } - List ids = new ArrayList<>(); - processService.recurseFindSubProcessId(processDefinition.getId(), ids); - Integer[] idArray = ids.toArray(new Integer[ids.size()]); - if (!ids.isEmpty()) { - List processDefinitionList = processDefinitionMapper.queryDefinitionListByIdList(idArray); + List codes = new ArrayList<>(); + processService.recurseFindSubProcess(processDefinition.getCode(), codes); + if (!codes.isEmpty()) { + List processDefinitionList = processDefinitionMapper.queryByCodes(codes); if (processDefinitionList != null) { for (ProcessDefinition processDefinitionTmp : processDefinitionList) { /** @@ -517,7 +502,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ Map cmdParam = new HashMap<>(); if (commandType == null) { - command.setCommandType(CommandType.START_PROCESS); + command.setCommandType(StringUtils.isEmpty(startNodeList) ? CommandType.START_PROCESS : CommandType.START_CURRENT_TASK_PROCESS); } else { command.setCommandType(commandType); } @@ -530,7 +515,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ } if (!StringUtils.isEmpty(startNodeList)) { - cmdParam.put(CMD_PARAM_START_NODE_NAMES, startNodeList); + cmdParam.put(CMD_PARAM_START_NODES, startNodeList); } if (warningType != null) { command.setWarningType(warningType); @@ -545,6 +530,11 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ command.setWorkerGroup(workerGroup); command.setEnvironmentCode(environmentCode); command.setDryRun(dryRun); + ProcessDefinition processDefinition = processService.findProcessDefinitionByCode(processDefineCode); + if (processDefinition != null) { + command.setProcessDefinitionVersion(processDefinition.getVersion()); + } + command.setProcessInstanceId(0); Date start = null; Date end = null; @@ -586,8 +576,20 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ int createCount = 0; runMode = (runMode == null) ? RunMode.RUN_MODE_SERIAL : runMode; Map cmdParam = JSONUtils.toMap(command.getCommandParam()); + List schedules = processService.queryReleaseSchedulerListByProcessDefinitionCode(command.getProcessDefinitionCode()); + LinkedList listDate = new LinkedList<>(CronUtils.getSelfFireDateList(start, end, schedules)); + final int listDateSize = listDate.size(); + if (listDateSize == 0) { + logger.warn("can't create complement command, because the fire date cannot be created, scope: {} ~ {}", + DateUtils.dateToString(start), DateUtils.dateToString(end)); + throw new ServiceException(Status.COMPLEMENT_PROCESS_INSTANCE_DATE_RANGE_ERROR); + } switch (runMode) { case RUN_MODE_SERIAL: { + if (start.after(end)) { + logger.warn("The startDate {} is later than the endDate {}", start, end); + break; + } cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(start)); cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(end)); command.setCommandParam(JSONUtils.toJsonString(cmdParam)); @@ -595,26 +597,41 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ break; } case RUN_MODE_PARALLEL: { - LinkedList listDate = new LinkedList<>(); - List schedules = processService.queryReleaseSchedulerListByProcessDefinitionCode(command.getProcessDefinitionCode()); - listDate.addAll(CronUtils.getSelfFireDateList(start, end, schedules)); + if (start.after(end)) { + logger.warn("The startDate {} is later than the endDate {}", start, end); + break; + } + createCount = listDate.size(); if (!CollectionUtils.isEmpty(listDate)) { if (expectedParallelismNumber != null && expectedParallelismNumber != 0) { createCount = Math.min(listDate.size(), expectedParallelismNumber); + if (listDateSize < createCount) { + createCount = listDateSize; + } } logger.info("In parallel mode, current expectedParallelismNumber:{}", createCount); - int chunkSize = listDate.size() / createCount; - - for (int i = 0; i < createCount; i++) { - int rangeStart = i == 0 ? i : (i * chunkSize); - int rangeEnd = i == createCount - 1 ? listDate.size() - 1 - : rangeStart + chunkSize; - if (rangeEnd == listDate.size()) { - rangeEnd = listDate.size() - 1; + + // Distribute the number of tasks equally to each command. + // The last command with insufficient quantity will be assigned to the remaining tasks. + int itemsPerCommand = (listDateSize / createCount); + int remainingItems = (listDateSize % createCount); + int startDateIndex = 0; + int endDateIndex = 0; + + for (int i = 1; i <= createCount; i++) { + int extra = (i <= remainingItems) ? 1 : 0; + int singleCommandItems = (itemsPerCommand + extra); + + if (i == 1) { + endDateIndex += singleCommandItems - 1; + } else { + startDateIndex = endDateIndex + 1; + endDateIndex += singleCommandItems; } - cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(listDate.get(rangeStart))); - cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(listDate.get(rangeEnd))); + + cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(listDate.get(startDateIndex))); + cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(listDate.get(endDateIndex))); command.setCommandParam(JSONUtils.toJsonString(cmdParam)); processService.createCommand(command); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java index df794647c86aafd5d615999b4905e68409a42d64..88c5a941336d7bfd9fbeab42dfb78811ca5779be 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java @@ -20,10 +20,15 @@ package org.apache.dolphinscheduler.api.service.impl; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ServiceException; import org.apache.dolphinscheduler.api.service.LoggerService; +import org.apache.dolphinscheduler.api.service.ProjectService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.utils.ArrayUtils; +import org.apache.dolphinscheduler.dao.entity.Project; +import org.apache.dolphinscheduler.dao.entity.TaskDefinition; import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; +import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper; import org.apache.dolphinscheduler.remote.utils.Host; import org.apache.dolphinscheduler.service.log.LogClientService; import org.apache.dolphinscheduler.service.process.ProcessService; @@ -31,6 +36,7 @@ import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.commons.lang.StringUtils; import java.nio.charset.StandardCharsets; +import java.util.Map; import java.util.Objects; import javax.annotation.PostConstruct; @@ -41,11 +47,13 @@ import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; +import com.google.common.primitives.Bytes; + /** * logger service impl */ @Service -public class LoggerServiceImpl implements LoggerService { +public class LoggerServiceImpl extends BaseServiceImpl implements LoggerService { private static final Logger logger = LoggerFactory.getLogger(LoggerServiceImpl.class); @@ -56,6 +64,15 @@ public class LoggerServiceImpl implements LoggerService { private LogClientService logClient; + @Autowired + ProjectMapper projectMapper; + + @Autowired + ProjectService projectService; + + @Autowired + TaskDefinitionMapper taskDefinitionMapper; + @PostConstruct public void init() { if (Objects.isNull(this.logClient)) { @@ -87,27 +104,9 @@ public class LoggerServiceImpl implements LoggerService { if (taskInstance == null || StringUtils.isBlank(taskInstance.getHost())) { return Result.error(Status.TASK_INSTANCE_NOT_FOUND); } - - String host = getHost(taskInstance.getHost()); - Result result = new Result<>(Status.SUCCESS.getCode(), Status.SUCCESS.getMsg()); - - logger.info("log host : {} , logPath : {} , logServer port : {}", host, taskInstance.getLogPath(), - Constants.RPC_PORT); - - StringBuilder log = new StringBuilder(); - if (skipLineNum == 0) { - String head = String.format(LOG_HEAD_FORMAT, - taskInstance.getLogPath(), - host, - Constants.SYSTEM_LINE_SEPARATOR); - log.append(head); - } - - log.append(logClient - .rollViewLog(host, Constants.RPC_PORT, taskInstance.getLogPath(), skipLineNum, limit)); - - result.setData(log.toString()); + String log = queryLog(taskInstance,skipLineNum,limit); + result.setData(log); return result; } @@ -124,13 +123,72 @@ public class LoggerServiceImpl implements LoggerService { if (taskInstance == null || StringUtils.isBlank(taskInstance.getHost())) { throw new ServiceException("task instance is null or host is null"); } - String host = getHost(taskInstance.getHost()); - byte[] head = String.format(LOG_HEAD_FORMAT, - taskInstance.getLogPath(), - host, - Constants.SYSTEM_LINE_SEPARATOR).getBytes(StandardCharsets.UTF_8); - return ArrayUtils.addAll(head, - logClient.getLogBytes(host, Constants.RPC_PORT, taskInstance.getLogPath())); + return getLogBytes(taskInstance); + } + + /** + * query log + * + * @param loginUser login user + * @param projectCode project code + * @param taskInstId task instance id + * @param skipLineNum skip line number + * @param limit limit + * @return log string data + */ + @Override + @SuppressWarnings("unchecked") + public Map queryLog(User loginUser, long projectCode, int taskInstId, int skipLineNum, int limit) { + Project project = projectMapper.queryByCode(projectCode); + //check user access for project + Map result = projectService.checkProjectAndAuth(loginUser, project, projectCode); + if (result.get(Constants.STATUS) != Status.SUCCESS) { + return result; + } + // check whether the task instance can be found + TaskInstance task = processService.findTaskInstanceById(taskInstId); + if (task == null || StringUtils.isBlank(task.getHost())) { + putMsg(result, Status.TASK_INSTANCE_NOT_FOUND); + return result; + } + + TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(task.getTaskCode()); + if (taskDefinition != null && projectCode != taskDefinition.getProjectCode()) { + putMsg(result, Status.TASK_INSTANCE_NOT_FOUND, taskInstId); + return result; + } + String log = queryLog(task, skipLineNum, limit); + result.put(Constants.DATA_LIST, log); + return result; + } + + /** + * get log bytes + * + * @param loginUser login user + * @param projectCode project code + * @param taskInstId task instance id + * @return log byte array + */ + @Override + public byte[] getLogBytes(User loginUser, long projectCode, int taskInstId) { + Project project = projectMapper.queryByCode(projectCode); + //check user access for project + Map result = projectService.checkProjectAndAuth(loginUser, project, projectCode); + if (result.get(Constants.STATUS) != Status.SUCCESS) { + throw new ServiceException("user has no permission"); + } + // check whether the task instance can be found + TaskInstance task = processService.findTaskInstanceById(taskInstId); + if (task == null || StringUtils.isBlank(task.getHost())) { + throw new ServiceException("task instance is null or host is null"); + } + + TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(task.getTaskCode()); + if (taskDefinition != null && projectCode != taskDefinition.getProjectCode()) { + throw new ServiceException("task instance does not exist in project"); + } + return getLogBytes(task); } /** @@ -145,4 +203,50 @@ public class LoggerServiceImpl implements LoggerService { } return Host.of(address).getIp(); } + + /** + * query log + * + * @param taskInstance task instance + * @param skipLineNum skip line number + * @param limit limit + * @return log string data + */ + private String queryLog(TaskInstance taskInstance, int skipLineNum, int limit) { + + String host = getHost(taskInstance.getHost()); + + logger.info("log host : {} , logPath : {} , logServer port : {}", host, taskInstance.getLogPath(), + Constants.RPC_PORT); + + StringBuilder log = new StringBuilder(); + if (skipLineNum == 0) { + String head = String.format(LOG_HEAD_FORMAT, + taskInstance.getLogPath(), + host, + Constants.SYSTEM_LINE_SEPARATOR); + log.append(head); + } + + log.append(logClient + .rollViewLog(host, Constants.RPC_PORT, taskInstance.getLogPath(), skipLineNum, limit)); + + return log.toString(); + } + + /** + * get log bytes + * + * @param taskInstance task instance + * @return log byte array + */ + private byte[] getLogBytes(TaskInstance taskInstance) { + String host = getHost(taskInstance.getHost()); + byte[] head = String.format(LOG_HEAD_FORMAT, + taskInstance.getLogPath(), + host, + Constants.SYSTEM_LINE_SEPARATOR).getBytes(StandardCharsets.UTF_8); + return Bytes.concat(head, + logClient.getLogBytes(host, Constants.RPC_PORT, taskInstance.getLogPath())); + } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/MonitorServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/MonitorServiceImpl.java index cb3b0b2716313dac30b587275c36dedf15a45c05..934aaf4464074b0ea9865a9791f31970bd8f4665 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/MonitorServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/MonitorServiceImpl.java @@ -19,14 +19,14 @@ package org.apache.dolphinscheduler.api.service.impl; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.MonitorService; -import org.apache.dolphinscheduler.api.utils.RegistryCenterUtils; import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.NodeType; import org.apache.dolphinscheduler.common.model.Server; import org.apache.dolphinscheduler.common.model.WorkerServerModel; import org.apache.dolphinscheduler.dao.MonitorDBDao; import org.apache.dolphinscheduler.dao.entity.MonitorRecord; import org.apache.dolphinscheduler.dao.entity.User; -import org.apache.dolphinscheduler.dao.entity.ZookeeperRecord; +import org.apache.dolphinscheduler.service.registry.RegistryClient; import java.util.HashMap; import java.util.List; @@ -48,6 +48,9 @@ public class MonitorServiceImpl extends BaseServiceImpl implements MonitorServic @Autowired private MonitorDBDao monitorDBDao; + @Autowired + private RegistryClient registryClient; + /** * query database state * @@ -55,7 +58,7 @@ public class MonitorServiceImpl extends BaseServiceImpl implements MonitorServic * @return data base state */ @Override - public Map queryDatabaseState(User loginUser) { + public Map queryDatabaseState(User loginUser) { Map result = new HashMap<>(); List monitorRecordList = monitorDBDao.queryDatabaseState(); @@ -74,34 +77,15 @@ public class MonitorServiceImpl extends BaseServiceImpl implements MonitorServic * @return master information list */ @Override - public Map queryMaster(User loginUser) { + public Map queryMaster(User loginUser) { Map result = new HashMap<>(); List masterServers = getServerListFromRegistry(true); result.put(Constants.DATA_LIST, masterServers); - putMsg(result,Status.SUCCESS); - - return result; - } - - /** - * query zookeeper state - * - * @param loginUser login user - * @return zookeeper information list - */ - @Override - public Map queryZookeeperState(User loginUser) { - Map result = new HashMap<>(); - - List zookeeperRecordList = RegistryCenterUtils.zookeeperInfoList(); - - result.put(Constants.DATA_LIST, zookeeperRecordList); putMsg(result, Status.SUCCESS); return result; - } /** @@ -111,46 +95,48 @@ public class MonitorServiceImpl extends BaseServiceImpl implements MonitorServic * @return worker information list */ @Override - public Map queryWorker(User loginUser) { + public Map queryWorker(User loginUser) { Map result = new HashMap<>(); List workerServers = getServerListFromRegistry(false) - .stream() - .map((Server server) -> { - WorkerServerModel model = new WorkerServerModel(); - model.setId(server.getId()); - model.setHost(server.getHost()); - model.setPort(server.getPort()); - model.setZkDirectories(Sets.newHashSet(server.getZkDirectory())); - model.setResInfo(server.getResInfo()); - model.setCreateTime(server.getCreateTime()); - model.setLastHeartbeatTime(server.getLastHeartbeatTime()); - return model; - }) - .collect(Collectors.toList()); + .stream() + .map((Server server) -> { + WorkerServerModel model = new WorkerServerModel(); + model.setId(server.getId()); + model.setHost(server.getHost()); + model.setPort(server.getPort()); + model.setZkDirectories(Sets.newHashSet(server.getZkDirectory())); + model.setResInfo(server.getResInfo()); + model.setCreateTime(server.getCreateTime()); + model.setLastHeartbeatTime(server.getLastHeartbeatTime()); + return model; + }) + .collect(Collectors.toList()); Map workerHostPortServerMapping = workerServers - .stream() - .collect(Collectors.toMap( - (WorkerServerModel worker) -> { - String[] s = worker.getZkDirectories().iterator().next().split("/"); - return s[s.length - 1]; - } - , Function.identity() - , (WorkerServerModel oldOne, WorkerServerModel newOne) -> { - oldOne.getZkDirectories().addAll(newOne.getZkDirectories()); - return oldOne; - })); + .stream() + .collect(Collectors.toMap( + (WorkerServerModel worker) -> { + String[] s = worker.getZkDirectories().iterator().next().split("/"); + return s[s.length - 1]; + } + , Function.identity() + , (WorkerServerModel oldOne, WorkerServerModel newOne) -> { + oldOne.getZkDirectories().addAll(newOne.getZkDirectories()); + return oldOne; + })); result.put(Constants.DATA_LIST, workerHostPortServerMapping.values()); - putMsg(result,Status.SUCCESS); + putMsg(result, Status.SUCCESS); return result; } @Override public List getServerListFromRegistry(boolean isMaster) { - return isMaster ? RegistryCenterUtils.getMasterServers() : RegistryCenterUtils.getWorkerServers(); + return isMaster + ? registryClient.getServerList(NodeType.MASTER) + : registryClient.getServerList(NodeType.WORKER); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java index fc2a2ac5abe49672caf054f6fc5b7c403e0c8317..9eb277caa65f78d0c60b918b1ab199bd05994363 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java @@ -17,9 +17,10 @@ package org.apache.dolphinscheduler.api.service.impl; -import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS_DEFINE_ID; +import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS_DEFINE_CODE; import org.apache.dolphinscheduler.api.dto.DagDataSchedule; +import org.apache.dolphinscheduler.api.dto.ScheduleParam; import org.apache.dolphinscheduler.api.dto.treeview.Instance; import org.apache.dolphinscheduler.api.dto.treeview.TreeViewDto; import org.apache.dolphinscheduler.api.enums.Status; @@ -33,18 +34,25 @@ import org.apache.dolphinscheduler.api.utils.FileUtils; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.AuthorizationType; +import org.apache.dolphinscheduler.common.enums.FailureStrategy; +import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.ReleaseState; +import org.apache.dolphinscheduler.common.enums.TaskType; import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.graph.DAG; import org.apache.dolphinscheduler.common.model.TaskNode; import org.apache.dolphinscheduler.common.model.TaskNodeRelation; +import org.apache.dolphinscheduler.common.task.AbstractParameters; +import org.apache.dolphinscheduler.common.task.conditions.ConditionsParameters; +import org.apache.dolphinscheduler.common.task.switchtask.SwitchParameters; +import org.apache.dolphinscheduler.common.task.switchtask.SwitchResultVo; import org.apache.dolphinscheduler.common.thread.Stopper; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.common.utils.CodeGenerateUtils; +import org.apache.dolphinscheduler.common.utils.CodeGenerateUtils.CodeGenerateException; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.apache.dolphinscheduler.common.utils.SnowFlakeUtils; -import org.apache.dolphinscheduler.common.utils.SnowFlakeUtils.SnowFlakeException; +import org.apache.dolphinscheduler.common.utils.TaskParametersUtils; import org.apache.dolphinscheduler.dao.entity.DagData; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionLog; @@ -69,10 +77,11 @@ import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.TenantMapper; import org.apache.dolphinscheduler.dao.mapper.UserMapper; -import org.apache.dolphinscheduler.service.permission.PermissionCheck; import org.apache.dolphinscheduler.service.process.ProcessService; -import org.apache.commons.lang.StringUtils; +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.hadoop.fs.Stat; import java.io.BufferedOutputStream; import java.io.IOException; @@ -103,6 +112,10 @@ import org.springframework.web.multipart.MultipartFile; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.Lists; /** @@ -221,13 +234,14 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro } long processDefinitionCode; try { - processDefinitionCode = SnowFlakeUtils.getInstance().nextId(); - } catch (SnowFlakeException e) { + processDefinitionCode = CodeGenerateUtils.getInstance().genCode(); + } catch (CodeGenerateException e) { putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS); return result; } ProcessDefinition processDefinition = new ProcessDefinition(projectCode, name, processDefinitionCode, description, - globalParams, locations, timeout, loginUser.getId(), tenantId); + globalParams, locations, timeout, loginUser.getId(), tenantId); + return createDagDefine(loginUser, taskRelationList, processDefinition, taskDefinitionLogs); } @@ -236,7 +250,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro ProcessDefinition processDefinition, List taskDefinitionLogs) { Map result = new HashMap<>(); - int saveTaskResult = processService.saveTaskDefine(loginUser, processDefinition.getProjectCode(), taskDefinitionLogs); + int saveTaskResult = processService.saveTaskDefine(loginUser, processDefinition.getProjectCode(), taskDefinitionLogs, Boolean.TRUE); if (saveTaskResult == Constants.EXIT_CODE_SUCCESS) { logger.info("The task has not changed, so skip"); } @@ -244,12 +258,13 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro putMsg(result, Status.CREATE_TASK_DEFINITION_ERROR); throw new ServiceException(Status.CREATE_TASK_DEFINITION_ERROR); } - int insertVersion = processService.saveProcessDefine(loginUser, processDefinition, true); + int insertVersion = processService.saveProcessDefine(loginUser, processDefinition, Boolean.TRUE, Boolean.TRUE); if (insertVersion == 0) { putMsg(result, Status.CREATE_PROCESS_DEFINITION_ERROR); throw new ServiceException(Status.CREATE_PROCESS_DEFINITION_ERROR); } - int insertResult = processService.saveTaskRelation(loginUser, processDefinition.getProjectCode(), processDefinition.getCode(), insertVersion, taskRelationList, taskDefinitionLogs); + int insertResult = processService.saveTaskRelation(loginUser, processDefinition.getProjectCode(), processDefinition.getCode(), + insertVersion, taskRelationList, taskDefinitionLogs, Boolean.TRUE); if (insertResult == Constants.EXIT_CODE_SUCCESS) { putMsg(result, Status.SUCCESS); result.put(Constants.DATA_LIST, processDefinition); @@ -292,8 +307,8 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro return result; } List processTaskRelations = taskRelationList.stream() - .map(processTaskRelationLog -> JSONUtils.parseObject(JSONUtils.toJsonString(processTaskRelationLog), ProcessTaskRelation.class)) - .collect(Collectors.toList()); + .map(processTaskRelationLog -> JSONUtils.parseObject(JSONUtils.toJsonString(processTaskRelationLog), ProcessTaskRelation.class)) + .collect(Collectors.toList()); List taskNodeList = processService.transformTask(processTaskRelations, taskDefinitionLogs); if (taskNodeList.size() != taskRelationList.size()) { Set postTaskCodes = taskRelationList.stream().map(ProcessTaskRelationLog::getPostTaskCode).collect(Collectors.toSet()); @@ -301,11 +316,11 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro Collection codes = CollectionUtils.subtract(postTaskCodes, taskNodeCodes); if (CollectionUtils.isNotEmpty(codes)) { logger.error("the task code is not exit"); - putMsg(result, Status.TASK_DEFINE_NOT_EXIST, StringUtils.join(codes, Constants.COMMA)); + putMsg(result, Status.TASK_DEFINE_NOT_EXIST, org.apache.commons.lang.StringUtils.join(codes, Constants.COMMA)); return result; } } - if (graphHasCycle(taskNodeList)) { + if (processService.graphHasCycle(taskNodeList)) { logger.error("process DAG has cycle"); putMsg(result, Status.PROCESS_NODE_HAS_CYCLE); return result; @@ -349,6 +364,36 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro return result; } + /** + * query process definition simple list + * + * @param loginUser login user + * @param projectCode project code + * @return definition simple list + */ + @Override + public Map queryProcessDefinitionSimpleList(User loginUser, long projectCode) { + Project project = projectMapper.queryByCode(projectCode); + //check user access for project + Map result = projectService.checkProjectAndAuth(loginUser, project, projectCode); + if (result.get(Constants.STATUS) != Status.SUCCESS) { + return result; + } + List processDefinitions = processDefinitionMapper.queryAllDefinitionList(projectCode); + ArrayNode arrayNode = JSONUtils.createArrayNode(); + for (ProcessDefinition processDefinition : processDefinitions) { + ObjectNode processDefinitionNode = JSONUtils.createObjectNode(); + processDefinitionNode.put("id", processDefinition.getId()); + processDefinitionNode.put("code", processDefinition.getCode()); + processDefinitionNode.put("name", processDefinition.getName()); + processDefinitionNode.put("projectCode", processDefinition.getProjectCode()); + arrayNode.add(processDefinitionNode); + } + result.put(Constants.DATA_LIST, arrayNode); + putMsg(result, Status.SUCCESS); + return result; + } + /** * query process definition list paging * @@ -374,7 +419,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro Page page = new Page<>(pageNo, pageSize); IPage processDefinitionIPage = processDefinitionMapper.queryDefineListPaging( - page, searchVal, userId, project.getCode(), isAdmin(loginUser)); + page, searchVal, userId, project.getCode(), isAdmin(loginUser)); List records = processDefinitionIPage.getRecords(); for (ProcessDefinition pd : records) { @@ -410,7 +455,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro } ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code); - if (processDefinition == null) { + if (processDefinition == null || projectCode != processDefinition.getProjectCode()) { putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, code); } else { Tenant tenant = tenantMapper.queryById(processDefinition.getTenantId()); @@ -503,7 +548,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code); // check process definition exists - if (processDefinition == null) { + if (processDefinition == null || projectCode != processDefinition.getProjectCode()) { putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, code); return result; } @@ -531,7 +576,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro ProcessDefinition processDefinitionDeepCopy, List taskDefinitionLogs) { Map result = new HashMap<>(); - int saveTaskResult = processService.saveTaskDefine(loginUser, processDefinition.getProjectCode(), taskDefinitionLogs); + int saveTaskResult = processService.saveTaskDefine(loginUser, processDefinition.getProjectCode(), taskDefinitionLogs, Boolean.TRUE); if (saveTaskResult == Constants.EXIT_CODE_SUCCESS) { logger.info("The task has not changed, so skip"); } @@ -539,25 +584,45 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro putMsg(result, Status.UPDATE_TASK_DEFINITION_ERROR); throw new ServiceException(Status.UPDATE_TASK_DEFINITION_ERROR); } - int insertVersion; - if (processDefinition.equals(processDefinitionDeepCopy)) { - insertVersion = processDefinitionDeepCopy.getVersion(); + boolean isChange = false; + if (processDefinition.equals(processDefinitionDeepCopy) && saveTaskResult == Constants.EXIT_CODE_SUCCESS) { + List processTaskRelationLogList = processTaskRelationLogMapper.queryByProcessCodeAndVersion(processDefinition.getCode(), processDefinition.getVersion()); + if (taskRelationList.size() == processTaskRelationLogList.size()) { + Set taskRelationSet = taskRelationList.stream().collect(Collectors.toSet()); + Set processTaskRelationLogSet = processTaskRelationLogList.stream().collect(Collectors.toSet()); + if (taskRelationSet.size() == processTaskRelationLogSet.size()) { + taskRelationSet.removeAll(processTaskRelationLogSet); + if (!taskRelationSet.isEmpty()) { + isChange = true; + } + } else { + isChange = true; + } + } else { + isChange = true; + } } else { - processDefinition.setUpdateTime(new Date()); - insertVersion = processService.saveProcessDefine(loginUser, processDefinition, true); + isChange = true; } - if (insertVersion == 0) { - putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR); - throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR); - } - int insertResult = processService.saveTaskRelation(loginUser, processDefinition.getProjectCode(), - processDefinition.getCode(), insertVersion, taskRelationList, taskDefinitionLogs); - if (insertResult == Constants.EXIT_CODE_SUCCESS) { + if (isChange) { + processDefinition.setUpdateTime(new Date()); + int insertVersion = processService.saveProcessDefine(loginUser, processDefinition, Boolean.TRUE, Boolean.TRUE); + if (insertVersion <= 0) { + putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR); + throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR); + } + int insertResult = processService.saveTaskRelation(loginUser, processDefinition.getProjectCode(), + processDefinition.getCode(), insertVersion, taskRelationList, taskDefinitionLogs, Boolean.TRUE); + if (insertResult == Constants.EXIT_CODE_SUCCESS) { + putMsg(result, Status.SUCCESS); + result.put(Constants.DATA_LIST, processDefinition); + } else { + putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR); + throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR); + } + } else { putMsg(result, Status.SUCCESS); result.put(Constants.DATA_LIST, processDefinition); - } else { - putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR); - throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR); } return result; } @@ -605,7 +670,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro return result; } ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code); - if (processDefinition == null) { + if (processDefinition == null || projectCode != processDefinition.getProjectCode()) { putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, code); return result; } @@ -629,35 +694,67 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro } // get the timing according to the process definition - List schedules = scheduleMapper.queryByProcessDefinitionCode(code); - if (!schedules.isEmpty() && schedules.size() > 1) { - logger.warn("scheduler num is {},Greater than 1", schedules.size()); - putMsg(result, Status.DELETE_PROCESS_DEFINE_BY_CODE_ERROR); - return result; - } else if (schedules.size() == 1) { - Schedule schedule = schedules.get(0); - if (schedule.getReleaseState() == ReleaseState.OFFLINE) { - int delete = scheduleMapper.deleteById(schedule.getId()); + Schedule scheduleObj = scheduleMapper.queryByProcessDefinitionCode(code); + if (scheduleObj != null) { + if (scheduleObj.getReleaseState() == ReleaseState.OFFLINE) { + int delete = scheduleMapper.deleteById(scheduleObj.getId()); if (delete == 0) { putMsg(result, Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR); throw new ServiceException(Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR); } - } else if (schedule.getReleaseState() == ReleaseState.ONLINE) { - putMsg(result, Status.SCHEDULE_CRON_STATE_ONLINE, schedule.getId()); + } + if (scheduleObj.getReleaseState() == ReleaseState.ONLINE) { + putMsg(result, Status.SCHEDULE_CRON_STATE_ONLINE, scheduleObj.getId()); return result; } } int delete = processDefinitionMapper.deleteById(processDefinition.getId()); - int deleteRelation = processTaskRelationMapper.deleteByCode(project.getCode(), processDefinition.getCode()); - if (delete == 0 || deleteRelation == 0) { + if (delete == 0) { putMsg(result, Status.DELETE_PROCESS_DEFINE_BY_CODE_ERROR); throw new ServiceException(Status.DELETE_PROCESS_DEFINE_BY_CODE_ERROR); } + int deleteRelation = processTaskRelationMapper.deleteByCode(project.getCode(), processDefinition.getCode()); + if (deleteRelation == 0) { + logger.warn("The process definition has not relation, it will be delete successfully"); + } + + try { + syncDeleteWorkflowInstanceByCode(processDefinition.getCode()); + } catch (Exception e) { + logger.error("delete workflow instance error", e); + } + putMsg(result, Status.SUCCESS); return result; } + /** + * delete workflow instance by processDefinitionCode + * 1.delete processInstances + * 2.delete subWorkProcesses + * 3.delete processMap + * 4.delete taskInstances + * + * todo delete syncly may take a long time when many processInstance + * @param processDefinitionCode + */ + private void syncDeleteWorkflowInstanceByCode(long processDefinitionCode) { + int pageSize = 100; + while (true) { + List deleteProcessInstances = processInstanceService.queryByProcessDefineCode(processDefinitionCode, pageSize); + if (CollectionUtils.isEmpty(deleteProcessInstances)) { + break; + } + for (ProcessInstance deleteProcessInstance : deleteProcessInstances) { + processService.deleteWorkProcessInstanceById(deleteProcessInstance.getId()); + processService.deleteAllSubWorkProcessByParentId(deleteProcessInstance.getId()); + processService.deleteWorkProcessMapByParentId(deleteProcessInstance.getId()); + processService.deleteWorkTaskInstanceByProcessInstanceId(deleteProcessInstance.getId()); + } + } + } + /** * release process definition: online / offline * @@ -684,37 +781,28 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro } ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code); - + if (processDefinition == null || projectCode != processDefinition.getProjectCode()) { + putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, code); + return result; + } switch (releaseState) { case ONLINE: - // To check resources whether they are already cancel authorized or deleted - String resourceIds = processDefinition.getResourceIds(); - if (StringUtils.isNotBlank(resourceIds)) { - Integer[] resourceIdArray = Arrays.stream(resourceIds.split(Constants.COMMA)).map(Integer::parseInt).toArray(Integer[]::new); - PermissionCheck permissionCheck = new PermissionCheck<>(AuthorizationType.RESOURCE_FILE_ID, processService, resourceIdArray, loginUser.getId(), logger); - try { - permissionCheck.checkPermission(); - } catch (Exception e) { - logger.error(e.getMessage(), e); - putMsg(result, Status.RESOURCE_NOT_EXIST_OR_NO_PERMISSION, RELEASESTATE); - return result; - } + List relationList = processService.findRelationByCode(code, processDefinition.getVersion()); + if (CollectionUtils.isEmpty(relationList)) { + putMsg(result, Status.PROCESS_DAG_IS_EMPTY); + return result; } - processDefinition.setReleaseState(releaseState); processDefinitionMapper.updateById(processDefinition); break; case OFFLINE: processDefinition.setReleaseState(releaseState); int updateProcess = processDefinitionMapper.updateById(processDefinition); - List scheduleList = scheduleMapper.selectAllByProcessDefineArray( - new long[]{processDefinition.getCode()} - ); - if (updateProcess > 0 && scheduleList.size() == 1) { - Schedule schedule = scheduleList.get(0); - logger.info("set schedule offline, project id: {}, schedule id: {}, process definition code: {}", project.getId(), schedule.getId(), code); + Schedule schedule = scheduleMapper.queryByProcessDefinitionCode(code); + if (updateProcess > 0 && schedule != null) { + logger.info("set schedule offline, project code: {}, schedule id: {}, process definition code: {}", projectCode, schedule.getId(), code); // set status - schedule.setReleaseState(ReleaseState.OFFLINE); + schedule.setReleaseState(releaseState); int updateSchedule = scheduleMapper.updateById(schedule); if (updateSchedule == 0) { putMsg(result, Status.OFFLINE_SCHEDULE_ERROR); @@ -737,7 +825,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro */ @Override public void batchExportProcessDefinitionByCodes(User loginUser, long projectCode, String codes, HttpServletResponse response) { - if (StringUtils.isEmpty(codes)) { + if (org.apache.commons.lang.StringUtils.isEmpty(codes)) { return; } Project project = projectMapper.queryByCode(projectCode); @@ -748,7 +836,12 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro } Set defineCodeSet = Lists.newArrayList(codes.split(Constants.COMMA)).stream().map(Long::parseLong).collect(Collectors.toSet()); List processDefinitionList = processDefinitionMapper.queryByCodes(defineCodeSet); - List dagDataSchedules = processDefinitionList.stream().map(this::exportProcessDagData).collect(Collectors.toList()); + if (CollectionUtils.isEmpty(processDefinitionList)) { + return; + } + // check processDefinition exist in project + List processDefinitionListInProject = processDefinitionList.stream().filter(o -> projectCode == o.getProjectCode()).collect(Collectors.toList()); + List dagDataSchedules = processDefinitionListInProject.stream().map(this::exportProcessDagData).collect(Collectors.toList()); if (CollectionUtils.isNotEmpty(dagDataSchedules)) { downloadProcessDefinitionFile(response, dagDataSchedules); } @@ -794,12 +887,11 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro * @return DagDataSchedule */ public DagDataSchedule exportProcessDagData(ProcessDefinition processDefinition) { - List schedules = scheduleMapper.queryByProcessDefinitionCode(processDefinition.getCode()); + Schedule scheduleObj = scheduleMapper.queryByProcessDefinitionCode(processDefinition.getCode()); DagDataSchedule dagDataSchedule = new DagDataSchedule(processService.genDagData(processDefinition)); - if (!schedules.isEmpty()) { - Schedule schedule = schedules.get(0); - schedule.setReleaseState(ReleaseState.OFFLINE); - dagDataSchedule.setSchedule(schedule); + if (scheduleObj != null) { + scheduleObj.setReleaseState(ReleaseState.OFFLINE); + dagDataSchedule.setSchedule(scheduleObj); } return dagDataSchedule; } @@ -839,20 +931,26 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro return false; } ProcessDefinition processDefinition = dagDataSchedule.getProcessDefinition(); + + // generate import processDefinitionName + String processDefinitionName = recursionProcessDefinitionName(projectCode, processDefinition.getName(), 1); + String importProcessDefinitionName = processDefinitionName + "_import_" + DateUtils.getCurrentTimeStamp(); + //unique check - Map checkResult = verifyProcessDefinitionName(loginUser, projectCode, processDefinition.getName()); + Map checkResult = verifyProcessDefinitionName(loginUser, projectCode, importProcessDefinitionName); if (Status.SUCCESS.equals(checkResult.get(Constants.STATUS))) { putMsg(result, Status.SUCCESS); } else { result.putAll(checkResult); return false; } - String processDefinitionName = recursionProcessDefinitionName(projectCode, processDefinition.getName(), 1); - processDefinition.setName(processDefinitionName + "_import_" + DateUtils.getCurrentTimeStamp()); + processDefinition.setName(importProcessDefinitionName); + processDefinition.setId(0); + processDefinition.setProjectCode(projectCode); processDefinition.setUserId(loginUser.getId()); try { - processDefinition.setCode(SnowFlakeUtils.getInstance().nextId()); - } catch (SnowFlakeException e) { + processDefinition.setCode(CodeGenerateUtils.getInstance().genCode()); + } catch (CodeGenerateException e) { putMsg(result, Status.CREATE_PROCESS_DEFINITION_ERROR); return false; } @@ -870,17 +968,19 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro taskDefinitionLog.setUpdateTime(now); taskDefinitionLog.setOperator(loginUser.getId()); taskDefinitionLog.setOperateTime(now); + taskDefinitionLog.setTaskParams(taskDefinition.getTaskParams()); try { - long code = SnowFlakeUtils.getInstance().nextId(); + long code = CodeGenerateUtils.getInstance().genCode(); taskCodeMap.put(taskDefinitionLog.getCode(), code); taskDefinitionLog.setCode(code); - } catch (SnowFlakeException e) { + } catch (CodeGenerateException e) { logger.error("Task code get error, ", e); putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS, "Error generating task definition code"); return false; } taskDefinitionLogList.add(taskDefinitionLog); } + taskDefinitionLogList.forEach(v -> v.setTaskParams(resetImportTaskParams(taskCodeMap, v))); int insert = taskDefinitionMapper.batchInsert(taskDefinitionLogList); int logInsert = taskDefinitionLogMapper.batchInsert(taskDefinitionLogList); if ((logInsert & insert) == 0) { @@ -892,12 +992,33 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro List taskRelationLogList = new ArrayList<>(); for (ProcessTaskRelation processTaskRelation : taskRelationList) { ProcessTaskRelationLog processTaskRelationLog = new ProcessTaskRelationLog(processTaskRelation); - processTaskRelationLog.setPreTaskCode(taskCodeMap.get(processTaskRelationLog.getPreTaskCode())); - processTaskRelationLog.setPostTaskCode(taskCodeMap.get(processTaskRelationLog.getPostTaskCode())); + if (taskCodeMap.containsKey(processTaskRelationLog.getPreTaskCode())) { + processTaskRelationLog.setPreTaskCode(taskCodeMap.get(processTaskRelationLog.getPreTaskCode())); + } + if (taskCodeMap.containsKey(processTaskRelationLog.getPostTaskCode())) { + processTaskRelationLog.setPostTaskCode(taskCodeMap.get(processTaskRelationLog.getPostTaskCode())); + } processTaskRelationLog.setPreTaskVersion(Constants.VERSION_FIRST); processTaskRelationLog.setPostTaskVersion(Constants.VERSION_FIRST); taskRelationLogList.add(processTaskRelationLog); } + if (StringUtils.isNotEmpty(processDefinition.getLocations()) && JSONUtils.checkJsonValid(processDefinition.getLocations())) { + ArrayNode arrayNode = JSONUtils.parseArray(processDefinition.getLocations()); + ArrayNode newArrayNode = JSONUtils.createArrayNode(); + for (int i = 0; i < arrayNode.size(); i++) { + ObjectNode newObjectNode = newArrayNode.addObject(); + JsonNode jsonNode = arrayNode.get(i); + Long taskCode = taskCodeMap.get(jsonNode.get("taskCode").asLong()); + if (Objects.nonNull(taskCode)) { + newObjectNode.put("taskCode", taskCode); + newObjectNode.set("x", jsonNode.get("x")); + newObjectNode.set("y", jsonNode.get("y")); + } + } + processDefinition.setLocations(newArrayNode.toString()); + } + processDefinition.setCreateTime(new Date()); + processDefinition.setUpdateTime(new Date()); Map createDagResult = createDagDefine(loginUser, taskRelationLogList, processDefinition, Lists.newArrayList()); if (Status.SUCCESS.equals(createDagResult.get(Constants.STATUS))) { putMsg(createDagResult, Status.SUCCESS); @@ -922,6 +1043,64 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro return true; } + private String resetImportTaskParams(Map taskCodeMap, TaskDefinitionLog taskDefinition) { + String taskType = taskDefinition.getTaskType(); + if (!TaskType.CONDITIONS.getDesc().equals(taskType) && !TaskType.SWITCH.getDesc().equals(taskType)) { + return taskDefinition.getTaskParams(); + } + + Map taskParamsMap = JSONUtils.parseObject(taskDefinition.getTaskParams(), new TypeReference>() {}); + if (taskParamsMap == null) { + taskParamsMap = new HashMap<>(); + } + AbstractParameters switchParameters = TaskParametersUtils.getParameters(TaskType.SWITCH.getDesc(), JSONUtils.toJsonString(taskParamsMap.get(Constants.SWITCH_RESULT))); + if (switchParameters != null) { + taskParamsMap.put(Constants.SWITCH_RESULT, resetImportSwitchTaskParams(taskCodeMap, switchParameters)); + } + AbstractParameters conditionParameters = TaskParametersUtils.getParameters(TaskType.CONDITIONS.getDesc(), JSONUtils.toJsonString(taskParamsMap.get(Constants.CONDITION_RESULT))); + if (conditionParameters != null) { + taskParamsMap.put(Constants.CONDITION_RESULT, resetImportConditionTaskParams(taskCodeMap, conditionParameters)); + } + return JSONUtils.toJsonString(taskParamsMap); + } + + private AbstractParameters resetImportSwitchTaskParams(Map taskCodeMap, AbstractParameters parameter) { + SwitchParameters switchParameters = (SwitchParameters) parameter; + List dependTaskList = switchParameters.getDependTaskList(); + if (CollectionUtils.isEmpty(dependTaskList)) { + return switchParameters; + } + for (SwitchResultVo resultVo : dependTaskList) { + Long nextNode = resultVo.getNextNode(); + resultVo.setNextNode(taskCodeMap.get(nextNode)); + } + Long nextNode = switchParameters.getNextNode(); + switchParameters.setNextNode(taskCodeMap.get(nextNode)); + return switchParameters; + } + + private AbstractParameters resetImportConditionTaskParams(Map taskCodeMap, AbstractParameters parameter) { + ConditionsParameters conditionsParameters = (ConditionsParameters) parameter; + List originalSuccessNode = conditionsParameters.getSuccessNode(); + List originalFailedNode = conditionsParameters.getFailedNode(); + if (CollectionUtils.isEmpty(originalSuccessNode) || CollectionUtils.isEmpty(originalFailedNode)) { + return conditionsParameters; + } + List resultSuccessNode = new ArrayList<>(); + List resultFailedNode = new ArrayList<>(); + + if (CollectionUtils.isNotEmpty(originalSuccessNode)) { + originalSuccessNode.forEach(v -> resultSuccessNode.add(taskCodeMap.get(v))); + } + if (CollectionUtils.isNotEmpty(originalFailedNode)) { + originalFailedNode.forEach(v -> resultFailedNode.add(taskCodeMap.get(v))); + } + + conditionsParameters.setSuccessNode(resultSuccessNode); + conditionsParameters.setFailedNode(resultFailedNode); + return conditionsParameters; + } + /** * check importance params */ @@ -963,7 +1142,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro * @return check result code */ @Override - public Map checkProcessNodeList(String processTaskRelationJson) { + public Map checkProcessNodeList(String processTaskRelationJson, List taskDefinitionLogsList) { Map result = new HashMap<>(); try { if (processTaskRelationJson == null) { @@ -974,7 +1153,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro List taskRelationList = JSONUtils.toList(processTaskRelationJson, ProcessTaskRelation.class); // Check whether the task node is normal - List taskNodes = processService.transformTask(taskRelationList, Lists.newArrayList()); + List taskNodes = processService.transformTask(taskRelationList, taskDefinitionLogsList); if (CollectionUtils.isEmpty(taskNodes)) { logger.error("process node info is empty"); @@ -983,7 +1162,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro } // check has cycle - if (graphHasCycle(taskNodes)) { + if (processService.graphHasCycle(taskNodes)) { logger.error("process DAG has cycle"); putMsg(result, Status.PROCESS_NODE_HAS_CYCLE); return result; @@ -1002,8 +1181,9 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro } putMsg(result, Status.SUCCESS); } catch (Exception e) { - result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR); - result.put(Constants.MSG, e.getMessage()); + result.put(Constants.STATUS, Status.INTERNAL_SERVER_ERROR_ARGS); + putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS, e.getMessage()); + logger.error(Status.INTERNAL_SERVER_ERROR_ARGS.getMsg(), e); } return result; } @@ -1030,6 +1210,14 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, code); return result; } + HashMap userProjects = new HashMap<>(Constants.DEFAULT_HASH_MAP_SIZE); + projectMapper.queryProjectCreatedAndAuthorizedByUserId(loginUser.getId()) + .forEach(userProject -> userProjects.put(userProject.getCode(), userProject)); + if (!userProjects.containsKey(projectCode)) { + logger.info("process define not exists, project dismatch"); + putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, code); + return result; + } DagData dagData = processService.genDagData(processDefinition); result.put(Constants.DATA_LIST, dagData.getTaskDefinitionList()); putMsg(result, Status.SUCCESS); @@ -1061,8 +1249,19 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, codes); return result; } + HashMap userProjects = new HashMap<>(Constants.DEFAULT_HASH_MAP_SIZE); + projectMapper.queryProjectCreatedAndAuthorizedByUserId(loginUser.getId()) + .forEach(userProject -> userProjects.put(userProject.getCode(), userProject)); + + // check processDefinition exist in project + List processDefinitionListInProject = processDefinitionList.stream() + .filter(o -> userProjects.containsKey(o.getProjectCode())).collect(Collectors.toList()); + if (CollectionUtils.isEmpty(processDefinitionListInProject)) { + putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, codes); + return result; + } Map> taskNodeMap = new HashMap<>(); - for (ProcessDefinition processDefinition : processDefinitionList) { + for (ProcessDefinition processDefinition : processDefinitionListInProject) { DagData dagData = processService.genDagData(processDefinition); taskNodeMap.put(processDefinition.getCode(), dagData.getTaskDefinitionList()); } @@ -1099,15 +1298,16 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro /** * Encapsulates the TreeView structure * + * @param projectCode project code * @param code process definition code * @param limit limit * @return tree view json data */ @Override - public Map viewTree(long code, Integer limit) { + public Map viewTree(long projectCode, long code, Integer limit) { Map result = new HashMap<>(); ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code); - if (null == processDefinition) { + if (null == processDefinition || projectCode != processDefinition.getProjectCode()) { logger.info("process define not exists"); putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, code); return result; @@ -1124,7 +1324,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro processInstanceList.forEach(processInstance -> processInstance.setDuration(DateUtils.format2Duration(processInstance.getStartTime(), processInstance.getEndTime()))); List taskDefinitionList = processService.genTaskDefineList(processTaskRelationMapper.queryByProcessCode(processDefinition.getProjectCode(), processDefinition.getCode())); Map taskDefinitionMap = taskDefinitionList.stream() - .collect(Collectors.toMap(TaskDefinitionLog::getCode, taskDefinitionLog -> taskDefinitionLog)); + .collect(Collectors.toMap(TaskDefinitionLog::getCode, taskDefinitionLog -> taskDefinitionLog)); if (limit > processInstanceList.size()) { limit = processInstanceList.size(); @@ -1139,8 +1339,8 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro ProcessInstance processInstance = processInstanceList.get(i); Date endTime = processInstance.getEndTime() == null ? new Date() : processInstance.getEndTime(); parentTreeViewDto.getInstances().add(new Instance(processInstance.getId(), processInstance.getName(), processInstance.getProcessDefinitionCode(), - "", processInstance.getState().toString(), processInstance.getStartTime(), endTime, processInstance.getHost(), - DateUtils.format2Readable(endTime.getTime() - processInstance.getStartTime().getTime()))); + "", processInstance.getState().toString(), processInstance.getStartTime(), endTime, processInstance.getHost(), + DateUtils.format2Readable(endTime.getTime() - processInstance.getStartTime().getTime()))); } List parentTreeViewDtoList = new ArrayList<>(); @@ -1155,51 +1355,51 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro Iterator>> iter = runningNodeMap.entrySet().iterator(); while (iter.hasNext()) { Map.Entry> en = iter.next(); - String nodeName = en.getKey(); + String nodeCode = en.getKey(); parentTreeViewDtoList = en.getValue(); TreeViewDto treeViewDto = new TreeViewDto(); - treeViewDto.setName(nodeName); - TaskNode taskNode = dag.getNode(nodeName); + TaskNode taskNode = dag.getNode(nodeCode); treeViewDto.setType(taskNode.getType()); treeViewDto.setCode(taskNode.getCode()); + treeViewDto.setName(taskNode.getName()); //set treeViewDto instances for (int i = limit - 1; i >= 0; i--) { ProcessInstance processInstance = processInstanceList.get(i); - TaskInstance taskInstance = taskInstanceMapper.queryByInstanceIdAndName(processInstance.getId(), nodeName); + TaskInstance taskInstance = taskInstanceMapper.queryByInstanceIdAndCode(processInstance.getId(), Long.parseLong(nodeCode)); if (taskInstance == null) { treeViewDto.getInstances().add(new Instance(-1, "not running", 0, "null")); } else { Date startTime = taskInstance.getStartTime() == null ? new Date() : taskInstance.getStartTime(); Date endTime = taskInstance.getEndTime() == null ? new Date() : taskInstance.getEndTime(); - int subProcessId = 0; + long subProcessCode = 0L; // if process is sub process, the return sub id, or sub id=0 if (taskInstance.isSubProcess()) { TaskDefinition taskDefinition = taskDefinitionMap.get(taskInstance.getTaskCode()); - subProcessId = Integer.parseInt(JSONUtils.parseObject( - taskDefinition.getTaskParams()).path(CMD_PARAM_SUB_PROCESS_DEFINE_ID).asText()); + subProcessCode = Integer.parseInt(JSONUtils.parseObject( + taskDefinition.getTaskParams()).path(CMD_PARAM_SUB_PROCESS_DEFINE_CODE).asText()); } treeViewDto.getInstances().add(new Instance(taskInstance.getId(), taskInstance.getName(), taskInstance.getTaskCode(), - taskInstance.getTaskType(), taskInstance.getState().toString(), taskInstance.getStartTime(), taskInstance.getEndTime(), - taskInstance.getHost(), DateUtils.format2Readable(endTime.getTime() - startTime.getTime()), subProcessId)); + taskInstance.getTaskType(), taskInstance.getState().toString(), taskInstance.getStartTime(), taskInstance.getEndTime(), + taskInstance.getHost(), DateUtils.format2Readable(endTime.getTime() - startTime.getTime()), subProcessCode)); } } for (TreeViewDto pTreeViewDto : parentTreeViewDtoList) { pTreeViewDto.getChildren().add(treeViewDto); } - postNodeList = dag.getSubsequentNodes(nodeName); + postNodeList = dag.getSubsequentNodes(nodeCode); if (CollectionUtils.isNotEmpty(postNodeList)) { - for (String nextNodeName : postNodeList) { - List treeViewDtoList = waitingRunningNodeMap.get(nextNodeName); + for (String nextNodeCode : postNodeList) { + List treeViewDtoList = waitingRunningNodeMap.get(nextNodeCode); if (CollectionUtils.isEmpty(treeViewDtoList)) { treeViewDtoList = new ArrayList<>(); } treeViewDtoList.add(treeViewDto); - waitingRunningNodeMap.put(nextNodeName, treeViewDtoList); + waitingRunningNodeMap.put(nextNodeCode, treeViewDtoList); } } - runningNodeMap.remove(nodeName); + runningNodeMap.remove(nodeCode); } if (waitingRunningNodeMap.size() == 0) { break; @@ -1214,32 +1414,6 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro return result; } - /** - * whether the graph has a ring - * - * @param taskNodeResponseList task node response list - * @return if graph has cycle flag - */ - private boolean graphHasCycle(List taskNodeResponseList) { - DAG graph = new DAG<>(); - // Fill the vertices - for (TaskNode taskNodeResponse : taskNodeResponseList) { - graph.addNode(taskNodeResponse.getName(), taskNodeResponse); - } - // Fill edge relations - for (TaskNode taskNodeResponse : taskNodeResponseList) { - List preTasks = JSONUtils.toList(taskNodeResponse.getPreTasks(), String.class); - if (CollectionUtils.isNotEmpty(preTasks)) { - for (String preTask : preTasks) { - if (!graph.addEdge(preTask, taskNodeResponse.getName())) { - return true; - } - } - } - } - return graph.hasCycle(); - } - /** * batch copy process definition * @@ -1259,12 +1433,16 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro } List failedProcessList = new ArrayList<>(); doBatchOperateProcessDefinition(loginUser, targetProjectCode, failedProcessList, codes, result, true); + if (result.get(Constants.STATUS) == Status.NOT_SUPPORT_COPY_TASK_TYPE) { + return result; + } checkBatchOperateResult(projectCode, targetProjectCode, result, failedProcessList, true); return result; } /** * batch move process definition + * Will be deleted * * @param loginUser loginUser * @param projectCode projectCode @@ -1301,7 +1479,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro return result; } - if (StringUtils.isEmpty(processDefinitionCodes)) { + if (org.apache.commons.lang.StringUtils.isEmpty(processDefinitionCodes)) { putMsg(result, Status.PROCESS_DEFINITION_CODES_IS_EMPTY, processDefinitionCodes); return result; } @@ -1331,21 +1509,60 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro diffCode.forEach(code -> failedProcessList.add(code + "[null]")); for (ProcessDefinition processDefinition : processDefinitionList) { List processTaskRelations = - processTaskRelationMapper.queryByProcessCode(processDefinition.getProjectCode(), processDefinition.getCode()); + processTaskRelationMapper.queryByProcessCode(processDefinition.getProjectCode(), processDefinition.getCode()); List taskRelationList = processTaskRelations.stream().map(ProcessTaskRelationLog::new).collect(Collectors.toList()); processDefinition.setProjectCode(targetProjectCode); if (isCopy) { + List taskDefinitionLogs = processService.genTaskDefineList(processTaskRelations); + Map taskCodeMap = new HashMap<>(); + for (TaskDefinitionLog taskDefinitionLog : taskDefinitionLogs) { + if (TaskType.CONDITIONS.getDesc().equals(taskDefinitionLog.getTaskType()) + || TaskType.SWITCH.getDesc().equals(taskDefinitionLog.getTaskType()) + || TaskType.SUB_PROCESS.getDesc().equals(taskDefinitionLog.getTaskType()) + || TaskType.DEPENDENT.getDesc().equals(taskDefinitionLog.getTaskType())) { + putMsg(result, Status.NOT_SUPPORT_COPY_TASK_TYPE, taskDefinitionLog.getTaskType()); + return; + } + try { + long taskCode = CodeGenerateUtils.getInstance().genCode(); + taskCodeMap.put(taskDefinitionLog.getCode(), taskCode); + taskDefinitionLog.setCode(taskCode); + } catch (CodeGenerateException e) { + putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS); + throw new ServiceException(Status.INTERNAL_SERVER_ERROR_ARGS); + } + taskDefinitionLog.setProjectCode(targetProjectCode); + taskDefinitionLog.setVersion(0); + taskDefinitionLog.setName(taskDefinitionLog.getName() + "_copy_" + DateUtils.getCurrentTimeStamp()); + } + for (ProcessTaskRelationLog processTaskRelationLog : taskRelationList) { + if (processTaskRelationLog.getPreTaskCode() > 0) { + processTaskRelationLog.setPreTaskCode(taskCodeMap.get(processTaskRelationLog.getPreTaskCode())); + } + if (processTaskRelationLog.getPostTaskCode() > 0) { + processTaskRelationLog.setPostTaskCode(taskCodeMap.get(processTaskRelationLog.getPostTaskCode())); + } + } try { - processDefinition.setCode(SnowFlakeUtils.getInstance().nextId()); - } catch (SnowFlakeException e) { + processDefinition.setCode(CodeGenerateUtils.getInstance().genCode()); + } catch (CodeGenerateException e) { putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS); throw new ServiceException(Status.INTERNAL_SERVER_ERROR_ARGS); } processDefinition.setId(0); processDefinition.setUserId(loginUser.getId()); processDefinition.setName(processDefinition.getName() + "_copy_" + DateUtils.getCurrentTimeStamp()); + if (StringUtils.isNotBlank(processDefinition.getLocations())) { + ArrayNode jsonNodes = JSONUtils.parseArray(processDefinition.getLocations()); + for (int i = 0; i < jsonNodes.size(); i++) { + ObjectNode node = (ObjectNode) jsonNodes.path(i); + node.put("taskCode", taskCodeMap.get(node.get("taskCode").asLong())); + jsonNodes.set(i, node); + } + processDefinition.setLocations(JSONUtils.toJsonString(jsonNodes)); + } try { - result.putAll(createDagDefine(loginUser, taskRelationList, processDefinition, Lists.newArrayList())); + result.putAll(createDagDefine(loginUser, taskRelationList, processDefinition, taskDefinitionLogs)); } catch (Exception e) { putMsg(result, Status.COPY_PROCESS_DEFINITION_ERROR); throw new ServiceException(Status.COPY_PROCESS_DEFINITION_ERROR); @@ -1384,7 +1601,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro } ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code); - if (Objects.isNull(processDefinition)) { + if (Objects.isNull(processDefinition) || projectCode != processDefinition.getProjectCode()) { putMsg(result, Status.SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_ERROR, code); return result; } @@ -1448,7 +1665,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro } PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); Page page = new Page<>(pageNo, pageSize); - IPage processDefinitionVersionsPaging = processDefinitionLogMapper.queryProcessDefinitionVersionsPaging(page, code); + IPage processDefinitionVersionsPaging = processDefinitionLogMapper.queryProcessDefinitionVersionsPaging(page, code, projectCode); List processDefinitionLogs = processDefinitionVersionsPaging.getRecords(); pageInfo.setTotalList(processDefinitionLogs); @@ -1479,11 +1696,15 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro } ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code); - if (processDefinition == null) { + if (processDefinition == null || projectCode != processDefinition.getProjectCode()) { putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, code); } else { + if (processDefinition.getVersion() == version) { + putMsg(result, Status.MAIN_TABLE_USING_VERSION); + return result; + } int deleteLog = processDefinitionLogMapper.deleteByProcessDefinitionCodeAndVersion(code, version); - int deleteRelationLog = processTaskRelationLogMapper.deleteByCode(processDefinition.getCode(), processDefinition.getVersion()); + int deleteRelationLog = processTaskRelationLogMapper.deleteByCode(code, version); if (deleteLog == 0 || deleteRelationLog == 0) { putMsg(result, Status.DELETE_PROCESS_DEFINE_BY_CODE_ERROR); throw new ServiceException(Status.DELETE_PROCESS_DEFINE_BY_CODE_ERROR); @@ -1492,4 +1713,313 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro } return result; } + + /** + * create empty process definition + * + * @param loginUser login user + * @param projectCode project code + * @param name process definition name + * @param description description + * @param globalParams globalParams + * @param timeout timeout + * @param tenantCode tenantCode + * @param scheduleJson scheduleJson + * @return process definition code + */ + @Override + @Transactional(rollbackFor = RuntimeException.class) + public Map createEmptyProcessDefinition(User loginUser, + long projectCode, + String name, + String description, + String globalParams, + int timeout, + String tenantCode, + String scheduleJson) { + Project project = projectMapper.queryByCode(projectCode); + //check user access for project + Map result = projectService.checkProjectAndAuth(loginUser, project, projectCode); + if (result.get(Constants.STATUS) != Status.SUCCESS) { + return result; + } + + // check whether the new process define name exist + ProcessDefinition definition = processDefinitionMapper.verifyByDefineName(project.getCode(), name); + if (definition != null) { + putMsg(result, Status.PROCESS_DEFINITION_NAME_EXIST, name); + return result; + } + + int tenantId = -1; + if (!Constants.DEFAULT.equals(tenantCode)) { + Tenant tenant = tenantMapper.queryByTenantCode(tenantCode); + if (tenant == null) { + putMsg(result, Status.TENANT_NOT_EXIST); + return result; + } + tenantId = tenant.getId(); + } + long processDefinitionCode; + try { + processDefinitionCode = CodeGenerateUtils.getInstance().genCode(); + } catch (CodeGenerateException e) { + putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS); + return result; + } + ProcessDefinition processDefinition = new ProcessDefinition(projectCode, name, processDefinitionCode, description, + globalParams, "", timeout, loginUser.getId(), tenantId); + result = createEmptyDagDefine(loginUser, processDefinition); + if (result.get(Constants.STATUS) != Status.SUCCESS) { + return result; + } + + if (StringUtils.isBlank(scheduleJson)) { + return result; + } + + // save dag schedule + Map scheduleResult = createDagSchedule(loginUser, processDefinition, scheduleJson); + if (scheduleResult.get(Constants.STATUS) != Status.SUCCESS) { + Status scheduleResultStatus = (Status) scheduleResult.get(Constants.STATUS); + putMsg(result, scheduleResultStatus); + throw new ServiceException(scheduleResultStatus); + } + return result; + } + + private Map createEmptyDagDefine(User loginUser, ProcessDefinition processDefinition) { + Map result = new HashMap<>(); + int insertVersion = processService.saveProcessDefine(loginUser, processDefinition, Boolean.TRUE, Boolean.TRUE); + if (insertVersion == 0) { + putMsg(result, Status.CREATE_PROCESS_DEFINITION_ERROR); + throw new ServiceException(Status.CREATE_PROCESS_DEFINITION_ERROR); + } + putMsg(result, Status.SUCCESS); + result.put(Constants.DATA_LIST, processDefinition); + return result; + } + + private Map createDagSchedule(User loginUser, ProcessDefinition processDefinition, String scheduleJson) { + Map result = new HashMap<>(); + Schedule scheduleObj = JSONUtils.parseObject(scheduleJson, Schedule.class); + if (scheduleObj == null) { + putMsg(result, Status.DATA_IS_NOT_VALID, scheduleJson); + throw new ServiceException(Status.DATA_IS_NOT_VALID); + } + Date now = new Date(); + scheduleObj.setProcessDefinitionCode(processDefinition.getCode()); + if (DateUtils.differSec(scheduleObj.getStartTime(), scheduleObj.getEndTime()) == 0) { + logger.warn("The start time must not be the same as the end"); + putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME); + return result; + } + if (!org.quartz.CronExpression.isValidExpression(scheduleObj.getCrontab())) { + logger.error("{} verify failure", scheduleObj.getCrontab()); + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, scheduleObj.getCrontab()); + return result; + } + scheduleObj.setWarningType(scheduleObj.getWarningType() == null ? WarningType.NONE : scheduleObj.getWarningType()); + scheduleObj.setWarningGroupId(scheduleObj.getWarningGroupId() == 0 ? 1 : scheduleObj.getWarningGroupId()); + scheduleObj.setFailureStrategy(scheduleObj.getFailureStrategy() == null ? FailureStrategy.CONTINUE : scheduleObj.getFailureStrategy()); + scheduleObj.setCreateTime(now); + scheduleObj.setUpdateTime(now); + scheduleObj.setUserId(loginUser.getId()); + scheduleObj.setReleaseState(ReleaseState.OFFLINE); + scheduleObj.setProcessInstancePriority(scheduleObj.getProcessInstancePriority() == null ? Priority.MEDIUM : scheduleObj.getProcessInstancePriority()); + scheduleObj.setWorkerGroup(scheduleObj.getWorkerGroup() == null ? "default" : scheduleObj.getWorkerGroup()); + scheduleObj.setEnvironmentCode(scheduleObj.getEnvironmentCode() == null ? -1 : scheduleObj.getEnvironmentCode()); + scheduleMapper.insert(scheduleObj); + + putMsg(result, Status.SUCCESS); + result.put("scheduleId", scheduleObj.getId()); + return result; + } + + /** + * update process definition basic info + * + * @param loginUser login user + * @param projectCode project code + * @param name process definition name + * @param code process definition code + * @param description description + * @param globalParams globalParams + * @param timeout timeout + * @param tenantCode tenantCode + * @param scheduleJson scheduleJson + * @return update result code + */ + @Override + @Transactional(rollbackFor = RuntimeException.class) + public Map updateProcessDefinitionBasicInfo(User loginUser, + long projectCode, + String name, + long code, + String description, + String globalParams, + int timeout, + String tenantCode, + String scheduleJson) { + Project project = projectMapper.queryByCode(projectCode); + //check user access for project + Map result = projectService.checkProjectAndAuth(loginUser, project, projectCode); + if (result.get(Constants.STATUS) != Status.SUCCESS) { + return result; + } + + int tenantId = -1; + if (!Constants.DEFAULT.equals(tenantCode)) { + Tenant tenant = tenantMapper.queryByTenantCode(tenantCode); + if (tenant == null) { + putMsg(result, Status.TENANT_NOT_EXIST); + return result; + } + tenantId = tenant.getId(); + } + + ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code); + // check process definition exists + if (processDefinition == null || projectCode != processDefinition.getProjectCode()) { + putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, code); + return result; + } + if (processDefinition.getReleaseState() == ReleaseState.ONLINE) { + // online can not permit edit + putMsg(result, Status.PROCESS_DEFINE_NOT_ALLOWED_EDIT, processDefinition.getName()); + return result; + } + if (!name.equals(processDefinition.getName())) { + // check whether the new process define name exist + ProcessDefinition definition = processDefinitionMapper.verifyByDefineName(project.getCode(), name); + if (definition != null) { + putMsg(result, Status.PROCESS_DEFINITION_NAME_EXIST, name); + return result; + } + } + ProcessDefinition processDefinitionDeepCopy = JSONUtils.parseObject(JSONUtils.toJsonString(processDefinition), ProcessDefinition.class); + processDefinition.set(projectCode, name, description, globalParams, "", timeout, tenantId); + List taskRelationList = processTaskRelationLogMapper.queryByProcessCodeAndVersion(processDefinition.getCode(), processDefinition.getVersion()); + result = updateDagDefine(loginUser, taskRelationList, processDefinition, processDefinitionDeepCopy, Lists.newArrayList()); + if (result.get(Constants.STATUS) != Status.SUCCESS) { + return result; + } + + if (StringUtils.isBlank(scheduleJson)) { + return result; + } + // update dag schedule + Map scheduleResult = updateDagSchedule(loginUser, projectCode, code, scheduleJson); + if (scheduleResult.get(Constants.STATUS) != Status.SUCCESS) { + Status scheduleResultStatus = (Status) scheduleResult.get(Constants.STATUS); + putMsg(result, scheduleResultStatus); + throw new ServiceException(scheduleResultStatus); + } + return result; + } + + private Map updateDagSchedule(User loginUser, + long projectCode, + long processDefinitionCode, + String scheduleJson) { + Map result = new HashMap<>(); + Schedule schedule = JSONUtils.parseObject(scheduleJson, Schedule.class); + if (schedule == null) { + putMsg(result, Status.DATA_IS_NOT_VALID, scheduleJson); + throw new ServiceException(Status.DATA_IS_NOT_VALID); + } + // set default value + FailureStrategy failureStrategy = schedule.getFailureStrategy() == null ? FailureStrategy.CONTINUE : schedule.getFailureStrategy(); + WarningType warningType = schedule.getWarningType() == null ? WarningType.NONE : schedule.getWarningType(); + Priority processInstancePriority = schedule.getProcessInstancePriority() == null ? Priority.MEDIUM : schedule.getProcessInstancePriority(); + int warningGroupId = schedule.getWarningGroupId() == 0 ? 1 : schedule.getWarningGroupId(); + String workerGroup = schedule.getWorkerGroup() == null ? "default" : schedule.getWorkerGroup(); + long environmentCode = schedule.getEnvironmentCode() == null ? -1 : schedule.getEnvironmentCode(); + + ScheduleParam param = new ScheduleParam(); + param.setStartTime(schedule.getStartTime()); + param.setEndTime(schedule.getEndTime()); + param.setCrontab(schedule.getCrontab()); + param.setTimezoneId(schedule.getTimezoneId()); + + return schedulerService.updateScheduleByProcessDefinitionCode( + loginUser, + projectCode, + processDefinitionCode, + JSONUtils.toJsonString(param), + warningType, + warningGroupId, + failureStrategy, + processInstancePriority, + workerGroup, + environmentCode); + } + + /** + * release process definition and schedule + * + * @param loginUser login user + * @param projectCode project code + * @param code process definition code + * @param releaseState releaseState + * @return update result code + */ + @Transactional(rollbackFor = RuntimeException.class) + @Override + public Map releaseWorkflowAndSchedule(User loginUser, long projectCode, long code, ReleaseState releaseState) { + Project project = projectMapper.queryByCode(projectCode); + //check user access for project + Map result = projectService.checkProjectAndAuth(loginUser, project, projectCode); + if (result.get(Constants.STATUS) != Status.SUCCESS) { + return result; + } + // check state + if (null == releaseState) { + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, RELEASESTATE); + return result; + } + + ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code); + if (processDefinition == null) { + putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, code); + return result; + } + Schedule scheduleObj = scheduleMapper.queryByProcessDefinitionCode(code); + if (scheduleObj == null) { + putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, "processDefinitionCode:" + code); + return result; + } + switch (releaseState) { + case ONLINE: + List relationList = processService.findRelationByCode(code, processDefinition.getVersion()); + if (CollectionUtils.isEmpty(relationList)) { + putMsg(result, Status.PROCESS_DAG_IS_EMPTY); + return result; + } + processDefinition.setReleaseState(releaseState); + processDefinitionMapper.updateById(processDefinition); + schedulerService.setScheduleState(loginUser, projectCode, scheduleObj.getId(), ReleaseState.ONLINE); + break; + case OFFLINE: + processDefinition.setReleaseState(releaseState); + int updateProcess = processDefinitionMapper.updateById(processDefinition); + if (updateProcess > 0) { + logger.info("set schedule offline, project code: {}, schedule id: {}, process definition code: {}", projectCode, scheduleObj.getId(), code); + // set status + scheduleObj.setReleaseState(ReleaseState.OFFLINE); + int updateSchedule = scheduleMapper.updateById(scheduleObj); + if (updateSchedule == 0) { + putMsg(result, Status.OFFLINE_SCHEDULE_ERROR); + throw new ServiceException(Status.OFFLINE_SCHEDULE_ERROR); + } + schedulerService.deleteSchedule(project.getId(), scheduleObj.getId()); + } + break; + default: + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, RELEASESTATE); + return result; + } + putMsg(result, Status.SUCCESS); + return result; + } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessInstanceServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessInstanceServiceImpl.java index 194a9922d8bf6c2bf013b8b9de42023bff1f3773..5189616602d9105aa606a8080f1c13974032945b 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessInstanceServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessInstanceServiceImpl.java @@ -23,6 +23,7 @@ import static org.apache.dolphinscheduler.common.Constants.GLOBAL_PARAMS; import static org.apache.dolphinscheduler.common.Constants.LOCAL_PARAMS; import static org.apache.dolphinscheduler.common.Constants.PROCESS_INSTANCE_STATE; import static org.apache.dolphinscheduler.common.Constants.TASK_LIST; +import static org.apache.dolphinscheduler.common.Constants.WARNING_GROUP_NAME; import org.apache.dolphinscheduler.api.dto.gantt.GanttDto; import org.apache.dolphinscheduler.api.dto.gantt.Task; @@ -46,28 +47,32 @@ import org.apache.dolphinscheduler.common.graph.DAG; import org.apache.dolphinscheduler.common.model.TaskNode; import org.apache.dolphinscheduler.common.model.TaskNodeRelation; import org.apache.dolphinscheduler.common.process.Property; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.placeholder.BusinessTimeUtils; +import org.apache.dolphinscheduler.dao.entity.AlertGroup; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelationLog; import org.apache.dolphinscheduler.dao.entity.Project; +import org.apache.dolphinscheduler.dao.entity.TaskDefinition; import org.apache.dolphinscheduler.dao.entity.TaskDefinitionLog; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.entity.Tenant; import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.AlertGroupMapper; import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionLogMapper; import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionLogMapper; +import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.TenantMapper; import org.apache.dolphinscheduler.service.process.ProcessService; +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.StringUtils; import java.io.BufferedReader; @@ -76,11 +81,13 @@ import java.io.IOException; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.util.ArrayList; +import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.function.Function; import java.util.stream.Collectors; import org.springframework.beans.factory.annotation.Autowired; @@ -120,6 +127,9 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce @Autowired ExecutorService execService; + @Autowired + AlertGroupMapper alertGroupMapper; + @Autowired TaskInstanceMapper taskInstanceMapper; @@ -138,6 +148,9 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce @Autowired private TenantMapper tenantMapper; + @Autowired + TaskDefinitionMapper taskDefinitionMapper; + /** * return top n SUCCESS process instance order by running time which started between startTime and endTime */ @@ -173,7 +186,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce return result; } - List processInstances = processInstanceMapper.queryTopNProcessInstance(size, start, end, ExecutionStatus.SUCCESS); + List processInstances = processInstanceMapper.queryTopNProcessInstance(size, start, end, ExecutionStatus.SUCCESS, projectCode); result.put(DATA_LIST, processInstances); putMsg(result, Status.SUCCESS); return result; @@ -182,9 +195,9 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce /** * query process instance by id * - * @param loginUser login user + * @param loginUser login user * @param projectCode project code - * @param processId process instance id + * @param processId process instance id * @return process instance detail */ @Override @@ -198,9 +211,9 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce ProcessInstance processInstance = processService.findProcessInstanceDetailById(processId); ProcessDefinition processDefinition = processService.findProcessDefinition(processInstance.getProcessDefinitionCode(), - processInstance.getProcessDefinitionVersion()); + processInstance.getProcessDefinitionVersion()); - if (processDefinition == null) { + if (processDefinition == null || projectCode != processDefinition.getProjectCode()) { putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processId); } else { processInstance.setWarningGroupId(processDefinition.getWarningGroupId()); @@ -216,16 +229,16 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce /** * paging query process instance list, filtering according to project, process definition, time range, keyword, process status * - * @param loginUser login user - * @param projectCode project code + * @param loginUser login user + * @param projectCode project code * @param processDefineCode process definition code - * @param pageNo page number - * @param pageSize page size - * @param searchVal search value - * @param stateType state type - * @param host host - * @param startDate start time - * @param endDate end time + * @param pageNo page number + * @param pageSize page size + * @param searchVal search value + * @param stateType state type + * @param host host + * @param startDate start time + * @param endDate end time * @return process instance list */ @Override @@ -238,7 +251,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectCode); Status resultEnum = (Status) checkResult.get(Constants.STATUS); if (resultEnum != Status.SUCCESS) { - putMsg(result,resultEnum); + putMsg(result, resultEnum); return result; } @@ -251,7 +264,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce Map checkAndParseDateResult = checkAndParseDateParameters(startDate, endDate); resultEnum = (Status) checkAndParseDateResult.get(Constants.STATUS); if (resultEnum != Status.SUCCESS) { - putMsg(result,resultEnum); + putMsg(result, resultEnum); return result; } Date start = (Date) checkAndParseDateResult.get(Constants.START_TIME); @@ -262,11 +275,18 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce int executorId = usersService.getUserIdByName(executorName); IPage processInstanceList = processInstanceMapper.queryProcessInstanceListPaging(page, - project.getCode(), processDefineCode, searchVal, executorId, statusArray, host, start, end); + project.getCode(), processDefineCode, searchVal, executorId, statusArray, host, start, end); List processInstances = processInstanceList.getRecords(); - List userIds = CollectionUtils.transformToList(processInstances, ProcessInstance::getExecutorId); - Map idToUserMap = CollectionUtils.collectionToMap(usersService.queryUser(userIds), User::getId); + List userIds = Collections.emptyList(); + if (CollectionUtils.isNotEmpty(processInstances)) { + userIds = processInstances.stream().map(ProcessInstance::getExecutorId).collect(Collectors.toList()); + } + List users = usersService.queryUser(userIds); + Map idToUserMap = Collections.emptyMap(); + if (CollectionUtils.isNotEmpty(users)) { + idToUserMap = users.stream().collect(Collectors.toMap(User::getId, Function.identity())); + } for (ProcessInstance processInstance : processInstances) { processInstance.setDuration(DateUtils.format2Duration(processInstance.getStartTime(), processInstance.getEndTime())); @@ -286,9 +306,9 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce /** * query task list by process instance id * - * @param loginUser login user + * @param loginUser login user * @param projectCode project code - * @param processId process instance id + * @param processId process instance id * @return task list for the process instance * @throws IOException io exception */ @@ -301,10 +321,26 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce return result; } ProcessInstance processInstance = processService.findProcessInstanceDetailById(processId); + ProcessDefinition processDefinition = processDefineMapper.queryByCode(processInstance.getProcessDefinitionCode()); + if (processDefinition != null && projectCode != processDefinition.getProjectCode()) { + putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processId); + return result; + } List taskInstanceList = processService.findValidTaskListByProcessId(processId); addDependResultForTaskList(taskInstanceList); + Map resultMap = new HashMap<>(); resultMap.put(PROCESS_INSTANCE_STATE, processInstance.getState().toString()); + + if (null != processDefinition && processDefinition.getWarningGroupId() != 0) { + //check if exist + AlertGroup alertGroup = alertGroupMapper.selectById(processDefinition.getWarningGroupId()); + if (null != alertGroup) { + resultMap.put(WARNING_GROUP_NAME, alertGroup.getGroupName()); + } + + } + resultMap.put(TASK_LIST, taskInstanceList); result.put(DATA_LIST, resultMap); @@ -319,7 +355,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce for (TaskInstance taskInstance : taskInstanceList) { if (TaskType.DEPENDENT.getDesc().equalsIgnoreCase(taskInstance.getTaskType())) { Result logResult = loggerService.queryLog( - taskInstance.getId(), Constants.LOG_QUERY_SKIP_LINE_NUMBER, Constants.LOG_QUERY_LIMIT); + taskInstance.getId(), Constants.LOG_QUERY_SKIP_LINE_NUMBER, Constants.LOG_QUERY_LIMIT); if (logResult.getCode() == Status.SUCCESS.ordinal()) { String log = logResult.getData(); Map resultMap = parseLogForDependentResult(log); @@ -337,7 +373,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce } BufferedReader br = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(log.getBytes( - StandardCharsets.UTF_8)), StandardCharsets.UTF_8)); + StandardCharsets.UTF_8)), StandardCharsets.UTF_8)); String line; while ((line = br.readLine()) != null) { if (line.contains(DEPENDENT_SPLIT)) { @@ -361,9 +397,9 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce /** * query sub process instance detail info by task id * - * @param loginUser login user + * @param loginUser login user * @param projectCode project code - * @param taskId task id + * @param taskId task id * @return sub process instance detail */ @Override @@ -380,6 +416,13 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce putMsg(result, Status.TASK_INSTANCE_NOT_EXISTS, taskId); return result; } + + TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(taskInstance.getTaskCode()); + if (taskDefinition != null && projectCode != taskDefinition.getProjectCode()) { + putMsg(result, Status.TASK_INSTANCE_NOT_EXISTS, taskId); + return result; + } + if (!taskInstance.isSubProcess()) { putMsg(result, Status.TASK_INSTANCE_NOT_SUB_WORKFLOW_INSTANCE, taskInstance.getName()); return result; @@ -431,6 +474,12 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId); return result; } + //check process instance exists in project + ProcessDefinition processDefinition0 = processDefineMapper.queryByCode(processInstance.getProcessDefinitionCode()); + if (processDefinition0 != null && projectCode != processDefinition0.getProjectCode()) { + putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId); + return result; + } //check process instance status if (!processInstance.getState().typeIsFinished()) { putMsg(result, Status.PROCESS_INSTANCE_STATE_OPERATION_ERROR, @@ -438,64 +487,55 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce return result; } setProcessInstance(processInstance, tenantCode, scheduleTime, globalParams, timeout); - if (Boolean.TRUE.equals(syncDefine)) { - List taskDefinitionLogs = JSONUtils.toList(taskDefinitionJson, TaskDefinitionLog.class); - if (taskDefinitionLogs.isEmpty()) { - putMsg(result, Status.DATA_IS_NOT_VALID, taskDefinitionJson); + List taskDefinitionLogs = JSONUtils.toList(taskDefinitionJson, TaskDefinitionLog.class); + if (taskDefinitionLogs.isEmpty()) { + putMsg(result, Status.DATA_IS_NOT_VALID, taskDefinitionJson); + return result; + } + for (TaskDefinitionLog taskDefinitionLog : taskDefinitionLogs) { + if (!CheckUtils.checkTaskDefinitionParameters(taskDefinitionLog)) { + putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskDefinitionLog.getName()); return result; } - for (TaskDefinitionLog taskDefinitionLog : taskDefinitionLogs) { - if (!CheckUtils.checkTaskDefinitionParameters(taskDefinitionLog)) { - putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskDefinitionLog.getName()); - return result; - } - } - int saveTaskResult = processService.saveTaskDefine(loginUser, projectCode, taskDefinitionLogs); - if (saveTaskResult == Constants.DEFINITION_FAILURE) { - putMsg(result, Status.UPDATE_TASK_DEFINITION_ERROR); - throw new ServiceException(Status.UPDATE_TASK_DEFINITION_ERROR); - } - ProcessDefinition processDefinition = processDefineMapper.queryByCode(processInstance.getProcessDefinitionCode()); - List taskRelationList = JSONUtils.toList(taskRelationJson, ProcessTaskRelationLog.class); - //check workflow json is valid - result = processDefinitionService.checkProcessNodeList(taskRelationJson); - if (result.get(Constants.STATUS) != Status.SUCCESS) { + } + int saveTaskResult = processService.saveTaskDefine(loginUser, projectCode, taskDefinitionLogs, syncDefine); + if (saveTaskResult == Constants.DEFINITION_FAILURE) { + putMsg(result, Status.UPDATE_TASK_DEFINITION_ERROR); + throw new ServiceException(Status.UPDATE_TASK_DEFINITION_ERROR); + } + ProcessDefinition processDefinition = processDefineMapper.queryByCode(processInstance.getProcessDefinitionCode()); + List taskRelationList = JSONUtils.toList(taskRelationJson, ProcessTaskRelationLog.class); + //check workflow json is valid + result = processDefinitionService.checkProcessNodeList(taskRelationJson, taskDefinitionLogs); + if (result.get(Constants.STATUS) != Status.SUCCESS) { + return result; + } + int tenantId = -1; + if (!Constants.DEFAULT.equals(tenantCode)) { + Tenant tenant = tenantMapper.queryByTenantCode(tenantCode); + if (tenant == null) { + putMsg(result, Status.TENANT_NOT_EXIST); return result; } - int tenantId = -1; - if (!Constants.DEFAULT.equals(tenantCode)) { - Tenant tenant = tenantMapper.queryByTenantCode(tenantCode); - if (tenant == null) { - putMsg(result, Status.TENANT_NOT_EXIST); - return result; - } - tenantId = tenant.getId(); - } - ProcessDefinition processDefinitionDeepCopy = JSONUtils.parseObject(JSONUtils.toJsonString(processDefinition), ProcessDefinition.class); - processDefinition.set(projectCode, processDefinition.getName(), processDefinition.getDescription(), globalParams, locations, timeout, tenantId); - processDefinition.setUpdateTime(new Date()); - int insertVersion; - if (processDefinition.equals(processDefinitionDeepCopy)) { - insertVersion = processDefinitionDeepCopy.getVersion(); - } else { - processDefinition.setUpdateTime(new Date()); - insertVersion = processService.saveProcessDefine(loginUser, processDefinition, false); - } - if (insertVersion == 0) { - putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR); - throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR); - } - int insertResult = processService.saveTaskRelation(loginUser, processDefinition.getProjectCode(), - processDefinition.getCode(), insertVersion, taskRelationList, taskDefinitionLogs); - if (insertResult == Constants.EXIT_CODE_SUCCESS) { - putMsg(result, Status.SUCCESS); - result.put(Constants.DATA_LIST, processDefinition); - } else { - putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR); - throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR); - } - processInstance.setProcessDefinitionVersion(insertVersion); + tenantId = tenant.getId(); + } + processDefinition.set(projectCode, processDefinition.getName(), processDefinition.getDescription(), globalParams, locations, timeout, tenantId); + processDefinition.setUpdateTime(new Date()); + int insertVersion = processService.saveProcessDefine(loginUser, processDefinition, syncDefine, Boolean.FALSE); + if (insertVersion == 0) { + putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR); + throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR); + } + int insertResult = processService.saveTaskRelation(loginUser, processDefinition.getProjectCode(), + processDefinition.getCode(), insertVersion, taskRelationList, taskDefinitionLogs, syncDefine); + if (insertResult == Constants.EXIT_CODE_SUCCESS) { + putMsg(result, Status.SUCCESS); + result.put(Constants.DATA_LIST, processDefinition); + } else { + putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR); + throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR); } + processInstance.setProcessDefinitionVersion(insertVersion); int update = processService.updateProcessInstance(processInstance); if (update == 0) { putMsg(result, Status.UPDATE_PROCESS_INSTANCE_ERROR); @@ -525,9 +565,9 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce /** * query parent process instance detail info by sub process instance id * - * @param loginUser login user + * @param loginUser login user * @param projectCode project code - * @param subId sub process id + * @param subId sub process id * @return parent instance detail */ @Override @@ -564,8 +604,8 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce /** * delete process instance by id, at the same time,delete task instance and their mapping relation data * - * @param loginUser login user - * @param projectCode project code + * @param loginUser login user + * @param projectCode project code * @param processInstanceId process instance id * @return delete result code */ @@ -580,13 +620,26 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce } ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId); if (null == processInstance) { - putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId); + putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, String.valueOf(processInstanceId)); + return result; + } + //check process instance status + if (!processInstance.getState().typeIsFinished()) { + putMsg(result, Status.PROCESS_INSTANCE_STATE_OPERATION_ERROR, + processInstance.getName(), processInstance.getState().toString(), "delete"); + return result; + } + + ProcessDefinition processDefinition = processDefineMapper.queryByCode(processInstance.getProcessDefinitionCode()); + if (processDefinition != null && projectCode != processDefinition.getProjectCode()) { + putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, String.valueOf(processInstanceId)); return result; } try { processService.removeTaskLogFile(processInstanceId); - } catch (Exception e) { + } catch (Exception ignore) { + // ignore } // delete database cascade @@ -594,6 +647,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce processService.deleteAllSubWorkProcessByParentId(processInstanceId); processService.deleteWorkProcessMapByParentId(processInstanceId); + processService.deleteWorkTaskInstanceByProcessInstanceId(processInstanceId); if (delete > 0) { putMsg(result, Status.SUCCESS); @@ -608,11 +662,12 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce /** * view process instance variables * + * @param projectCode project code * @param processInstanceId process instance id * @return variables data */ @Override - public Map viewVariables(Integer processInstanceId) { + public Map viewVariables(long projectCode, Integer processInstanceId) { Map result = new HashMap<>(); ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId); @@ -621,9 +676,15 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce throw new RuntimeException("workflow instance is null"); } + ProcessDefinition processDefinition = processDefineMapper.queryByCode(processInstance.getProcessDefinitionCode()); + if (processDefinition != null && projectCode != processDefinition.getProjectCode()) { + putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId); + return result; + } + Map timeParams = BusinessTimeUtils - .getBusinessTime(processInstance.getCmdTypeIfComplement(), - processInstance.getScheduleTime()); + .getBusinessTime(processInstance.getCmdTypeIfComplement(), + processInstance.getScheduleTime()); String userDefinedParams = processInstance.getGlobalParams(); // global params List globalParams = new ArrayList<>(); @@ -659,7 +720,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce List taskInstanceList = taskInstanceMapper.findValidTaskListByProcessId(processInstance.getId(), Flag.YES); for (TaskInstance taskInstance : taskInstanceList) { TaskDefinitionLog taskDefinitionLog = taskDefinitionLogMapper.queryByDefinitionCodeAndVersion( - taskInstance.getTaskCode(), taskInstance.getTaskDefinitionVersion()); + taskInstance.getTaskCode(), taskInstance.getTaskDefinitionVersion()); String localParams = JSONUtils.getNodeString(taskDefinitionLog.getTaskParams(), LOCAL_PARAMS); if (!StringUtils.isEmpty(localParams)) { @@ -680,12 +741,13 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce /** * encapsulation gantt structure * + * @param projectCode project code * @param processInstanceId process instance id * @return gantt tree data * @throws Exception exception when json parse */ @Override - public Map viewGantt(Integer processInstanceId) throws Exception { + public Map viewGantt(long projectCode, Integer processInstanceId) throws Exception { Map result = new HashMap<>(); ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId); @@ -698,6 +760,10 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce processInstance.getProcessDefinitionCode(), processInstance.getProcessDefinitionVersion() ); + if (processDefinition == null || projectCode != processDefinition.getProjectCode()) { + putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId); + return result; + } GanttDto ganttDto = new GanttDto(); DAG dag = processService.genDagGraph(processDefinition); //topological sort @@ -707,7 +773,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce List taskList = new ArrayList<>(); for (String node : nodeList) { - TaskInstance taskInstance = taskInstanceMapper.queryByInstanceIdAndName(processInstanceId, node); + TaskInstance taskInstance = taskInstanceMapper.queryByInstanceIdAndCode(processInstanceId, Long.parseLong(node)); if (taskInstance == null) { continue; } @@ -735,7 +801,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce * query process instance by processDefinitionCode and stateArray * * @param processDefinitionCode processDefinitionCode - * @param states states array + * @param states states array * @return process instance list */ @Override @@ -747,7 +813,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce * query process instance by processDefinitionCode * * @param processDefinitionCode processDefinitionCode - * @param size size + * @param size size * @return process instance list */ @Override diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessTaskRelationServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessTaskRelationServiceImpl.java new file mode 100644 index 0000000000000000000000000000000000000000..b65a03ff1fb9b1d03f99cedb21597f838a8b2077 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessTaskRelationServiceImpl.java @@ -0,0 +1,553 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.service.impl; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.exceptions.ServiceException; +import org.apache.dolphinscheduler.api.service.ProcessTaskRelationService; +import org.apache.dolphinscheduler.api.service.ProjectService; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ConditionType; +import org.apache.dolphinscheduler.common.enums.TaskType; +import org.apache.dolphinscheduler.common.model.TaskNode; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelation; +import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelationLog; +import org.apache.dolphinscheduler.dao.entity.Project; +import org.apache.dolphinscheduler.dao.entity.TaskDefinition; +import org.apache.dolphinscheduler.dao.entity.TaskDefinitionLog; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; +import org.apache.dolphinscheduler.dao.mapper.ProcessTaskRelationMapper; +import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; +import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionLogMapper; +import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper; +import org.apache.dolphinscheduler.service.process.ProcessService; +import org.apache.dolphinscheduler.spi.utils.StringUtils; + +import org.apache.commons.collections.CollectionUtils; + +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import com.google.common.collect.Lists; + +/** + * process task relation service impl + */ +@Service +public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements ProcessTaskRelationService { + + @Autowired + private ProjectMapper projectMapper; + + @Autowired + private ProjectService projectService; + + @Autowired + private ProcessTaskRelationMapper processTaskRelationMapper; + + @Autowired + private TaskDefinitionLogMapper taskDefinitionLogMapper; + + @Autowired + private TaskDefinitionMapper taskDefinitionMapper; + + @Autowired + private ProcessDefinitionMapper processDefinitionMapper; + + @Autowired + private ProcessService processService; + + /** + * create process task relation + * + * @param loginUser login user + * @param projectCode project code + * @param processDefinitionCode processDefinitionCode + * @param preTaskCode preTaskCode + * @param postTaskCode postTaskCode + * @return create result code + */ + @Transactional(rollbackFor = RuntimeException.class) + @Override + public Map createProcessTaskRelation(User loginUser, long projectCode, long processDefinitionCode, long preTaskCode, long postTaskCode) { + Project project = projectMapper.queryByCode(projectCode); + //check user access for project + Map result = projectService.checkProjectAndAuth(loginUser, project, projectCode); + if (result.get(Constants.STATUS) != Status.SUCCESS) { + return result; + } + ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefinitionCode); + if (processDefinition == null) { + putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefinitionCode); + return result; + } + if (processDefinition.getProjectCode() != projectCode) { + putMsg(result, Status.PROJECT_PROCESS_NOT_MATCH); + return result; + } + updateProcessDefiniteVersion(loginUser, result, processDefinition); + List processTaskRelationList = processTaskRelationMapper.queryByProcessCode(projectCode, processDefinitionCode); + List processTaskRelations = Lists.newArrayList(processTaskRelationList); + if (!processTaskRelations.isEmpty()) { + Map preTaskCodeMap = processTaskRelations.stream().filter(r -> r.getPostTaskCode() == postTaskCode) + .collect(Collectors.toMap(ProcessTaskRelation::getPreTaskCode, processTaskRelation -> processTaskRelation)); + if (!preTaskCodeMap.isEmpty()) { + if (preTaskCodeMap.containsKey(preTaskCode) || (!preTaskCodeMap.containsKey(0L) && preTaskCode == 0L)) { + putMsg(result, Status.PROCESS_TASK_RELATION_EXIST, processDefinitionCode); + return result; + } + if (preTaskCodeMap.containsKey(0L) && preTaskCode != 0L) { + // delete no upstream + processTaskRelations.remove(preTaskCodeMap.get(0L)); + } + } + } + TaskDefinition postTaskDefinition = taskDefinitionMapper.queryByCode(postTaskCode); + ProcessTaskRelation processTaskRelation = setRelation(processDefinition, postTaskDefinition); + if (preTaskCode != 0L) { + TaskDefinition preTaskDefinition = taskDefinitionMapper.queryByCode(preTaskCode); + List upstreamTaskRelationList = processTaskRelations.stream().filter(r -> r.getPostTaskCode() == preTaskCode).collect(Collectors.toList()); + // upstream is or not exist + if (upstreamTaskRelationList.isEmpty()) { + ProcessTaskRelation preProcessTaskRelation = setRelation(processDefinition, preTaskDefinition); + preProcessTaskRelation.setPreTaskCode(0L); + preProcessTaskRelation.setPreTaskVersion(0); + processTaskRelations.add(preProcessTaskRelation); + } + processTaskRelation.setPreTaskCode(preTaskDefinition.getCode()); + processTaskRelation.setPreTaskVersion(preTaskDefinition.getVersion()); + } else { + processTaskRelation.setPreTaskCode(0L); + processTaskRelation.setPreTaskVersion(0); + } + processTaskRelations.add(processTaskRelation); + + List taskNodeList = processService.transformTask(processTaskRelations, null); + if (processService.graphHasCycle(taskNodeList)) { + putMsg(result, Status.PROCESS_NODE_HAS_CYCLE); + return result; + } + + updateRelation(loginUser, result, processDefinition, processTaskRelations); + return result; + } + + private ProcessTaskRelation setRelation(ProcessDefinition processDefinition, TaskDefinition taskDefinition) { + Date now = new Date(); + ProcessTaskRelation processTaskRelation = new ProcessTaskRelation(); + processTaskRelation.setProjectCode(processDefinition.getProjectCode()); + processTaskRelation.setProcessDefinitionCode(processDefinition.getCode()); + processTaskRelation.setProcessDefinitionVersion(processDefinition.getVersion()); + processTaskRelation.setPostTaskCode(taskDefinition.getCode()); + processTaskRelation.setPostTaskVersion(taskDefinition.getVersion()); + processTaskRelation.setConditionType(ConditionType.NONE); + processTaskRelation.setConditionParams("{}"); + processTaskRelation.setCreateTime(now); + processTaskRelation.setUpdateTime(now); + return processTaskRelation; + } + + private void updateProcessDefiniteVersion(User loginUser, Map result, ProcessDefinition processDefinition) { + int insertVersion = processService.saveProcessDefine(loginUser, processDefinition, Boolean.TRUE, Boolean.TRUE); + if (insertVersion <= 0) { + putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR); + throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR); + } + processDefinition.setVersion(insertVersion); + } + + /** + * delete process task relation + * + * @param loginUser login user + * @param projectCode project code + * @param processDefinitionCode process definition code + * @param taskCode the post task code + * @return delete result code + */ + @Transactional(rollbackFor = RuntimeException.class) + @Override + public Map deleteTaskProcessRelation(User loginUser, long projectCode, long processDefinitionCode, long taskCode) { + Project project = projectMapper.queryByCode(projectCode); + //check user access for project + Map result = projectService.checkProjectAndAuth(loginUser, project, projectCode); + if (result.get(Constants.STATUS) != Status.SUCCESS) { + return result; + } + if (taskCode == 0) { + putMsg(result, Status.DELETE_TASK_PROCESS_RELATION_ERROR); + return result; + } + ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefinitionCode); + if (processDefinition == null) { + putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefinitionCode); + return result; + } + TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(taskCode); + if (null == taskDefinition) { + putMsg(result, Status.TASK_DEFINE_NOT_EXIST, taskCode); + return result; + } + List processTaskRelations = processTaskRelationMapper.queryByProcessCode(projectCode, processDefinitionCode); + List processTaskRelationList = Lists.newArrayList(processTaskRelations); + if (CollectionUtils.isEmpty(processTaskRelationList)) { + putMsg(result, Status.DATA_IS_NULL, "processTaskRelationList"); + return result; + } + List downstreamList = Lists.newArrayList(); + for (ProcessTaskRelation processTaskRelation : processTaskRelations) { + if (processTaskRelation.getPreTaskCode() == taskCode) { + downstreamList.add(processTaskRelation.getPostTaskCode()); + } + if (processTaskRelation.getPostTaskCode() == taskCode) { + processTaskRelationList.remove(processTaskRelation); + } + } + if (CollectionUtils.isNotEmpty(downstreamList)) { + putMsg(result, Status.TASK_HAS_DOWNSTREAM, org.apache.commons.lang.StringUtils.join(downstreamList, ",")); + return result; + } + updateProcessDefiniteVersion(loginUser, result, processDefinition); + updateRelation(loginUser, result, processDefinition, processTaskRelationList); + if (TaskType.CONDITIONS.getDesc().equals(taskDefinition.getTaskType()) + || TaskType.DEPENDENT.getDesc().equals(taskDefinition.getTaskType()) + || TaskType.SUB_PROCESS.getDesc().equals(taskDefinition.getTaskType())) { + int deleteTaskDefinition = taskDefinitionMapper.deleteByCode(taskCode); + if (0 == deleteTaskDefinition) { + putMsg(result, Status.DELETE_TASK_DEFINE_BY_CODE_ERROR); + throw new ServiceException(Status.DELETE_TASK_DEFINE_BY_CODE_ERROR); + } + } + putMsg(result, Status.SUCCESS); + return result; + } + + private void updateRelation(User loginUser, Map result, ProcessDefinition processDefinition, + List processTaskRelationList) { + List relationLogs = processTaskRelationList.stream().map(ProcessTaskRelationLog::new).collect(Collectors.toList()); + int insertResult = processService.saveTaskRelation(loginUser, processDefinition.getProjectCode(), processDefinition.getCode(), + processDefinition.getVersion(), relationLogs, Lists.newArrayList(), Boolean.TRUE); + if (insertResult == Constants.EXIT_CODE_SUCCESS) { + putMsg(result, Status.SUCCESS); + result.put(Constants.DATA_LIST, processDefinition); + } else { + putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR); + throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR); + } + } + + /** + * delete task upstream relation + * + * @param loginUser login user + * @param projectCode project code + * @param preTaskCodes the pre task codes, sep ',' + * @param taskCode the post task code + * @return delete result code + */ + @Transactional(rollbackFor = RuntimeException.class) + @Override + public Map deleteUpstreamRelation(User loginUser, long projectCode, String preTaskCodes, long taskCode) { + Project project = projectMapper.queryByCode(projectCode); + //check user access for project + Map result = projectService.checkProjectAndAuth(loginUser, project, projectCode); + if (result.get(Constants.STATUS) != Status.SUCCESS) { + return result; + } + if (StringUtils.isEmpty(preTaskCodes)) { + putMsg(result, Status.DATA_IS_NULL, "preTaskCodes"); + return result; + } + List upstreamList = processTaskRelationMapper.queryUpstreamByCode(projectCode, taskCode); + if (CollectionUtils.isEmpty(upstreamList)) { + putMsg(result, Status.DATA_IS_NULL, "taskCode"); + return result; + } + + List preTaskCodeList = Lists.newArrayList(preTaskCodes.split(Constants.COMMA)).stream().map(Long::parseLong).collect(Collectors.toList()); + if (preTaskCodeList.contains(0L)) { + putMsg(result, Status.DATA_IS_NULL, "preTaskCodes"); + return result; + } + List currentUpstreamList = upstreamList.stream().map(ProcessTaskRelation::getPreTaskCode).collect(Collectors.toList()); + if (currentUpstreamList.contains(0L)) { + putMsg(result, Status.DATA_IS_NOT_VALID, "currentUpstreamList"); + return result; + } + List tmpPreTaskCodeList = Lists.newArrayList(preTaskCodeList); + tmpPreTaskCodeList.removeAll(currentUpstreamList); + if (!tmpPreTaskCodeList.isEmpty()) { + putMsg(result, Status.DATA_IS_NOT_VALID, StringUtils.join(preTaskCodeList, Constants.COMMA)); + return result; + } + ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(upstreamList.get(0).getProcessDefinitionCode()); + if (processDefinition == null) { + putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, upstreamList.get(0).getProcessDefinitionCode()); + return result; + } + + List remainCurrentUpstreamList = Lists.newArrayList(currentUpstreamList); + remainCurrentUpstreamList.removeAll(preTaskCodeList); + + List processTaskRelations = processTaskRelationMapper.queryByProcessCode(projectCode, processDefinition.getCode()); + List remainProcessTaskRelationList = Lists.newArrayList(); + List processTaskRelationWaitRemove = Lists.newArrayList(); + for (ProcessTaskRelation processTaskRelation : processTaskRelations) { + if (processTaskRelation.getPostTaskCode() == taskCode + && preTaskCodeList.contains(processTaskRelation.getPreTaskCode())) { + processTaskRelationWaitRemove.add(processTaskRelation); + } else { + remainProcessTaskRelationList.add(processTaskRelation); + } + } + if (remainCurrentUpstreamList.isEmpty() && processTaskRelationWaitRemove.size() > 0) { + ProcessTaskRelation lastTaskRelation = processTaskRelationWaitRemove.get(0); + lastTaskRelation.setPreTaskVersion(0); + lastTaskRelation.setPreTaskCode(0L); + remainProcessTaskRelationList.add(lastTaskRelation); + } + updateProcessDefiniteVersion(loginUser, result, processDefinition); + updateRelation(loginUser, result, processDefinition, remainProcessTaskRelationList); + return result; + } + + /** + * delete task downstream relation + * + * @param loginUser login user + * @param projectCode project code + * @param postTaskCodes the post task codes, sep ',' + * @param taskCode the pre task code + * @return delete result code + */ + @Transactional(rollbackFor = RuntimeException.class) + @Override + public Map deleteDownstreamRelation(User loginUser, long projectCode, String postTaskCodes, long taskCode) { + Project project = projectMapper.queryByCode(projectCode); + //check user access for project + Map result = projectService.checkProjectAndAuth(loginUser, project, projectCode); + if (result.get(Constants.STATUS) != Status.SUCCESS) { + return result; + } + if (StringUtils.isEmpty(postTaskCodes)) { + putMsg(result, Status.DATA_IS_NULL, "postTaskCodes"); + return result; + } + List downstreamList = processTaskRelationMapper.queryDownstreamByCode(projectCode, taskCode); + if (CollectionUtils.isEmpty(downstreamList)) { + putMsg(result, Status.DATA_IS_NULL, "taskCode"); + return result; + } + List postTaskCodeList = Lists.newArrayList(postTaskCodes.split(Constants.COMMA)).stream().map(Long::parseLong).collect(Collectors.toList()); + if (postTaskCodeList.contains(0L)) { + putMsg(result, Status.DATA_IS_NULL, "postTaskCodes"); + return result; + } + ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(downstreamList.get(0).getProcessDefinitionCode()); + if (processDefinition == null) { + putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, downstreamList.get(0).getProcessDefinitionCode()); + return result; + } + List processTaskRelations = processTaskRelationMapper.queryByProcessCode(projectCode, processDefinition.getCode()); + List processTaskRelationList = Lists.newArrayList(processTaskRelations); + processTaskRelationList.removeIf(processTaskRelation -> postTaskCodeList.contains(processTaskRelation.getPostTaskCode()) && processTaskRelation.getPreTaskCode() == taskCode); + updateProcessDefiniteVersion(loginUser, result, processDefinition); + updateRelation(loginUser, result, processDefinition, processTaskRelationList); + return result; + } + + /** + * query task upstream relation + * + * @param loginUser login user + * @param projectCode project code + * @param taskCode current task code (post task code) + * @return the upstream task definitions + */ + @Override + public Map queryUpstreamRelation(User loginUser, long projectCode, long taskCode) { + Project project = projectMapper.queryByCode(projectCode); + //check user access for project + Map result = projectService.checkProjectAndAuth(loginUser, project, projectCode); + if (result.get(Constants.STATUS) != Status.SUCCESS) { + return result; + } + List processTaskRelationList = processTaskRelationMapper.queryUpstreamByCode(projectCode, taskCode); + List taskDefinitionLogList = new ArrayList<>(); + if (CollectionUtils.isNotEmpty(processTaskRelationList)) { + Set taskDefinitions = processTaskRelationList + .stream() + .map(processTaskRelation -> { + TaskDefinition taskDefinition = buildTaskDefinition(); + taskDefinition.setProjectCode(processTaskRelation.getProjectCode()); + taskDefinition.setCode(processTaskRelation.getPreTaskCode()); + taskDefinition.setVersion(processTaskRelation.getPreTaskVersion()); + return taskDefinition; + }) + .collect(Collectors.toSet()); + taskDefinitionLogList = taskDefinitionLogMapper.queryByTaskDefinitions(taskDefinitions); + } + result.put(Constants.DATA_LIST, taskDefinitionLogList); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * query task downstream relation + * + * @param loginUser login user + * @param projectCode project code + * @param taskCode pre task code + * @return the downstream task definitions + */ + @Override + public Map queryDownstreamRelation(User loginUser, long projectCode, long taskCode) { + Project project = projectMapper.queryByCode(projectCode); + //check user access for project + Map result = projectService.checkProjectAndAuth(loginUser, project, projectCode); + if (result.get(Constants.STATUS) != Status.SUCCESS) { + return result; + } + List processTaskRelationList = processTaskRelationMapper.queryDownstreamByCode(projectCode, taskCode); + List taskDefinitionLogList = new ArrayList<>(); + if (CollectionUtils.isNotEmpty(processTaskRelationList)) { + Set taskDefinitions = processTaskRelationList + .stream() + .map(processTaskRelation -> { + TaskDefinition taskDefinition = buildTaskDefinition(); + taskDefinition.setProjectCode(processTaskRelation.getProjectCode()); + taskDefinition.setCode(processTaskRelation.getPostTaskCode()); + taskDefinition.setVersion(processTaskRelation.getPostTaskVersion()); + return taskDefinition; + }) + .collect(Collectors.toSet()); + taskDefinitionLogList = taskDefinitionLogMapper.queryByTaskDefinitions(taskDefinitions); + } + result.put(Constants.DATA_LIST, taskDefinitionLogList); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * delete edge + * + * @param loginUser login user + * @param projectCode project code + * @param processDefinitionCode process definition code + * @param preTaskCode pre task code + * @param postTaskCode post task code + * @return delete result code + */ + @Transactional(rollbackFor = RuntimeException.class) + @Override + public Map deleteEdge(User loginUser, long projectCode, long processDefinitionCode, long preTaskCode, long postTaskCode) { + Project project = projectMapper.queryByCode(projectCode); + //check user access for project + Map result = projectService.checkProjectAndAuth(loginUser, project, projectCode); + if (result.get(Constants.STATUS) != Status.SUCCESS) { + return result; + } + ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefinitionCode); + if (processDefinition == null) { + putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefinitionCode); + return result; + } + List processTaskRelations = processTaskRelationMapper.queryByProcessCode(projectCode, processDefinitionCode); + List processTaskRelationList = Lists.newArrayList(processTaskRelations); + if (CollectionUtils.isEmpty(processTaskRelationList)) { + putMsg(result, Status.DATA_IS_NULL, "processTaskRelationList"); + return result; + } + Map> taskRelationMap = new HashMap<>(); + for (ProcessTaskRelation processTaskRelation : processTaskRelationList) { + taskRelationMap.compute(processTaskRelation.getPostTaskCode(), (k, v) -> { + if (v == null) { + v = new ArrayList<>(); + } + v.add(processTaskRelation); + return v; + }); + } + if (!taskRelationMap.containsKey(postTaskCode)) { + putMsg(result, Status.DATA_IS_NULL, "postTaskCode"); + return result; + } + if (taskRelationMap.get(postTaskCode).size() > 1) { + for (ProcessTaskRelation processTaskRelation : taskRelationMap.get(postTaskCode)) { + if (processTaskRelation.getPreTaskCode() == preTaskCode) { + int delete = processTaskRelationMapper.deleteById(processTaskRelation.getId()); + if (delete == 0) { + putMsg(result, Status.DELETE_EDGE_ERROR); + throw new ServiceException(Status.DELETE_EDGE_ERROR); + } + processTaskRelationList.remove(processTaskRelation); + } + } + } else { + ProcessTaskRelation processTaskRelation = taskRelationMap.get(postTaskCode).get(0); + processTaskRelationList.remove(processTaskRelation); + processTaskRelation.setPreTaskVersion(0); + processTaskRelation.setPreTaskCode(0L); + processTaskRelationList.add(processTaskRelation); + } + updateProcessDefiniteVersion(loginUser, result, processDefinition); + updateRelation(loginUser, result, processDefinition, processTaskRelationList); + return result; + } + + /** + * build task definition + * + * @return task definition + */ + private TaskDefinition buildTaskDefinition() { + + return new TaskDefinition() { + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof TaskDefinition)) { + return false; + } + TaskDefinition that = (TaskDefinition) o; + return getCode() == that.getCode() + && getVersion() == that.getVersion() + && getProjectCode() == that.getProjectCode(); + } + + @Override + public int hashCode() { + return Objects.hash(getCode(), getVersion(), getProjectCode()); + } + }; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java index 7d49cfaae0435237b1d1870b5d419cc34e56b246..3579cd8ade8adbb9fe124be47a7ee0813202e044 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java @@ -25,8 +25,8 @@ import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.UserType; -import org.apache.dolphinscheduler.common.utils.SnowFlakeUtils; -import org.apache.dolphinscheduler.common.utils.SnowFlakeUtils.SnowFlakeException; +import org.apache.dolphinscheduler.common.utils.CodeGenerateUtils; +import org.apache.dolphinscheduler.common.utils.CodeGenerateUtils.CodeGenerateException; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.ProjectUser; @@ -97,20 +97,20 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic project = Project .newBuilder() .name(name) - .code(SnowFlakeUtils.getInstance().nextId()) + .code(CodeGenerateUtils.getInstance().genCode()) .description(desc) .userId(loginUser.getId()) .userName(loginUser.getUserName()) .createTime(now) .updateTime(now) .build(); - } catch (SnowFlakeException e) { + } catch (CodeGenerateException e) { putMsg(result, Status.CREATE_PROJECT_ERROR); return result; } if (projectMapper.insert(project) > 0) { - result.put(Constants.DATA_LIST, project.getId()); + result.put(Constants.DATA_LIST, project); putMsg(result, Status.SUCCESS); } else { putMsg(result, Status.CREATE_PROJECT_ERROR); @@ -139,6 +139,21 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic return result; } + @Override + public Map queryByName(User loginUser, String projectName) { + Map result = new HashMap<>(); + Project project = projectMapper.queryByName(projectName); + boolean hasProjectAndPerm = hasProjectAndPerm(loginUser, project, result); + if (!hasProjectAndPerm) { + return result; + } + if (project != null) { + result.put(Constants.DATA_LIST, project); + putMsg(result, Status.SUCCESS); + } + return result; + } + /** * check project and authorization * @@ -389,6 +404,31 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic return result; } + /** + * query authorized user + * + * @param loginUser login user + * @param projectCode project code + * @return users who have permission for the specified project + */ + @Override + public Map queryAuthorizedUser(User loginUser, Long projectCode) { + Map result = new HashMap<>(); + + // 1. check read permission + Project project = this.projectMapper.queryByCode(projectCode); + boolean hasProjectAndPerm = this.hasProjectAndPerm(loginUser, project, result); + if (!hasProjectAndPerm) { + return result; + } + + // 2. query authorized user list + List users = this.userMapper.queryAuthedUserListByProjectId(project.getId()); + result.put(Constants.DATA_LIST, users); + this.putMsg(result, Status.SUCCESS); + return result; + } + /** * query authorized project * diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/QueueServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/QueueServiceImpl.java index edd0e1d819cd3866c7fbfe90dac3b52dea024357..2da89df000d96d976d36b0dff258f8b3afdad3de 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/QueueServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/QueueServiceImpl.java @@ -89,7 +89,7 @@ public class QueueServiceImpl extends BaseServiceImpl implements QueueService { public Result queryList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { Result result = new Result(); if (!isAdmin(loginUser)) { - putMsg(result,Status.USER_NO_OPERATION_PERM); + putMsg(result, Status.USER_NO_OPERATION_PERM); return result; } @@ -151,6 +151,7 @@ public class QueueServiceImpl extends BaseServiceImpl implements QueueService { queueObj.setUpdateTime(now); queueMapper.insert(queueObj); + result.put(Constants.DATA_LIST, queueObj); putMsg(result, Status.SUCCESS); return result; @@ -230,7 +231,7 @@ public class QueueServiceImpl extends BaseServiceImpl implements QueueService { /** * verify queue and queueName * - * @param queue queue + * @param queue queue * @param queueName queue name * @return true if the queue name not exists, otherwise return false */ @@ -262,6 +263,32 @@ public class QueueServiceImpl extends BaseServiceImpl implements QueueService { return result; } + /** + * query queue by queueName + * + * @param queueName queue name + * @return queue object for provide queue name + */ + @Override + public Map queryQueueName(String queueName) { + Map result = new HashMap<>(); + + if (StringUtils.isEmpty(queueName)) { + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.QUEUE_NAME); + return result; + } + + if (!checkQueueNameExist(queueName)) { + putMsg(result, Status.QUEUE_NOT_EXIST, queueName); + return result; + } + + List queueList = queueMapper.queryQueueName(queueName); + result.put(Constants.DATA_LIST, queueList); + putMsg(result, Status.SUCCESS); + return result; + } + /** * check queue exist * if exists return true,not exists return false @@ -293,7 +320,7 @@ public class QueueServiceImpl extends BaseServiceImpl implements QueueService { * @param newQueue new queue name * @return true if need to update user */ - private boolean checkIfQueueIsInUsing (String oldQueue, String newQueue) { + private boolean checkIfQueueIsInUsing(String oldQueue, String newQueue) { return !oldQueue.equals(newQueue) && userMapper.existUser(oldQueue) == Boolean.TRUE; } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ResourcesServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ResourcesServiceImpl.java index f0cbcc9a7f0bd5fb31317b9471b19ec10d2e921b..c00d454614550b98b761cfa8744a592df10c36a9 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ResourcesServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ResourcesServiceImpl.java @@ -33,8 +33,6 @@ import org.apache.dolphinscheduler.api.utils.RegexUtils; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ProgramType; -import org.apache.dolphinscheduler.common.enums.ResourceType; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.FileUtils; import org.apache.dolphinscheduler.common.utils.HadoopUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; @@ -51,8 +49,10 @@ import org.apache.dolphinscheduler.dao.mapper.TenantMapper; import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper; import org.apache.dolphinscheduler.dao.mapper.UserMapper; import org.apache.dolphinscheduler.dao.utils.ResourceProcessDefinitionUtils; +import org.apache.dolphinscheduler.spi.enums.ResourceType; import org.apache.commons.beanutils.BeanMap; +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.StringUtils; import java.io.IOException; @@ -64,6 +64,7 @@ import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.UUID; import java.util.regex.Matcher; @@ -80,6 +81,8 @@ import org.springframework.web.multipart.MultipartFile; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import com.fasterxml.jackson.databind.SerializationFeature; +import com.google.common.base.Joiner; +import com.google.common.io.Files; /** * resources service impl @@ -130,6 +133,10 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe if (!result.getCode().equals(Status.SUCCESS.getCode())) { return result; } + if (FileUtils.directoryTraversal(name)) { + putMsg(result, Status.VERIFY_PARAMETER_NAME_FAILED); + return result; + } String fullName = currentDir.equals("/") ? String.format("%s%s",currentDir,name) : String.format("%s/%s",currentDir,name); result = verifyResource(loginUser, type, fullName, pid); if (!result.getCode().equals(Status.SUCCESS.getCode())) { @@ -219,6 +226,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe try { resourcesMapper.insert(resource); + updateParentResourceSize(resource, resource.getSize()); putMsg(result, Status.SUCCESS); Map dataMap = new BeanMap(resource); Map resultMap = new HashMap<>(); @@ -242,6 +250,33 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe return result; } + /** + * update the folder's size of the resource + * + * @param resource the current resource + * @param size size + */ + private void updateParentResourceSize(Resource resource, long size) { + if (resource.getSize() > 0) { + String[] splits = resource.getFullName().split("/"); + for (int i = 1; i < splits.length; i++) { + String parentFullName = Joiner.on("/").join(Arrays.copyOfRange(splits, 0, i)); + if (StringUtils.isNotBlank(parentFullName)) { + List resources = resourcesMapper.queryResource(parentFullName, resource.getType().ordinal()); + if (CollectionUtils.isNotEmpty(resources)) { + Resource parentResource = resources.get(0); + if (parentResource.getSize() + size >= 0) { + parentResource.setSize(parentResource.getSize() + size); + } else { + parentResource.setSize(0L); + } + resourcesMapper.updateById(parentResource); + } + } + } + } + } + /** * check resource is exists * @@ -330,8 +365,8 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe if (!resource.isDirectory()) { //get the origin file suffix - String originSuffix = FileUtils.suffix(originFullName); - String suffix = FileUtils.suffix(fullName); + String originSuffix = Files.getFileExtension(originFullName); + String suffix = Files.getFileExtension(fullName); boolean suffixIsChanged = false; if (StringUtils.isBlank(suffix) && StringUtils.isNotBlank(originSuffix)) { suffixIsChanged = true; @@ -359,6 +394,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe // updateResource data Date now = new Date(); + long originFileSize = resource.getSize(); resource.setAlias(name); resource.setFileName(name); @@ -444,6 +480,8 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe throw new ServiceException(String.format("delete resource: %s failed.", originFullName)); } } + + updateParentResourceSize(resource, resource.getSize() - originFileSize); return result; } @@ -465,6 +503,19 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe private Result verifyFile(String name, ResourceType type, MultipartFile file) { Result result = new Result<>(); putMsg(result, Status.SUCCESS); + + if (FileUtils.directoryTraversal(name)) { + logger.error("file alias name {} verify failed", name); + putMsg(result, Status.VERIFY_PARAMETER_NAME_FAILED); + return result; + } + + if (file != null && FileUtils.directoryTraversal(Objects.requireNonNull(file.getOriginalFilename()))) { + logger.error("file original name {} verify failed", file.getOriginalFilename()); + putMsg(result, Status.VERIFY_PARAMETER_NAME_FAILED); + return result; + } + if (file != null) { // file is empty if (file.isEmpty()) { @@ -474,8 +525,8 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe } // file suffix - String fileSuffix = FileUtils.suffix(file.getOriginalFilename()); - String nameSuffix = FileUtils.suffix(name); + String fileSuffix = Files.getFileExtension(file.getOriginalFilename()); + String nameSuffix = Files.getFileExtension(name); // determine file suffix if (!(StringUtils.isNotEmpty(fileSuffix) && fileSuffix.equalsIgnoreCase(nameSuffix))) { @@ -576,8 +627,8 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe */ private boolean upload(User loginUser, String fullName, MultipartFile file, ResourceType type) { // save to local - String fileSuffix = FileUtils.suffix(file.getOriginalFilename()); - String nameSuffix = FileUtils.suffix(fullName); + String fileSuffix = Files.getFileExtension(file.getOriginalFilename()); + String nameSuffix = Files.getFileExtension(fullName); // determine file suffix if (!(StringUtils.isNotEmpty(fileSuffix) && fileSuffix.equalsIgnoreCase(nameSuffix))) { @@ -596,14 +647,10 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe if (!HadoopUtils.getInstance().exists(resourcePath)) { createTenantDirIfNotExists(tenantCode); } - org.apache.dolphinscheduler.api.utils.FileUtils.copyFile(file, localFilename); + org.apache.dolphinscheduler.api.utils.FileUtils.copyInputStreamToFile(file, localFilename); HadoopUtils.getInstance().copyLocalToHdfs(localFilename, hdfsFilename, true, true); } catch (Exception e) { - try { - FileUtils.deleteFile(localFilename); - } catch (IOException ex) { - logger.error("delete local tmp file:{} error", localFilename, ex); - } + FileUtils.deleteFile(localFilename); logger.error(e.getMessage(), e); return false; } @@ -730,11 +777,15 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe String hdfsFilename = HadoopUtils.getHdfsFileName(resource.getType(), tenantCode, resource.getFullName()); //delete data in database + resourcesMapper.selectBatchIds(Arrays.asList(needDeleteResourceIdArray)).forEach(item -> { + updateParentResourceSize(item, item.getSize() * -1); + }); resourcesMapper.deleteIds(needDeleteResourceIdArray); resourceUserMapper.deleteResourceUserArray(0, needDeleteResourceIdArray); //delete file on hdfs HadoopUtils.getInstance().delete(hdfsFilename, true); + putMsg(result, Status.SUCCESS); return result; @@ -840,7 +891,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe return result; } //check preview or not by file suffix - String nameSuffix = FileUtils.suffix(resource.getAlias()); + String nameSuffix = Files.getFileExtension(resource.getAlias()); String resourceViewSuffixs = FileUtils.getResourceViewSuffixs(); if (StringUtils.isNotEmpty(resourceViewSuffixs)) { List strList = Arrays.asList(resourceViewSuffixs.split(",")); @@ -901,6 +952,10 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe if (!result.getCode().equals(Status.SUCCESS.getCode())) { return result; } + if (FileUtils.directoryTraversal(fileName)) { + putMsg(result, Status.VERIFY_PARAMETER_NAME_FAILED); + return result; + } //check file suffix String nameSuffix = fileSuffix.trim(); @@ -926,6 +981,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe Resource resource = new Resource(pid,name,fullName,false,desc,name,loginUser.getId(),type,content.getBytes().length,now,now); resourcesMapper.insert(resource); + updateParentResourceSize(resource, resource.getSize()); putMsg(result, Status.SUCCESS); Map dataMap = new BeanMap(resource); @@ -1005,7 +1061,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe return result; } //check can edit by file suffix - String nameSuffix = FileUtils.suffix(resource.getAlias()); + String nameSuffix = Files.getFileExtension(resource.getAlias()); String resourceViewSuffixs = FileUtils.getResourceViewSuffixs(); if (StringUtils.isNotEmpty(resourceViewSuffixs)) { List strList = Arrays.asList(resourceViewSuffixs.split(",")); @@ -1020,10 +1076,13 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe if (StringUtils.isEmpty(tenantCode)) { return result; } + long originFileSize = resource.getSize(); resource.setSize(content.getBytes().length); resource.setUpdateTime(new Date()); resourcesMapper.updateById(resource); + updateParentResourceSize(resource, resource.getSize() - originFileSize); + result = uploadContentToHdfs(resource.getFullName(), tenantCode, content); if (!result.getCode().equals(Status.SUCCESS.getCode())) { throw new ServiceException(result.getMsg()); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SchedulerServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SchedulerServiceImpl.java index cf1c83376590101e5ec6300e6b06b2188bb14316..fd961cde178402ef036f33d53f800981dd10ba52 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SchedulerServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SchedulerServiceImpl.java @@ -36,10 +36,12 @@ import org.apache.dolphinscheduler.common.model.Server; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelation; import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.Schedule; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; +import org.apache.dolphinscheduler.dao.mapper.ProcessTaskRelationMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper; import org.apache.dolphinscheduler.service.process.ProcessService; @@ -57,6 +59,9 @@ import java.util.List; import java.util.Map; import org.quartz.CronExpression; +import org.quartz.JobKey; +import org.quartz.Scheduler; +import org.quartz.SchedulerException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -95,6 +100,12 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe @Autowired private ProcessDefinitionMapper processDefinitionMapper; + @Autowired + private Scheduler scheduler; + + @Autowired + private ProcessTaskRelationMapper processTaskRelationMapper; + /** * save schedule * @@ -135,7 +146,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe // check work flow define release state ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefineCode); - result = executorService.checkProcessDefinitionValid(processDefinition, processDefineCode); + result = executorService.checkProcessDefinitionValid(projectCode,processDefinition, processDefineCode); if (result.get(Constants.STATUS) != Status.SUCCESS) { return result; } @@ -236,62 +247,12 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe } ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(schedule.getProcessDefinitionCode()); - if (processDefinition == null) { + if (processDefinition == null || projectCode != processDefinition.getProjectCode()) { putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, schedule.getProcessDefinitionCode()); return result; } - /** - * scheduling on-line status forbid modification - */ - if (checkValid(result, schedule.getReleaseState() == ReleaseState.ONLINE, Status.SCHEDULE_CRON_ONLINE_FORBID_UPDATE)) { - return result; - } - - Date now = new Date(); - - // updateProcessInstance param - if (!StringUtils.isEmpty(scheduleExpression)) { - ScheduleParam scheduleParam = JSONUtils.parseObject(scheduleExpression, ScheduleParam.class); - if (DateUtils.differSec(scheduleParam.getStartTime(), scheduleParam.getEndTime()) == 0) { - logger.warn("The start time must not be the same as the end"); - putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME); - return result; - } - schedule.setStartTime(scheduleParam.getStartTime()); - schedule.setEndTime(scheduleParam.getEndTime()); - if (!org.quartz.CronExpression.isValidExpression(scheduleParam.getCrontab())) { - putMsg(result, Status.SCHEDULE_CRON_CHECK_FAILED, scheduleParam.getCrontab()); - return result; - } - schedule.setCrontab(scheduleParam.getCrontab()); - schedule.setTimezoneId(scheduleParam.getTimezoneId()); - } - - if (warningType != null) { - schedule.setWarningType(warningType); - } - - schedule.setWarningGroupId(warningGroupId); - - if (failureStrategy != null) { - schedule.setFailureStrategy(failureStrategy); - } - - schedule.setWorkerGroup(workerGroup); - schedule.setEnvironmentCode(environmentCode); - schedule.setUpdateTime(now); - schedule.setProcessInstancePriority(processInstancePriority); - scheduleMapper.updateById(schedule); - - /** - * updateProcessInstance recipients and cc by process definition ID - */ - processDefinition.setWarningGroupId(warningGroupId); - - processDefinitionMapper.updateById(processDefinition); - - putMsg(result, Status.SUCCESS); + updateSchedule(result, schedule, processDefinition, scheduleExpression, warningType, warningGroupId, failureStrategy, processInstancePriority, workerGroup, environmentCode); return result; } @@ -335,11 +296,15 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe return result; } ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(scheduleObj.getProcessDefinitionCode()); - if (processDefinition == null) { + if (processDefinition == null || projectCode != processDefinition.getProjectCode()) { putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, scheduleObj.getProcessDefinitionCode()); return result; } - + List processTaskRelations = processTaskRelationMapper.queryByProcessCode(projectCode, scheduleObj.getProcessDefinitionCode()); + if (processTaskRelations.isEmpty()) { + putMsg(result, Status.PROCESS_DAG_IS_EMPTY); + return result; + } if (scheduleStatus == ReleaseState.ONLINE) { // check process definition release state if (processDefinition.getReleaseState() != ReleaseState.ONLINE) { @@ -349,12 +314,11 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe return result; } // check sub process definition release state - List subProcessDefineIds = new ArrayList<>(); - processService.recurseFindSubProcessId(processDefinition.getId(), subProcessDefineIds); - Integer[] idArray = subProcessDefineIds.toArray(new Integer[subProcessDefineIds.size()]); - if (!subProcessDefineIds.isEmpty()) { + List subProcessDefineCodes = new ArrayList<>(); + processService.recurseFindSubProcess(processDefinition.getCode(), subProcessDefineCodes); + if (!subProcessDefineCodes.isEmpty()) { List subProcessDefinitionList = - processDefinitionMapper.queryDefinitionListByIdList(idArray); + processDefinitionMapper.queryByCodes(subProcessDefineCodes); if (subProcessDefinitionList != null && !subProcessDefinitionList.isEmpty()) { for (ProcessDefinition subProcessDefinition : subProcessDefinitionList) { /** @@ -399,6 +363,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe return result; } } catch (Exception e) { + logger.error("set online failure", e); result.put(Constants.MSG, scheduleStatus == ReleaseState.ONLINE ? "set online failure" : "set offline failure"); throw new ServiceException(result.get(Constants.MSG).toString()); } @@ -432,7 +397,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe } ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefineCode); - if (processDefinition == null) { + if (processDefinition == null || projectCode != processDefinition.getProjectCode()) { putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefineCode); return result; } @@ -494,12 +459,11 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe String jobName = QuartzExecutors.buildJobName(scheduleId); String jobGroupName = QuartzExecutors.buildJobGroupName(projectId); - - if (!QuartzExecutors.getInstance().deleteJob(jobName, jobGroupName)) { - logger.warn("set offline failure:projectId:{},scheduleId:{}", projectId, scheduleId); - throw new ServiceException("set offline failure"); + boolean result = QuartzExecutors.getInstance().deleteJob(jobName, jobGroupName); + if (!result) { + throw new ServiceException(String.format("Failed to delete job, jobName:%s, jobGroupName:%s", jobName, jobGroupName)); } - + logger.info("delete job success, job name: {}, job group name: {},", jobName, jobGroupName); } /** @@ -597,4 +561,113 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe putMsg(result, Status.SUCCESS); return result; } + + /** + * update process definition schedule + * + * @param loginUser login user + * @param projectCode project code + * @param processDefinitionCode process definition code + * @param scheduleExpression scheduleExpression + * @param warningType warning type + * @param warningGroupId warning group id + * @param failureStrategy failure strategy + * @param workerGroup worker group + * @param processInstancePriority process instance priority + * @return update result code + */ + @Override + public Map updateScheduleByProcessDefinitionCode(User loginUser, + long projectCode, + long processDefinitionCode, + String scheduleExpression, + WarningType warningType, + int warningGroupId, + FailureStrategy failureStrategy, + Priority processInstancePriority, + String workerGroup, + long environmentCode) { + Project project = projectMapper.queryByCode(projectCode); + //check user access for project + Map result = projectService.checkProjectAndAuth(loginUser, project, projectCode); + if (result.get(Constants.STATUS) != Status.SUCCESS) { + return result; + } + // check schedule exists + Schedule schedule = scheduleMapper.queryByProcessDefinitionCode(processDefinitionCode); + if (schedule == null) { + putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, processDefinitionCode); + return result; + } + + ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefinitionCode); + if (processDefinition == null || projectCode != processDefinition.getProjectCode()) { + putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefinitionCode); + return result; + } + + updateSchedule(result, schedule, processDefinition, scheduleExpression, warningType, warningGroupId, failureStrategy, processInstancePriority, workerGroup, environmentCode); + return result; + } + + private void updateSchedule(Map result, + Schedule schedule, + ProcessDefinition processDefinition, + String scheduleExpression, + WarningType warningType, + int warningGroupId, + FailureStrategy failureStrategy, + Priority processInstancePriority, + String workerGroup, + long environmentCode) { + if (checkValid(result, schedule.getReleaseState() == ReleaseState.ONLINE, Status.SCHEDULE_CRON_ONLINE_FORBID_UPDATE)) { + return; + } + + Date now = new Date(); + + // updateProcessInstance param + if (!StringUtils.isEmpty(scheduleExpression)) { + ScheduleParam scheduleParam = JSONUtils.parseObject(scheduleExpression, ScheduleParam.class); + if (scheduleParam == null) { + putMsg(result, Status.PARSE_TO_CRON_EXPRESSION_ERROR); + return; + } + if (DateUtils.differSec(scheduleParam.getStartTime(), scheduleParam.getEndTime()) == 0) { + logger.warn("The start time must not be the same as the end"); + putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME); + return; + } + schedule.setStartTime(scheduleParam.getStartTime()); + schedule.setEndTime(scheduleParam.getEndTime()); + if (!org.quartz.CronExpression.isValidExpression(scheduleParam.getCrontab())) { + putMsg(result, Status.SCHEDULE_CRON_CHECK_FAILED, scheduleParam.getCrontab()); + return; + } + schedule.setCrontab(scheduleParam.getCrontab()); + schedule.setTimezoneId(scheduleParam.getTimezoneId()); + } + + if (warningType != null) { + schedule.setWarningType(warningType); + } + + schedule.setWarningGroupId(warningGroupId); + + if (failureStrategy != null) { + schedule.setFailureStrategy(failureStrategy); + } + + schedule.setWorkerGroup(workerGroup); + schedule.setEnvironmentCode(environmentCode); + schedule.setUpdateTime(now); + schedule.setProcessInstancePriority(processInstancePriority); + scheduleMapper.updateById(schedule); + + processDefinition.setWarningGroupId(warningGroupId); + + processDefinitionMapper.updateById(processDefinition); + + putMsg(result, Status.SUCCESS); + } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SessionServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SessionServiceImpl.java index de1c417cead342ab8c4f6149ececbac068bd12aa..c7f55fc7b4e26a3c7f3d85a36933ed212adbe948 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SessionServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SessionServiceImpl.java @@ -17,23 +17,14 @@ package org.apache.dolphinscheduler.api.service.impl; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang.StringUtils; import org.apache.dolphinscheduler.api.controller.BaseController; import org.apache.dolphinscheduler.api.service.SessionService; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.dao.entity.Session; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.SessionMapper; - -import org.apache.commons.lang.StringUtils; - -import java.util.Date; -import java.util.List; -import java.util.UUID; - -import javax.servlet.http.Cookie; -import javax.servlet.http.HttpServletRequest; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -41,6 +32,12 @@ import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import org.springframework.web.util.WebUtils; +import javax.servlet.http.Cookie; +import javax.servlet.http.HttpServletRequest; +import java.util.Date; +import java.util.List; +import java.util.UUID; + /** * session service implement */ diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskDefinitionServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskDefinitionServiceImpl.java index cc63bbc7cb677ab429fe0b6d60e22c1c54b2ab16..199670d32536a309f9c2f01ab551730e0d013463 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskDefinitionServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskDefinitionServiceImpl.java @@ -25,24 +25,32 @@ import org.apache.dolphinscheduler.api.utils.CheckUtils; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.AuthorizationType; +import org.apache.dolphinscheduler.common.enums.Flag; +import org.apache.dolphinscheduler.common.enums.ReleaseState; +import org.apache.dolphinscheduler.common.utils.CodeGenerateUtils; +import org.apache.dolphinscheduler.common.utils.CodeGenerateUtils.CodeGenerateException; import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.apache.dolphinscheduler.common.utils.SnowFlakeUtils; -import org.apache.dolphinscheduler.common.utils.SnowFlakeUtils.SnowFlakeException; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelation; +import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelationLog; import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.TaskDefinition; import org.apache.dolphinscheduler.dao.entity.TaskDefinitionLog; import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.ProcessTaskRelationMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionLogMapper; import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.UserMapper; +import org.apache.dolphinscheduler.service.permission.PermissionCheck; import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.commons.lang.StringUtils; import java.util.ArrayList; +import java.util.Arrays; import java.util.Date; import java.util.HashMap; import java.util.List; @@ -58,6 +66,7 @@ import org.springframework.transaction.annotation.Transactional; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import com.google.common.collect.Lists; /** * task definition service impl @@ -67,6 +76,8 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe private static final Logger logger = LoggerFactory.getLogger(TaskDefinitionServiceImpl.class); + private static final String RELEASESTATE = "releaseState"; + @Autowired private ProjectMapper projectMapper; @@ -82,6 +93,9 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe @Autowired private ProcessTaskRelationMapper processTaskRelationMapper; + @Autowired + private ProcessDefinitionMapper processDefinitionMapper; + @Autowired private ProcessService processService; @@ -119,14 +133,8 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskDefinitionLog.getName()); return result; } - TaskDefinition taskDefinition = taskDefinitionMapper.queryByName(projectCode, taskDefinitionLog.getName()); - if (taskDefinition != null) { - logger.error("task definition name {} already exists", taskDefinitionLog.getName()); - putMsg(result, Status.TASK_DEFINITION_NAME_EXISTED, taskDefinitionLog.getName()); - return result; - } } - int saveTaskResult = processService.saveTaskDefine(loginUser, projectCode, taskDefinitionLogs); + int saveTaskResult = processService.saveTaskDefine(loginUser, projectCode, taskDefinitionLogs, Boolean.TRUE); if (saveTaskResult == Constants.DEFINITION_FAILURE) { putMsg(result, Status.CREATE_TASK_DEFINITION_ERROR); throw new ServiceException(Status.CREATE_TASK_DEFINITION_ERROR); @@ -167,11 +175,12 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe /** * delete task definition - * + * Only offline and no downstream dependency can be deleted * @param loginUser login user * @param projectCode project code * @param taskCode task code */ + @Transactional(rollbackFor = RuntimeException.class) @Override public Map deleteTaskDefinitionByCode(User loginUser, long projectCode, long taskCode) { Project project = projectMapper.queryByCode(projectCode); @@ -180,24 +189,68 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe if (result.get(Constants.STATUS) != Status.SUCCESS) { return result; } - List processTaskRelationList = processTaskRelationMapper.queryByTaskCode(taskCode); + if (taskCode == 0) { + putMsg(result, Status.DELETE_TASK_DEFINE_BY_CODE_ERROR); + return result; + } + TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(taskCode); + if (taskDefinition == null || projectCode != taskDefinition.getProjectCode()) { + putMsg(result, Status.TASK_DEFINE_NOT_EXIST, taskCode); + return result; + } + if (processService.isTaskOnline(taskCode) && taskDefinition.getFlag() == Flag.YES) { + putMsg(result, Status.TASK_DEFINE_STATE_ONLINE, taskCode); + return result; + } + List processTaskRelationList = processTaskRelationMapper.queryDownstreamByTaskCode(taskCode); if (!processTaskRelationList.isEmpty()) { - Set processDefinitionCodes = processTaskRelationList + Set postTaskCodes = processTaskRelationList .stream() - .map(ProcessTaskRelation::getProcessDefinitionCode) + .map(ProcessTaskRelation::getPostTaskCode) .collect(Collectors.toSet()); - putMsg(result, Status.PROCESS_TASK_RELATION_EXIST, StringUtils.join(processDefinitionCodes, ",")); + putMsg(result, Status.TASK_HAS_DOWNSTREAM, StringUtils.join(postTaskCodes, ",")); return result; } int delete = taskDefinitionMapper.deleteByCode(taskCode); if (delete > 0) { - putMsg(result, Status.SUCCESS); + List taskRelationList = processTaskRelationMapper.queryUpstreamByCode(projectCode, taskCode); + if (!taskRelationList.isEmpty()) { + long processDefinitionCode = taskRelationList.get(0).getProcessDefinitionCode(); + List processTaskRelations = processTaskRelationMapper.queryByProcessCode(projectCode, processDefinitionCode); + List relationList = processTaskRelations.stream().filter(r -> r.getPostTaskCode() != taskCode).collect(Collectors.toList()); + updateDag(loginUser, result, processDefinitionCode, relationList, Lists.newArrayList()); + } else { + putMsg(result, Status.SUCCESS); + } } else { putMsg(result, Status.DELETE_TASK_DEFINE_BY_CODE_ERROR); + throw new ServiceException(Status.DELETE_TASK_DEFINE_BY_CODE_ERROR); } return result; } + private void updateDag(User loginUser, Map result, long processDefinitionCode, List processTaskRelationList, + List taskDefinitionLogs) { + ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefinitionCode); + if (processDefinition == null) { + throw new ServiceException(Status.PROCESS_DEFINE_NOT_EXIST); + } + int insertVersion = processService.saveProcessDefine(loginUser, processDefinition, Boolean.TRUE, Boolean.TRUE); + if (insertVersion <= 0) { + throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR); + } + List relationLogs = processTaskRelationList.stream().map(ProcessTaskRelationLog::new).collect(Collectors.toList()); + int insertResult = processService.saveTaskRelation(loginUser, processDefinition.getProjectCode(), processDefinition.getCode(), + insertVersion, relationLogs, taskDefinitionLogs, Boolean.TRUE); + if (insertResult == Constants.EXIT_CODE_SUCCESS) { + putMsg(result, Status.SUCCESS); + result.put(Constants.DATA_LIST, processDefinition); + } else { + putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR); + throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR); + } + } + /** * update task definition * @@ -215,15 +268,15 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe if (result.get(Constants.STATUS) != Status.SUCCESS) { return result; } - if (processService.isTaskOnline(taskCode)) { - putMsg(result, Status.PROCESS_DEFINE_STATE_ONLINE); - return result; - } TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(taskCode); if (taskDefinition == null) { putMsg(result, Status.TASK_DEFINE_NOT_EXIST, taskCode); return result; } + if (processService.isTaskOnline(taskCode) && taskDefinition.getFlag() == Flag.YES) { + putMsg(result, Status.NOT_SUPPORT_UPDATE_TASK_DEFINITION); + return result; + } TaskDefinitionLog taskDefinitionToUpdate = JSONUtils.parseObject(taskDefinitionJsonObj, TaskDefinitionLog.class); if (taskDefinitionToUpdate == null) { logger.error("taskDefinitionJson is not valid json"); @@ -245,7 +298,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe taskDefinitionToUpdate.setId(taskDefinition.getId()); taskDefinitionToUpdate.setProjectCode(projectCode); taskDefinitionToUpdate.setUserId(taskDefinition.getUserId()); - taskDefinitionToUpdate.setVersion(version + 1); + taskDefinitionToUpdate.setVersion(++version); taskDefinitionToUpdate.setTaskType(taskDefinitionToUpdate.getTaskType().toUpperCase()); taskDefinitionToUpdate.setResourceIds(processService.getResourceIds(taskDefinitionToUpdate)); taskDefinitionToUpdate.setUpdateTime(now); @@ -258,19 +311,28 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe putMsg(result, Status.UPDATE_TASK_DEFINITION_ERROR); throw new ServiceException(Status.UPDATE_TASK_DEFINITION_ERROR); } + List taskRelationList = processTaskRelationMapper.queryUpstreamByCode(projectCode, taskCode); + if (!taskRelationList.isEmpty()) { + long processDefinitionCode = taskRelationList.get(0).getProcessDefinitionCode(); + List processTaskRelations = processTaskRelationMapper.queryByProcessCode(projectCode, processDefinitionCode); + updateDag(loginUser, result, processDefinitionCode, processTaskRelations, Lists.newArrayList(taskDefinitionToUpdate)); + } else { + putMsg(result, Status.SUCCESS); + } result.put(Constants.DATA_LIST, taskCode); putMsg(result, Status.SUCCESS, update); return result; } /** - * update task definition + * switch task definition * * @param loginUser login user * @param projectCode project code * @param taskCode task code * @param version the version user want to switch */ + @Transactional(rollbackFor = RuntimeException.class) @Override public Map switchVersion(User loginUser, long projectCode, long taskCode, int version) { Project project = projectMapper.queryByCode(projectCode); @@ -284,17 +346,24 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe return result; } TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(taskCode); - if (taskDefinition == null) { + if (taskDefinition == null || projectCode != taskDefinition.getProjectCode()) { putMsg(result, Status.TASK_DEFINE_NOT_EXIST, taskCode); return result; } - TaskDefinitionLog taskDefinitionLog = taskDefinitionLogMapper.queryByDefinitionCodeAndVersion(taskCode, version); - taskDefinitionLog.setUserId(loginUser.getId()); - taskDefinitionLog.setUpdateTime(new Date()); - int switchVersion = taskDefinitionMapper.updateById(taskDefinitionLog); + TaskDefinitionLog taskDefinitionUpdate = taskDefinitionLogMapper.queryByDefinitionCodeAndVersion(taskCode, version); + taskDefinitionUpdate.setUserId(loginUser.getId()); + taskDefinitionUpdate.setUpdateTime(new Date()); + taskDefinitionUpdate.setId(taskDefinition.getId()); + int switchVersion = taskDefinitionMapper.updateById(taskDefinitionUpdate); if (switchVersion > 0) { - result.put(Constants.DATA_LIST, taskCode); - putMsg(result, Status.SUCCESS); + List taskRelationList = processTaskRelationMapper.queryUpstreamByCode(projectCode, taskCode); + if (!taskRelationList.isEmpty()) { + long processDefinitionCode = taskRelationList.get(0).getProcessDefinitionCode(); + List processTaskRelations = processTaskRelationMapper.queryByProcessCode(projectCode, processDefinitionCode); + updateDag(loginUser, result, processDefinitionCode, processTaskRelations, Lists.newArrayList(taskDefinitionUpdate)); + } else { + putMsg(result, Status.SUCCESS); + } } else { putMsg(result, Status.SWITCH_TASK_DEFINITION_VERSION_ERROR); } @@ -318,7 +387,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe } PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); Page page = new Page<>(pageNo, pageSize); - IPage taskDefinitionVersionsPaging = taskDefinitionLogMapper.queryTaskDefinitionVersionsPaging(page, taskCode); + IPage taskDefinitionVersionsPaging = taskDefinitionLogMapper.queryTaskDefinitionVersionsPaging(page, taskCode, projectCode); List taskDefinitionLogs = taskDefinitionVersionsPaging.getRecords(); pageInfo.setTotalList(taskDefinitionLogs); @@ -341,6 +410,10 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe if (taskDefinition == null) { putMsg(result, Status.TASK_DEFINE_NOT_EXIST, taskCode); } else { + if (taskDefinition.getVersion() == version) { + putMsg(result, Status.MAIN_TABLE_USING_VERSION); + return result; + } int delete = taskDefinitionLogMapper.deleteByCodeAndVersion(taskCode, version); if (delete > 0) { putMsg(result, Status.SUCCESS); @@ -361,7 +434,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe } TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(taskCode); - if (taskDefinition == null) { + if (taskDefinition == null || projectCode != taskDefinition.getProjectCode()) { putMsg(result, Status.TASK_DEFINE_NOT_EXIST, taskCode); } else { result.put(Constants.DATA_LIST, taskDefinition); @@ -421,9 +494,9 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe List taskCodes = new ArrayList<>(); try { for (int i = 0; i < genNum; i++) { - taskCodes.add(SnowFlakeUtils.getInstance().nextId()); + taskCodes.add(CodeGenerateUtils.getInstance().genCode()); } - } catch (SnowFlakeException e) { + } catch (CodeGenerateException e) { logger.error("Task code get error, ", e); putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS, "Error generating task definition code"); } @@ -432,4 +505,72 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe result.put(Constants.DATA_LIST, taskCodes); return result; } + + /** + * release task definition + * + * @param loginUser login user + * @param projectCode project code + * @param code task definition code + * @param releaseState releaseState + * @return update result code + */ + @Transactional(rollbackFor = RuntimeException.class) + @Override + public Map releaseTaskDefinition(User loginUser, long projectCode, long code, ReleaseState releaseState) { + Project project = projectMapper.queryByCode(projectCode); + //check user access for project + Map result = projectService.checkProjectAndAuth(loginUser, project, projectCode); + Status resultStatus = (Status) result.get(Constants.STATUS); + if (resultStatus != Status.SUCCESS) { + return result; + } + if (null == releaseState) { + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, RELEASESTATE); + return result; + } + TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(code); + if (taskDefinition == null || projectCode != taskDefinition.getProjectCode()) { + putMsg(result, Status.TASK_DEFINE_NOT_EXIST, code); + return result; + } + TaskDefinitionLog taskDefinitionLog = taskDefinitionLogMapper.queryByDefinitionCodeAndVersion(code, taskDefinition.getVersion()); + if (taskDefinitionLog == null) { + putMsg(result, Status.TASK_DEFINE_NOT_EXIST, code); + return result; + } + switch (releaseState) { + case OFFLINE: + taskDefinition.setFlag(Flag.NO); + taskDefinitionLog.setFlag(Flag.NO); + break; + case ONLINE: + String resourceIds = taskDefinition.getResourceIds(); + if (StringUtils.isNotBlank(resourceIds)) { + Integer[] resourceIdArray = Arrays.stream(resourceIds.split(",")).map(Integer::parseInt).toArray(Integer[]::new); + PermissionCheck permissionCheck = new PermissionCheck(AuthorizationType.RESOURCE_FILE_ID,processService,resourceIdArray,loginUser.getId(),logger); + try { + permissionCheck.checkPermission(); + } catch (Exception e) { + logger.error(e.getMessage(),e); + putMsg(result, Status.RESOURCE_NOT_EXIST_OR_NO_PERMISSION); + return result; + } + } + taskDefinition.setFlag(Flag.YES); + taskDefinitionLog.setFlag(Flag.YES); + break; + default: + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, RELEASESTATE); + return result; + } + int update = taskDefinitionMapper.updateById(taskDefinition); + int updateLog = taskDefinitionLogMapper.updateById(taskDefinitionLog); + if ((update == 0 && updateLog == 1) || (update == 1 && updateLog == 0)) { + putMsg(result, Status.UPDATE_TASK_DEFINITION_ERROR); + throw new ServiceException(Status.UPDATE_TASK_DEFINITION_ERROR); + } + putMsg(result, Status.SUCCESS); + return result; + } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskInstanceServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskInstanceServiceImpl.java index 9f4b8cb8e5c69cd5a26b9934e51a67d2f612fb12..367af22e5a4118bce02e4b304cc38a175701a3ca 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskInstanceServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskInstanceServiceImpl.java @@ -17,8 +17,9 @@ package org.apache.dolphinscheduler.api.service.impl; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import org.apache.dolphinscheduler.api.enums.Status; -import org.apache.dolphinscheduler.api.service.ProcessInstanceService; import org.apache.dolphinscheduler.api.service.ProjectService; import org.apache.dolphinscheduler.api.service.TaskInstanceService; import org.apache.dolphinscheduler.api.service.UsersService; @@ -26,26 +27,19 @@ import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.DateUtils; -import org.apache.dolphinscheduler.dao.entity.Project; -import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.entity.*; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; +import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; import org.apache.dolphinscheduler.service.process.ProcessService; - -import java.util.Date; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import java.util.*; +import java.util.stream.Collectors; /** * task instance service impl @@ -66,48 +60,48 @@ public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInst TaskInstanceMapper taskInstanceMapper; @Autowired - ProcessInstanceService processInstanceService; + UsersService usersService; @Autowired - UsersService usersService; + TaskDefinitionMapper taskDefinitionMapper; /** * query task list by project, process instance, task name, task start time, task end time, task status, keyword paging * - * @param loginUser login user - * @param projectCode project code + * @param loginUser login user + * @param projectCode project code * @param processInstanceId process instance id - * @param searchVal search value - * @param taskName task name - * @param stateType state type - * @param host host - * @param startDate start time - * @param endDate end time - * @param pageNo page number - * @param pageSize page size + * @param searchVal search value + * @param taskName task name + * @param stateType state type + * @param host host + * @param startDate start time + * @param endDate end time + * @param pageNo page number + * @param pageSize page size * @return task list page */ @Override public Result queryTaskListPaging(User loginUser, - long projectCode, - Integer processInstanceId, - String processInstanceName, - String taskName, - String executorName, - String startDate, - String endDate, - String searchVal, - ExecutionStatus stateType, - String host, - Integer pageNo, - Integer pageSize) { + long projectCode, + Integer processInstanceId, + String processInstanceName, + String taskName, + String executorName, + String startDate, + String endDate, + String searchVal, + ExecutionStatus stateType, + String host, + Integer pageNo, + Integer pageSize) { Result result = new Result(); Project project = projectMapper.queryByCode(projectCode); //check user access for project Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectCode); Status status = (Status) checkResult.get(Constants.STATUS); if (status != Status.SUCCESS) { - putMsg(result,status); + putMsg(result, status); return result; } int[] statusArray = null; @@ -117,7 +111,7 @@ public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInst Map checkAndParseDateResult = checkAndParseDateParameters(startDate, endDate); status = (Status) checkAndParseDateResult.get(Constants.STATUS); if (status != Status.SUCCESS) { - putMsg(result,status); + putMsg(result, status); return result; } Date start = (Date) checkAndParseDateResult.get(Constants.START_TIME); @@ -126,7 +120,7 @@ public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInst PageInfo> pageInfo = new PageInfo<>(pageNo, pageSize); int executorId = usersService.getUserIdByName(executorName); IPage taskInstanceIPage = taskInstanceMapper.queryTaskInstanceListPaging( - page, project.getCode(), processInstanceId, processInstanceName, searchVal, taskName, executorId, statusArray, host, start, end + page, project.getCode(), processInstanceId, processInstanceName, searchVal, taskName, executorId, statusArray, host, start, end ); Set exclusionSet = new HashSet<>(); exclusionSet.add(Constants.CLASS); @@ -150,8 +144,8 @@ public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInst /** * change one task instance's state from failure to forced success * - * @param loginUser login user - * @param projectCode project code + * @param loginUser login user + * @param projectCode project code * @param taskInstanceId task instance id * @return the result code and msg */ @@ -171,12 +165,36 @@ public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInst return result; } + TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(task.getTaskCode()); + if (taskDefinition != null && projectCode != taskDefinition.getProjectCode()) { + putMsg(result, Status.TASK_INSTANCE_NOT_FOUND, taskInstanceId); + return result; + } + // check whether the task instance state type is failure or cancel if (!task.getState().typeIsFailure() && !task.getState().typeIsCancel()) { putMsg(result, Status.TASK_INSTANCE_STATE_OPERATION_ERROR, taskInstanceId, task.getState().toString()); return result; } + ProcessInstance processInstance = processService.findProcessInstanceDetailById(task.getProcessInstanceId()); + if (processInstance != null && (processInstance.getState().typeIsFailure() || processInstance.getState().typeIsCancel())) { + List validTaskList = processService.findValidTaskListByProcessId(processInstance.getId()); + List instanceTaskCodeList = validTaskList.stream().map(TaskInstance::getTaskCode).collect(Collectors.toList()); + List taskRelations = processService.findRelationByCode(processInstance.getProcessDefinitionCode(), + processInstance.getProcessDefinitionVersion()); + List taskDefinitionLogs = processService.genTaskDefineList(taskRelations); + List definiteTaskCodeList = taskDefinitionLogs.stream().filter(definitionLog -> definitionLog.getFlag() == Flag.YES) + .map(TaskDefinitionLog::getCode).collect(Collectors.toList()); + if (CollectionUtils.equalLists(instanceTaskCodeList, definiteTaskCodeList)) { + List failTaskList = validTaskList.stream().filter(instance -> instance.getState().typeIsFailure() || instance.getState().typeIsCancel()) + .map(TaskInstance::getId).collect(Collectors.toList()); + if (failTaskList.size() == 1 && failTaskList.contains(taskInstanceId)) { + processInstance.setState(ExecutionStatus.SUCCESS); + processService.updateProcessInstance(processInstance); + } + } + } // change the state of the task instance task.setState(ExecutionStatus.FORCED_SUCCESS); int changedNum = taskInstanceMapper.updateById(task); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java index fc01bd7d20c5ddd10d4c64bdecedb37aea3cf93e..86845ad8d9484a22d42247b80ffcb62a556120b4 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java @@ -17,13 +17,14 @@ package org.apache.dolphinscheduler.api.service.impl; +import static org.apache.dolphinscheduler.common.Constants.TENANT_FULL_NAME_MAX_LENGTH; + import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.TenantService; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.RegexUtils; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.HadoopUtils; import org.apache.dolphinscheduler.common.utils.PropertyUtils; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; @@ -35,6 +36,7 @@ import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.TenantMapper; import org.apache.dolphinscheduler.dao.mapper.UserMapper; +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.StringUtils; import java.util.Date; @@ -70,10 +72,10 @@ public class TenantServiceImpl extends BaseServiceImpl implements TenantService /** * create tenant * - * @param loginUser login user + * @param loginUser login user * @param tenantCode tenant code - * @param queueId queue id - * @param desc description + * @param queueId queue id + * @param desc description * @return create result code * @throws Exception exception */ @@ -90,6 +92,11 @@ public class TenantServiceImpl extends BaseServiceImpl implements TenantService return result; } + if(StringUtils.length(tenantCode) > TENANT_FULL_NAME_MAX_LENGTH){ + putMsg(result, Status.TENANT_FULL_NAME_TOO_LONG_ERROR); + return result; + } + if (!RegexUtils.isValidLinuxUserName(tenantCode)) { putMsg(result, Status.CHECK_OS_TENANT_CODE_ERROR); return result; @@ -116,6 +123,7 @@ public class TenantServiceImpl extends BaseServiceImpl implements TenantService createTenantDirIfNotExists(tenantCode); } + result.put(Constants.DATA_LIST, tenant); putMsg(result, Status.SUCCESS); return result; @@ -135,7 +143,7 @@ public class TenantServiceImpl extends BaseServiceImpl implements TenantService Result result = new Result(); if (!isAdmin(loginUser)) { - putMsg(result,Status.USER_NO_OPERATION_PERM); + putMsg(result, Status.USER_NO_OPERATION_PERM); return result; } @@ -154,11 +162,11 @@ public class TenantServiceImpl extends BaseServiceImpl implements TenantService /** * updateProcessInstance tenant * - * @param loginUser login user - * @param id tenant id + * @param loginUser login user + * @param id tenant id * @param tenantCode tenant code - * @param queueId queue id - * @param desc description + * @param queueId queue id + * @param desc description * @return update result code * @throws Exception exception */ @@ -271,6 +279,7 @@ public class TenantServiceImpl extends BaseServiceImpl implements TenantService tenantMapper.deleteById(id); processInstanceMapper.updateProcessInstanceByTenantId(id, -1); + putMsg(result, Status.SUCCESS); return result; } @@ -320,8 +329,25 @@ public class TenantServiceImpl extends BaseServiceImpl implements TenantService * @param tenantCode tenant code * @return ture if the tenant code exists, otherwise return false */ - private boolean checkTenantExists(String tenantCode) { + public boolean checkTenantExists(String tenantCode) { Boolean existTenant = tenantMapper.existTenant(tenantCode); return existTenant == Boolean.TRUE; } + + /** + * query tenant by tenant code + * + * @param tenantCode tenant code + * @return tenant detail information + */ + @Override + public Map queryByTenantCode(String tenantCode) { + Map result = new HashMap<>(); + Tenant tenant = tenantMapper.queryByTenantCode(tenantCode); + if (tenant != null) { + result.put(Constants.DATA_LIST, tenant); + putMsg(result, Status.SUCCESS); + } + return result; + } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UiPluginServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UiPluginServiceImpl.java index 0f1483970a3afeeab73cc1fdb28bf711c6ec5d96..0fe6e27e1c3fd75554a09a8d46dbeefd4e065525 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UiPluginServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UiPluginServiceImpl.java @@ -17,21 +17,20 @@ package org.apache.dolphinscheduler.api.service.impl; +import org.apache.commons.collections.CollectionUtils; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.UiPluginService; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.PluginType; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.dao.entity.PluginDefine; import org.apache.dolphinscheduler.dao.mapper.PluginDefineMapper; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; import java.util.HashMap; import java.util.List; import java.util.Map; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; - /** * ui plugin service impl */ diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UsersServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UsersServiceImpl.java index 7e50a5416d10f52df50652beaa321390e19fae5c..15ae5e8ed95ef2b3c14bd1c51d11ced9ffa8e483 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UsersServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UsersServiceImpl.java @@ -27,9 +27,7 @@ import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.Flag; -import org.apache.dolphinscheduler.common.enums.ResourceType; import org.apache.dolphinscheduler.common.enums.UserType; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.EncryptionUtils; import org.apache.dolphinscheduler.common.utils.HadoopUtils; import org.apache.dolphinscheduler.common.utils.PropertyUtils; @@ -54,7 +52,9 @@ import org.apache.dolphinscheduler.dao.mapper.TenantMapper; import org.apache.dolphinscheduler.dao.mapper.UDFUserMapper; import org.apache.dolphinscheduler.dao.mapper.UserMapper; import org.apache.dolphinscheduler.dao.utils.ResourceProcessDefinitionUtils; +import org.apache.dolphinscheduler.spi.enums.ResourceType; +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.StringUtils; import java.io.IOException; @@ -118,7 +118,6 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { @Autowired private ProjectMapper projectMapper; - /** * create user, only system admin have permission * @@ -174,13 +173,13 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { HadoopUtils.getInstance().mkdir(userPath); } + result.put(Constants.DATA_LIST, user); putMsg(result, Status.SUCCESS); return result; } @Override - @Transactional(rollbackFor = RuntimeException.class) public User createUser(String userName, String userPassword, String email, @@ -227,6 +226,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { user.setCreateTime(now); user.setUpdateTime(now); user.setQueue(""); + user.setState(Flag.YES.getCode()); // save user userMapper.insert(user); @@ -322,7 +322,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { public Result queryUserList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { Result result = new Result(); if (!isAdmin(loginUser)) { - putMsg(result,Status.USER_NO_OPERATION_PERM); + putMsg(result, Status.USER_NO_OPERATION_PERM); return result; } @@ -342,8 +342,6 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { /** * updateProcessInstance user * - * - * @param loginUser * @param userId user id * @param userName user name * @param userPassword user password @@ -409,6 +407,12 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, phone); return result; } + + if (state == 0 && user.getState() != state && loginUser.getId() == user.getId()) { + putMsg(result, Status.NOT_ALLOW_TO_DISABLE_OWN_ACCOUNT); + return result; + } + user.setPhone(phone); user.setQueue(queue); user.setState(state); @@ -474,6 +478,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { // updateProcessInstance user userMapper.updateById(user); + putMsg(result, Status.SUCCESS); return result; } @@ -521,8 +526,9 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { } accessTokenMapper.deleteAccessTokenByUserId(id); - + userMapper.deleteById(id); + putMsg(result, Status.SUCCESS); return result; @@ -578,6 +584,90 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { return result; } + /** + * grant project by code + * + * @param loginUser login user + * @param userId user id + * @param projectCode project code + * @return grant result code + */ + @Override + public Map grantProjectByCode(final User loginUser, final int userId, final long projectCode) { + Map result = new HashMap<>(); + result.put(Constants.STATUS, false); + + // 1. check if user is existed + User tempUser = this.userMapper.selectById(userId); + if (tempUser == null) { + this.putMsg(result, Status.USER_NOT_EXIST, userId); + return result; + } + + // 2. check if project is existed + Project project = this.projectMapper.queryByCode(projectCode); + if (project == null) { + this.putMsg(result, Status.PROJECT_NOT_FOUNT, projectCode); + return result; + } + + // 3. only project owner can operate + if (!this.hasPerm(loginUser, project.getUserId())) { + this.putMsg(result, Status.USER_NO_OPERATION_PERM); + return result; + } + + // 4. maintain the relationship between project and user + final Date today = new Date(); + ProjectUser projectUser = new ProjectUser(); + projectUser.setUserId(userId); + projectUser.setProjectId(project.getId()); + projectUser.setPerm(7); + projectUser.setCreateTime(today); + projectUser.setUpdateTime(today); + this.projectUserMapper.insert(projectUser); + + this.putMsg(result, Status.SUCCESS); + return result; + } + + /** + * revoke the project permission for specified user. + * @param loginUser Login user + * @param userId User id + * @param projectCode Project Code + * @return + */ + @Override + public Map revokeProject(User loginUser, int userId, long projectCode) { + Map result = new HashMap<>(); + result.put(Constants.STATUS, false); + + // 1. only admin can operate + if (this.check(result, !this.isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { + return result; + } + + // 2. check if user is existed + User user = this.userMapper.selectById(userId); + if (user == null) { + this.putMsg(result, Status.USER_NOT_EXIST, userId); + return result; + } + + // 3. check if project is existed + Project project = this.projectMapper.queryByCode(projectCode); + if (project == null) { + this.putMsg(result, Status.PROJECT_NOT_FOUNT, projectCode); + return result; + } + + // 4. delete the relationship between project and user + this.projectUserMapper.deleteProjectRelation(project.getId(), user.getId()); + this.putMsg(result, Status.SUCCESS); + return result; + } + /** * grant resource * @@ -1066,6 +1156,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { Date now = new Date(); user.setUpdateTime(now); userMapper.updateById(user); + User responseUser = userMapper.queryByUserNameAccurately(userName); putMsg(result, Status.SUCCESS); result.put(Constants.DATA_LIST, responseUser); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkFlowLineageServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkFlowLineageServiceImpl.java index d665a1a184bb59c0ec64a648eca220fad146e4e4..1367de7d963f2ea5f36879215c72b37eebab1dc5 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkFlowLineageServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkFlowLineageServiceImpl.java @@ -177,6 +177,9 @@ public class WorkFlowLineageServiceImpl extends BaseServiceImpl implements WorkF private Set querySourceWorkFlowCodes(long projectCode, long workFlowCode, List taskDefinitionList) { Set sourceWorkFlowCodes = new HashSet<>(); + if (taskDefinitionList == null || taskDefinitionList.isEmpty()) { + return sourceWorkFlowCodes; + } List taskDefinitionLogs = taskDefinitionLogMapper.queryByTaskDefinitions(taskDefinitionList); for (TaskDefinitionLog taskDefinitionLog : taskDefinitionLogs) { if (taskDefinitionLog.getProjectCode() == projectCode) { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkerGroupServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkerGroupServiceImpl.java index 8124ed96568556c59557398305002dfb5ea59d12..d7df710e34eca86ff3e88dc00015ca4f47c2efe4 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkerGroupServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkerGroupServiceImpl.java @@ -20,25 +20,27 @@ package org.apache.dolphinscheduler.api.service.impl; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.WorkerGroupService; import org.apache.dolphinscheduler.api.utils.PageInfo; -import org.apache.dolphinscheduler.api.utils.RegistryCenterUtils; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.NodeType; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; -import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.common.utils.HeartBeat; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.WorkerGroup; import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.WorkerGroupMapper; +import org.apache.dolphinscheduler.service.registry.RegistryClient; +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.StringUtils; import java.util.ArrayList; +import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.stream.Collectors; import org.slf4j.Logger; @@ -58,10 +60,13 @@ public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGro private static final Logger logger = LoggerFactory.getLogger(WorkerGroupServiceImpl.class); @Autowired - WorkerGroupMapper workerGroupMapper; + private WorkerGroupMapper workerGroupMapper; @Autowired - ProcessInstanceMapper processInstanceMapper; + private ProcessInstanceMapper processInstanceMapper; + + @Autowired + private RegistryClient registryClient; /** * create or update a worker group @@ -139,7 +144,7 @@ public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGro } // check zookeeper String workerGroupPath = Constants.REGISTRY_DOLPHINSCHEDULER_WORKERS + Constants.SINGLE_SLASH + workerGroup.getName(); - return RegistryCenterUtils.isNodeExisted(workerGroupPath); + return registryClient.exists(workerGroupPath); } /** @@ -149,7 +154,7 @@ public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGro * @return boolean */ private String checkWorkerGroupAddrList(WorkerGroup workerGroup) { - Map serverMaps = RegistryCenterUtils.getServerMaps(NodeType.WORKER, true); + Map serverMaps = registryClient.getServerMaps(NodeType.WORKER, true); if (Strings.isNullOrEmpty(workerGroup.getAddrList())) { return null; } @@ -185,6 +190,7 @@ public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGro List workerGroups = getWorkerGroups(true); List resultDataList = new ArrayList<>(); + int total = 0; if (CollectionUtils.isNotEmpty(workerGroups)) { List searchValDataList = new ArrayList<>(); @@ -198,7 +204,7 @@ public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGro } else { searchValDataList = workerGroups; } - + total = searchValDataList.size(); if (fromIndex < searchValDataList.size()) { if (toIndex > searchValDataList.size()) { toIndex = searchValDataList.size(); @@ -208,7 +214,7 @@ public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGro } PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); - pageInfo.setTotal(resultDataList.size()); + pageInfo.setTotal(total); pageInfo.setTotalList(resultDataList); result.setData(pageInfo); @@ -249,11 +255,11 @@ public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGro List workerGroups = workerGroupMapper.queryAllWorkerGroup(); // worker groups from zookeeper String workerPath = Constants.REGISTRY_DOLPHINSCHEDULER_WORKERS; - List workerGroupList = null; + Collection workerGroupList = null; try { - workerGroupList = RegistryCenterUtils.getChildrenNodes(workerPath); + workerGroupList = registryClient.getChildrenKeys(workerPath); } catch (Exception e) { - logger.error("getWorkerGroups exception: {}, workerPath: {}, isPaging: {}", e.getMessage(), workerPath, isPaging); + logger.error("getWorkerGroups exception, workerPath: {}, isPaging: {}", workerPath, isPaging, e); } if (CollectionUtils.isEmpty(workerGroupList)) { @@ -267,9 +273,9 @@ public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGro for (String workerGroup : workerGroupList) { String workerGroupPath = workerPath + Constants.SINGLE_SLASH + workerGroup; - List childrenNodes = null; + Collection childrenNodes = null; try { - childrenNodes = RegistryCenterUtils.getChildrenNodes(workerGroupPath); + childrenNodes = registryClient.getChildrenKeys(workerGroupPath); } catch (Exception e) { logger.error("getChildrenNodes exception: {}, workerGroupPath: {}", e.getMessage(), workerGroupPath); } @@ -280,9 +286,10 @@ public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGro wg.setName(workerGroup); if (isPaging) { wg.setAddrList(String.join(Constants.COMMA, childrenNodes)); - String registeredValue = RegistryCenterUtils.getNodeData(workerGroupPath + Constants.SINGLE_SLASH + childrenNodes.get(0)); - wg.setCreateTime(DateUtils.stringToDate(registeredValue.split(Constants.COMMA)[6])); - wg.setUpdateTime(DateUtils.stringToDate(registeredValue.split(Constants.COMMA)[7])); + String registeredValue = registryClient.get(workerGroupPath + Constants.SINGLE_SLASH + childrenNodes.iterator().next()); + HeartBeat heartBeat = HeartBeat.decodeHeartBeat(registeredValue); + wg.setCreateTime(new Date(heartBeat.getStartupTime())); + wg.setUpdateTime(new Date(heartBeat.getReportTime())); wg.setSystemDefault(true); } workerGroups.add(wg); @@ -327,7 +334,7 @@ public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGro @Override public Map getWorkerAddressList() { Map result = new HashMap<>(); - List serverNodeList = RegistryCenterUtils.getServerNodeList(NodeType.WORKER, true); + Set serverNodeList = registryClient.getServerNodeSet(NodeType.WORKER, true); result.put(Constants.DATA_LIST, serverNodeList); putMsg(result, Status.SUCCESS); return result; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/CheckUtils.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/CheckUtils.java index 0341e0b95d24b3b332082c6fa5ba8efe1b7c41b4..389108744006b4b7d3382e9200f3cf471db7e0db 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/CheckUtils.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/CheckUtils.java @@ -26,6 +26,7 @@ import org.apache.dolphinscheduler.common.utils.TaskParametersUtils; import org.apache.dolphinscheduler.dao.entity.TaskDefinition; import org.apache.commons.lang.StringUtils; +import org.hibernate.validator.internal.constraintvalidators.bv.EmailValidator; import java.text.MessageFormat; import java.util.HashMap; @@ -59,11 +60,17 @@ public class CheckUtils { * @return true if email regex valid, otherwise return false */ public static boolean checkEmail(String email) { - if (StringUtils.isEmpty(email)) { + if (StringUtils.isBlank(email)) { return false; } - - return email.length() > 5 && email.length() <= 40 && regexChecks(email, Constants.REGEX_MAIL_NAME); + EmailValidator emailValidator = new EmailValidator(); + if (!emailValidator.isValid(email, null)) { + return false; + } + //Email is at least a second-level domain name + int indexDomain = email.lastIndexOf("@"); + String domainString = email.substring(indexDomain); + return domainString.contains("."); } /** diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/FileUtils.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/FileUtils.java index 355f6c954eb51b3015ff7a34cd424600e171baa2..694bbaf69a58ee78dcf54d4ca010d99a4ec49861 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/FileUtils.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/FileUtils.java @@ -16,19 +16,17 @@ */ package org.apache.dolphinscheduler.api.utils; +import org.apache.commons.io.IOUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.core.io.Resource; import org.springframework.core.io.UrlResource; import org.springframework.web.multipart.MultipartFile; -import java.io.BufferedReader; import java.io.File; import java.io.IOException; -import java.io.InputStreamReader; import java.net.MalformedURLException; import java.nio.charset.StandardCharsets; -import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; @@ -39,23 +37,13 @@ public class FileUtils { private static final Logger logger = LoggerFactory.getLogger(FileUtils.class); /** - * copy source file to target file - * - * @param file file - * @param destFilename destination file name + * copy source InputStream to target file + * @param file + * @param destFilename */ - - public static void copyFile(MultipartFile file, String destFilename) { + public static void copyInputStreamToFile(MultipartFile file, String destFilename) { try { - - File destFile = new File(destFilename); - File destParentDir = new File(destFile.getParent()); - - if (!destParentDir.exists()) { - org.apache.commons.io.FileUtils.forceMkdir(destParentDir); - } - - Files.copy(file.getInputStream(), Paths.get(destFilename)); + org.apache.commons.io.FileUtils.copyInputStreamToFile(file.getInputStream(), new File(destFilename)); } catch (IOException e) { logger.error("failed to copy file , {} is empty file", file.getOriginalFilename(), e); } @@ -87,20 +75,12 @@ public class FileUtils { * @return file content string */ public static String file2String(MultipartFile file) { - StringBuilder strBuilder = new StringBuilder(); - - try (InputStreamReader inputStreamReader = new InputStreamReader(file.getInputStream(), StandardCharsets.UTF_8)) { - BufferedReader streamReader = new BufferedReader(inputStreamReader); - String inputStr; - - while ((inputStr = streamReader.readLine()) != null) { - strBuilder.append(inputStr); - } - + try { + return IOUtils.toString(file.getInputStream(), StandardCharsets.UTF_8); } catch (IOException e) { logger.error("file convert to string failed: {}", file.getName()); } - return strBuilder.toString(); + return ""; } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/FourLetterWordMain.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/FourLetterWordMain.java deleted file mode 100644 index 340a389d1cb044f4f827120500190bc25c57f8f3..0000000000000000000000000000000000000000 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/FourLetterWordMain.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.api.utils; - - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStreamReader; -import java.io.OutputStream; -import java.net.InetAddress; -import java.net.InetSocketAddress; -import java.net.Socket; -import java.net.SocketTimeoutException; -import java.util.Objects; - -public class FourLetterWordMain { - - private static final int DEFAULT_SOCKET_TIMEOUT = 5000; - protected static final Logger LOG = LoggerFactory.getLogger(FourLetterWordMain.class); - - private FourLetterWordMain() { - throw new IllegalStateException("FourLetterWordMain class"); - } - - /** - * Send the 4letterword - * @param host the destination host - * @param port the destination port - * @param cmd the 4letterword - * @return server response - * @throws java.io.IOException io exceptions - */ - public static String send4LetterWord(String host, int port, String cmd) - throws IOException { - return send4LetterWord(host, port, cmd, DEFAULT_SOCKET_TIMEOUT); - } - - /** - * Send the 4letterword - * @param host the destination host - * @param port the destination port - * @param cmd the 4letterword - * @param timeout in milliseconds, maximum time to wait while connecting/reading data - * @return server response - * @throws java.io.IOException io exceptions - */ - public static String send4LetterWord(String host, int port, String cmd, int timeout) - throws IOException { - Objects.requireNonNull(cmd, "cmd must not be null"); - LOG.info("connecting to {} {}", host, port); - InetSocketAddress hostaddress= host != null ? new InetSocketAddress(host, port) : - new InetSocketAddress(InetAddress.getByName(null), port); - - try (Socket sock = new Socket()) { - sock.setSoTimeout(timeout); - sock.connect(hostaddress, timeout); - OutputStream outstream = sock.getOutputStream(); - outstream.write(cmd.getBytes()); - outstream.flush(); - // this replicates NC - close the output stream before reading - sock.shutdownOutput(); - - try (BufferedReader reader = - new BufferedReader( - new InputStreamReader(sock.getInputStream()))) { - StringBuilder sb = new StringBuilder(); - String line; - while ((line = reader.readLine()) != null) { - sb.append(line + "\n"); - } - return sb.toString(); - } - } catch (SocketTimeoutException e) { - throw new IOException("Exception while executing four letter word: " + cmd, e); - } - } -} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/PageInfo.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/PageInfo.java index 95405a4f06a24b990cd2a2ab8e29f76323016a35..5b443dc3ed0483fc3a04f4b428705b203a3b89d5 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/PageInfo.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/PageInfo.java @@ -51,6 +51,10 @@ public class PageInfo { */ private Integer pageNo; + public PageInfo() { + + } + public PageInfo(Integer currentPage, Integer pageSize) { if (currentPage == null) { currentPage = 1; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/RegexUtils.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/RegexUtils.java index 2e43d2bdaf6ae22989ce5ef5697cceb5051aa83a..4ddf0738eddeb8188a9c196926c687799e2522c1 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/RegexUtils.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/RegexUtils.java @@ -17,7 +17,6 @@ package org.apache.dolphinscheduler.api.utils; -import java.util.regex.Matcher; import java.util.regex.Pattern; /** @@ -25,28 +24,11 @@ import java.util.regex.Pattern; */ public class RegexUtils { - /** - * check number regex expression - */ - private static final String CHECK_NUMBER = "^-?\\d+(\\.\\d+)?$"; - private static final String LINUX_USERNAME_PATTERN = "[a-z_][a-z\\d_]{0,30}"; private RegexUtils() { } - /** - * check if the input is number - * - * @param str input - * @return - */ - public static boolean isNumeric(String str) { - Pattern pattern = Pattern.compile(CHECK_NUMBER); - Matcher isNum = pattern.matcher(str); - return isNum.matches(); - } - /** * check if the input is a valid linux username * @param str input diff --git a/dolphinscheduler-api/src/main/resources/i18n/messages.properties b/dolphinscheduler-api/src/main/resources/i18n/messages.properties index 62d36156644511a208e8f5b32bf8f79f65962d36..928e6088809925533aec1197aece5e65d873f5bd 100644 --- a/dolphinscheduler-api/src/main/resources/i18n/messages.properties +++ b/dolphinscheduler-api/src/main/resources/i18n/messages.properties @@ -18,13 +18,14 @@ QUERY_SCHEDULE_LIST_NOTES=query schedule list EXECUTE_PROCESS_TAG=execute process related operation PROCESS_INSTANCE_EXECUTOR_TAG=process instance executor related operation -RUN_PROCESS_INSTANCE_NOTES=run process instance +RUN_PROCESS_INSTANCE_NOTES=run process instance +BATCH_RUN_PROCESS_INSTANCE_NOTES=batch run process instance START_NODE_LIST=start node list(node name) TASK_DEPEND_TYPE=task depend type COMMAND_TYPE=command type RUN_MODE=run mode TIMEOUT=timeout -EXECUTE_ACTION_TO_PROCESS_INSTANCE_NOTES=execute action to process instance +EXECUTE_ACTION_TO_PROCESS_INSTANCE_NOTES=execute action to process instance EXECUTE_TYPE=execute type START_CHECK_PROCESS_DEFINITION_NOTES=start check process definition GET_RECEIVER_CC_NOTES=query receiver cc @@ -140,10 +141,16 @@ DELETE_PROJECT_BY_ID_NOTES=delete project by id QUERY_UNAUTHORIZED_PROJECT_NOTES=query unauthorized project QUERY_ALL_PROJECT_LIST_NOTES=query all project list QUERY_AUTHORIZED_PROJECT_NOTES=query authorized project +QUERY_AUTHORIZED_USER_NOTES=query authorized user TASK_RECORD_TAG=task record related operation QUERY_TASK_RECORD_LIST_PAGING_NOTES=query task record list paging -CREATE_TOKEN_NOTES=create token ,note: please login first +CREATE_TOKEN_NOTES=create access token for specified user +UPDATE_TOKEN_NOTES=update access token for specified user +TOKEN=access token string, it will be automatically generated when it absent +EXPIRE_TIME=expire time for the token +TOKEN_ID=access token id QUERY_ACCESS_TOKEN_LIST_NOTES=query access token list paging +QUERY_ACCESS_TOKEN_BY_USER_NOTES=query access token for specified user SCHEDULE=schedule WARNING_TYPE=warning type(sending strategy) WARNING_GROUP_ID=warning group id @@ -221,6 +228,9 @@ UPDATE_USER_NOTES=update user DELETE_USER_BY_ID_NOTES=delete user by id GRANT_PROJECT_NOTES=GRANT PROJECT PROJECT_IDS=project ids(string format, multiple projects separated by ",") +GRANT_PROJECT_BY_CODE_NOTES=GRANT PROJECT BY CODE +REVOKE_PROJECT_NOTES=REVOKE PROJECT FOR USER +PROJECT_CODE=project code GRANT_RESOURCE_NOTES=grant resource file RESOURCE_IDS=resource ids(string format, multiple resources separated by ",") GET_USER_INFO_NOTES=get user info diff --git a/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties b/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties index db705be6abbd7256e3c2d034ff6aa0498316789e..7e9a7e62bc57570f52c030384ac0d430241c875c 100644 --- a/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties +++ b/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties @@ -17,7 +17,9 @@ QUERY_SCHEDULE_LIST_NOTES=query schedule list EXECUTE_PROCESS_TAG=execute process related operation PROCESS_INSTANCE_EXECUTOR_TAG=process instance executor related operation -RUN_PROCESS_INSTANCE_NOTES=run process instance +RUN_PROCESS_INSTANCE_NOTES=run process instance +BATCH_RUN_PROCESS_INSTANCE_NOTES=batch run process instance(If any processDefinitionCode cannot be found, the failure\ + \ information is returned and the status is set to failed. The successful task will run normally and will not stop) START_NODE_LIST=start node list(node name) TASK_DEPEND_TYPE=task depend type COMMAND_TYPE=command type @@ -156,10 +158,16 @@ QUERY_ALL_PROJECT_LIST_NOTES=query all project list DELETE_PROJECT_BY_ID_NOTES=delete project by id QUERY_UNAUTHORIZED_PROJECT_NOTES=query unauthorized project QUERY_AUTHORIZED_PROJECT_NOTES=query authorized project +QUERY_AUTHORIZED_USER_NOTES=query authorized user TASK_RECORD_TAG=task record related operation QUERY_TASK_RECORD_LIST_PAGING_NOTES=query task record list paging -CREATE_TOKEN_NOTES=create token ,note: please login first +CREATE_TOKEN_NOTES=create access token for specified user +UPDATE_TOKEN_NOTES=update access token for specified user +TOKEN=access token string, it will be automatically generated when it absent +EXPIRE_TIME=expire time for the token +TOKEN_ID=access token id QUERY_ACCESS_TOKEN_LIST_NOTES=query access token list paging +QUERY_ACCESS_TOKEN_BY_USER_NOTES=query access token for specified user SCHEDULE=schedule WARNING_TYPE=warning type(sending strategy) WARNING_GROUP_ID=warning group id @@ -268,6 +276,9 @@ UPDATE_QUEUE_NOTES=update queue DELETE_USER_BY_ID_NOTES=delete user by id GRANT_PROJECT_NOTES=GRANT PROJECT PROJECT_IDS=project ids(string format, multiple projects separated by ",") +GRANT_PROJECT_BY_CODE_NOTES=GRANT PROJECT BY CODE +REVOKE_PROJECT_NOTES=REVOKE PROJECT FOR USER +PROJECT_CODE=project code GRANT_RESOURCE_NOTES=grant resource file RESOURCE_IDS=resource ids(string format, multiple resources separated by ",") GET_USER_INFO_NOTES=get user info diff --git a/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties b/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties index ec88f74fb57fa35fb11a979224f5a0aeae9cda92..d5df6cd18b9502c1ec9422ada58c176285769323 100644 --- a/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties +++ b/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties @@ -19,6 +19,7 @@ PROCESS_INSTANCE_EXECUTOR_TAG=流程实例执行相关操作 UI_PLUGINS_TAG=UI插件相关操作 WORK_FLOW_LINEAGE_TAG=工作流血缘相关操作 RUN_PROCESS_INSTANCE_NOTES=运行流程实例 +BATCH_RUN_PROCESS_INSTANCE_NOTES=批量运行流程实例(其中有任意一个processDefinitionCode找不到,则返回失败信息并且状态置为失败,成功的任务会正常运行,不会停止) START_NODE_LIST=开始节点列表(节点name) TASK_DEPEND_TYPE=任务依赖类型 COMMAND_TYPE=指令类型 @@ -145,10 +146,16 @@ QUERY_ALL_PROJECT_LIST_NOTES=查询所有项目 DELETE_PROJECT_BY_ID_NOTES=通过ID删除项目 QUERY_UNAUTHORIZED_PROJECT_NOTES=查询未授权的项目 QUERY_AUTHORIZED_PROJECT_NOTES=查询授权项目 +QUERY_AUTHORIZED_USER_NOTES=查询拥有项目授权的用户 TASK_RECORD_TAG=任务记录相关操作 QUERY_TASK_RECORD_LIST_PAGING_NOTES=分页查询任务记录列表 -CREATE_TOKEN_NOTES=创建token,注意需要先登录 +CREATE_TOKEN_NOTES=为指定用户创建安全令牌 +UPDATE_TOKEN_NOTES=更新指定用户的安全令牌 +TOKEN=安全令牌字符串,若未显式指定将会自动生成 +EXPIRE_TIME=安全令牌的过期时间 +TOKEN_ID=安全令牌的ID QUERY_ACCESS_TOKEN_LIST_NOTES=分页查询access token列表 +QUERY_ACCESS_TOKEN_BY_USER_NOTES=查询指定用户的access token SCHEDULE=定时 WARNING_TYPE=发送策略 WARNING_GROUP_ID=发送组ID @@ -256,6 +263,9 @@ UPDATE_QUEUE_NOTES=更新队列 DELETE_USER_BY_ID_NOTES=删除用户通过ID GRANT_PROJECT_NOTES=授权项目 PROJECT_IDS=项目IDS(字符串格式,多个项目以","分割) +GRANT_PROJECT_BY_CODE_NOTES=授权项目 +REVOKE_PROJECT_NOTES=撤销用户的项目权限 +PROJECT_CODE=项目Code GRANT_RESOURCE_NOTES=授权资源文件 RESOURCE_IDS=资源ID列表(字符串格式,多个资源ID以","分割) GET_USER_INFO_NOTES=获取用户信息 diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/HttpClientTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/HttpClientTest.java deleted file mode 100644 index 6753d93d444816cb28c7c567bf1df7cf73911a98..0000000000000000000000000000000000000000 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/HttpClientTest.java +++ /dev/null @@ -1,157 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.dolphinscheduler.api; - -import org.apache.http.NameValuePair; -import org.apache.http.client.entity.UrlEncodedFormEntity; -import org.apache.http.client.methods.CloseableHttpResponse; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.client.utils.URIBuilder; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClients; -import org.apache.http.message.BasicNameValuePair; -import org.apache.http.util.EntityUtils; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.net.URI; -import java.util.ArrayList; -import java.util.List; - -public class HttpClientTest { - - private static final Logger logger = LoggerFactory.getLogger(HttpClientTest.class); - - @Test - public void doPOSTParam()throws Exception{ - // create HttpClient - CloseableHttpClient httpclient = HttpClients.createDefault(); - - // create http post request - HttpPost httpPost = new HttpPost("http://localhost:12345/dolphinscheduler/projects/create"); - httpPost.setHeader("token", "123"); - // set parameters - List parameters = new ArrayList(); - parameters.add(new BasicNameValuePair("projectName", "qzw")); - parameters.add(new BasicNameValuePair("desc", "qzw")); - - UrlEncodedFormEntity formEntity = new UrlEncodedFormEntity(parameters); - httpPost.setEntity(formEntity); - - - CloseableHttpResponse response = null; - try { - // execute - response = httpclient.execute(httpPost); - // response status code 200 - if (response.getStatusLine().getStatusCode() == 200) { - String content = EntityUtils.toString(response.getEntity(), "UTF-8"); - logger.info(content); - } - } finally { - if (response != null) { - response.close(); - } - httpclient.close(); - } - } - - /** - * do get param path variables chinese - * @throws Exception - */ - @Test - public void doGETParamPathVariableAndChinese()throws Exception{ - // create HttpClient - CloseableHttpClient httpclient = HttpClients.createDefault(); - - List parameters = new ArrayList(); - // parameters.add(new BasicNameValuePair("pageSize", "10")); - - // define the parameters of the request - URI uri = new URIBuilder("http://localhost:12345/dolphinscheduler/projects/%E5%85%A8%E9%83%A8%E6%B5%81%E7%A8%8B%E6%B5%8B%E8%AF%95/process/list") - .build(); - - // create http GET request - HttpGet httpGet = new HttpGet(uri); - httpGet.setHeader("token","10f5625a2a1cbf9aa710653796c5d764"); - //response object - CloseableHttpResponse response = null; - try { - // execute http get request - response = httpclient.execute(httpGet); - // response status code 200 - if (response.getStatusLine().getStatusCode() == 200) { - String content = EntityUtils.toString(response.getEntity(), "UTF-8"); - logger.info("start--------------->"); - logger.info(content); - logger.info("end----------------->"); - } - } finally { - if (response != null) { - response.close(); - } - httpclient.close(); - } - } - - /** - * - * do get param - * @throws Exception - */ - @Test - public void doGETParam()throws Exception{ - // create HttpClient - CloseableHttpClient httpclient = HttpClients.createDefault(); - - List parameters = new ArrayList(); - parameters.add(new BasicNameValuePair("startDate", "2018-04-22 19:30:08")); - parameters.add(new BasicNameValuePair("endDate", "2028-04-22 19:30:08")); - parameters.add(new BasicNameValuePair("projectId", "0")); - - // define the parameters of the request - URI uri = new URIBuilder("http://localhost:12345/dolphinscheduler/projects/analysis/queue-count") - .setParameters(parameters) - .build(); - - // create http GET request - HttpGet httpGet = new HttpGet(uri); - httpGet.setHeader("token","2aef24c052c212fab9eec78848c2258b"); - //response object - CloseableHttpResponse response = null; - try { - // execute http get request - response = httpclient.execute(httpGet); - // response status code 200 - if (response.getStatusLine().getStatusCode() == 200) { - String content = EntityUtils.toString(response.getEntity(), "UTF-8"); - logger.info("start--------------->"); - logger.info(content); - logger.info("end----------------->"); - } - } finally { - if (response != null) { - response.close(); - } - httpclient.close(); - } - } - -} diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-slack/src/test/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackAlertPluginTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/aspect/AccessLogAspectTest.java similarity index 54% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-slack/src/test/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackAlertPluginTest.java rename to dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/aspect/AccessLogAspectTest.java index 5086352a46a6e4fdd9ba29ca6ce788a7148e4308..e272e4256e48012c375c705f3cb6df22bed58864 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-slack/src/test/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackAlertPluginTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/aspect/AccessLogAspectTest.java @@ -15,22 +15,28 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.plugin.alert.slack; - -import org.apache.dolphinscheduler.spi.alert.AlertChannelFactory; +package org.apache.dolphinscheduler.api.aspect; import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; + +/** + * @author Hua Jiang + */ -public class SlackAlertPluginTest { +public class AccessLogAspectTest { - private SlackAlertPlugin slackAlertPlugin = new SlackAlertPlugin(); + private AccessLogAspect accessLogAspect = new AccessLogAspect(); @Test - public void testGetAlertChannelFactorys() { - Iterable alertChannelFactorys = slackAlertPlugin.getAlertChannelFactorys(); - for (AlertChannelFactory alertChannelFactory : alertChannelFactorys) { - Assert.assertTrue(alertChannelFactory instanceof SlackAlertChannelFactory); - } + public void testHandleSensitiveData() { + String data = "userPassword='7ad2410b2f4c074479a8937a28a22b8f', email='xxx@qq.com', database='null', userName='root', password='root', other='null'"; + String expected = "userPassword='********************************', email='xxx@qq.com', database='null', userName='root', password='****', other='null'"; + + String actual = accessLogAspect.handleSensitiveData(data); + + Assert.assertEquals(expected, actual); + } -} \ No newline at end of file + +} diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AbstractControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AbstractControllerTest.java index 41a1cc351e934c2c2893f5c1331ff6472132fcde..867f34261bef5bcec2e95166d7ba9fced32162cd 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AbstractControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AbstractControllerTest.java @@ -20,29 +20,30 @@ package org.apache.dolphinscheduler.api.controller; import org.apache.dolphinscheduler.api.ApiApplicationServer; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.SessionService; -import org.apache.dolphinscheduler.api.utils.RegistryCenterUtils; +import org.apache.dolphinscheduler.api.service.UsersService; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.common.enums.ProfileType; import org.apache.dolphinscheduler.dao.entity.User; -import org.apache.dolphinscheduler.service.registry.RegistryClient; import org.apache.commons.lang.StringUtils; +import org.apache.curator.test.TestingServer; import java.text.MessageFormat; import java.util.HashMap; import java.util.Map; +import javax.annotation.PostConstruct; + import org.junit.After; import org.junit.Assert; import org.junit.Before; +import org.junit.Ignore; import org.junit.runner.RunWith; -import org.powermock.api.mockito.PowerMockito; -import org.powermock.core.classloader.annotations.PowerMockIgnore; -import org.powermock.core.classloader.annotations.PrepareForTest; -import org.powermock.modules.junit4.PowerMockRunner; -import org.powermock.modules.junit4.PowerMockRunnerDelegate; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Profile; +import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.junit4.SpringRunner; import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.setup.MockMvcBuilders; @@ -51,11 +52,10 @@ import org.springframework.web.context.WebApplicationContext; /** * abstract controller test */ -@RunWith(PowerMockRunner.class) -@PowerMockRunnerDelegate(SpringRunner.class) +@ActiveProfiles(value = {ProfileType.H2}) +@RunWith(SpringRunner.class) @SpringBootTest(classes = ApiApplicationServer.class) -@PrepareForTest({ RegistryCenterUtils.class, RegistryClient.class }) -@PowerMockIgnore({"javax.management.*"}) +@Ignore public class AbstractControllerTest { public static final String SESSION_ID = "sessionId"; @@ -68,18 +68,19 @@ public class AbstractControllerTest { @Autowired private SessionService sessionService; + @Autowired + private UsersService usersService; + protected User user; protected String sessionId; @Before public void setUp() { - PowerMockito.suppress(PowerMockito.constructor(RegistryClient.class)); - PowerMockito.mockStatic(RegistryCenterUtils.class); - mockMvc = MockMvcBuilders.webAppContextSetup(webApplicationContext).build(); - createSession(); + user = usersService.queryUser(1); + createSession(user); } @After @@ -87,18 +88,14 @@ public class AbstractControllerTest { sessionService.signOut("127.0.0.1", user); } - private void createSession() { - - User loginUser = new User(); - loginUser.setId(1); - loginUser.setUserType(UserType.GENERAL_USER); + private void createSession(User loginUser) { user = loginUser; String session = sessionService.createSession(loginUser, "127.0.0.1"); sessionId = session; - Assert.assertTrue(!StringUtils.isEmpty(session)); + Assert.assertFalse(StringUtils.isEmpty(session)); } public Map success() { @@ -115,4 +112,14 @@ public class AbstractControllerTest { result.put(Constants.MSG, status.getMsg()); } } + + @Configuration + @Profile(ProfileType.H2) + public static class RegistryServer { + @PostConstruct + public void startEmbedRegistryServer() throws Exception { + final TestingServer server = new TestingServer(true); + System.setProperty("registry.servers", server.getConnectString()); + } + } } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AccessTokenControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AccessTokenControllerTest.java index a13d773abf6a397a7b04168a80df62b72bb3db6f..1426c19726b5d7cc4f668d32dfdc280e1a39776a 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AccessTokenControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AccessTokenControllerTest.java @@ -19,6 +19,7 @@ package org.apache.dolphinscheduler.api.controller; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; @@ -59,6 +60,26 @@ public class AccessTokenControllerTest extends AbstractControllerTest { logger.info(mvcResult.getResponse().getContentAsString()); } + @Test + public void testCreateTokenIfAbsent() throws Exception { + MultiValueMap paramsMap = new LinkedMultiValueMap<>(); + paramsMap.add("userId", "4"); + paramsMap.add("expireTime", "2019-12-18 00:00:00"); + paramsMap.add("token", null); + + MvcResult mvcResult = this.mockMvc + .perform(post("/access-tokens") + .header("sessionId", this.sessionId) + .params(paramsMap)) + .andExpect(status().isCreated()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON)) + .andReturn(); + + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); + logger.info(mvcResult.getResponse().getContentAsString()); + } + @Test public void testExceptionHandler() throws Exception { MultiValueMap paramsMap = new LinkedMultiValueMap<>(); @@ -109,6 +130,19 @@ public class AccessTokenControllerTest extends AbstractControllerTest { logger.info(mvcResult.getResponse().getContentAsString()); } + @Test + public void testQueryAccessTokenByUser() throws Exception { + MvcResult mvcResult = this.mockMvc + .perform(get("/access-tokens/user/1") + .header("sessionId", this.sessionId)) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON)) + .andReturn(); + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); + logger.info(mvcResult.getResponse().getContentAsString()); + } + @Test public void testDelAccessTokenById() throws Exception { MultiValueMap paramsMap = new LinkedMultiValueMap<>(); @@ -142,4 +176,26 @@ public class AccessTokenControllerTest extends AbstractControllerTest { logger.info(mvcResult.getResponse().getContentAsString()); } + @Test + public void testUpdateTokenIfAbsent() throws Exception { + this.testCreateTokenIfAbsent(); + + MultiValueMap paramsMap = new LinkedMultiValueMap<>(); + paramsMap.add("userId", "4"); + paramsMap.add("expireTime", "2019-12-20 00:00:00"); + paramsMap.add("token", null); + + MvcResult mvcResult = this.mockMvc + .perform(put("/access-tokens/2") + .header("sessionId", this.sessionId) + .params(paramsMap)) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON)) + .andReturn(); + + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); + Assert.assertNotNull(result.getData()); + logger.info(mvcResult.getResponse().getContentAsString()); + } } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataSourceControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataSourceControllerTest.java index e7863739ded1a9e9f7cff326ccfa329d5a52064b..f412ee6e90feeabe8d63607d77dc3bd6a1492b10 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataSourceControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataSourceControllerTest.java @@ -24,8 +24,8 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers. import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.common.datasource.mysql.MysqlDatasourceParamDTO; import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.mysql.MysqlDatasourceParamDTO; import java.util.HashMap; diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/EnvironmentControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/EnvironmentControllerTest.java index 0b7233be9220b367663eca0e3e66d08267c0ae82..98426346c2fae3bdd15709d8569f612b49ee2a5f 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/EnvironmentControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/EnvironmentControllerTest.java @@ -40,12 +40,8 @@ import org.springframework.util.MultiValueMap; import com.fasterxml.jackson.core.type.TypeReference; import com.google.common.base.Preconditions; -/** - * environment controller test - */ public class EnvironmentControllerTest extends AbstractControllerTest { - - private static Logger logger = LoggerFactory.getLogger(EnvironmentControllerTest.class); + private static final Logger logger = LoggerFactory.getLogger(EnvironmentControllerTest.class); private String environmentCode; @@ -60,6 +56,7 @@ public class EnvironmentControllerTest extends AbstractControllerTest { testCreateEnvironment(); } + @Override @After public void after() throws Exception { testDeleteEnvironment(); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/MonitorControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/MonitorControllerTest.java index 057a73adfe2a324e8139959bc5ccff1b191adb2a..0730ce8cf9583ca62871028495829c4cb7ab19b0 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/MonitorControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/MonitorControllerTest.java @@ -75,23 +75,7 @@ public class MonitorControllerTest extends AbstractControllerTest { @Test public void testQueryDatabaseState() throws Exception { - MvcResult mvcResult = mockMvc.perform(get("/monitor/database") - .header(SESSION_ID, sessionId) - /* .param("type", ResourceType.FILE.name())*/) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - result.getCode().equals(Status.SUCCESS.getCode()); - - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } - - @Test - public void testQueryZookeeperState() throws Exception { - MvcResult mvcResult = mockMvc.perform(get("/monitor/zookeeper/list") + MvcResult mvcResult = mockMvc.perform(get("/monitor/databases") .header(SESSION_ID, sessionId) /* .param("type", ResourceType.FILE.name())*/) .andExpect(status().isOk()) diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java index 4737c2f8f8627ae24c41181891340ca657196956..d141a56d65d905826c669ea2dbdfd53e7038dec7 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java @@ -339,7 +339,7 @@ public class ProcessDefinitionControllerTest { Map result = new HashMap<>(); putMsg(result, Status.SUCCESS); - Mockito.when(processDefinitionService.viewTree(processId, limit)).thenReturn(result); + Mockito.when(processDefinitionService.viewTree(projectCode, processId, limit)).thenReturn(result); Result response = processDefinitionController.viewTree(user, projectCode, processId, limit); Assert.assertTrue(response != null && response.isSuccess()); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceControllerTest.java index 7d6fabc12721fe2fcee44d388a74cf917d820e79..27665ae4b211bbee4d80c9f32cc088ea271221ff 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceControllerTest.java @@ -23,12 +23,19 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers. import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.ProcessInstanceService; import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.utils.JSONUtils; +import java.util.HashMap; +import java.util.Map; + import org.junit.Assert; import org.junit.Test; +import org.powermock.api.mockito.PowerMockito; +import org.springframework.boot.test.mock.mockito.MockBean; import org.springframework.http.MediaType; import org.springframework.test.web.servlet.MvcResult; import org.springframework.util.LinkedMultiValueMap; @@ -39,6 +46,9 @@ import org.springframework.util.MultiValueMap; */ public class ProcessInstanceControllerTest extends AbstractControllerTest { + @MockBean(name = "processInstanceService") + private ProcessInstanceService processInstanceService; + @Test public void testQueryProcessInstanceList() throws Exception { MultiValueMap paramsMap = new LinkedMultiValueMap<>(); @@ -146,13 +156,14 @@ public class ProcessInstanceControllerTest extends AbstractControllerTest { @Test public void testViewVariables() throws Exception { - MvcResult mvcResult = mockMvc.perform(get("/projects/{projectCode}/instance/view-variables", "cxc_1113") - .header(SESSION_ID, sessionId) - .param("processInstanceId", "1204")) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - + Map mockResult = new HashMap<>(); + mockResult.put(Constants.STATUS, Status.SUCCESS); + PowerMockito.when(processInstanceService.viewVariables(1113L,123)).thenReturn(mockResult); + MvcResult mvcResult = mockMvc.perform(get("/projects/{projectCode}/process-instances/{id}/view-variables", "1113", "123") + .header(SESSION_ID, sessionId)) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON)) + .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertNotNull(result); Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessTaskRelationControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessTaskRelationControllerTest.java new file mode 100644 index 0000000000000000000000000000000000000000..ffb478ff7ecf57435575d6f3db7c3a2badc3c1a5 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessTaskRelationControllerTest.java @@ -0,0 +1,84 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.controller; + +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.ProcessTaskRelationService; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.utils.JSONUtils; + +import java.util.HashMap; +import java.util.Map; + +import org.junit.Assert; +import org.junit.Test; +import org.mockito.Mockito; +import org.powermock.api.mockito.PowerMockito; +import org.springframework.boot.test.mock.mockito.MockBean; +import org.springframework.http.MediaType; +import org.springframework.test.web.servlet.MvcResult; + +/** + * process task relation controller test + */ +public class ProcessTaskRelationControllerTest extends AbstractControllerTest { + + @MockBean + private ProcessTaskRelationService processTaskRelationService; + + @Test + public void testQueryDownstreamRelation() throws Exception { + Map mockResult = new HashMap<>(); + mockResult.put(Constants.STATUS, Status.SUCCESS); + PowerMockito.when(processTaskRelationService.queryDownstreamRelation(Mockito.any(), Mockito.anyLong(), Mockito.anyLong())) + .thenReturn(mockResult); + + MvcResult mvcResult = mockMvc.perform(get("/projects/{projectCode}/process-task-relation/{taskCode}/downstream", "1113", "123") + .header(SESSION_ID, sessionId)) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON)) + .andReturn(); + + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + Assert.assertNotNull(result); + Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); + } + + @Test + public void testQueryUpstreamRelation() throws Exception { + Map mockResult = new HashMap<>(); + mockResult.put(Constants.STATUS, Status.SUCCESS); + PowerMockito.when(processTaskRelationService.queryUpstreamRelation(Mockito.any(), Mockito.anyLong(), Mockito.anyLong())) + .thenReturn(mockResult); + + MvcResult mvcResult = mockMvc.perform(get("/projects/{projectCode}/process-task-relation/{taskCode}/upstream", "1113", "123") + .header(SESSION_ID, sessionId)) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON)) + .andReturn(); + + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + Assert.assertNotNull(result); + Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); + } +} diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProjectControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProjectControllerTest.java index 0bce72d9f96b0f7f7bb6f8be6327676fba8aded7..118484c4c974a2149293615fc07d504ab92cc300 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProjectControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProjectControllerTest.java @@ -125,6 +125,16 @@ public class ProjectControllerTest { Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue()); } + @Test + public void testQueryAuthorizedUser() { + Map result = new HashMap<>(); + this.putMsg(result, Status.SUCCESS); + + Mockito.when(this.projectService.queryAuthorizedUser(this.user, 3682329499136L)).thenReturn(result); + Result response = this.projectController.queryAuthorizedUser(this.user, 3682329499136L); + Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue()); + } + @Test public void testQueryAllProjectList() { Map result = new HashMap<>(); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ResourcesControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ResourcesControllerTest.java index 81bde4d11920b3ef85dc7a6694cde7d1f7db543c..fd46f48c6f034de59aad537fadcabb0226434c3a 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ResourcesControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ResourcesControllerTest.java @@ -25,7 +25,7 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers. import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.common.enums.ResourceType; +import org.apache.dolphinscheduler.spi.enums.ResourceType; import org.apache.dolphinscheduler.common.enums.UdfType; import org.apache.dolphinscheduler.common.utils.JSONUtils; diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/SchedulerControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/SchedulerControllerTest.java index 62022d8b40534b9c9d012b22ec4482cb056fc698..a0a1772d6aadd76df9dbcb2d48a67db4cabc4601 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/SchedulerControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/SchedulerControllerTest.java @@ -48,9 +48,6 @@ import org.springframework.test.web.servlet.MvcResult; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; -/** - * scheduler controller test - */ public class SchedulerControllerTest extends AbstractControllerTest { private static Logger logger = LoggerFactory.getLogger(SchedulerControllerTest.class); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TaskInstanceControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TaskInstanceControllerTest.java index 486707cf9f0be6983e3c072de8d90a5d834bb7b4..a61e7d38ec663438ff610cf535b3d26de3485905 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TaskInstanceControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TaskInstanceControllerTest.java @@ -50,9 +50,6 @@ import org.springframework.test.web.servlet.MvcResult; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; -/** - * task instance controller test - */ public class TaskInstanceControllerTest extends AbstractControllerTest { @InjectMocks diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TenantControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TenantControllerTest.java index 14cd52fbe1bca597e7d957b352da539805a79f2d..7f675a5b3eb87bb65a564d2a3f9fbfe2416c31ad 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TenantControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TenantControllerTest.java @@ -37,9 +37,6 @@ import org.springframework.test.web.servlet.MvcResult; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; -/** - * tenant controller test - */ public class TenantControllerTest extends AbstractControllerTest { private static Logger logger = LoggerFactory.getLogger(TenantControllerTest.class); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/UsersControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/UsersControllerTest.java index fb4b0cee4ae47265b5df025eec96c65f10254470..248cfea2a9cd4d61e9f26d014d62e53e55a85842 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/UsersControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/UsersControllerTest.java @@ -108,6 +108,43 @@ public class UsersControllerTest extends AbstractControllerTest { logger.info(mvcResult.getResponse().getContentAsString()); } + @Test + public void testGrantProjectByCode() throws Exception { + MultiValueMap paramsMap = new LinkedMultiValueMap<>(); + paramsMap.add("userId", "32"); + paramsMap.add("projectCode", "3682329499136"); + + MvcResult mvcResult = this.mockMvc + .perform(post("/users/grant-project-by-code") + .header(SESSION_ID, this.sessionId) + .params(paramsMap)) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON)) + .andReturn(); + + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + Assert.assertEquals(Status.USER_NOT_EXIST.getCode(), result.getCode().intValue()); + logger.info(mvcResult.getResponse().getContentAsString()); + } + + @Test + public void testRevokeProject() throws Exception { + MultiValueMap paramsMap = new LinkedMultiValueMap<>(); + paramsMap.add("userId", "32"); + paramsMap.add("projectCode", "3682329499136"); + + MvcResult mvcResult = this.mockMvc.perform(post("/users/revoke-project") + .header(SESSION_ID, this.sessionId) + .params(paramsMap)) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON)) + .andReturn(); + + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + Assert.assertEquals(Status.USER_NOT_EXIST.getCode(), result.getCode().intValue()); + logger.info(mvcResult.getResponse().getContentAsString()); + } + @Test public void testGrantResource() throws Exception { MultiValueMap paramsMap = new LinkedMultiValueMap<>(); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkerGroupControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkerGroupControllerTest.java index f6e79bf77068869c64e586e426ed6b94e415bda8..873236f55126834ef71a62d7a7936f6caf18cabc 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkerGroupControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/WorkerGroupControllerTest.java @@ -22,7 +22,6 @@ import static org.springframework.test.web.servlet.request.MockMvcRequestBuilder import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; -import org.apache.dolphinscheduler.api.utils.RegistryCenterUtils; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.NodeType; @@ -30,6 +29,7 @@ import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.WorkerGroup; import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.WorkerGroupMapper; +import org.apache.dolphinscheduler.service.registry.RegistryClient; import java.util.HashMap; import java.util.Map; @@ -59,12 +59,15 @@ public class WorkerGroupControllerTest extends AbstractControllerTest { @MockBean private ProcessInstanceMapper processInstanceMapper; + @MockBean + private RegistryClient registryClient; + @Test public void testSaveWorkerGroup() throws Exception { Map serverMaps = new HashMap<>(); serverMaps.put("192.168.0.1", "192.168.0.1"); serverMaps.put("192.168.0.2", "192.168.0.2"); - PowerMockito.when(RegistryCenterUtils.getServerMaps(NodeType.WORKER, true)).thenReturn(serverMaps); + PowerMockito.when(registryClient.getServerMaps(NodeType.WORKER, true)).thenReturn(serverMaps); MultiValueMap paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("name","cxc_work_group"); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/enums/StatusTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/enums/StatusTest.java index 4e31a71e9d4959e5a027e9132b97eaf24cfcd710..d0c1de12057656e43aa8c07ec9bfcdcde0290b44 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/enums/StatusTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/enums/StatusTest.java @@ -21,6 +21,7 @@ import org.junit.Test; import org.springframework.context.i18n.LocaleContextHolder; import java.util.Locale; +import java.util.Optional; import static org.junit.Assert.*; @@ -41,4 +42,15 @@ public class StatusTest { Assert.assertEquals("成功", Status.SUCCESS.getMsg()); } + @Test + public void testGetStatusByCode() { + // FAILURE + Optional optional = Status.findStatusBy(1); + Assert.assertFalse(optional.isPresent()); + + // SUCCESS + optional = Status.findStatusBy(10018); + Assert.assertTrue(optional.isPresent()); + Assert.assertEquals(Status.PROJECT_NOT_FOUNT, optional.get()); + } } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/impl/ldap/LdapAuthenticatorTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/impl/ldap/LdapAuthenticatorTest.java index 9b6814815cbbdfa39c2fb4fb582ef21b0ad8c38d..9fa7aa2c14bb4ea396decd2c9504ec380581e254 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/impl/ldap/LdapAuthenticatorTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/impl/ldap/LdapAuthenticatorTest.java @@ -79,6 +79,7 @@ public class LdapAuthenticatorTest extends AbstractControllerTest { private String ip = "127.0.0.1"; private UserType userType = UserType.GENERAL_USER; + @Override @Before public void setUp() { ldapAuthenticator = new LdapAuthenticator(); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/impl/ldap/LdapServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/impl/ldap/LdapServiceTest.java index 8cd435f9545382e38858ef203260a1211cea3f5e..4cfdb0e50a5e224dc64b3cc82213a86d73235cf0 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/impl/ldap/LdapServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/impl/ldap/LdapServiceTest.java @@ -18,18 +18,23 @@ package org.apache.dolphinscheduler.api.security.impl.ldap; import org.apache.dolphinscheduler.api.ApiApplicationServer; +import org.apache.dolphinscheduler.common.enums.ProfileType; import org.apache.dolphinscheduler.common.enums.UserType; import org.junit.Assert; import org.junit.Before; +import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.config.AutowireCapableBeanFactory; import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.TestPropertySource; import org.springframework.test.context.junit4.SpringRunner; +@Ignore +@ActiveProfiles(ProfileType.H2) @RunWith(SpringRunner.class) @SpringBootTest(classes = ApiApplicationServer.class) @TestPropertySource( @@ -78,4 +83,4 @@ public class LdapServiceTest { String email2 = ldapService.ldapLogin("tesla", "error password"); Assert.assertNull(email2); } -} \ No newline at end of file +} diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/impl/pwd/PasswordAuthenticatorTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/impl/pwd/PasswordAuthenticatorTest.java index 2ccc802ecf4c5bfabedc22d5d75801be9c7770ab..b58a4b59dfff7ee57824d06b0b1dbfce8f9f2bef 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/impl/pwd/PasswordAuthenticatorTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/impl/pwd/PasswordAuthenticatorTest.java @@ -57,6 +57,7 @@ public class PasswordAuthenticatorTest extends AbstractControllerTest { private User mockUser; private Session mockSession; + @Override @Before public void setUp() { authenticator = new PasswordAuthenticator(); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AccessTokenServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AccessTokenServiceTest.java index 3b8ef6b792c0ea29c5f2e5cff9cf0603710137c0..a9276a5f4010cb55601cf702301dba91b83e68b5 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AccessTokenServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AccessTokenServiceTest.java @@ -38,11 +38,13 @@ import java.util.Date; import java.util.List; import java.util.Map; +import org.assertj.core.util.Lists; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; +import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -80,12 +82,36 @@ public class AccessTokenServiceTest { } @Test - public void testCreateToken() { + public void testQueryAccessTokenByUser() { + List accessTokenList = Lists.newArrayList(this.getEntity()); + Mockito.when(this.accessTokenMapper.queryAccessTokenByUser(1)).thenReturn(accessTokenList); + + // USER_NO_OPERATION_PERM + User user = this.getLoginUser(); + user.setUserType(UserType.GENERAL_USER); + Map result = this.accessTokenService.queryAccessTokenByUser(user, 1); + logger.info(result.toString()); + Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); + + // SUCCESS + user.setUserType(UserType.ADMIN_USER); + result = this.accessTokenService.queryAccessTokenByUser(user, 1); + logger.info(result.toString()); + Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); + } + @Test + public void testCreateToken() { + // Given Token when(accessTokenMapper.insert(any(AccessToken.class))).thenReturn(2); Map result = accessTokenService.createToken(getLoginUser(), 1, getDate(), "AccessTokenServiceTest"); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); + + // Token is absent + result = this.accessTokenService.createToken(getLoginUser(), 1, getDate(), null); + logger.info(result.toString()); + Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } @Test @@ -121,16 +147,23 @@ public class AccessTokenServiceTest { @Test public void testUpdateToken() { - + // Given Token when(accessTokenMapper.selectById(1)).thenReturn(getEntity()); Map result = accessTokenService.updateToken(getLoginUser(), 1,Integer.MAX_VALUE,getDate(),"token"); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); - // not exist + Assert.assertNotNull(result.get(Constants.DATA_LIST)); + + // Token is absent + result = accessTokenService.updateToken(getLoginUser(), 1, Integer.MAX_VALUE,getDate(),null); + logger.info(result.toString()); + Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); + Assert.assertNotNull(result.get(Constants.DATA_LIST)); + + // ACCESS_TOKEN_NOT_EXIST result = accessTokenService.updateToken(getLoginUser(), 2,Integer.MAX_VALUE,getDate(),"token"); logger.info(result.toString()); Assert.assertEquals(Status.ACCESS_TOKEN_NOT_EXIST, result.get(Constants.STATUS)); - } private User getLoginUser() { diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertGroupServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertGroupServiceTest.java index eea323e6f625f703358d86a41e890941db438ecf..443b68e064e5dc1fc8075070e753741f76c6ed01 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertGroupServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertGroupServiceTest.java @@ -26,11 +26,11 @@ import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.UserType; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.dao.entity.AlertGroup; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.AlertGroupMapper; -import org.apache.dolphinscheduler.dao.vo.AlertGroupVo; + +import org.apache.commons.collections.CollectionUtils; import java.util.ArrayList; import java.util.List; @@ -78,10 +78,10 @@ public class AlertGroupServiceTest { @Test public void testListPaging() { - IPage page = new Page<>(1, 10); + IPage page = new Page<>(1, 10); page.setTotal(1L); - page.setRecords(getAlertGroupVoList()); - Mockito.when(alertGroupMapper.queryAlertGroupVo(any(Page.class), eq(groupName))).thenReturn(page); + page.setRecords(getList()); + Mockito.when(alertGroupMapper.queryAlertGroupPage(any(Page.class), eq(groupName))).thenReturn(page); User user = new User(); // no operate Result result = alertGroupService.listPaging(user, groupName, 1, 10); @@ -91,7 +91,7 @@ public class AlertGroupServiceTest { user.setUserType(UserType.ADMIN_USER); result = alertGroupService.listPaging(user, groupName, 1, 10); logger.info(result.toString()); - PageInfo pageInfo = (PageInfo) result.getData(); + PageInfo pageInfo = (PageInfo) result.getData(); Assert.assertTrue(CollectionUtils.isNotEmpty(pageInfo.getTotalList())); } @@ -217,23 +217,4 @@ public class AlertGroupServiceTest { return alertGroup; } - /** - * get AlertGroupVo list - */ - private List getAlertGroupVoList() { - List alertGroupVos = new ArrayList<>(); - alertGroupVos.add(getAlertGroupVoEntity()); - return alertGroupVos; - } - - /** - * get AlertGroupVo entity - */ - private AlertGroupVo getAlertGroupVoEntity() { - AlertGroupVo alertGroupVo = new AlertGroupVo(); - alertGroupVo.setId(1); - alertGroupVo.setGroupName(groupName); - return alertGroupVo; - } - } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java index f115370415fe3f94445582c97b179d39611d6073..292bf71aaf604e09d1ded61a1310774594a5b886 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java @@ -17,11 +17,6 @@ package org.apache.dolphinscheduler.api.service; -import static org.assertj.core.api.Assertions.assertThat; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.anyLong; - import org.apache.dolphinscheduler.api.dto.CommandStateCount; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.impl.DataAnalysisServiceImpl; @@ -30,26 +25,9 @@ import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.utils.DateUtils; -import org.apache.dolphinscheduler.dao.entity.CommandCount; -import org.apache.dolphinscheduler.dao.entity.DefinitionGroupByUser; -import org.apache.dolphinscheduler.dao.entity.ExecuteStatusCount; -import org.apache.dolphinscheduler.dao.entity.Project; -import org.apache.dolphinscheduler.dao.entity.User; -import org.apache.dolphinscheduler.dao.mapper.CommandMapper; -import org.apache.dolphinscheduler.dao.mapper.ErrorCommandMapper; -import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; -import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; -import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; -import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; +import org.apache.dolphinscheduler.dao.entity.*; +import org.apache.dolphinscheduler.dao.mapper.*; import org.apache.dolphinscheduler.service.process.ProcessService; - -import java.text.MessageFormat; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -61,6 +39,12 @@ import org.mockito.Mockito; import org.powermock.api.mockito.PowerMockito; import org.powermock.modules.junit4.PowerMockRunner; +import java.text.MessageFormat; +import java.util.*; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.ArgumentMatchers.*; + /** * data analysis service test */ @@ -190,10 +174,10 @@ public class DataAnalysisServiceTest { Mockito.when(taskInstanceMapper.countTaskInstanceStateByUser(any(), any(), any())).thenReturn( Collections.emptyList()); result = dataAnalysisServiceImpl.countTaskStateByProject(user, 1, null, null); - assertThat(result.get(Constants.DATA_LIST)).extracting("totalCount").first().isEqualTo(0); - assertThat(result.get(Constants.DATA_LIST)).extracting("taskCountDtos").first().asList().hasSameSizeAs( + assertThat(result.get(Constants.DATA_LIST)).extracting("totalCount").isEqualTo(0); + assertThat(result.get(Constants.DATA_LIST)).extracting("taskCountDtos").asList().hasSameSizeAs( ExecutionStatus.values()); - assertThat(result.get(Constants.DATA_LIST)).extracting("taskCountDtos").first().asList().extracting( + assertThat(result.get(Constants.DATA_LIST)).extracting("taskCountDtos").asList().extracting( "count").allMatch(count -> count.equals(0)); } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataSourceServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataSourceServiceTest.java index 022ea24db20a733c41e897b68fad7355c3b2b502..68d02c2a0edc61b7ab721f9014ae35eeb76c5059 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataSourceServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataSourceServiceTest.java @@ -21,22 +21,24 @@ import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.impl.DataSourceServiceImpl; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.datasource.ConnectionParam; -import org.apache.dolphinscheduler.common.datasource.DatasourceUtil; -import org.apache.dolphinscheduler.common.datasource.hive.HiveDataSourceParamDTO; -import org.apache.dolphinscheduler.common.datasource.mysql.MysqlDatasourceParamDTO; -import org.apache.dolphinscheduler.common.datasource.oracle.OracleDatasourceParamDTO; -import org.apache.dolphinscheduler.common.datasource.postgresql.PostgreSqlDatasourceParamDTO; -import org.apache.dolphinscheduler.common.enums.DbConnectType; -import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.UserType; -import org.apache.dolphinscheduler.common.utils.CommonUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.apache.dolphinscheduler.common.utils.PropertyUtils; import org.apache.dolphinscheduler.dao.entity.DataSource; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper; import org.apache.dolphinscheduler.dao.mapper.DataSourceUserMapper; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.hive.HiveDataSourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.mysql.MysqlDatasourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.oracle.OracleDatasourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.postgresql.PostgreSqlDatasourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.plugin.DataSourceClientProvider; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.CommonUtils; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.DatasourceUtil; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils; +import org.apache.dolphinscheduler.spi.datasource.ConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbConnectType; +import org.apache.dolphinscheduler.spi.enums.DbType; +import org.apache.dolphinscheduler.spi.utils.PropertyUtils; import java.sql.Connection; import java.util.ArrayList; @@ -60,7 +62,7 @@ import org.powermock.modules.junit4.PowerMockRunner; */ @RunWith(PowerMockRunner.class) @PowerMockIgnore({"sun.security.*", "javax.net.*"}) -@PrepareForTest({DatasourceUtil.class, CommonUtils.class}) +@PrepareForTest({DatasourceUtil.class, CommonUtils.class, DataSourceClientProvider.class, PasswordUtils.class}) public class DataSourceServiceTest { @InjectMocks @@ -307,13 +309,14 @@ public class DataSourceServiceTest { oracleDatasourceParamDTO.setConnectType(DbConnectType.ORACLE_SERVICE_NAME); ConnectionParam connectionParam = DatasourceUtil.buildConnectionParams(oracleDatasourceParamDTO); - String expected = "{\"user\":\"test\",\"password\":\"test\",\"address\":\"jdbc:oracle:thin:@//192.168.9.1:1521\"," - + "\"database\":\"im\",\"jdbcUrl\":\"jdbc:oracle:thin:@//192.168.9.1:1521/im\",\"connectType\":\"ORACLE_SERVICE_NAME\"}"; + String expected = "{\"user\":\"test\",\"password\":\"test\",\"address\":\"jdbc:oracle:thin:@//192.168.9.1:1521\",\"database\":\"im\",\"jdbcUrl\":\"jdbc:oracle:thin:@//192.168.9.1:1521/im\"," + + "\"driverClassName\":\"oracle.jdbc.OracleDriver\",\"validationQuery\":\"select 1 from dual\",\"connectType\":\"ORACLE_SERVICE_NAME\"}"; Assert.assertEquals(expected, JSONUtils.toJsonString(connectionParam)); PowerMockito.mockStatic(CommonUtils.class); + PowerMockito.mockStatic(PasswordUtils.class); PowerMockito.when(CommonUtils.getKerberosStartupState()).thenReturn(true); - PowerMockito.when(CommonUtils.encodePassword(Mockito.anyString())).thenReturn("test"); + PowerMockito.when(PasswordUtils.encodePassword(Mockito.anyString())).thenReturn("test"); HiveDataSourceParamDTO hiveDataSourceParamDTO = new HiveDataSourceParamDTO(); hiveDataSourceParamDTO.setHost("192.168.9.1"); hiveDataSourceParamDTO.setPort(10000); @@ -325,8 +328,9 @@ public class DataSourceServiceTest { hiveDataSourceParamDTO.setLoginUserKeytabPath("/opt/hdfs.headless.keytab"); hiveDataSourceParamDTO.setLoginUserKeytabUsername("test2/hdfs-mycluster@ESZ.COM"); connectionParam = DatasourceUtil.buildConnectionParams(hiveDataSourceParamDTO); - expected = "{\"user\":\"test\",\"password\":\"test\",\"address\":\"jdbc:hive2://192.168.9.1:10000\"," - + "\"database\":\"im\",\"jdbcUrl\":\"jdbc:hive2://192.168.9.1:10000/im;principal=hive/hdfs-mycluster@ESZ.COM\",\"principal\":\"hive/hdfs-mycluster@ESZ.COM\"," + + expected = "{\"user\":\"test\",\"password\":\"test\",\"address\":\"jdbc:hive2://192.168.9.1:10000\",\"database\":\"im\",\"jdbcUrl\":\"jdbc:hive2://192.168.9.1:10000/im\"," + + "\"driverClassName\":\"org.apache.hive.jdbc.HiveDriver\",\"validationQuery\":\"select 1\",\"principal\":\"hive/hdfs-mycluster@ESZ.COM\"," + "\"javaSecurityKrb5Conf\":\"/opt/krb5.conf\",\"loginUserKeytabUsername\":\"test2/hdfs-mycluster@ESZ.COM\",\"loginUserKeytabPath\":\"/opt/hdfs.headless.keytab\"}"; Assert.assertEquals(expected, JSONUtils.toJsonString(connectionParam)); @@ -346,8 +350,8 @@ public class DataSourceServiceTest { mysqlDatasourceParamDTO.setPassword("123456"); mysqlDatasourceParamDTO.setOther(other); ConnectionParam connectionParam = DatasourceUtil.buildConnectionParams(mysqlDatasourceParamDTO); - String expected = "{\"user\":\"test\",\"password\":\"IUAjJCVeJipNVEl6TkRVMg==\",\"address\":\"jdbc:mysql://192.168.9.1:1521\"," - + "\"database\":\"im\",\"jdbcUrl\":\"jdbc:mysql://192.168.9.1:1521/im\"}"; + String expected = "{\"user\":\"test\",\"password\":\"IUAjJCVeJipNVEl6TkRVMg==\",\"address\":\"jdbc:mysql://192.168.9.1:1521\",\"database\":\"im\",\"jdbcUrl\":\"jdbc:mysql://192.168.9.1:1521/" + + "im\",\"driverClassName\":\"com.mysql.jdbc.Driver\",\"validationQuery\":\"select 1\",\"props\":{\"autoDeserialize\":\"yes\",\"allowUrlInLocalInfile\":\"true\"}}"; Assert.assertEquals(expected, JSONUtils.toJsonString(connectionParam)); PropertyUtils.setValue(Constants.DATASOURCE_ENCRYPTION_ENABLE, "false"); @@ -358,8 +362,8 @@ public class DataSourceServiceTest { mysqlDatasourceParamDTO.setUserName("test"); mysqlDatasourceParamDTO.setPassword("123456"); connectionParam = DatasourceUtil.buildConnectionParams(mysqlDatasourceParamDTO); - expected = "{\"user\":\"test\",\"password\":\"123456\",\"address\":\"jdbc:mysql://192.168.9.1:1521\"," - + "\"database\":\"im\",\"jdbcUrl\":\"jdbc:mysql://192.168.9.1:1521/im\"}"; + expected = "{\"user\":\"test\",\"password\":\"123456\",\"address\":\"jdbc:mysql://192.168.9.1:1521\",\"database\":\"im\"," + + "\"jdbcUrl\":\"jdbc:mysql://192.168.9.1:1521/im\",\"driverClassName\":\"com.mysql.jdbc.Driver\",\"validationQuery\":\"select 1\"}"; Assert.assertEquals(expected, JSONUtils.toJsonString(connectionParam)); } @@ -396,12 +400,15 @@ public class DataSourceServiceTest { ConnectionParam connectionParam = DatasourceUtil.buildConnectionParams(postgreSqlDatasourceParam); PowerMockito.mockStatic(DatasourceUtil.class); + PowerMockito.mockStatic(DataSourceClientProvider.class); + DataSourceClientProvider clientProvider = PowerMockito.mock(DataSourceClientProvider.class); + PowerMockito.when(DataSourceClientProvider.getInstance()).thenReturn(clientProvider); Result result = dataSourceService.checkConnection(dataSourceType, connectionParam); Assert.assertEquals(Status.CONNECTION_TEST_FAILURE.getCode(), result.getCode().intValue()); Connection connection = PowerMockito.mock(Connection.class); - PowerMockito.when(DatasourceUtil.getConnection(Mockito.any(), Mockito.any())).thenReturn(connection); + PowerMockito.when(clientProvider.getConnection(Mockito.any(), Mockito.any())).thenReturn(connection); result = dataSourceService.checkConnection(dataSourceType, connectionParam); Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/EnvironmentServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/EnvironmentServiceTest.java index b9b95ecae8e30c11f20e7010a0e2408bac6e4dc8..81ba83476dc5223ddfa017d8f59501cdb9956a33 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/EnvironmentServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/EnvironmentServiceTest.java @@ -23,7 +23,6 @@ import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.UserType; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.dao.entity.Environment; import org.apache.dolphinscheduler.dao.entity.EnvironmentWorkerGroupRelation; import org.apache.dolphinscheduler.dao.entity.User; @@ -31,6 +30,8 @@ import org.apache.dolphinscheduler.dao.mapper.EnvironmentMapper; import org.apache.dolphinscheduler.dao.mapper.EnvironmentWorkerGroupRelationMapper; import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper; +import org.apache.commons.collections.CollectionUtils; + import java.util.ArrayList; import java.util.List; import java.util.Map; diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorServiceTest.java index e308f58443bd046c3ca92a20a116fc9d504347e0..90fb173017321d8a81fa74a41ccae308d7df5ff5 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorServiceTest.java @@ -17,6 +17,8 @@ package org.apache.dolphinscheduler.api.service; +import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_END_DATE; +import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_START_DATE; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -32,6 +34,8 @@ import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.RunMode; import org.apache.dolphinscheduler.common.model.Server; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.Command; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; @@ -58,12 +62,15 @@ import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * executor service 2 test */ @RunWith(MockitoJUnitRunner.Silent.class) public class ExecutorServiceTest { + private static final Logger logger = LoggerFactory.getLogger(ExecutorServiceTest.class); @InjectMocks private ExecutorServiceImpl executorService; @@ -119,6 +126,7 @@ public class ExecutorServiceTest { processDefinition.setUserId(userId); processDefinition.setVersion(1); processDefinition.setCode(1L); + processDefinition.setProjectCode(projectCode); // processInstance processInstance.setId(processInstanceId); @@ -276,9 +284,9 @@ public class ExecutorServiceTest { @Test public void testStartCheckByProcessDefinedCode() { - List ids = new ArrayList<>(); - ids.add(1); - Mockito.doNothing().when(processService).recurseFindSubProcessId(1, ids); + List ids = new ArrayList<>(); + ids.add(1L); + Mockito.doNothing().when(processService).recurseFindSubProcess(1, ids); List processDefinitionList = new ArrayList<>(); ProcessDefinition processDefinition = new ProcessDefinition(); @@ -326,4 +334,45 @@ public class ExecutorServiceTest { result.put(Constants.STATUS, Status.SUCCESS); return result; } + + @Test + public void testCreateComplementToParallel() { + List result = new ArrayList<>(); + int expectedParallelismNumber = 3; + LinkedList listDate = new LinkedList<>(); + listDate.add(0); + listDate.add(1); + listDate.add(2); + listDate.add(3); + listDate.add(4); + + int listDateSize = listDate.size(); + int createCount = Math.min(listDate.size(), expectedParallelismNumber); + logger.info("In parallel mode, current expectedParallelismNumber:{}", createCount); + + int itemsPerCommand = (listDateSize / createCount); + int remainingItems = (listDateSize % createCount); + int startDateIndex = 0; + int endDateIndex = 0; + + for (int i = 1; i <= createCount; i++) { + int extra = (i <= remainingItems) ? 1 : 0; + int singleCommandItems = (itemsPerCommand + extra); + + if (i == 1) { + endDateIndex += singleCommandItems - 1; + } else { + startDateIndex = endDateIndex + 1; + endDateIndex += singleCommandItems; + } + + logger.info("startDate:{}, endDate:{}", listDate.get(startDateIndex), listDate.get(endDateIndex)); + result.add(listDate.get(startDateIndex) + "," + listDate.get(endDateIndex)); + } + + Assert.assertEquals("0,1", result.get(0)); + Assert.assertEquals("2,3", result.get(1)); + Assert.assertEquals("4,4", result.get(2)); + } + } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java index bd8aa72fefdc2390569dda40fc2c3c6947e08e4e..bc46d248be086e691b3ac4c634dcc094da2ab550 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java @@ -20,9 +20,20 @@ package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.impl.LoggerServiceImpl; import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.dao.entity.Project; +import org.apache.dolphinscheduler.dao.entity.TaskDefinition; import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; +import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper; import org.apache.dolphinscheduler.service.process.ProcessService; +import java.text.MessageFormat; +import java.util.HashMap; +import java.util.Map; + import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -51,6 +62,15 @@ public class LoggerServiceTest { @Mock private ProcessService processService; + @Mock + private ProjectMapper projectMapper; + + @Mock + private ProjectService projectService; + + @Mock + private TaskDefinitionMapper taskDefinitionMapper; + @Before public void init() { this.loggerService.init(); @@ -113,9 +133,87 @@ public class LoggerServiceTest { } + @Test + public void testQueryLogInSpecifiedProject() { + long projectCode = 1L; + Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(getProject(projectCode)); + Project project = getProject(projectCode); + + User loginUser = new User(); + loginUser.setId(-1); + loginUser.setUserType(UserType.GENERAL_USER); + Map result = new HashMap<>(); + putMsg(result, Status.SUCCESS, projectCode); + TaskInstance taskInstance = new TaskInstance(); + Mockito.when(processService.findTaskInstanceById(1)).thenReturn(taskInstance); + TaskDefinition taskDefinition = new TaskDefinition(); + taskDefinition.setProjectCode(projectCode); + taskDefinition.setCode(1L); + //SUCCESS + taskInstance.setTaskCode(1L); + taskInstance.setId(1); + taskInstance.setHost("127.0.0.1:8080"); + taskInstance.setLogPath("/temp/log"); + Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result); + Mockito.when(processService.findTaskInstanceById(1)).thenReturn(taskInstance); + Mockito.when(taskDefinitionMapper.queryByCode(taskInstance.getTaskCode())).thenReturn(taskDefinition); + result = loggerService.queryLog(loginUser, projectCode, 1, 1, 1); + Assert.assertEquals(Status.SUCCESS.getCode(), ((Status) result.get(Constants.STATUS)).getCode()); + } + + @Test + public void testGetLogBytesInSpecifiedProject() { + long projectCode = 1L; + Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(getProject(projectCode)); + Project project = getProject(projectCode); + + User loginUser = new User(); + loginUser.setId(-1); + loginUser.setUserType(UserType.GENERAL_USER); + Map result = new HashMap<>(); + putMsg(result, Status.SUCCESS, projectCode); + TaskInstance taskInstance = new TaskInstance(); + TaskDefinition taskDefinition = new TaskDefinition(); + taskDefinition.setProjectCode(projectCode); + taskDefinition.setCode(1L); + //SUCCESS + taskInstance.setTaskCode(1L); + taskInstance.setId(1); + taskInstance.setHost("127.0.0.1:8080"); + taskInstance.setLogPath("/temp/log"); + Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result); + Mockito.when(processService.findTaskInstanceById(1)).thenReturn(taskInstance); + Mockito.when(taskDefinitionMapper.queryByCode(taskInstance.getTaskCode())).thenReturn(taskDefinition); + loggerService.getLogBytes(loginUser, projectCode, 1); + } + + @After public void close() { this.loggerService.close(); } + /** + * get mock Project + * + * @param projectCode projectCode + * @return Project + */ + private Project getProject(long projectCode) { + Project project = new Project(); + project.setCode(projectCode); + project.setId(1); + project.setName("test"); + project.setUserId(1); + return project; + } + + private void putMsg(Map result, Status status, Object... statusParams) { + result.put(Constants.STATUS, status); + if (statusParams != null && statusParams.length > 0) { + result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams)); + } else { + result.put(Constants.MSG, status.getMsg()); + } + } } \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/MonitorServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/MonitorServiceTest.java index dc04cd06bef4c3bfdb092d31ecae65c6c2dd0762..344ea8968ea483585c30885e4333940c063944a0 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/MonitorServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/MonitorServiceTest.java @@ -20,11 +20,12 @@ package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.impl.MonitorServiceImpl; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.model.Server; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.dao.MonitorDBDao; import org.apache.dolphinscheduler.dao.entity.MonitorRecord; +import org.apache.dolphinscheduler.spi.enums.DbType; + +import org.apache.commons.collections.CollectionUtils; import java.util.ArrayList; import java.util.List; @@ -81,13 +82,6 @@ public class MonitorServiceTest { /*Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));*/ } - @Test - public void testGetServerListFromZK() { - //TODO need zk - /*List serverList = monitorService.getServerListFromZK(true);*/ - /*logger.info(serverList.toString());*/ - } - private List getList() { List monitorRecordList = new ArrayList<>(); monitorRecordList.add(getEntity()); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java index 506bba3f92bf29af4d7e9c728d550ac50b1cf19a..5d7e87ac136f3f1afcf216caaaaa413743fa5d41 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java @@ -30,11 +30,13 @@ import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.graph.DAG; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.DagData; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelation; import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.Schedule; +import org.apache.dolphinscheduler.dao.entity.TaskDefinitionLog; import org.apache.dolphinscheduler.dao.entity.Tenant; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper; @@ -82,6 +84,14 @@ public class ProcessDefinitionServiceTest { + "\"postTaskVersion\":1,\"conditionType\":0,\"conditionParams\":\"{}\"},{\"name\":\"\",\"preTaskCode\":123456789," + "\"preTaskVersion\":1,\"postTaskCode\":123451234,\"postTaskVersion\":1,\"conditionType\":0,\"conditionParams\":\"{}\"}]"; + private static final String taskDefinitionJson = "[{\"code\":123456789,\"name\":\"test1\",\"version\":1,\"description\":\"\",\"delayTime\":0,\"taskType\":\"SHELL\"," + + "\"taskParams\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"echo 1\",\"dependence\":{},\"conditionResult\":{\"successNode\":[],\"failedNode\":[]},\"waitStartTimeout\":{}," + + "\"switchResult\":{}},\"flag\":\"YES\",\"taskPriority\":\"MEDIUM\",\"workerGroup\":\"default\",\"failRetryTimes\":0,\"failRetryInterval\":1,\"timeoutFlag\":\"CLOSE\"," + + "\"timeoutNotifyStrategy\":null,\"timeout\":0,\"environmentCode\":-1},{\"code\":123451234,\"name\":\"test2\",\"version\":1,\"description\":\"\",\"delayTime\":0,\"taskType\":\"SHELL\"," + + "\"taskParams\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"echo 2\",\"dependence\":{},\"conditionResult\":{\"successNode\":[],\"failedNode\":[]},\"waitStartTimeout\":{}," + + "\"switchResult\":{}},\"flag\":\"YES\",\"taskPriority\":\"MEDIUM\",\"workerGroup\":\"default\",\"failRetryTimes\":0,\"failRetryInterval\":1,\"timeoutFlag\":\"CLOSE\"," + + "\"timeoutNotifyStrategy\":\"WARN\",\"timeout\":0,\"environmentCode\":-1}]"; + @InjectMocks private ProcessDefinitionServiceImpl processDefinitionService; @@ -289,7 +299,7 @@ public class ProcessDefinitionServiceTest { processDefinitionList.add(definition); Set definitionCodes = Arrays.stream("46".split(Constants.COMMA)).map(Long::parseLong).collect(Collectors.toSet()); Mockito.when(processDefineMapper.queryByCodes(definitionCodes)).thenReturn(processDefinitionList); - Mockito.when(processService.saveProcessDefine(loginUser, definition, true)).thenReturn(2); + Mockito.when(processService.saveProcessDefine(loginUser, definition, Boolean.TRUE, Boolean.TRUE)).thenReturn(2); Map map3 = processDefinitionService.batchCopyProcessDefinition( loginUser, projectCode, "46", 1L); Assert.assertEquals(Status.SUCCESS, map3.get(Constants.STATUS)); @@ -321,7 +331,7 @@ public class ProcessDefinitionServiceTest { processDefinitionList.add(definition); Set definitionCodes = Arrays.stream("46".split(Constants.COMMA)).map(Long::parseLong).collect(Collectors.toSet()); Mockito.when(processDefineMapper.queryByCodes(definitionCodes)).thenReturn(processDefinitionList); - Mockito.when(processService.saveProcessDefine(loginUser, definition, true)).thenReturn(2); + Mockito.when(processService.saveProcessDefine(loginUser, definition, Boolean.TRUE, Boolean.TRUE)).thenReturn(2); Mockito.when(processTaskRelationMapper.queryByProcessCode(projectCode, 46L)).thenReturn(getProcessTaskRelation(projectCode)); putMsg(result, Status.SUCCESS); @@ -377,32 +387,28 @@ public class ProcessDefinitionServiceTest { Mockito.when(processDefineMapper.queryByCode(46L)).thenReturn(processDefinition); putMsg(result, Status.SUCCESS, projectCode); Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result); - List schedules = new ArrayList<>(); - schedules.add(getSchedule()); - schedules.add(getSchedule()); - Mockito.when(scheduleMapper.queryByProcessDefinitionCode(46L)).thenReturn(schedules); + Mockito.when(scheduleMapper.queryByProcessDefinitionCode(46L)).thenReturn(getSchedule()); + Mockito.when(scheduleMapper.deleteById(46)).thenReturn(1); + Mockito.when(processDefineMapper.deleteById(processDefinition.getId())).thenReturn(1); + Mockito.when(processTaskRelationMapper.deleteByCode(project.getCode(), processDefinition.getCode())).thenReturn(1); Map schedulerGreaterThanOneRes = processDefinitionService.deleteProcessDefinitionByCode(loginUser, projectCode, 46L); - Assert.assertEquals(Status.DELETE_PROCESS_DEFINE_BY_CODE_ERROR, schedulerGreaterThanOneRes.get(Constants.STATUS)); + Assert.assertEquals(Status.SUCCESS, schedulerGreaterThanOneRes.get(Constants.STATUS)); //scheduler online - schedules.clear(); Schedule schedule = getSchedule(); schedule.setReleaseState(ReleaseState.ONLINE); - schedules.add(schedule); putMsg(result, Status.SUCCESS, projectCode); Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result); - Mockito.when(scheduleMapper.queryByProcessDefinitionCode(46L)).thenReturn(schedules); + Mockito.when(scheduleMapper.queryByProcessDefinitionCode(46L)).thenReturn(schedule); Map schedulerOnlineRes = processDefinitionService.deleteProcessDefinitionByCode(loginUser, projectCode, 46L); Assert.assertEquals(Status.SCHEDULE_CRON_STATE_ONLINE, schedulerOnlineRes.get(Constants.STATUS)); //delete success - schedules.clear(); schedule.setReleaseState(ReleaseState.OFFLINE); - schedules.add(schedule); Mockito.when(processDefineMapper.deleteById(46)).thenReturn(1); Mockito.when(scheduleMapper.deleteById(schedule.getId())).thenReturn(1); Mockito.when(processTaskRelationMapper.deleteByCode(project.getCode(), processDefinition.getCode())).thenReturn(1); - Mockito.when(scheduleMapper.queryByProcessDefinitionCode(46L)).thenReturn(schedules); + Mockito.when(scheduleMapper.queryByProcessDefinitionCode(46L)).thenReturn(getSchedule()); putMsg(result, Status.SUCCESS, projectCode); Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result); Map deleteSuccess = processDefinitionService.deleteProcessDefinitionByCode(loginUser, projectCode, 46L); @@ -430,13 +436,18 @@ public class ProcessDefinitionServiceTest { // project check auth success, processs definition online putMsg(result, Status.SUCCESS, projectCode); Mockito.when(processDefineMapper.queryByCode(46L)).thenReturn(getProcessDefinition()); + List processTaskRelationList = new ArrayList<>(); + ProcessTaskRelation processTaskRelation = new ProcessTaskRelation(); + processTaskRelation.setProjectCode(projectCode); + processTaskRelation.setProcessDefinitionCode(46L); + processTaskRelation.setPostTaskCode(123L); + processTaskRelationList.add(processTaskRelation); + Mockito.when(processService.findRelationByCode(46L, 1)).thenReturn(processTaskRelationList); Map onlineRes = processDefinitionService.releaseProcessDefinition( loginUser, projectCode, 46, ReleaseState.ONLINE); Assert.assertEquals(Status.SUCCESS, onlineRes.get(Constants.STATUS)); // project check auth success, processs definition online - ProcessDefinition processDefinition1 = getProcessDefinition(); - processDefinition1.setResourceIds("1,2"); Map onlineWithResourceRes = processDefinitionService.releaseProcessDefinition( loginUser, projectCode, 46, ReleaseState.ONLINE); Assert.assertEquals(Status.SUCCESS, onlineWithResourceRes.get(Constants.STATUS)); @@ -482,10 +493,12 @@ public class ProcessDefinitionServiceTest { @Test public void testCheckProcessNodeList() { - Map dataNotValidRes = processDefinitionService.checkProcessNodeList(null); + Map dataNotValidRes = processDefinitionService.checkProcessNodeList(null, null); Assert.assertEquals(Status.DATA_IS_NOT_VALID, dataNotValidRes.get(Constants.STATUS)); - Map taskEmptyRes = processDefinitionService.checkProcessNodeList(taskRelationJson); + List taskDefinitionLogs = JSONUtils.toList(taskDefinitionJson, TaskDefinitionLog.class); + + Map taskEmptyRes = processDefinitionService.checkProcessNodeList(taskRelationJson, taskDefinitionLogs); Assert.assertEquals(Status.PROCESS_DAG_IS_EMPTY, taskEmptyRes.get(Constants.STATUS)); } @@ -513,6 +526,7 @@ public class ProcessDefinitionServiceTest { putMsg(result, Status.SUCCESS, projectCode); Mockito.when(processService.genDagData(Mockito.any())).thenReturn(new DagData(processDefinition, null, null)); Mockito.when(processDefineMapper.queryByCode(46L)).thenReturn(processDefinition); + Mockito.when(projectMapper.queryProjectCreatedAndAuthorizedByUserId(loginUser.getId())).thenReturn(Lists.newArrayList(project)); Map dataNotValidRes = processDefinitionService.getTaskNodeListByDefinitionCode(loginUser, projectCode, 46L); Assert.assertEquals(Status.SUCCESS, dataNotValidRes.get(Constants.STATUS)); } @@ -542,8 +556,13 @@ public class ProcessDefinitionServiceTest { ProcessDefinition processDefinition = getProcessDefinition(); List processDefinitionList = new ArrayList<>(); processDefinitionList.add(processDefinition); + Mockito.when(processDefineMapper.queryByCodes(defineCodeSet)).thenReturn(processDefinitionList); Mockito.when(processService.genDagData(Mockito.any())).thenReturn(new DagData(processDefinition, null, null)); + Project project1 = getProject(projectCode); + List projects = new ArrayList<>(); + projects.add(project1); + Mockito.when(projectMapper.queryProjectCreatedAndAuthorizedByUserId(loginUser.getId())).thenReturn(projects); Map successRes = processDefinitionService.getNodeListMapByDefinitionCodes(loginUser, projectCode, defineCodes); Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); @@ -572,17 +591,17 @@ public class ProcessDefinitionServiceTest { public void testViewTree() { //process definition not exist ProcessDefinition processDefinition = getProcessDefinition(); - Map processDefinitionNullRes = processDefinitionService.viewTree(46, 10); + Map processDefinitionNullRes = processDefinitionService.viewTree(processDefinition.getProjectCode(), 46, 10); Assert.assertEquals(Status.PROCESS_DEFINE_NOT_EXIST, processDefinitionNullRes.get(Constants.STATUS)); //task instance not exist Mockito.when(processDefineMapper.queryByCode(46L)).thenReturn(processDefinition); Mockito.when(processService.genDagGraph(processDefinition)).thenReturn(new DAG<>()); - Map taskNullRes = processDefinitionService.viewTree(46, 10); + Map taskNullRes = processDefinitionService.viewTree(processDefinition.getProjectCode(), 46, 10); Assert.assertEquals(Status.SUCCESS, taskNullRes.get(Constants.STATUS)); //task instance exist - Map taskNotNuLLRes = processDefinitionService.viewTree(46, 10); + Map taskNotNuLLRes = processDefinitionService.viewTree(processDefinition.getProjectCode(), 46, 10); Assert.assertEquals(Status.SUCCESS, taskNotNuLLRes.get(Constants.STATUS)); } @@ -591,7 +610,7 @@ public class ProcessDefinitionServiceTest { ProcessDefinition processDefinition = getProcessDefinition(); Mockito.when(processDefineMapper.queryByCode(46L)).thenReturn(processDefinition); Mockito.when(processService.genDagGraph(processDefinition)).thenReturn(new DAG<>()); - Map taskNotNuLLRes = processDefinitionService.viewTree(46, 10); + Map taskNotNuLLRes = processDefinitionService.viewTree(processDefinition.getProjectCode(), 46, 10); Assert.assertEquals(Status.SUCCESS, taskNotNuLLRes.get(Constants.STATUS)); } @@ -660,6 +679,7 @@ public class ProcessDefinitionServiceTest { processDefinition.setTenantId(1); processDefinition.setDescription(""); processDefinition.setCode(46L); + processDefinition.setVersion(1); return processDefinition; } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java index ccce682e55cac940caef6a94890a37fd3c671c12..248ca6860877b873daf867fd87b1cc5f9388e24d 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java @@ -36,10 +36,13 @@ import org.apache.dolphinscheduler.common.graph.DAG; import org.apache.dolphinscheduler.common.model.TaskNode; import org.apache.dolphinscheduler.common.model.TaskNodeRelation; import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionLog; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.Project; +import org.apache.dolphinscheduler.dao.entity.TaskDefinition; +import org.apache.dolphinscheduler.dao.entity.TaskDefinitionLog; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.entity.Tenant; import org.apache.dolphinscheduler.dao.entity.User; @@ -48,6 +51,7 @@ import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionLogMapper; import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; +import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.TenantMapper; import org.apache.dolphinscheduler.service.process.ProcessService; @@ -111,6 +115,8 @@ public class ProcessInstanceServiceTest { @Mock TenantMapper tenantMapper; + @Mock + TaskDefinitionMapper taskDefinitionMapper; private String shellJson = "[{\"name\":\"\",\"preTaskCode\":0,\"preTaskVersion\":0,\"postTaskCode\":123456789," + "\"postTaskVersion\":1,\"conditionType\":0,\"conditionParams\":\"{}\"},{\"name\":\"\",\"preTaskCode\":123456789," @@ -124,6 +130,17 @@ public class ProcessInstanceServiceTest { + ":[\"\"],\"failedNode\":[\"\"]},\"dependence\":{}},\"flag\":\"NORMAL\",\"taskPriority\":\"MEDIUM\",\"workerGroup\":\"default\"," + "\"failRetryTimes\":\"0\",\"failRetryInterval\":\"1\",\"timeoutFlag\":\"CLOSE\",\"timeoutNotifyStrategy\":\"\",\"timeout\":null,\"delayTime\":\"0\"}]"; + private String taskRelationJson = "[{\"name\":\"\",\"preTaskCode\":4254865123776,\"preTaskVersion\":1,\"postTaskCode\":4254862762304,\"postTaskVersion\":1,\"conditionType\":0," + + "\"conditionParams\":{}},{\"name\":\"\",\"preTaskCode\":0,\"preTaskVersion\":0,\"postTaskCode\":4254865123776,\"postTaskVersion\":1,\"conditionType\":0,\"conditionParams\":{}}]"; + + private String taskDefinitionJson = "[{\"code\":4254862762304,\"name\":\"test1\",\"version\":1,\"description\":\"\",\"delayTime\":0,\"taskType\":\"SHELL\",\"taskParams\":{\"resourceList\":[]," + + "\"localParams\":[],\"rawScript\":\"echo 1\",\"dependence\":{},\"conditionResult\":{\"successNode\":[],\"failedNode\":[]},\"waitStartTimeout\":{},\"switchResult\":{}},\"flag\":\"YES\"," + + "\"taskPriority\":\"MEDIUM\",\"workerGroup\":\"default\",\"failRetryTimes\":0,\"failRetryInterval\":1,\"timeoutFlag\":\"CLOSE\",\"timeoutNotifyStrategy\":null,\"timeout\":0," + + "\"environmentCode\":-1},{\"code\":4254865123776,\"name\":\"test2\",\"version\":1,\"description\":\"\",\"delayTime\":0,\"taskType\":\"SHELL\",\"taskParams\":{\"resourceList\":[]," + + "\"localParams\":[],\"rawScript\":\"echo 2\",\"dependence\":{},\"conditionResult\":{\"successNode\":[],\"failedNode\":[]},\"waitStartTimeout\":{},\"switchResult\":{}},\"flag\":\"YES\"," + + "\"taskPriority\":\"MEDIUM\",\"workerGroup\":\"default\",\"failRetryTimes\":0,\"failRetryInterval\":1,\"timeoutFlag\":\"CLOSE\",\"timeoutNotifyStrategy\":\"WARN\",\"timeout\":0," + + "\"environmentCode\":-1}]"; + @Test public void testQueryProcessInstanceList() { long projectCode = 1L; @@ -255,6 +272,7 @@ public class ProcessInstanceServiceTest { ProcessInstance processInstance = getProcessInstance(); putMsg(result, Status.SUCCESS, projectCode); ProcessDefinition processDefinition = getProcessDefinition(); + processDefinition.setProjectCode(projectCode); when(projectMapper.queryByCode(projectCode)).thenReturn(project); when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result); when(processService.findProcessInstanceDetailById(processInstance.getId())).thenReturn(processInstance); @@ -347,6 +365,9 @@ public class ProcessInstanceServiceTest { taskInstance.setProcessInstanceId(1); putMsg(result, Status.SUCCESS, projectCode); when(processService.findTaskInstanceById(1)).thenReturn(taskInstance); + TaskDefinition taskDefinition = new TaskDefinition(); + taskDefinition.setProjectCode(projectCode); + when(taskDefinitionMapper.queryByCode(taskInstance.getTaskCode())).thenReturn(taskDefinition); Map notSubprocessRes = processInstanceService.querySubProcessInstanceByTaskId(loginUser, projectCode, 1); Assert.assertEquals(Status.TASK_INSTANCE_NOT_SUB_WORKFLOW_INSTANCE, notSubprocessRes.get(Constants.STATUS)); @@ -410,24 +431,28 @@ public class ProcessInstanceServiceTest { ProcessDefinition processDefinition = getProcessDefinition(); processDefinition.setId(1); processDefinition.setUserId(1); + processDefinition.setProjectCode(projectCode); Tenant tenant = getTenant(); when(processDefineMapper.queryByCode(46L)).thenReturn(processDefinition); when(tenantMapper.queryByTenantCode("root")).thenReturn(tenant); when(processService.getTenantForProcess(Mockito.anyInt(), Mockito.anyInt())).thenReturn(tenant); when(processService.updateProcessInstance(processInstance)).thenReturn(1); - when(processService.saveProcessDefine(loginUser, processDefinition, false)).thenReturn(1); - when(processDefinitionService.checkProcessNodeList(shellJson)).thenReturn(result); + when(processService.saveProcessDefine(loginUser, processDefinition, Boolean.TRUE, Boolean.FALSE)).thenReturn(1); + + List taskDefinitionLogs = JSONUtils.toList(taskDefinitionJson, TaskDefinitionLog.class); + when(processDefinitionService.checkProcessNodeList(taskRelationJson, taskDefinitionLogs)).thenReturn(result); putMsg(result, Status.SUCCESS, projectCode); Map processInstanceFinishRes = processInstanceService.updateProcessInstance(loginUser, projectCode, 1, - shellJson, taskJson,"2020-02-21 00:00:00", true, "", "", 0, "root"); + taskRelationJson, taskDefinitionJson,"2020-02-21 00:00:00", true, "", "", 0, "root"); Assert.assertEquals(Status.SUCCESS, processInstanceFinishRes.get(Constants.STATUS)); //success when(processDefineMapper.queryByCode(46L)).thenReturn(processDefinition); putMsg(result, Status.SUCCESS, projectCode); + when(processService.saveProcessDefine(loginUser, processDefinition, Boolean.FALSE, Boolean.FALSE)).thenReturn(1); Map successRes = processInstanceService.updateProcessInstance(loginUser, projectCode, 1, - shellJson, taskJson,"2020-02-21 00:00:00", false, "", "", 0, "root"); + taskRelationJson, taskDefinitionJson,"2020-02-21 00:00:00", Boolean.FALSE, "", "", 0, "root"); Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); } @@ -497,7 +522,7 @@ public class ProcessInstanceServiceTest { processInstance.setScheduleTime(new Date()); processInstance.setGlobalParams(""); when(processInstanceMapper.queryDetailById(1)).thenReturn(processInstance); - Map successRes = processInstanceService.viewVariables(1); + Map successRes = processInstanceService.viewVariables(1L,1); Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); } @@ -522,7 +547,7 @@ public class ProcessInstanceServiceTest { when(processService.genDagGraph(Mockito.any(ProcessDefinition.class))) .thenReturn(graph); - Map successRes = processInstanceService.viewGantt(1); + Map successRes = processInstanceService.viewGantt(0L, 1); Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); } @@ -628,5 +653,4 @@ public class ProcessInstanceServiceTest { result.put(Constants.MSG, status.getMsg()); } } - -} \ No newline at end of file +} diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessTaskRelationServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessTaskRelationServiceTest.java new file mode 100644 index 0000000000000000000000000000000000000000..f21de2aed4baed4ddcba2cc07ed87d22f432e1c1 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessTaskRelationServiceTest.java @@ -0,0 +1,529 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.impl.ProcessTaskRelationServiceImpl; +import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.TaskType; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelation; +import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelationLog; +import org.apache.dolphinscheduler.dao.entity.Project; +import org.apache.dolphinscheduler.dao.entity.TaskDefinition; +import org.apache.dolphinscheduler.dao.entity.TaskDefinitionLog; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; +import org.apache.dolphinscheduler.dao.mapper.ProcessTaskRelationLogMapper; +import org.apache.dolphinscheduler.dao.mapper.ProcessTaskRelationMapper; +import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; +import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionLogMapper; +import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper; +import org.apache.dolphinscheduler.service.process.ProcessService; + +import org.apache.commons.collections.CollectionUtils; + +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; + +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.MockitoJUnitRunner; + +import com.google.common.collect.Lists; + +/** + * process task instance relation service test + */ +@RunWith(MockitoJUnitRunner.Silent.class) +public class ProcessTaskRelationServiceTest { + + @InjectMocks + ProcessTaskRelationServiceImpl processTaskRelationService; + + @Mock + private ProjectMapper projectMapper; + + @Mock + private ProjectServiceImpl projectService; + + @Mock + private ProcessTaskRelationMapper processTaskRelationMapper; + + @Mock + private TaskDefinitionLogMapper taskDefinitionLogMapper; + + @Mock + private ProcessDefinitionMapper processDefinitionMapper; + + @Mock + private TaskDefinitionMapper taskDefinitionMapper; + + @Mock + private ProcessTaskRelationLogMapper processTaskRelationLogMapper; + + @Mock + private ProcessService processService; + + /** + * get Mock Admin User + * + * @return admin user + */ + private User getAdminUser() { + User loginUser = new User(); + loginUser.setId(-1); + loginUser.setUserName("admin"); + loginUser.setUserType(UserType.GENERAL_USER); + return loginUser; + } + + /** + * get mock Project + * + * @param projectCode projectCode + * @return Project + */ + private Project getProject(long projectCode) { + Project project = new Project(); + project.setCode(projectCode); + project.setId(1); + project.setName("project_test1"); + project.setUserId(1); + return project; + } + + private void putMsg(Map result, Status status, Object... statusParams) { + result.put(Constants.STATUS, status); + if (statusParams != null && statusParams.length > 0) { + result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams)); + } else { + result.put(Constants.MSG, status.getMsg()); + } + } + + private TaskDefinitionLog buildTaskDefinitionLog(long projectCode, long code, int version) { + + TaskDefinitionLog taskDefinitionLog = new TaskDefinitionLog() { + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof TaskDefinitionLog)) { + return false; + } + TaskDefinitionLog that = (TaskDefinitionLog) o; + return getCode() == that.getCode() + && getVersion() == that.getVersion() + && getProjectCode() == that.getProjectCode(); + } + + @Override + public int hashCode() { + return Objects.hash(getCode(), getVersion(), getProjectCode()); + } + }; + taskDefinitionLog.setProjectCode(projectCode); + taskDefinitionLog.setCode(code); + taskDefinitionLog.setVersion(version); + return taskDefinitionLog; + } + + private TaskDefinition buildTaskDefinition(long projectCode, long code, int version) { + + TaskDefinition taskDefinition = new TaskDefinition() { + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof TaskDefinition)) { + return false; + } + TaskDefinition that = (TaskDefinition) o; + return getCode() == that.getCode() + && getVersion() == that.getVersion() + && getProjectCode() == that.getProjectCode(); + } + + @Override + public int hashCode() { + return Objects.hash(getCode(), getVersion(), getProjectCode()); + } + }; + taskDefinition.setProjectCode(projectCode); + taskDefinition.setCode(code); + taskDefinition.setVersion(version); + return taskDefinition; + } + + private List getProcessTaskUpstreamRelationList(long projectCode, long taskCode) { + ProcessTaskRelation processTaskRelationUpstream0 = new ProcessTaskRelation(); + processTaskRelationUpstream0.setPostTaskCode(taskCode); + processTaskRelationUpstream0.setPreTaskVersion(1); + processTaskRelationUpstream0.setProjectCode(projectCode); + processTaskRelationUpstream0.setPreTaskCode(123); + processTaskRelationUpstream0.setProcessDefinitionCode(123); + ProcessTaskRelation processTaskRelationUpstream1 = new ProcessTaskRelation(); + processTaskRelationUpstream1.setPostTaskCode(taskCode); + processTaskRelationUpstream1.setPreTaskVersion(1); + processTaskRelationUpstream1.setPreTaskCode(123); + processTaskRelationUpstream1.setProcessDefinitionCode(124); + processTaskRelationUpstream1.setProjectCode(projectCode); + ProcessTaskRelation processTaskRelationUpstream2 = new ProcessTaskRelation(); + processTaskRelationUpstream2.setPostTaskCode(taskCode); + processTaskRelationUpstream2.setPreTaskVersion(2); + processTaskRelationUpstream2.setPreTaskCode(123); + processTaskRelationUpstream2.setProcessDefinitionCode(125); + processTaskRelationUpstream2.setProjectCode(projectCode); + List processTaskRelationList = new ArrayList<>(); + processTaskRelationList.add(processTaskRelationUpstream0); + processTaskRelationList.add(processTaskRelationUpstream1); + processTaskRelationList.add(processTaskRelationUpstream2); + return processTaskRelationList; + } + + private List getProcessTaskDownstreamRelationList(long projectCode,long taskCode) { + ProcessTaskRelation processTaskRelationDownstream0 = new ProcessTaskRelation(); + processTaskRelationDownstream0.setPreTaskCode(taskCode); + processTaskRelationDownstream0.setPostTaskCode(456); + processTaskRelationDownstream0.setPostTaskVersion(1); + processTaskRelationDownstream0.setProjectCode(projectCode); + ProcessTaskRelation processTaskRelationDownstream1 = new ProcessTaskRelation(); + processTaskRelationDownstream1.setPreTaskCode(taskCode); + processTaskRelationDownstream1.setPostTaskCode(456); + processTaskRelationDownstream1.setPostTaskVersion(1); + processTaskRelationDownstream1.setProjectCode(projectCode); + ProcessTaskRelation processTaskRelationDownstream2 = new ProcessTaskRelation(); + processTaskRelationDownstream2.setPreTaskCode(taskCode); + processTaskRelationDownstream2.setPostTaskCode(4567); + processTaskRelationDownstream2.setPostTaskVersion(1); + processTaskRelationDownstream2.setProjectCode(projectCode); + List processTaskRelationList = new ArrayList<>(); + processTaskRelationList.add(processTaskRelationDownstream0); + processTaskRelationList.add(processTaskRelationDownstream1); + processTaskRelationList.add(processTaskRelationDownstream2); + return processTaskRelationList; + } + + private ProcessDefinition getProcessDefinition() { + ProcessDefinition processDefinition = new ProcessDefinition(); + processDefinition.setId(1); + processDefinition.setProjectCode(1L); + processDefinition.setName("test_pdf"); + processDefinition.setTenantId(1); + processDefinition.setDescription(""); + processDefinition.setCode(1L); + processDefinition.setVersion(1); + return processDefinition; + } + + private TaskDefinition getTaskDefinition() { + TaskDefinition taskDefinition = new TaskDefinition(); + taskDefinition.setProjectCode(1L); + taskDefinition.setCode(1L); + taskDefinition.setVersion(1); + taskDefinition.setTaskType(TaskType.SHELL.getDesc()); + return taskDefinition; + } + + @Test + public void testCreateProcessTaskRelation() { + long projectCode = 1L; + long processDefinitionCode = 1L; + long preTaskCode = 0L; + long postTaskCode = 1L; + + Project project = getProject(projectCode); + Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(project); + + User loginUser = new User(); + loginUser.setId(-1); + loginUser.setUserType(UserType.GENERAL_USER); + + Map result = new HashMap<>(); + putMsg(result, Status.SUCCESS, projectCode); + Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result); + Mockito.when(processDefinitionMapper.queryByCode(processDefinitionCode)).thenReturn(getProcessDefinition()); + Mockito.when(processTaskRelationMapper.queryByCode(projectCode, processDefinitionCode, preTaskCode, postTaskCode)).thenReturn(Lists.newArrayList()); + Mockito.when(taskDefinitionMapper.queryByCode(postTaskCode)).thenReturn(getTaskDefinition()); + List processTaskRelationList = Lists.newArrayList(); + ProcessTaskRelationLog processTaskRelationLog = new ProcessTaskRelationLog(); + processTaskRelationLog.setProjectCode(projectCode); + processTaskRelationLog.setProcessDefinitionCode(processDefinitionCode); + processTaskRelationLog.setPreTaskCode(0L); + processTaskRelationLog.setPreTaskVersion(0); + processTaskRelationLog.setPostTaskCode(postTaskCode); + processTaskRelationLog.setPostTaskVersion(1); + processTaskRelationList.add(processTaskRelationLog); + Mockito.when(processTaskRelationMapper.batchInsert(processTaskRelationList)).thenReturn(1); + Mockito.when(processTaskRelationLogMapper.batchInsert(processTaskRelationList)).thenReturn(1); + Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); + } + + @Test + public void testQueryDownstreamRelation() { + long projectCode = 1L; + long taskCode = 2L; + + Project project = getProject(projectCode); + Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(project); + + User loginUser = new User(); + loginUser.setId(-1); + loginUser.setUserType(UserType.GENERAL_USER); + + Map result = new HashMap<>(); + putMsg(result, Status.SUCCESS, projectCode); + Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result); + + List processTaskRelationList = getProcessTaskDownstreamRelationList(projectCode,taskCode); + + Mockito.when(processTaskRelationMapper.queryDownstreamByCode(projectCode,taskCode)) + .thenReturn(processTaskRelationList); + + if (CollectionUtils.isNotEmpty(processTaskRelationList)) { + Set taskDefinitions = processTaskRelationList + .stream() + .map(processTaskRelation -> { + TaskDefinition taskDefinition = buildTaskDefinition( + processTaskRelation.getProjectCode(), + processTaskRelation.getPostTaskCode(), + processTaskRelation.getPostTaskVersion()); + return taskDefinition; + }) + .collect(Collectors.toSet()); + + Set taskDefinitionLogSet = processTaskRelationList + .stream() + .map(processTaskRelation -> { + TaskDefinitionLog taskDefinitionLog = buildTaskDefinitionLog( + processTaskRelation.getProjectCode(), + processTaskRelation.getPostTaskCode(), + processTaskRelation.getPostTaskVersion() + ); + return taskDefinitionLog; + }) + .collect(Collectors.toSet()); + List taskDefinitionLogList = taskDefinitionLogSet.stream().collect(Collectors.toList()); + Mockito.when(taskDefinitionLogMapper.queryByTaskDefinitions(taskDefinitions)) + .thenReturn(taskDefinitionLogList); + } + Map relation = processTaskRelationService + .queryDownstreamRelation(loginUser, projectCode, taskCode); + Assert.assertEquals(Status.SUCCESS, relation.get(Constants.STATUS)); + Assert.assertEquals(2, ((List) relation.get("data")).size()); + } + + @Test + public void testQueryUpstreamRelation() { + long projectCode = 1L; + long taskCode = 2L; + + Project project = getProject(projectCode); + Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(project); + + User loginUser = new User(); + loginUser.setId(-1); + loginUser.setUserType(UserType.GENERAL_USER); + + Map result = new HashMap<>(); + putMsg(result, Status.SUCCESS, projectCode); + Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result); + List processTaskRelationList = getProcessTaskUpstreamRelationList(projectCode,taskCode); + + Mockito.when(processTaskRelationMapper.queryUpstreamByCode(projectCode, taskCode)).thenReturn(processTaskRelationList); + + if (CollectionUtils.isNotEmpty(processTaskRelationList)) { + Set taskDefinitions = processTaskRelationList + .stream() + .map(processTaskRelation -> { + TaskDefinition taskDefinition = buildTaskDefinition( + processTaskRelation.getProjectCode(), + processTaskRelation.getPreTaskCode(), + processTaskRelation.getPreTaskVersion()); + return taskDefinition; + }) + .collect(Collectors.toSet()); + + Set taskDefinitionLogSet = processTaskRelationList + .stream() + .map(processTaskRelation -> { + TaskDefinitionLog taskDefinitionLog = buildTaskDefinitionLog( + processTaskRelation.getProjectCode(), + processTaskRelation.getPreTaskCode(), + processTaskRelation.getPreTaskVersion()); + return taskDefinitionLog; + }) + .collect(Collectors.toSet()); + List taskDefinitionLogList = taskDefinitionLogSet.stream().collect(Collectors.toList()); + Mockito.when(taskDefinitionLogMapper.queryByTaskDefinitions(taskDefinitions)) + .thenReturn(taskDefinitionLogList); + } + Map relation = processTaskRelationService + .queryUpstreamRelation(loginUser, projectCode, taskCode); + Assert.assertEquals(Status.SUCCESS, relation.get(Constants.STATUS)); + Assert.assertEquals(2, ((List) relation.get("data")).size()); + } + + @Test + public void testDeleteDownstreamRelation() { + long projectCode = 1L; + long taskCode = 2L; + Project project = getProject(projectCode); + Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(project); + + User loginUser = new User(); + loginUser.setId(-1); + loginUser.setUserType(UserType.GENERAL_USER); + Map result = new HashMap<>(); + putMsg(result, Status.SUCCESS, projectCode); + Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result); + List processTaskRelationList = new ArrayList<>(); + ProcessTaskRelation processTaskRelation = new ProcessTaskRelation(); + processTaskRelation.setProjectCode(projectCode); + processTaskRelation.setProcessDefinitionCode(1L); + processTaskRelation.setPreTaskCode(taskCode); + processTaskRelation.setPostTaskCode(123L); + processTaskRelationList.add(processTaskRelation); + Mockito.when(processTaskRelationMapper.queryDownstreamByCode(projectCode, taskCode)).thenReturn(processTaskRelationList); + ProcessTaskRelationLog processTaskRelationLog = new ProcessTaskRelationLog(processTaskRelation); + Mockito.when(processTaskRelationMapper.deleteRelation(processTaskRelationLog)).thenReturn(1); + Mockito.when(processTaskRelationLogMapper.deleteRelation(processTaskRelationLog)).thenReturn(1); + ProcessDefinition processDefinition = getProcessDefinition(); + Mockito.when(processDefinitionMapper.queryByCode(1L)).thenReturn(processDefinition); + Mockito.when(processService.saveProcessDefine(loginUser, processDefinition, Boolean.TRUE, Boolean.TRUE)).thenReturn(1); + Map result1 = processTaskRelationService.deleteDownstreamRelation(loginUser, projectCode, "123", taskCode); + Assert.assertEquals(Status.SUCCESS, result1.get(Constants.STATUS)); + } + + @Test + public void testDeleteUpstreamRelation() { + long projectCode = 1L; + long taskCode = 2L; + Project project = getProject(projectCode); + Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(project); + + User loginUser = new User(); + loginUser.setId(-1); + loginUser.setUserType(UserType.GENERAL_USER); + Map result = new HashMap<>(); + putMsg(result, Status.SUCCESS, projectCode); + List processTaskRelationList = Lists.newArrayList(); + ProcessTaskRelation processTaskRelation = new ProcessTaskRelation(); + processTaskRelation.setProjectCode(projectCode); + processTaskRelation.setProcessDefinitionCode(1L); + processTaskRelation.setPreTaskCode(0L); + processTaskRelation.setPreTaskVersion(0); + processTaskRelation.setPostTaskCode(taskCode); + processTaskRelation.setPostTaskVersion(1); + processTaskRelationList.add(processTaskRelation); + Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result); + Mockito.when(processTaskRelationMapper.queryUpstreamByCode(projectCode, taskCode)).thenReturn(processTaskRelationList); + Mockito.when(processDefinitionMapper.queryByCode(1L)).thenReturn(getProcessDefinition()); + Mockito.when(processTaskRelationMapper.queryByProcessCode(projectCode, 1L)).thenReturn(processTaskRelationList); + List relationLogs = processTaskRelationList.stream().map(ProcessTaskRelationLog::new).collect(Collectors.toList()); + Mockito.when(processService.saveTaskRelation(loginUser, 1L, 1L, + 1, relationLogs, Lists.newArrayList(), Boolean.TRUE)).thenReturn(0); + Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); + } + + @Test + public void testDeleteTaskProcessRelation() { + long projectCode = 1L; + long taskCode = 1L; + long processDefinitionCode = 1L; + long preTaskCode = 4L; + long postTaskCode = 5L; + Project project = getProject(projectCode); + Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(project); + + User loginUser = new User(); + loginUser.setId(-1); + loginUser.setUserType(UserType.GENERAL_USER); + Map result = new HashMap<>(); + putMsg(result, Status.SUCCESS, projectCode); + Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result); + Mockito.when(processTaskRelationMapper.queryByCode(projectCode, processDefinitionCode, preTaskCode, postTaskCode)).thenReturn(Lists.newArrayList()); + Mockito.when(processDefinitionMapper.queryByCode(processDefinitionCode)).thenReturn(getProcessDefinition()); + Mockito.when(taskDefinitionMapper.queryByCode(taskCode)).thenReturn(getTaskDefinition()); + TaskDefinition taskDefinition = new TaskDefinition(); + taskDefinition.setTaskType(TaskType.CONDITIONS.getDesc()); + Mockito.when(taskDefinitionMapper.queryByCode(taskCode)).thenReturn(taskDefinition); + List processTaskRelationList = Lists.newArrayList(); + ProcessTaskRelation processTaskRelation = new ProcessTaskRelation(); + processTaskRelation.setProjectCode(projectCode); + processTaskRelation.setProcessDefinitionCode(1L); + processTaskRelation.setPreTaskCode(0L); + processTaskRelation.setPreTaskVersion(0); + processTaskRelation.setPostTaskCode(taskCode); + processTaskRelation.setPostTaskVersion(1); + processTaskRelationList.add(processTaskRelation); + Mockito.when(processTaskRelationMapper.queryByProcessCode(projectCode, processDefinitionCode)).thenReturn(processTaskRelationList); + List relationLogs = processTaskRelationList.stream().map(ProcessTaskRelationLog::new).collect(Collectors.toList()); + Mockito.when(processService.saveTaskRelation(loginUser, 1L, 1L, + 1, relationLogs, Lists.newArrayList(), Boolean.TRUE)).thenReturn(0); + Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); + } + + @Test + public void testDeleteEdge() { + long projectCode = 1L; + long processDefinitionCode = 3L; + long preTaskCode = 0L; + long postTaskCode = 5L; + Project project = getProject(projectCode); + Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(project); + + User loginUser = new User(); + loginUser.setId(-1); + loginUser.setUserType(UserType.GENERAL_USER); + Map result = new HashMap<>(); + putMsg(result, Status.SUCCESS, projectCode); + Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result); + ProcessTaskRelation processTaskRelation = new ProcessTaskRelation(); + processTaskRelation.setProjectCode(projectCode); + processTaskRelation.setProcessDefinitionCode(processDefinitionCode); + processTaskRelation.setProcessDefinitionVersion(1); + processTaskRelation.setPreTaskCode(preTaskCode); + processTaskRelation.setPostTaskCode(postTaskCode); + ProcessTaskRelationLog processTaskRelationLog = new ProcessTaskRelationLog(processTaskRelation); + processTaskRelationLog.setOperator(loginUser.getId()); + List processTaskRelationList = new ArrayList<>(); + processTaskRelationList.add(processTaskRelation); + Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result); + Mockito.when(processTaskRelationMapper.queryByProcessCode(projectCode, 1L)).thenReturn(processTaskRelationList); + List relationLogs = processTaskRelationList.stream().map(ProcessTaskRelationLog::new).collect(Collectors.toList()); + Mockito.when(processService.saveTaskRelation(loginUser, 1L, 1L, + 1, relationLogs, Lists.newArrayList(), Boolean.TRUE)).thenReturn(0); + Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); + } +} diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectServiceTest.java index 61c8a1d8a2e0aab6f1caf85e5803e9d3ffe33aa2..de782df08744205ca5db77336e936bf630c3d569 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectServiceTest.java @@ -23,7 +23,6 @@ import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.UserType; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.ProjectUser; @@ -33,6 +32,8 @@ import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectUserMapper; import org.apache.dolphinscheduler.dao.mapper.UserMapper; +import org.apache.commons.collections.CollectionUtils; + import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -275,6 +276,38 @@ public class ProjectServiceTest { Assert.assertTrue(CollectionUtils.isNotEmpty(projects)); } + @Test + public void testQueryAuthorizedUser() { + final User loginUser = this.getLoginUser(); + + // Failure 1: PROJECT_NOT_FOUND + Map result = this.projectService.queryAuthorizedUser(loginUser, 3682329499136L); + logger.info("FAILURE 1: {}", result.toString()); + Assert.assertEquals(Status.PROJECT_NOT_FOUNT, result.get(Constants.STATUS)); + + // Failure 2: USER_NO_OPERATION_PROJECT_PERM + loginUser.setId(100); + Mockito.when(this.projectMapper.queryByCode(Mockito.anyLong())).thenReturn(this.getProject()); + result = this.projectService.queryAuthorizedUser(loginUser, 3682329499136L); + logger.info("FAILURE 2: {}", result.toString()); + Assert.assertEquals(Status.USER_NO_OPERATION_PROJECT_PERM, result.get(Constants.STATUS)); + + // SUCCESS + loginUser.setUserType(UserType.ADMIN_USER); + Mockito.when(this.userMapper.queryAuthedUserListByProjectId(1)).thenReturn(this.getUserList()); + result = this.projectService.queryAuthorizedUser(loginUser, 3682329499136L); + logger.info("SUCCESS 1: {}", result.toString()); + List users = (List) result.get(Constants.DATA_LIST); + Assert.assertTrue(CollectionUtils.isNotEmpty(users)); + + loginUser.setId(1); + loginUser.setUserType(UserType.GENERAL_USER); + result = this.projectService.queryAuthorizedUser(loginUser, 3682329499136L); + logger.info("SUCCESS 2: {}", result.toString()); + users = (List) result.get(Constants.DATA_LIST); + Assert.assertTrue(CollectionUtils.isNotEmpty(users)); + } + @Test public void testQueryCreatedProject() { @@ -364,6 +397,28 @@ public class ProjectServiceTest { return loginUser; } + /** + * Get general user + * @return + */ + private User getGeneralUser() { + User user = new User(); + user.setUserType(UserType.GENERAL_USER); + user.setUserName("userTest0001"); + user.setUserPassword("userTest0001"); + return user; + } + + /** + * Get user list + * @return + */ + private List getUserList() { + List userList = new ArrayList<>(); + userList.add(this.getGeneralUser()); + return userList; + } + /** * get project user */ diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/QueueServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/QueueServiceTest.java index 7d69d6598c0bf437c09b496ae60bb8a1ea34380b..f3167a560a62df9863cdfafa2d3bb5781a2697b4 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/QueueServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/QueueServiceTest.java @@ -23,12 +23,13 @@ import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.UserType; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.dao.entity.Queue; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.QueueMapper; import org.apache.dolphinscheduler.dao.mapper.UserMapper; +import org.apache.commons.collections.CollectionUtils; + import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -72,7 +73,7 @@ public class QueueServiceTest { } @After - public void after(){ + public void after() { } @Test @@ -81,7 +82,7 @@ public class QueueServiceTest { Mockito.when(queueMapper.selectList(null)).thenReturn(getQueueList()); Map result = queueService.queryList(getLoginUser()); logger.info(result.toString()); - List queueList = (List) result.get(Constants.DATA_LIST); + List queueList = (List) result.get(Constants.DATA_LIST); Assert.assertTrue(CollectionUtils.isNotEmpty(queueList)); } @@ -89,13 +90,13 @@ public class QueueServiceTest { @Test public void testQueryListPage() { - IPage page = new Page<>(1,10); + IPage page = new Page<>(1, 10); page.setTotal(1L); page.setRecords(getQueueList()); Mockito.when(queueMapper.queryQueuePaging(Mockito.any(Page.class), Mockito.eq(queueName))).thenReturn(page); - Result result = queueService.queryList(getLoginUser(),queueName,1,10); + Result result = queueService.queryList(getLoginUser(), queueName, 1, 10); logger.info(result.toString()); - PageInfo pageInfo = (PageInfo) result.getData(); + PageInfo pageInfo = (PageInfo) result.getData(); Assert.assertTrue(CollectionUtils.isNotEmpty(pageInfo.getTotalList())); } @@ -103,17 +104,17 @@ public class QueueServiceTest { public void testCreateQueue() { // queue is null - Map result = queueService.createQueue(getLoginUser(),null,queueName); + Map result = queueService.createQueue(getLoginUser(), null, queueName); logger.info(result.toString()); - Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR,result.get(Constants.STATUS)); + Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); // queueName is null - result = queueService.createQueue(getLoginUser(),queueName,null); + result = queueService.createQueue(getLoginUser(), queueName, null); logger.info(result.toString()); - Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR,result.get(Constants.STATUS)); + Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); // correct - result = queueService.createQueue(getLoginUser(),queueName,queueName); + result = queueService.createQueue(getLoginUser(), queueName, queueName); logger.info(result.toString()); - Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); + Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } @@ -125,25 +126,25 @@ public class QueueServiceTest { Mockito.when(queueMapper.existQueue(null, "test")).thenReturn(true); // not exist - Map result = queueService.updateQueue(getLoginUser(),0,"queue",queueName); + Map result = queueService.updateQueue(getLoginUser(), 0, "queue", queueName); logger.info(result.toString()); - Assert.assertEquals(Status.QUEUE_NOT_EXIST.getCode(),((Status)result.get(Constants.STATUS)).getCode()); + Assert.assertEquals(Status.QUEUE_NOT_EXIST.getCode(), ((Status) result.get(Constants.STATUS)).getCode()); //no need update - result = queueService.updateQueue(getLoginUser(),1,queueName,queueName); + result = queueService.updateQueue(getLoginUser(), 1, queueName, queueName); logger.info(result.toString()); - Assert.assertEquals(Status.NEED_NOT_UPDATE_QUEUE.getCode(),((Status)result.get(Constants.STATUS)).getCode()); + Assert.assertEquals(Status.NEED_NOT_UPDATE_QUEUE.getCode(), ((Status) result.get(Constants.STATUS)).getCode()); //queue exist - result = queueService.updateQueue(getLoginUser(),1,"test",queueName); + result = queueService.updateQueue(getLoginUser(), 1, "test", queueName); logger.info(result.toString()); - Assert.assertEquals(Status.QUEUE_VALUE_EXIST.getCode(),((Status)result.get(Constants.STATUS)).getCode()); + Assert.assertEquals(Status.QUEUE_VALUE_EXIST.getCode(), ((Status) result.get(Constants.STATUS)).getCode()); // queueName exist - result = queueService.updateQueue(getLoginUser(),1,"test1","test"); + result = queueService.updateQueue(getLoginUser(), 1, "test1", "test"); logger.info(result.toString()); - Assert.assertEquals(Status.QUEUE_NAME_EXIST.getCode(),((Status)result.get(Constants.STATUS)).getCode()); + Assert.assertEquals(Status.QUEUE_NAME_EXIST.getCode(), ((Status) result.get(Constants.STATUS)).getCode()); //success - result = queueService.updateQueue(getLoginUser(),1,"test1","test1"); + result = queueService.updateQueue(getLoginUser(), 1, "test1", "test1"); logger.info(result.toString()); - Assert.assertEquals(Status.SUCCESS.getCode(),((Status)result.get(Constants.STATUS)).getCode()); + Assert.assertEquals(Status.SUCCESS.getCode(), ((Status) result.get(Constants.STATUS)).getCode()); } @@ -154,27 +155,27 @@ public class QueueServiceTest { Mockito.when(queueMapper.existQueue(null, queueName)).thenReturn(true); //queue null - Result result = queueService.verifyQueue(null,queueName); + Result result = queueService.verifyQueue(null, queueName); logger.info(result.toString()); Assert.assertEquals(result.getCode().intValue(), Status.REQUEST_PARAMS_NOT_VALID_ERROR.getCode()); //queueName null - result = queueService.verifyQueue(queueName,null); + result = queueService.verifyQueue(queueName, null); logger.info(result.toString()); Assert.assertEquals(result.getCode().intValue(), Status.REQUEST_PARAMS_NOT_VALID_ERROR.getCode()); //exist queueName - result = queueService.verifyQueue(queueName,queueName); + result = queueService.verifyQueue(queueName, queueName); logger.info(result.toString()); Assert.assertEquals(result.getCode().intValue(), Status.QUEUE_NAME_EXIST.getCode()); //exist queue - result = queueService.verifyQueue(queueName,"test"); + result = queueService.verifyQueue(queueName, "test"); logger.info(result.toString()); Assert.assertEquals(result.getCode().intValue(), Status.QUEUE_VALUE_EXIST.getCode()); // success - result = queueService.verifyQueue("test","test"); + result = queueService.verifyQueue("test", "test"); logger.info(result.toString()); Assert.assertEquals(result.getCode().intValue(), Status.SUCCESS.getCode()); @@ -182,7 +183,6 @@ public class QueueServiceTest { /** * create admin user - * @return */ private User getLoginUser() { @@ -200,7 +200,6 @@ public class QueueServiceTest { /** * get queue - * @return */ private Queue getQueue() { Queue queue = new Queue(); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java index 65a087286b8211d2dc844986c80bfa5290262a43..141f9de15397517979432e573965bef361c749b3 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java @@ -17,14 +17,16 @@ package org.apache.dolphinscheduler.api.service; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import com.google.common.io.Files; +import org.apache.commons.collections.CollectionUtils; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.impl.ResourcesServiceImpl; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.ResourceType; import org.apache.dolphinscheduler.common.enums.UserType; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.FileUtils; import org.apache.dolphinscheduler.common.utils.HadoopUtils; import org.apache.dolphinscheduler.common.utils.PropertyUtils; @@ -32,19 +34,8 @@ import org.apache.dolphinscheduler.dao.entity.Resource; import org.apache.dolphinscheduler.dao.entity.Tenant; import org.apache.dolphinscheduler.dao.entity.UdfFunc; import org.apache.dolphinscheduler.dao.entity.User; -import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; -import org.apache.dolphinscheduler.dao.mapper.ResourceMapper; -import org.apache.dolphinscheduler.dao.mapper.ResourceUserMapper; -import org.apache.dolphinscheduler.dao.mapper.TenantMapper; -import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper; -import org.apache.dolphinscheduler.dao.mapper.UserMapper; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - +import org.apache.dolphinscheduler.dao.mapper.*; +import org.apache.dolphinscheduler.spi.enums.ResourceType; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -60,15 +51,20 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.mock.web.MockMultipartFile; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; /** * resources service test */ @RunWith(PowerMockRunner.class) @PowerMockIgnore({"sun.security.*", "javax.net.*"}) -@PrepareForTest({HadoopUtils.class, PropertyUtils.class, FileUtils.class, org.apache.dolphinscheduler.api.utils.FileUtils.class}) +@PrepareForTest({HadoopUtils.class, PropertyUtils.class, + FileUtils.class, org.apache.dolphinscheduler.api.utils.FileUtils.class, + Files.class}) public class ResourcesServiceTest { private static final Logger logger = LoggerFactory.getLogger(ResourcesServiceTest.class); @@ -99,9 +95,9 @@ public class ResourcesServiceTest { @Before public void setUp() { - PowerMockito.mockStatic(HadoopUtils.class); PowerMockito.mockStatic(FileUtils.class); + PowerMockito.mockStatic(Files.class); PowerMockito.mockStatic(org.apache.dolphinscheduler.api.utils.FileUtils.class); try { // new HadoopUtils @@ -132,15 +128,15 @@ public class ResourcesServiceTest { //RESOURCE_SUFFIX_FORBID_CHANGE mockMultipartFile = new MockMultipartFile("test.pdf", "test.pdf", "pdf", "test".getBytes()); - PowerMockito.when(FileUtils.suffix("test.pdf")).thenReturn("pdf"); - PowerMockito.when(FileUtils.suffix("ResourcesServiceTest.jar")).thenReturn("jar"); + PowerMockito.when(Files.getFileExtension("test.pdf")).thenReturn("pdf"); + PowerMockito.when(Files.getFileExtension("ResourcesServiceTest.jar")).thenReturn("jar"); result = resourcesService.createResource(user, "ResourcesServiceTest.jar", "ResourcesServiceTest", ResourceType.FILE, mockMultipartFile, -1, "/"); logger.info(result.toString()); Assert.assertEquals(Status.RESOURCE_SUFFIX_FORBID_CHANGE.getMsg(), result.getMsg()); //UDF_RESOURCE_SUFFIX_NOT_JAR mockMultipartFile = new MockMultipartFile("ResourcesServiceTest.pdf", "ResourcesServiceTest.pdf", "pdf", "test".getBytes()); - PowerMockito.when(FileUtils.suffix("ResourcesServiceTest.pdf")).thenReturn("pdf"); + PowerMockito.when(Files.getFileExtension("ResourcesServiceTest.pdf")).thenReturn("pdf"); result = resourcesService.createResource(user, "ResourcesServiceTest.pdf", "ResourcesServiceTest", ResourceType.UDF, mockMultipartFile, -1, "/"); logger.info(result.toString()); Assert.assertEquals(Status.UDF_RESOURCE_SUFFIX_NOT_JAR.getMsg(), result.getMsg()); @@ -268,10 +264,10 @@ public class ResourcesServiceTest { resourcePage.setRecords(getResourceList()); Mockito.when(resourcesMapper.queryResourcePaging(Mockito.any(Page.class), - Mockito.eq(0), Mockito.eq(-1), Mockito.eq(0), Mockito.eq("test"), Mockito.any())).thenReturn(resourcePage); + Mockito.eq(0), Mockito.eq(-1), Mockito.eq(0), Mockito.eq("test"), Mockito.any())).thenReturn(resourcePage); Result result = resourcesService.queryResourceListPaging(loginUser, -1, ResourceType.FILE, "test", 1, 10); logger.info(result.toString()); - Assert.assertEquals(Status.SUCCESS.getCode(), (int)result.getCode()); + Assert.assertEquals(Status.SUCCESS.getCode(), (int) result.getCode()); PageInfo pageInfo = (PageInfo) result.getData(); Assert.assertTrue(CollectionUtils.isNotEmpty(pageInfo.getTotalList())); @@ -407,7 +403,7 @@ public class ResourcesServiceTest { //USER_NOT_EXIST PowerMockito.when(FileUtils.getResourceViewSuffixs()).thenReturn("jar"); - PowerMockito.when(FileUtils.suffix("ResourcesServiceTest.jar")).thenReturn("jar"); + PowerMockito.when(Files.getFileExtension("ResourcesServiceTest.jar")).thenReturn("jar"); result = resourcesService.readResource(1, 1, 10); logger.info(result.toString()); Assert.assertTrue(Status.USER_NOT_EXIST.getCode() == result.getCode()); @@ -508,7 +504,7 @@ public class ResourcesServiceTest { //USER_NOT_EXIST PowerMockito.when(FileUtils.getResourceViewSuffixs()).thenReturn("jar"); - PowerMockito.when(FileUtils.suffix("ResourcesServiceTest.jar")).thenReturn("jar"); + PowerMockito.when(Files.getFileExtension("ResourcesServiceTest.jar")).thenReturn("jar"); result = resourcesService.updateResourceContent(1, "content"); logger.info(result.toString()); Assert.assertTrue(Status.USER_NOT_EXIST.getCode() == result.getCode()); @@ -717,4 +713,4 @@ public class ResourcesServiceTest { resources.add(resource); return resources; } -} \ No newline at end of file +} diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SchedulerServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SchedulerServiceTest.java index 238d1d08fcca5808b2bfa07e85aa7f071f73f619..2f8079c55891dfb790578217ab4e1485fec9e328 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SchedulerServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SchedulerServiceTest.java @@ -103,6 +103,7 @@ public class SchedulerServiceTest { Project project = getProject(projectName, projectCode); ProcessDefinition processDefinition = new ProcessDefinition(); + processDefinition.setProjectCode(projectCode); Schedule schedule = new Schedule(); schedule.setId(1); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskDefinitionServiceImplTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskDefinitionServiceImplTest.java index 1651950c523967a5a5fb475677ca85efa238e059..42ad000fd0cc39d61970ca4720d5fea8da3a2bd4 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskDefinitionServiceImplTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskDefinitionServiceImplTest.java @@ -21,6 +21,8 @@ import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl; import org.apache.dolphinscheduler.api.service.impl.TaskDefinitionServiceImpl; import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ReleaseState; +import org.apache.dolphinscheduler.common.enums.TaskType; import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.task.shell.ShellParameters; import org.apache.dolphinscheduler.common.utils.JSONUtils; @@ -71,7 +73,6 @@ public class TaskDefinitionServiceImplTest { @Mock private ProcessTaskRelationMapper processTaskRelationMapper; - ; @Test public void createTaskDefinition() { @@ -96,7 +97,7 @@ public class TaskDefinitionServiceImplTest { + "\"workerGroup\":\"default\",\"failRetryTimes\":0,\"failRetryInterval\":0,\"timeoutFlag\":0," + "\"timeoutNotifyStrategy\":0,\"timeout\":0,\"delayTime\":0,\"resourceIds\":\"\"}]"; List taskDefinitions = JSONUtils.toList(createTaskDefinitionJson, TaskDefinitionLog.class); - Mockito.when(processService.saveTaskDefine(loginUser, projectCode, taskDefinitions)).thenReturn(1); + Mockito.when(processService.saveTaskDefine(loginUser, projectCode, taskDefinitions, Boolean.TRUE)).thenReturn(1); Map relation = taskDefinitionService .createTaskDefinition(loginUser, projectCode, createTaskDefinitionJson); Assert.assertEquals(Status.SUCCESS, relation.get(Constants.STATUS)); @@ -163,6 +164,7 @@ public class TaskDefinitionServiceImplTest { @Test public void deleteTaskDefinitionByCode() { long projectCode = 1L; + long taskCode = 1L; Project project = getProject(projectCode); Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(project); @@ -174,13 +176,14 @@ public class TaskDefinitionServiceImplTest { Map result = new HashMap<>(); putMsg(result, Status.SUCCESS, projectCode); Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result); - Mockito.when(processTaskRelationMapper.queryByTaskCode(Mockito.anyLong())) + Mockito.when(taskDefinitionMapper.queryByCode(taskCode)).thenReturn(getTaskDefinition()); + Mockito.when(processTaskRelationMapper.queryDownstreamByTaskCode(taskCode)) .thenReturn(new ArrayList<>()); - Mockito.when(taskDefinitionMapper.deleteByCode(Mockito.anyLong())) + Mockito.when(taskDefinitionMapper.deleteByCode(taskCode)) .thenReturn(1); Map relation = taskDefinitionService - .deleteTaskDefinitionByCode(loginUser, projectCode, Mockito.anyLong()); + .deleteTaskDefinitionByCode(loginUser, projectCode, taskCode); Assert.assertEquals(Status.SUCCESS, relation.get(Constants.STATUS)); } @@ -204,9 +207,10 @@ public class TaskDefinitionServiceImplTest { Mockito.when(taskDefinitionLogMapper.queryByDefinitionCodeAndVersion(taskCode, version)) .thenReturn(new TaskDefinitionLog()); - + TaskDefinition taskDefinition = new TaskDefinition(); + taskDefinition.setProjectCode(projectCode); Mockito.when(taskDefinitionMapper.queryByCode(taskCode)) - .thenReturn(new TaskDefinition()); + .thenReturn(taskDefinition); Mockito.when(taskDefinitionMapper.updateById(new TaskDefinitionLog())).thenReturn(1); Map relation = taskDefinitionService .switchVersion(loginUser, projectCode, taskCode, version); @@ -238,6 +242,15 @@ public class TaskDefinitionServiceImplTest { return project; } + private TaskDefinition getTaskDefinition() { + TaskDefinition taskDefinition = new TaskDefinition(); + taskDefinition.setProjectCode(1L); + taskDefinition.setCode(1L); + taskDefinition.setVersion(1); + taskDefinition.setTaskType(TaskType.SHELL.getDesc()); + return taskDefinition; + } + @Test public void checkJson() { String taskDefinitionJson = "[{\"name\":\"detail_up\",\"description\":\"\",\"taskType\":\"SHELL\",\"taskParams\":" @@ -273,4 +286,47 @@ public class TaskDefinitionServiceImplTest { Map genTaskCodeList = taskDefinitionService.genTaskCodeList(10); Assert.assertEquals(Status.SUCCESS, genTaskCodeList.get(Constants.STATUS)); } + + @Test + public void testReleaseTaskDefinition() { + long projectCode = 1L; + long taskCode = 1L; + Mockito.when(projectMapper.queryByCode(projectCode)).thenReturn(getProject(projectCode)); + + Project project = getProject(projectCode); + User loginUser = new User(); + loginUser.setId(1); + loginUser.setUserType(UserType.GENERAL_USER); + + // check task dose not exist + Map result = new HashMap<>(); + putMsg(result, Status.TASK_DEFINE_NOT_EXIST, taskCode); + Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectCode)).thenReturn(result); + Map map = taskDefinitionService.releaseTaskDefinition(loginUser, projectCode, taskCode, ReleaseState.OFFLINE); + Assert.assertEquals(Status.TASK_DEFINE_NOT_EXIST, map.get(Constants.STATUS)); + + // process definition offline + putMsg(result, Status.SUCCESS); + TaskDefinition taskDefinition = new TaskDefinition(); + taskDefinition.setProjectCode(projectCode); + taskDefinition.setVersion(1); + taskDefinition.setCode(taskCode); + String params = "{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"echo 1\",\"conditionResult\":{\"successNode\":[\"\"],\"failedNode\":[\"\"]},\"dependence\":{}}"; + taskDefinition.setTaskParams(params); + taskDefinition.setTaskType(TaskType.SHELL.getDesc()); + Mockito.when(taskDefinitionMapper.queryByCode(taskCode)).thenReturn(taskDefinition); + TaskDefinitionLog taskDefinitionLog = new TaskDefinitionLog(taskDefinition); + Mockito.when(taskDefinitionLogMapper.queryByDefinitionCodeAndVersion(taskCode, taskDefinition.getVersion())).thenReturn(taskDefinitionLog); + + Map offlineTaskResult = taskDefinitionService.releaseTaskDefinition(loginUser, projectCode, taskCode, ReleaseState.OFFLINE); + Assert.assertEquals(Status.SUCCESS, offlineTaskResult.get(Constants.STATUS)); + + // process definition online, resource exist + Map onlineTaskResult = taskDefinitionService.releaseTaskDefinition(loginUser, projectCode, taskCode, ReleaseState.ONLINE); + Assert.assertEquals(Status.SUCCESS, onlineTaskResult.get(Constants.STATUS)); + + // release error code + Map failResult = taskDefinitionService.releaseTaskDefinition(loginUser, projectCode, taskCode, ReleaseState.getEnum(2)); + Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, failResult.get(Constants.STATUS)); + } } \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java index 08fdf5fe51927afaa3329a220d1b61ff9f99499e..5544a18ee036200147dd5d45a6d61eec2cd3aed9 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java @@ -32,9 +32,11 @@ import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.Project; +import org.apache.dolphinscheduler.dao.entity.TaskDefinition; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; +import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; import org.apache.dolphinscheduler.service.process.ProcessService; @@ -81,6 +83,9 @@ public class TaskInstanceServiceTest { @Mock UsersService usersService; + @Mock + TaskDefinitionMapper taskDefinitionMapper; + @Test public void queryTaskListPaging() { long projectCode = 1L; @@ -249,6 +254,9 @@ public class TaskInstanceServiceTest { // test task not found when(projectService.checkProjectAndAuth(user, project, projectCode)).thenReturn(mockSuccess); when(taskInstanceMapper.selectById(Mockito.anyInt())).thenReturn(null); + TaskDefinition taskDefinition = new TaskDefinition(); + taskDefinition.setProjectCode(projectCode); + when(taskDefinitionMapper.queryByCode(task.getTaskCode())).thenReturn(taskDefinition); Map taskNotFoundRes = taskInstanceService.forceTaskSuccess(user, projectCode, taskId); Assert.assertEquals(Status.TASK_INSTANCE_NOT_FOUND, taskNotFoundRes.get(Constants.STATUS)); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TenantServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TenantServiceTest.java index 7f662dc7e8954e4f663bdce8a7cafb5838d229f7..e1c00d2e2e3e8b12faf718ed6b3055c03e394406 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TenantServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TenantServiceTest.java @@ -23,7 +23,6 @@ import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.UserType; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.Tenant; @@ -33,6 +32,8 @@ import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.TenantMapper; import org.apache.dolphinscheduler.dao.mapper.UserMapper; +import org.apache.commons.collections.CollectionUtils; + import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -83,7 +84,7 @@ public class TenantServiceTest { try { //check tenantCode Map result = - tenantService.createTenant(getLoginUser(), "%!1111", 1, "TenantServiceTest"); + tenantService.createTenant(getLoginUser(), "%!1111", 1, "TenantServiceTest"); logger.info(result.toString()); Assert.assertEquals(Status.CHECK_OS_TENANT_CODE_ERROR, result.get(Constants.STATUS)); @@ -111,7 +112,7 @@ public class TenantServiceTest { page.setRecords(getList()); page.setTotal(1L); Mockito.when(tenantMapper.queryTenantPaging(Mockito.any(Page.class), Mockito.eq("TenantServiceTest"))) - .thenReturn(page); + .thenReturn(page); Result result = tenantService.queryTenantList(getLoginUser(), "TenantServiceTest", 1, 10); logger.info(result.toString()); PageInfo pageInfo = (PageInfo) result.getData(); @@ -126,7 +127,7 @@ public class TenantServiceTest { try { // id not exist Map result = - tenantService.updateTenant(getLoginUser(), 912222, tenantCode, 1, "desc"); + tenantService.updateTenant(getLoginUser(), 912222, tenantCode, 1, "desc"); logger.info(result.toString()); // success Assert.assertEquals(Status.TENANT_NOT_EXIST, result.get(Constants.STATUS)); @@ -145,7 +146,7 @@ public class TenantServiceTest { Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant()); Mockito.when(processInstanceMapper.queryByTenantIdAndStatus(1, Constants.NOT_TERMINATED_STATES)) - .thenReturn(getInstanceList()); + .thenReturn(getInstanceList()); Mockito.when(processDefinitionMapper.queryDefinitionListByTenant(2)).thenReturn(getDefinitionsList()); Mockito.when(userMapper.queryUserListByTenant(3)).thenReturn(getUserList()); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UdfFuncServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UdfFuncServiceTest.java index 23ac7b07eed8efc4e2065ad6264e52d96d67d505..632c7a7bed6f75878cb9ecf2944e04dbcd220218 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UdfFuncServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UdfFuncServiceTest.java @@ -24,7 +24,6 @@ import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.UdfType; import org.apache.dolphinscheduler.common.enums.UserType; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.PropertyUtils; import org.apache.dolphinscheduler.dao.entity.Resource; import org.apache.dolphinscheduler.dao.entity.UdfFunc; @@ -33,6 +32,8 @@ import org.apache.dolphinscheduler.dao.mapper.ResourceMapper; import org.apache.dolphinscheduler.dao.mapper.UDFUserMapper; import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper; +import org.apache.commons.collections.CollectionUtils; + import java.util.ArrayList; import java.util.Date; import java.util.List; @@ -244,4 +245,4 @@ public class UdfFuncServiceTest { udfFunc.setType(UdfType.HIVE); return udfFunc; } -} \ No newline at end of file +} diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java index 72526416022600fc0933f6587058bf8bc1da09cc..5e0eb6b9c12b0c09c1d345b94f425c0e8ecf9547 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java @@ -26,9 +26,7 @@ import org.apache.dolphinscheduler.api.service.impl.UsersServiceImpl; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.ResourceType; import org.apache.dolphinscheduler.common.enums.UserType; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.EncryptionUtils; import org.apache.dolphinscheduler.dao.entity.AlertGroup; import org.apache.dolphinscheduler.dao.entity.Project; @@ -45,6 +43,9 @@ import org.apache.dolphinscheduler.dao.mapper.ResourceUserMapper; import org.apache.dolphinscheduler.dao.mapper.TenantMapper; import org.apache.dolphinscheduler.dao.mapper.UDFUserMapper; import org.apache.dolphinscheduler.dao.mapper.UserMapper; +import org.apache.dolphinscheduler.spi.enums.ResourceType; + +import org.apache.commons.collections.CollectionUtils; import java.util.ArrayList; import java.util.List; @@ -69,7 +70,7 @@ import com.google.common.collect.Lists; /** * users service test */ -@RunWith(MockitoJUnitRunner.class) +@RunWith(MockitoJUnitRunner.Silent.class) public class UsersServiceTest { private static final Logger logger = LoggerFactory.getLogger(UsersServiceTest.class); @@ -337,6 +338,74 @@ public class UsersServiceTest { Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } + @Test + public void testGrantProjectByCode() { + // Mock Project, User + final long projectCode = 1L; + final int projectCreator = 1; + final int authorizer = 100; + Mockito.when(this.userMapper.selectById(authorizer)).thenReturn(this.getUser()); + Mockito.when(this.userMapper.selectById(projectCreator)).thenReturn(this.getUser()); + Mockito.when(this.projectMapper.queryByCode(projectCode)).thenReturn(this.getProject()); + + // ERROR: USER_NOT_EXIST + User loginUser = new User(); + Map result = this.usersService.grantProjectByCode(loginUser, 999, projectCode); + logger.info(result.toString()); + Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS)); + + // ERROR: PROJECT_NOT_FOUNT + result = this.usersService.grantProjectByCode(loginUser, authorizer, 999); + logger.info(result.toString()); + Assert.assertEquals(Status.PROJECT_NOT_FOUNT, result.get(Constants.STATUS)); + + // ERROR: USER_NO_OPERATION_PERM + loginUser.setId(999); + loginUser.setUserType(UserType.GENERAL_USER); + result = this.usersService.grantProjectByCode(loginUser, authorizer, projectCode); + logger.info(result.toString()); + Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); + + // SUCCESS: USER IS PROJECT OWNER + loginUser.setId(projectCreator); + loginUser.setUserType(UserType.GENERAL_USER); + result = this.usersService.grantProjectByCode(loginUser, authorizer, projectCode); + logger.info(result.toString()); + Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); + + // SUCCESS: USER IS ADMINISTRATOR + loginUser.setId(999); + loginUser.setUserType(UserType.ADMIN_USER); + result = this.usersService.grantProjectByCode(loginUser, authorizer, projectCode); + logger.info(result.toString()); + Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); + } + + @Test + public void testRevokeProject() { + Mockito.when(this.userMapper.selectById(1)).thenReturn(this.getUser()); + + final long projectCode = 3682329499136L; + + // user no permission + User loginUser = new User(); + Map result = this.usersService.revokeProject(loginUser, 1, projectCode); + logger.info(result.toString()); + Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); + + // user not exist + loginUser.setUserType(UserType.ADMIN_USER); + result = this.usersService.revokeProject(loginUser, 2, projectCode); + logger.info(result.toString()); + Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS)); + + // success + Mockito.when(this.projectMapper.queryByCode(Mockito.anyLong())).thenReturn(new Project()); + result = this.usersService.revokeProject(loginUser, 1, projectCode); + logger.info(result.toString()); + Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); + } + @Test public void testGrantResources() { String resourceIds = "100000,120000"; @@ -617,6 +686,22 @@ public class UsersServiceTest { return user; } + /** + * Get project + * @return + */ + private Project getProject() { + Project project = new Project(); + project.setId(1); + project.setCode(1L); + project.setUserId(1); + project.setName("PJ-001"); + project.setPerm(7); + project.setDefCount(0); + project.setInstRunningCount(0); + return project; + } + /** * get user */ @@ -680,4 +765,4 @@ public class UsersServiceTest { return alertGroups; } -} \ No newline at end of file +} diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkerGroupServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkerGroupServiceTest.java index 0c7683554a319f291ca3667f57526e686cab9ef3..1dfcc64c413b5c1a7bf5607e86b2fe59daf6057f 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkerGroupServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkerGroupServiceTest.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.api.service; +import org.apache.dolphinscheduler.api.ApiApplicationServer; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.impl.WorkerGroupServiceImpl; import org.apache.dolphinscheduler.common.Constants; @@ -33,105 +34,32 @@ import java.util.List; import java.util.Map; import org.junit.Assert; -import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; -import org.mockito.InjectMocks; -import org.mockito.Mock; import org.mockito.Mockito; -import org.powermock.api.mockito.PowerMockito; -import org.powermock.core.classloader.annotations.PowerMockIgnore; -import org.powermock.core.classloader.annotations.PrepareForTest; -import org.powermock.modules.junit4.PowerMockRunner; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.boot.test.mock.mockito.MockBean; +import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -/** - * worker group service test - */ -@RunWith(PowerMockRunner.class) -@PrepareForTest({ RegistryClient.class }) -@PowerMockIgnore({"javax.management.*"}) +@RunWith(SpringJUnit4ClassRunner.class) +@SpringBootTest(classes = ApiApplicationServer.class) public class WorkerGroupServiceTest { + @MockBean + private RegistryClient registryClient; - @InjectMocks + @Autowired private WorkerGroupServiceImpl workerGroupService; - @Mock + @MockBean private WorkerGroupMapper workerGroupMapper; - @Mock + @MockBean private ProcessInstanceMapper processInstanceMapper; - private String groupName = "groupName000001"; - /* @Before - public void init() { - ZookeeperConfig zookeeperConfig = new ZookeeperConfig(); - zookeeperConfig.setDsRoot("/dolphinscheduler_qzw"); - Mockito.when(zookeeperCachedOperator.getZookeeperConfig()).thenReturn(zookeeperConfig); - - String workerPath = zookeeperCachedOperator.getZookeeperConfig().getDsRoot() + Constants.ZOOKEEPER_DOLPHINSCHEDULER_WORKERS; - - List workerGroupStrList = new ArrayList<>(); - workerGroupStrList.add("default"); - workerGroupStrList.add("test"); - Mockito.when(zookeeperCachedOperator.getChildrenNodes(workerPath)).thenReturn(workerGroupStrList); - - List defaultAddressList = new ArrayList<>(); - defaultAddressList.add("192.168.220.188:1234"); - defaultAddressList.add("192.168.220.189:1234"); - - Mockito.when(zookeeperCachedOperator.getChildrenNodes(workerPath + "/default")).thenReturn(defaultAddressList); - - Mockito.when(zookeeperCachedOperator.get(workerPath + "/default" + "/" + defaultAddressList.get(0))).thenReturn("0.01,0.17,0.03,25.83,8.0,1.0,2020-07-21 11:17:59,2020-07-21 14:39:20,0,13238"); - } - -*//** - * create or update a worker group - *//* - @Test - public void testSaveWorkerGroup() { - // worker server maps - Map serverMaps = new HashMap<>(); - serverMaps.put("127.0.0.1:1234", "0.3,0.07,4.4,7.42,16.0,0.3,2021-03-19 20:17:58,2021-03-19 20:25:29,0,79214"); - Mockito.when(zookeeperMonitor.getServerMaps(ZKNodeType.WORKER, true)).thenReturn(serverMaps); - - User user = new User(); - // general user add - user.setUserType(UserType.GENERAL_USER); - Map result = workerGroupService.saveWorkerGroup(user, 0, groupName, "127.0.0.1:1234"); - Assert.assertEquals(Status.USER_NO_OPERATION_PERM.getMsg(), result.get(Constants.MSG)); - - // success - user.setUserType(UserType.ADMIN_USER); - result = workerGroupService.saveWorkerGroup(user, 0, groupName, "127.0.0.1:1234"); - Assert.assertEquals(Status.SUCCESS.getMsg(), result.get(Constants.MSG)); - // group name exist - Mockito.when(workerGroupMapper.selectById(2)).thenReturn(getWorkerGroup(2)); - Mockito.when(workerGroupMapper.queryWorkerGroupByName(groupName)).thenReturn(getList()); - result = workerGroupService.saveWorkerGroup(user, 2, groupName, "127.0.0.1:1234"); - Assert.assertEquals(Status.NAME_EXIST, result.get(Constants.STATUS)); - }*/ - - /** - * query worker group paging - */ - /* @Test - public void testQueryAllGroupPaging() { - User user = new User(); - // general user add - user.setUserType(UserType.ADMIN_USER); - Map result = workerGroupService.queryAllGroupPaging(user, 1, 10, null); - PageInfo pageInfo = (PageInfo) result.get(Constants.DATA_LIST); - Assert.assertEquals(pageInfo.getLists().size(), 1); - }*/ - - @Before - public void before() { - PowerMockito.suppress(PowerMockito.constructor(RegistryClient.class)); - } - @Test public void testQueryAllGroup() { Map result = workerGroupService.queryAllGroup(); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/CheckUtilsTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/CheckUtilsTest.java index 2f159292400ec8a26d1a39046925c3533c260fac..71b0f929cdffcbb658da59f565f4f5dbbabeb530 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/CheckUtilsTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/CheckUtilsTest.java @@ -128,7 +128,7 @@ public class CheckUtilsTest { taskNode.setType(TaskType.SUB_PROCESS.getDesc()); assertFalse(CheckUtils.checkTaskNodeParameters(taskNode)); - subProcessParameters.setProcessDefinitionId(1234); + subProcessParameters.setProcessDefinitionCode(1234L); taskNode.setParams(JSONUtils.toJsonString(subProcessParameters)); taskNode.setType(TaskType.SUB_PROCESS.getDesc()); assertTrue(CheckUtils.checkTaskNodeParameters(taskNode)); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/FileUtilsTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/FileUtilsTest.java index 228eaf9b9a6f4448f14366cbaa85ef412dd50900..06aecc80f8a4bf27465f2e85a1f92788253ec38a 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/FileUtilsTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/FileUtilsTest.java @@ -63,19 +63,20 @@ public class FileUtilsTest { public void testCopyFile() throws IOException { //Define dest file path + String src = rootPath + System.getProperty("file.separator") + "src.txt"; String destFilename = rootPath + System.getProperty("file.separator") + "data.txt"; logger.info("destFilename: "+destFilename); //Define InputStream for MultipartFile String data = "data text"; - InputStream targetStream = new ByteArrayInputStream(data.getBytes()); + org.apache.commons.io.FileUtils.writeStringToFile(new File(src), data); //Use Mockito to mock MultipartFile - MultipartFile file = Mockito.mock(MultipartFile.class); - Mockito.when(file.getInputStream()).thenReturn(targetStream); + MultipartFile file = Mockito.mock(MultipartFile.class, Mockito.RETURNS_DEEP_STUBS); + Mockito.when(file.getResource().getFile()).thenReturn(new File(src)); //Invoke copyFile - FileUtils.copyFile(file,destFilename); + FileUtils.copyInputStreamToFile(file,destFilename); //Test file exists File destFile = new File(destFilename); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/FourLetterWordMainTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/FourLetterWordMainTest.java deleted file mode 100644 index 69d1f21c373aefc3c41470ce1f5ea3d53a9dfc0c..0000000000000000000000000000000000000000 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/FourLetterWordMainTest.java +++ /dev/null @@ -1,217 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.api.utils; - -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.InjectMocks; -import org.mockito.Mock; -import org.mockito.Mockito; -import org.powermock.api.mockito.PowerMockito; -import org.powermock.core.classloader.annotations.PrepareForTest; -import org.powermock.modules.junit4.PowerMockRunner; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.InputStream; -import java.net.InetSocketAddress; -import java.net.Socket; -import java.net.SocketTimeoutException; - -import static org.junit.Assert.assertEquals; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.when; - -@RunWith(PowerMockRunner.class) -@PrepareForTest({FourLetterWordMain.class, Socket.class}) -public class FourLetterWordMainTest { - - private static final Logger logger = - LoggerFactory.getLogger(FourLetterWordMainTest.class); - private static final String NEW_LINE = "\n"; - - @InjectMocks - private FourLetterWordMain fourLetterWord; - @Mock - private Socket socket; - @Mock - private InetSocketAddress socketAddress; - - private final String localHost = "127.0.0.1"; - private final int zkPort = 2181; - private ByteArrayOutputStream byteArrayOutputStream; - private InputStream inputStream; - - private String cmd; - private String testResult; - private String expectedStr; - - @Before - public void setUp() { - // mock socket class - PowerMockito.mockStatic(Socket.class); - try { - PowerMockito.whenNew(Socket.class).withNoArguments() - .thenReturn(socket); - } catch (Exception e) { - e.printStackTrace(); - } - } - - /** - * None mock test method, just to check zookeeper status. - * Comment @Before notation to run this test. - * Zookeeper status will be as: - * Zookeeper version: 3.4.11 ... - * Received: 6739707 - * Sent: 6739773 - * Connections: 20 - * Outstanding: 0 - * Zxid: 0x9ba - * Mode: standalone - * Node count: 263 - */ - public void testCmd() { - // "192.168.64.11" - // final String zkHost = localHost; - final String zkHost = "192.168.64.11"; - cmd = "srvr"; - try { - // Change localhost to right zk host ip. - final String result = FourLetterWordMain - .send4LetterWord(zkHost, zkPort, cmd); - logger.info(cmd + ": " + result + "<<<"); - } catch (Exception e) { - e.printStackTrace(); - } - } - - @Test - public void testEmptyCmd() { - cmd = ""; - expectedStr = ""; - testSend4LetterWord(cmd, expectedStr); - } - - @Test - public void testNullCmd() { - cmd = null; - - try { - testResult = FourLetterWordMain - .send4LetterWord(localHost, zkPort, cmd); - } catch (Exception e) { - testResult = e.getMessage(); - } - - logger.info("testNullCmd result: " + testResult); - assertEquals("cmd must not be null", testResult); - } - - @Test - public void testNullSocketOutput() { - cmd = "test null socket output"; - expectedStr = null; - testSend4LetterWord(cmd, expectedStr); - } - - @Test - public void testOneLineOutput() { - cmd = "line 1"; - - // line end without \n - expectedStr = "line 1" + NEW_LINE; - testSend4LetterWord(cmd, expectedStr); - - // line end with \n - expectedStr = "line 1\n" + NEW_LINE; - testSend4LetterWord(cmd, expectedStr); - } - - @Test - public void testMultiline() { - cmd = "line 1 " + NEW_LINE + - "line 2 " + NEW_LINE + - "line 3 " + NEW_LINE; - - expectedStr = cmd + NEW_LINE; - testSend4LetterWord(cmd, expectedStr); - - expectedStr = NEW_LINE + NEW_LINE + NEW_LINE; - testSend4LetterWord(cmd, expectedStr); - } - - @Test - public void testSocketTimeOut() { - cmd = "test socket time out"; - - try { - doThrow(new SocketTimeoutException()) - .when(socket) - .connect(any(InetSocketAddress.class), Mockito.anyInt()); - testResult = FourLetterWordMain - .send4LetterWord(localHost, zkPort, cmd); - } catch (Exception e) { - testResult = e.getMessage(); - } - - logger.info("testSocketTimeOut result: " + testResult); - assertEquals( - "Exception while executing four letter word: " + cmd, - testResult - ); - } - - /** - * Test FourLetterWordMain.send4LetterWord() with input cmd and output - * string. - * @param cmd - * @param expectedStr - */ - public void testSend4LetterWord(String cmd, String expectedStr) { - try { - final byte[] strBytes = cmd.getBytes(); - byteArrayOutputStream = new ByteArrayOutputStream(strBytes.length); - byteArrayOutputStream.write(strBytes, 0, strBytes.length); - - inputStream = new ByteArrayInputStream(expectedStr.getBytes()); - - when(socket.getOutputStream()) - .thenReturn(byteArrayOutputStream); - when(socket.getInputStream()).thenReturn(inputStream); - - final String result = FourLetterWordMain - .send4LetterWord(localHost, zkPort, cmd); - logger.info( - "testSend4LetterWord: " + - "cmd: " + cmd + - ", expectedStr: " + expectedStr + - ", result: " + result + "." - ); - Assert.assertEquals(expectedStr, result); - } catch (Exception e) { - e.printStackTrace(); - } - } - -} diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/RegexUtilsTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/RegexUtilsTest.java index 067dfeb17dc410e8de9f2264d9a580933155b5d6..2a339fdea86c8cf175ac793b077664aa205dd90c 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/RegexUtilsTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/RegexUtilsTest.java @@ -25,17 +25,6 @@ import org.junit.Test; */ public class RegexUtilsTest { - @Test - public void testIsNumeric() { - String num1 = "123467854678"; - boolean numeric = RegexUtils.isNumeric(num1); - Assert.assertTrue(numeric); - - String num2 = "0.0.01"; - boolean numeric2 = RegexUtils.isNumeric(num2); - Assert.assertFalse(numeric2); - } - @Test public void testIsValidLinuxUserName() { String name1 = "10000"; @@ -72,4 +61,4 @@ public class RegexUtilsTest { Assert.assertNull(result4); } -} \ No newline at end of file +} diff --git a/dolphinscheduler-common/pom.xml b/dolphinscheduler-common/pom.xml index 481e00cc3dd113fde323d080d594bf3381c45f54..db007a0fda93fb1e7e1d36a2c7c527f5fd400f64 100644 --- a/dolphinscheduler-common/pom.xml +++ b/dolphinscheduler-common/pom.xml @@ -21,16 +21,12 @@ org.apache.dolphinscheduler dolphinscheduler - 2.0.0-SNAPSHOT + 2.0.10-SNAPSHOT dolphinscheduler-common dolphinscheduler-common jar - - UTF-8 - 3.1.0 - @@ -42,14 +38,6 @@ org.apache.httpcomponents httpclient - - io.airlift.resolver - resolver - - - org.sonatype.aether - aether-api - org.ow2.asm asm @@ -66,16 +54,9 @@ - - - junit - junit - test - org.mockito mockito-core - jar test @@ -89,19 +70,6 @@ org.powermock powermock-api-mockito2 test - - - org.mockito - mockito-core - - - - - - org.jacoco - org.jacoco.agent - runtime - test @@ -624,11 +592,6 @@ compile - - org.codehaus.janino - janino - ${codehaus.janino.version} - com.github.rholder guava-retrying @@ -646,7 +609,6 @@ io.netty netty-all - compile diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java index 2d695046ba40cc2d461e5f9a475a52a2d598a3ac..70fab13f76550daf9fdec6513a50a590c63dcc3c 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java @@ -18,9 +18,9 @@ package org.apache.dolphinscheduler.common; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; -import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.commons.lang.StringUtils; +import org.apache.commons.lang.SystemUtils; import java.util.regex.Pattern; @@ -53,7 +53,7 @@ public final class Constants { public static final String ORG_QUARTZ_JOBSTORE_ACQUIRETRIGGERSWITHINLOCK = "org.quartz.jobStore.acquireTriggersWithinLock"; public static final String ORG_QUARTZ_JOBSTORE_DATASOURCE = "org.quartz.jobStore.dataSource"; public static final String ORG_QUARTZ_DATASOURCE_MYDS_CONNECTIONPROVIDER_CLASS = "org.quartz.dataSource.myDs.connectionProvider.class"; - + public static final String ORG_QUARTZ_SCHEDULER_BATCHTRIGGERACQUISTITIONMAXCOUNT = "org.quartz.scheduler.batchTriggerAcquisitionMaxCount"; /** * quartz config default value */ @@ -66,6 +66,7 @@ public final class Constants { public static final String QUARTZ_INSTANCENAME = "DolphinScheduler"; public static final String QUARTZ_INSTANCEID = "AUTO"; public static final String QUARTZ_ACQUIRETRIGGERSWITHINLOCK = "true"; + public static final String QUARTZ_BATCHTRIGGERACQUISTITIONMAXCOUNT = "100"; /** * common properties path @@ -90,9 +91,8 @@ public final class Constants { public static final String REGISTRY_DOLPHINSCHEDULER_LOCK_FAILOVER_MASTERS = "/lock/failover/masters"; public static final String REGISTRY_DOLPHINSCHEDULER_LOCK_FAILOVER_WORKERS = "/lock/failover/workers"; public static final String REGISTRY_DOLPHINSCHEDULER_LOCK_FAILOVER_STARTUP_MASTERS = "/lock/failover/startup-masters"; - public static final String REGISTRY_PLUGIN_BINDING = "registry.plugin.binding"; - public static final String REGISTRY_PLUGIN_DIR = "registry.plugin.dir"; public static final String REGISTRY_SERVERS = "registry.servers"; + public static final String FOLDER_SEPARATOR = "/"; /** * fs.defaultFS @@ -309,12 +309,7 @@ public final class Constants { * user name regex */ public static final Pattern REGEX_USER_NAME = Pattern.compile("^[a-zA-Z0-9._-]{3,39}$"); - - /** - * email regex - */ - public static final Pattern REGEX_MAIL_NAME = Pattern.compile("^([a-z0-9A-Z]+[_|\\-|\\.]?)+[a-z0-9A-Z]@([a-z0-9A-Z]+(-[a-z0-9A-Z]+)?\\.)+[a-zA-Z]{2,}$"); - + /** * default display rows */ @@ -342,6 +337,11 @@ public final class Constants { */ public static final int DEFAULT_ADMIN_PERMISSION = 7; + /** + * default hash map size + */ + public static final int DEFAULT_HASH_MAP_SIZE = 16; + /** * all permissions @@ -404,7 +404,6 @@ public final class Constants { /** * datasource configuration path */ - public static final String DATASOURCE_PROPERTIES = "/datasource.properties"; public static final String COMMON_TASK_TYPE = "common"; @@ -431,10 +430,12 @@ public final class Constants { public static final String CMD_PARAM_SUB_PROCESS_PARENT_INSTANCE_ID = "parentProcessInstanceId"; - public static final String CMD_PARAM_SUB_PROCESS_DEFINE_ID = "processDefinitionId"; + public static final String CMD_PARAM_SUB_PROCESS_DEFINE_CODE = "processDefinitionCode"; public static final String CMD_PARAM_START_NODE_NAMES = "StartNodeNameList"; + public static final String CMD_PARAM_START_NODES = "StartNodeList"; + public static final String CMD_PARAM_START_PARAMS = "StartParams"; public static final String CMD_PARAM_FATHER_PARAMS = "fatherParams"; @@ -454,11 +455,6 @@ public final class Constants { */ public static final String DEFAULT_CRON_STRING = "0 0 0 * * ? *"; - - /** - * data source config - */ - public static final String SPRING_DATASOURCE_DRIVER_CLASS_NAME = "spring.datasource.driver-class-name"; public static final String SPRING_DATASOURCE_URL = "spring.datasource.url"; @@ -467,39 +463,37 @@ public final class Constants { public static final String SPRING_DATASOURCE_PASSWORD = "spring.datasource.password"; - public static final String SPRING_DATASOURCE_VALIDATION_QUERY_TIMEOUT = "spring.datasource.validationQueryTimeout"; - - public static final String SPRING_DATASOURCE_INITIAL_SIZE = "spring.datasource.initialSize"; + public static final String SPRING_DATASOURCE_CONNECTION_TIMEOUT = "spring.datasource.connectionTimeout"; public static final String SPRING_DATASOURCE_MIN_IDLE = "spring.datasource.minIdle"; public static final String SPRING_DATASOURCE_MAX_ACTIVE = "spring.datasource.maxActive"; - public static final String SPRING_DATASOURCE_MAX_WAIT = "spring.datasource.maxWait"; - - public static final String SPRING_DATASOURCE_TIME_BETWEEN_EVICTION_RUNS_MILLIS = "spring.datasource.timeBetweenEvictionRunsMillis"; + public static final String SPRING_DATASOURCE_IDLE_TIMEOUT = "spring.datasource.idleTimeout"; - public static final String SPRING_DATASOURCE_TIME_BETWEEN_CONNECT_ERROR_MILLIS = "spring.datasource.timeBetweenConnectErrorMillis"; + public static final String SPRING_DATASOURCE_MAX_LIFE_TIME = "spring.datasource.maxLifetime"; - public static final String SPRING_DATASOURCE_MIN_EVICTABLE_IDLE_TIME_MILLIS = "spring.datasource.minEvictableIdleTimeMillis"; + public static final String SPRING_DATASOURCE_VALIDATION_TIMEOUT = "spring.datasource.validationTimeout"; public static final String SPRING_DATASOURCE_VALIDATION_QUERY = "spring.datasource.validationQuery"; - public static final String SPRING_DATASOURCE_TEST_WHILE_IDLE = "spring.datasource.testWhileIdle"; + public static final String SPRING_DATASOURCE_LEAK_DETECTION_THRESHOLD = "spring.datasource.leakDetectionThreshold"; - public static final String SPRING_DATASOURCE_TEST_ON_BORROW = "spring.datasource.testOnBorrow"; + public static final String SPRING_DATASOURCE_INITIALIZATION_FAIL_TIMEOUT = "spring.datasource.initializationFailTimeout"; - public static final String SPRING_DATASOURCE_TEST_ON_RETURN = "spring.datasource.testOnReturn"; + public static final String SPRING_DATASOURCE_IS_AUTOCOMMIT = "spring.datasource.isAutoCommit"; - public static final String SPRING_DATASOURCE_POOL_PREPARED_STATEMENTS = "spring.datasource.poolPreparedStatements"; + public static final String SPRING_DATASOURCE_CACHE_PREP_STMTS = "spring.datasource.cachePrepStmts"; - public static final String SPRING_DATASOURCE_DEFAULT_AUTO_COMMIT = "spring.datasource.defaultAutoCommit"; + public static final String SPRING_DATASOURCE_PREP_STMT_CACHE_SIZE = "spring.datasource.prepStmtCacheSize"; - public static final String SPRING_DATASOURCE_KEEP_ALIVE = "spring.datasource.keepAlive"; + public static final String SPRING_DATASOURCE_PREP_STMT_CACHE_SQL_LIMIT = "spring.datasource.prepStmtCacheSqlLimit"; - public static final String SPRING_DATASOURCE_MAX_POOL_PREPARED_STATEMENT_PER_CONNECTION_SIZE = "spring.datasource.maxPoolPreparedStatementPerConnectionSize"; + public static final String CACHE_PREP_STMTS = "cachePrepStmts"; - public static final String DEVELOPMENT = "development"; + public static final String PREP_STMT_CACHE_SIZE = "prepStmtCacheSize"; + + public static final String PREP_STMT_CACHE_SQL_LIMIT = "prepStmtCacheSqlLimit"; public static final String QUARTZ_PROPERTIES_PATH = "quartz.properties"; @@ -521,8 +515,7 @@ public final class Constants { /** * heartbeat for zk info length */ - public static final int HEARTBEAT_FOR_ZOOKEEPER_INFO_LENGTH = 10; - public static final int HEARTBEAT_WITH_WEIGHT_FOR_ZOOKEEPER_INFO_LENGTH = 11; + public static final int HEARTBEAT_FOR_ZOOKEEPER_INFO_LENGTH = 14; /** * jar @@ -667,6 +660,10 @@ public final class Constants { * SUCCEEDED */ public static final String SUCCEEDED = "SUCCEEDED"; + /** + * ENDED + */ + public static final String ENDED = "ENDED"; /** * NEW */ @@ -719,7 +716,7 @@ public final class Constants { * application regex */ public static final String APPLICATION_REGEX = "application_\\d+_\\d+"; - public static final String PID = OSUtils.isWindows() ? "handle" : "pid"; + public static final String PID = SystemUtils.IS_OS_WINDOWS ? "handle" : "pid"; /** * month_begin */ @@ -751,7 +748,7 @@ public final class Constants { public static final char LEFT_BRACE_CHAR = '('; public static final char RIGHT_BRACE_CHAR = ')'; public static final String ADD_STRING = "+"; - public static final String MULTIPLY_STRING = "*"; + public static final String STAR = "*"; public static final String DIVISION_STRING = "/"; public static final String LEFT_BRACE_STRING = "("; public static final char P = 'P'; @@ -769,6 +766,7 @@ public final class Constants { public static final String DEPENDENCE = "dependence"; public static final String TASK_TYPE = "taskType"; public static final String TASK_LIST = "taskList"; + public static final String WARNING_GROUP_NAME="warningGroupName"; public static final String RWXR_XR_X = "rwxr-xr-x"; public static final String QUEUE = "queue"; public static final String QUEUE_NAME = "queueName"; @@ -786,6 +784,8 @@ public final class Constants { public static final String CONTENT = "content"; public static final String DEPENDENT_SPLIT = ":||"; public static final String DEPENDENT_ALL = "ALL"; + public static final long DEPENDENT_ALL_TASK_CODE = 0; + /** @@ -935,6 +935,12 @@ public final class Constants { */ public static final String LOCALE_LANGUAGE = "language"; + /** + * temporary parameter prefix + */ + public static final String START_UP_PARAMS_PREFIX = "startup-"; + public static final String GLOBAL_PARAMS_PREFIX = "global-"; + /** * driver */ @@ -947,6 +953,19 @@ public final class Constants { public static final String COM_DB2_JDBC_DRIVER = "com.ibm.db2.jcc.DB2Driver"; public static final String COM_PRESTO_JDBC_DRIVER = "com.facebook.presto.jdbc.PrestoDriver"; + + /** + * validation Query + */ + public static final String POSTGRESQL_VALIDATION_QUERY = "select version()"; + public static final String MYSQL_VALIDATION_QUERY = "select 1"; + public static final String HIVE_VALIDATION_QUERY = "select 1"; + public static final String CLICKHOUSE_VALIDATION_QUERY = "select 1"; + public static final String ORACLE_VALIDATION_QUERY = "select 1 from dual"; + public static final String SQLSERVER_VALIDATION_QUERY = "select 1"; + public static final String DB2_VALIDATION_QUERY = "select 1 from sysibm.sysdummy1"; + public static final String PRESTO_VALIDATION_QUERY = "select 1"; + /** * database type */ @@ -1021,14 +1040,9 @@ public final class Constants { */ public static final int AUTHORIZE_READABLE_PERM = 4; - - /** - * plugin configurations - */ - public static final String PLUGIN_JAR_SUFFIX = ".jar"; - public static final int NORMAL_NODE_STATUS = 0; public static final int ABNORMAL_NODE_STATUS = 1; + public static final int BUSY_NODE_STATUE = 2; public static final String START_TIME = "start time"; public static final String END_TIME = "end time"; @@ -1039,9 +1053,6 @@ public final class Constants { */ public static final String SYSTEM_LINE_SEPARATOR = System.getProperty("line.separator"); - - public static final String EXCEL_SUFFIX_XLS = ".xls"; - /** * datasource encryption salt */ @@ -1078,21 +1089,7 @@ public final class Constants { * docker & kubernetes */ public static final boolean DOCKER_MODE = !StringUtils.isEmpty(System.getenv("DOCKER")); - public static final boolean KUBERNETES_MODE = !StringUtils.isEmpty(System.getenv("KUBERNETES_SERVICE_HOST")) && !StringUtils.isEmpty(System.getenv("KUBERNETES_SERVICE_PORT")); - - /** - * task parameter keys - */ - public static final String TASK_PARAMS = "params"; - public static final String TASK_PARAMS_DATASOURCE = "datasource"; - public static final String TASK_PARAMS_DATASOURCE_NAME = "datasourceName"; - public static final String TASK_DEPENDENCE = "dependence"; - public static final String TASK_DEPENDENCE_DEPEND_TASK_LIST = "dependTaskList"; - public static final String TASK_DEPENDENCE_DEPEND_ITEM_LIST = "dependItemList"; - public static final String TASK_DEPENDENCE_PROJECT_ID = "projectId"; - public static final String TASK_DEPENDENCE_PROJECT_NAME = "projectName"; - public static final String TASK_DEPENDENCE_DEFINITION_ID = "definitionId"; - public static final String TASK_DEPENDENCE_DEFINITION_NAME = "definitionName"; + public static final Boolean KUBERNETES_MODE = !StringUtils.isEmpty(System.getenv("KUBERNETES_SERVICE_HOST")) && !StringUtils.isEmpty(System.getenv("KUBERNETES_SERVICE_PORT")); /** * dry run flag @@ -1100,4 +1097,10 @@ public final class Constants { public static final int DRY_RUN_FLAG_NO = 0; public static final int DRY_RUN_FLAG_YES = 1; + public static final String CACHE_KEY_VALUE_ALL = "'all'"; + + /** + * tenant + */ + public static final int TENANT_FULL_NAME_MAX_LENGTH = 30; } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/DatasourceProcessor.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/DatasourceProcessor.java deleted file mode 100644 index e76a08800197304b40325ee31817579b2a672e58..0000000000000000000000000000000000000000 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/DatasourceProcessor.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.common.datasource; - -import org.apache.dolphinscheduler.common.enums.DbType; - -import java.io.IOException; -import java.sql.Connection; -import java.sql.SQLException; - -public interface DatasourceProcessor { - - /** - * check datasource param is valid - */ - void checkDatasourceParam(BaseDataSourceParamDTO datasourceParam); - - /** - * create BaseDataSourceParamDTO by connectionJson - * - * @param connectionJson see{@link org.apache.dolphinscheduler.dao.entity.Datasource} - * @return {@link BaseDataSourceParamDTO} - */ - BaseDataSourceParamDTO createDatasourceParamDTO(String connectionJson); - - /** - * create datasource connection parameter which will be stored at DataSource - *

- * see {@code org.apache.dolphinscheduler.dao.entity.DataSource.connectionParams} - */ - ConnectionParam createConnectionParams(BaseDataSourceParamDTO datasourceParam); - - /** - * deserialize json to datasource connection param - * - * @param connectionJson {@code org.apache.dolphinscheduler.dao.entity.DataSource.connectionParams} - * @return {@link BaseConnectionParam} - */ - ConnectionParam createConnectionParams(String connectionJson); - - /** - * get datasource Driver - */ - String getDatasourceDriver(); - - /** - * get jdbcUrl by connection param, the jdbcUrl is different with ConnectionParam.jdbcUrl, this method will inject - * other to jdbcUrl - * - * @param connectionParam connection param - */ - String getJdbcUrl(ConnectionParam connectionParam); - - /** - * get connection by connectionParam - * - * @param connectionParam connectionParam - * @return {@link Connection} - */ - Connection getConnection(ConnectionParam connectionParam) throws ClassNotFoundException, SQLException, IOException; - - /** - * @return {@link DbType} - */ - DbType getDbType(); -} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/clickhouse/ClickHouseDatasourceParamDTO.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/clickhouse/ClickHouseDatasourceParamDTO.java deleted file mode 100644 index f4168fce5f15e3484148559499d390ff5385126d..0000000000000000000000000000000000000000 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/clickhouse/ClickHouseDatasourceParamDTO.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.common.datasource.clickhouse; - -import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO; -import org.apache.dolphinscheduler.common.enums.DbType; - -public class ClickHouseDatasourceParamDTO extends BaseDataSourceParamDTO { - - @Override - public String toString() { - return "ClickHouseDatasourceParamDTO{" - + "host='" + host + '\'' - + ", port=" + port - + ", database='" + database + '\'' - + ", userName='" + userName + '\'' - + ", password='" + password + '\'' - + ", other='" + other + '\'' - + '}'; - } - - @Override - public DbType getType() { - return DbType.CLICKHOUSE; - } -} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/db2/Db2DatasourceParamDTO.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/db2/Db2DatasourceParamDTO.java deleted file mode 100644 index c9894728323677ab01b486b50f24156b8852bb69..0000000000000000000000000000000000000000 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/db2/Db2DatasourceParamDTO.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.common.datasource.db2; - -import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO; -import org.apache.dolphinscheduler.common.enums.DbType; - -public class Db2DatasourceParamDTO extends BaseDataSourceParamDTO { - - @Override - public String toString() { - return "Db2DatasourceParamDTO{" - + "name='" + name + '\'' - + ", note='" + note + '\'' - + ", host='" + host + '\'' - + ", port=" + port - + ", database='" + database + '\'' - + ", userName='" + userName + '\'' - + ", password='" + password + '\'' - + ", other='" + other + '\'' - + '}'; - } - - @Override - public DbType getType() { - return DbType.DB2; - } -} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/hive/HiveConnectionParam.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/hive/HiveConnectionParam.java deleted file mode 100644 index e982211df76cbbdd086d61a344841fd075f305a8..0000000000000000000000000000000000000000 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/hive/HiveConnectionParam.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.common.datasource.hive; - -import org.apache.dolphinscheduler.common.datasource.BaseHdfsConnectionParam; - -public class HiveConnectionParam extends BaseHdfsConnectionParam { - @Override - public String toString() { - return "HiveConnectionParam{" - + "user='" + user + '\'' - + ", password='" + password + '\'' - + ", address='" + address + '\'' - + ", database='" + database + '\'' - + ", jdbcUrl='" + jdbcUrl + '\'' - + ", other='" + other + '\'' - + ", principal='" + principal + '\'' - + ", javaSecurityKrb5Conf='" + javaSecurityKrb5Conf + '\'' - + ", loginUserKeytabUsername='" + loginUserKeytabUsername + '\'' - + ", loginUserKeytabPath='" + loginUserKeytabPath + '\'' - + '}'; - } -} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/hive/HiveDataSourceParamDTO.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/hive/HiveDataSourceParamDTO.java deleted file mode 100644 index 816d08889a4241d3743564d606aef8507159b72b..0000000000000000000000000000000000000000 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/hive/HiveDataSourceParamDTO.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.common.datasource.hive; - -import org.apache.dolphinscheduler.common.datasource.BaseHdfsDatasourceParamDTO; -import org.apache.dolphinscheduler.common.enums.DbType; - -public class HiveDataSourceParamDTO extends BaseHdfsDatasourceParamDTO { - - @Override - public String toString() { - return "HiveDataSourceParamDTO{" - + "host='" + host + '\'' - + ", port=" + port - + ", database='" + database + '\'' - + ", principal='" + principal + '\'' - + ", userName='" + userName + '\'' - + ", password='" + password + '\'' - + ", other='" + other + '\'' - + ", javaSecurityKrb5Conf='" + javaSecurityKrb5Conf + '\'' - + ", loginUserKeytabUsername='" + loginUserKeytabUsername + '\'' - + ", loginUserKeytabPath='" + loginUserKeytabPath + '\'' - + '}'; - } - - @Override - public DbType getType() { - return DbType.HIVE; - } -} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/mysql/MysqlConnectionParam.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/mysql/MysqlConnectionParam.java deleted file mode 100644 index 3c5117c2e25f556e6dfc9e337d3a7bc6eabe5c29..0000000000000000000000000000000000000000 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/mysql/MysqlConnectionParam.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.common.datasource.mysql; - -import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam; - -public class MysqlConnectionParam extends BaseConnectionParam { - @Override - public String toString() { - return "MysqlConnectionParam{" - + "user='" + user + '\'' - + ", password='" + password + '\'' - + ", address='" + address + '\'' - + ", database='" + database + '\'' - + ", jdbcUrl='" + jdbcUrl + '\'' - + ", other='" + other + '\'' - + '}'; - } -} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/mysql/MysqlDatasourceParamDTO.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/mysql/MysqlDatasourceParamDTO.java deleted file mode 100644 index 3e8f0ed91874f801aed420e0577721b00744cdd7..0000000000000000000000000000000000000000 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/mysql/MysqlDatasourceParamDTO.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.common.datasource.mysql; - -import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO; -import org.apache.dolphinscheduler.common.enums.DbType; - -public class MysqlDatasourceParamDTO extends BaseDataSourceParamDTO { - - @Override - public String toString() { - return "MysqlDatasourceParamDTO{" - + "name='" + name + '\'' - + ", note='" + note + '\'' - + ", host='" + host + '\'' - + ", port=" + port - + ", database='" + database + '\'' - + ", userName='" + userName + '\'' - + ", password='" + password + '\'' - + ", other='" + other + '\'' - + '}'; - } - - @Override - public DbType getType() { - return DbType.MYSQL; - } -} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/oracle/OracleConnectionParam.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/oracle/OracleConnectionParam.java deleted file mode 100644 index a59f50badb79587fa4c6a63fde469f7086b5e762..0000000000000000000000000000000000000000 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/oracle/OracleConnectionParam.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.common.datasource.oracle; - -import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam; -import org.apache.dolphinscheduler.common.enums.DbConnectType; - -public class OracleConnectionParam extends BaseConnectionParam { - protected DbConnectType connectType; - - public DbConnectType getConnectType() { - return connectType; - } - - public void setConnectType(DbConnectType connectType) { - this.connectType = connectType; - } - - @Override - public String toString() { - return "OracleConnectionParam{" - + "user='" + user + '\'' - + ", password='" + password + '\'' - + ", address='" + address + '\'' - + ", database='" + database + '\'' - + ", jdbcUrl='" + jdbcUrl + '\'' - + ", other='" + other + '\'' - + ", connectType=" + connectType - + '}'; - } -} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/postgresql/PostgreSqlDatasourceParamDTO.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/postgresql/PostgreSqlDatasourceParamDTO.java deleted file mode 100644 index 5d81233a7a847e48e733dac8d7af51d1b59f370a..0000000000000000000000000000000000000000 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/postgresql/PostgreSqlDatasourceParamDTO.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.common.datasource.postgresql; - -import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO; -import org.apache.dolphinscheduler.common.enums.DbType; - -public class PostgreSqlDatasourceParamDTO extends BaseDataSourceParamDTO { - - @Override - public String toString() { - return "PostgreSqlDatasourceParamDTO{" - + "host='" + host + '\'' - + ", port=" + port - + ", database='" + database + '\'' - + ", userName='" + userName + '\'' - + ", password='" + password + '\'' - + ", other='" + other + '\'' - + '}'; - } - - @Override - public DbType getType() { - return DbType.POSTGRESQL; - } -} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/presto/PrestoDatasourceParamDTO.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/presto/PrestoDatasourceParamDTO.java deleted file mode 100644 index 9477362b03ffd2aabe91b26c254482b838dd3d47..0000000000000000000000000000000000000000 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/presto/PrestoDatasourceParamDTO.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.common.datasource.presto; - -import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO; -import org.apache.dolphinscheduler.common.enums.DbType; - -public class PrestoDatasourceParamDTO extends BaseDataSourceParamDTO { - - @Override - public String toString() { - return "PrestoDatasourceParamDTO{" - + "name='" + name + '\'' - + ", note='" + note + '\'' - + ", host='" + host + '\'' - + ", port=" + port - + ", database='" + database + '\'' - + ", userName='" + userName + '\'' - + ", password='" + password + '\'' - + ", other='" + other + '\'' - + '}'; - } - - @Override - public DbType getType() { - return DbType.PRESTO; - } -} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/spark/SparkDatasourceParamDTO.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/spark/SparkDatasourceParamDTO.java deleted file mode 100644 index 784f7d01d907a1d57e8adaf6bd54d0d065ff4f44..0000000000000000000000000000000000000000 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/spark/SparkDatasourceParamDTO.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.common.datasource.spark; - -import org.apache.dolphinscheduler.common.datasource.BaseHdfsDatasourceParamDTO; -import org.apache.dolphinscheduler.common.enums.DbType; - -public class SparkDatasourceParamDTO extends BaseHdfsDatasourceParamDTO { - - @Override - public String toString() { - return "SparkDatasourceParamDTO{" - + "host='" + host + '\'' - + ", port=" + port - + ", database='" + database + '\'' - + ", principal='" + principal + '\'' - + ", userName='" + userName + '\'' - + ", password='" + password + '\'' - + ", other='" + other + '\'' - + ", javaSecurityKrb5Conf='" + javaSecurityKrb5Conf + '\'' - + ", loginUserKeytabUsername='" + loginUserKeytabUsername + '\'' - + ", loginUserKeytabPath='" + loginUserKeytabPath + '\'' - + '}'; - } - - @Override - public DbType getType() { - return DbType.SPARK; - } -} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/sqlserver/SqlServerDatasourceParamDTO.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/sqlserver/SqlServerDatasourceParamDTO.java deleted file mode 100644 index 62c757638f71dcd6594b0f15ad32c3a2fddedc77..0000000000000000000000000000000000000000 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/sqlserver/SqlServerDatasourceParamDTO.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.common.datasource.sqlserver; - -import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO; -import org.apache.dolphinscheduler.common.enums.DbType; - -public class SqlServerDatasourceParamDTO extends BaseDataSourceParamDTO { - - @Override - public String toString() { - return "SqlServerDatasourceParamDTO{" - + "name='" + name + '\'' - + ", note='" + note + '\'' - + ", host='" + host + '\'' - + ", port=" + port - + ", database='" + database + '\'' - + ", userName='" + userName + '\'' - + ", password='" + password + '\'' - + ", other='" + other + '\'' - + '}'; - } - - @Override - public DbType getType() { - return DbType.SQLSERVER; - } -} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ArgsUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/CacheType.java similarity index 62% rename from dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ArgsUtils.java rename to dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/CacheType.java index d71eb54f3cb411f7faa21074b77517ef121973ff..f845b2083f137b18a498f4b526f99d9c404c6db1 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ArgsUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/CacheType.java @@ -15,16 +15,25 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.server.utils; +package org.apache.dolphinscheduler.common.enums; -public class ArgsUtils { +public enum CacheType { + TENANT("tenant"), + USER("user"), + QUEUE("queue"), + PROCESS_DEFINITION("processDefinition"), + PROCESS_TASK_RELATION("processTaskRelation"), + TASK_DEFINITION("taskDefinition"), + WORKER_GROUP("workerGroup"), + SCHEDULE("schedule"); - private ArgsUtils() throws IllegalStateException { - throw new IllegalStateException("Utility class"); + CacheType(String cacheName) { + this.cacheName = cacheName; } - public static String escape(String arg) { - return arg.replace(" ", "\\ ").replace("\"", "\\\"").replace("'", "\\'"); - } + private final String cacheName; + public String getCacheName() { + return cacheName; + } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/Event.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/Event.java index 9cec2766f162e9025841b12ca213d034c54aa72c..1f1d9335afdd6dbdd76119f14d73c00382d31da1 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/Event.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/Event.java @@ -19,5 +19,8 @@ package org.apache.dolphinscheduler.common.enums; public enum Event { ACK, - RESULT; + RESULT, + ACTION_STOP, + WORKER_REJECT, + REALLOCATE } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ExecutionStatus.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ExecutionStatus.java index 637eab2a4c1b94c08e6eec2d851d8b3cffe7f676..b3fac4094fb25a6643466848f10dfca9991384cd 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ExecutionStatus.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ExecutionStatus.java @@ -99,8 +99,7 @@ public enum ExecutionStatus { * @return status */ public boolean typeIsFinished() { - return typeIsSuccess() || typeIsFailure() || typeIsCancel() || typeIsPause() - || typeIsStop(); + return typeIsSuccess() || typeIsFailure() || typeIsCancel(); } /** @@ -113,12 +112,12 @@ public enum ExecutionStatus { } /** - * status is pause + * status is ready pause * * @return status */ - public boolean typeIsPause() { - return this == PAUSE; + public boolean typeIsReadyPause() { + return this == READY_PAUSE; } /** @@ -145,7 +144,16 @@ public enum ExecutionStatus { * @return status */ public boolean typeIsCancel() { - return this == KILL || this == STOP; + return this == KILL || this == STOP || this == PAUSE; + } + + /** + * status is ready cancel + * + * @return status + */ + public boolean typeIsReadyCancel() { + return this == READY_PAUSE || this == READY_STOP; } public int getCode() { diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/NodeType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/NodeType.java index acc3c02378a46e20e068d92bc037a16403088618..cb247cf2f4fe2856acac6d93dd3ea46863f9cced 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/NodeType.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/NodeType.java @@ -16,15 +16,6 @@ */ package org.apache.dolphinscheduler.common.enums; -/** - * zk node type - */ public enum NodeType { - - /** - * 0 master node; - * 1 worker node; - * 2 dead_server node; - */ - MASTER, WORKER, DEAD_SERVER; + MASTER, WORKER, DEAD_SERVER } diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/register/DataChangeEvent.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ProfileType.java similarity index 67% rename from dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/register/DataChangeEvent.java rename to dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ProfileType.java index a6aa32d8f40506ee01aed55acbb227766328aa6c..42144ba90da444d9e285c61f7a3b07e2ab3f1eef 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/register/DataChangeEvent.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ProfileType.java @@ -15,23 +15,20 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.spi.register; +package org.apache.dolphinscheduler.common.enums; -/** - * Monitor the type of data changes - */ -public enum DataChangeEvent { +import com.google.common.collect.Lists; + +import java.util.List; + +public enum ProfileType { + ; - ADD("ADD", 1), - REMOVE("REMOVE", 2), - UPDATE("UPDATE",3); + public static final String H2 = "h2"; - private String type; + public static final String MYSQL = "mysql"; - private int value; + public static final String POSTGRESQL = "postgresql"; - DataChangeEvent(String type, int value) { - this.type = type; - this.value = value; - } + public static final List DATASOURCE_PROFILE = Lists.newArrayList(H2, MYSQL, POSTGRESQL); } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/StateEvent.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/StateEvent.java index f24b3c154662a298830b4fa0d0b82f023e58737e..06dc9b6dfaa03c1e37f8a7058aefe7e5345ca377 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/StateEvent.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/StateEvent.java @@ -85,7 +85,7 @@ public class StateEvent { public String toString() { return "State Event :" + "key: " + key - + " type: " + type.toString() + + " type: " + type + " executeStatus: " + executionStatus + " task instance id: " + taskInstanceId + " process instance id: " + processInstanceId diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/graph/DAG.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/graph/DAG.java index 397f32ed6ea2b5d36673a10b6c3a7fc58e5c9335..c9217d05fb71b5b6b9f7f36f949f32d430ed47ad 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/graph/DAG.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/graph/DAG.java @@ -16,14 +16,23 @@ */ package org.apache.dolphinscheduler.common.graph; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.*; +import java.util.AbstractMap; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Queue; +import java.util.Set; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; +import org.apache.commons.collections.CollectionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + /** * analysis of DAG * Node: node diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/DependentItem.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/DependentItem.java index 196ac0bb55533de52a18531ecb202d3d061244e2..a9dab84d4377ed320dbe41a5df2940b71c385de2 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/DependentItem.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/DependentItem.java @@ -26,16 +26,16 @@ import org.apache.dolphinscheduler.common.enums.ExecutionStatus; public class DependentItem { private long projectCode; private long definitionCode; - private String depTasks; + private long depTaskCode; private String cycle; private String dateValue; private DependResult dependResult; private ExecutionStatus status; public String getKey() { - return String.format("%d-%s-%s-%s", + return String.format("%d-%d-%s-%s", getDefinitionCode(), - getDepTasks(), + getDepTaskCode(), getCycle(), getDateValue()); } @@ -56,12 +56,12 @@ public class DependentItem { this.definitionCode = definitionCode; } - public String getDepTasks() { - return depTasks; + public long getDepTaskCode() { + return depTaskCode; } - public void setDepTasks(String depTasks) { - this.depTasks = depTasks; + public void setDepTaskCode(long depTaskCode) { + this.depTaskCode = depTaskCode; } public String getCycle() { diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/AbstractParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/AbstractParameters.java index 26f3a659a7fd74fda8aa946879da9bbde52cf13c..5714c8a96b6a4bb7e291c0bd967409b8a5c7ad3d 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/AbstractParameters.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/AbstractParameters.java @@ -19,9 +19,9 @@ package org.apache.dolphinscheduler.common.task; import org.apache.dolphinscheduler.common.enums.Direct; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.process.ResourceInfo; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.StringUtils; import java.util.ArrayList; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/conditions/ConditionsParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/conditions/ConditionsParameters.java index b136ae295d7c2e3570201252b123fa409355982a..0b592901f9ce85de127d4f9e5559529c99cd60f6 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/conditions/ConditionsParameters.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/conditions/ConditionsParameters.java @@ -32,10 +32,10 @@ public class ConditionsParameters extends AbstractParameters { private DependentRelation dependRelation; // node list to run when success - private List successNode; + private List successNode; // node list to run when failed - private List failedNode; + private List failedNode; @Override public boolean checkParameters() { @@ -63,19 +63,19 @@ public class ConditionsParameters extends AbstractParameters { this.dependRelation = dependRelation; } - public List getSuccessNode() { + public List getSuccessNode() { return successNode; } - public void setSuccessNode(List successNode) { + public void setSuccessNode(List successNode) { this.successNode = successNode; } - public List getFailedNode() { + public List getFailedNode() { return failedNode; } - public void setFailedNode(List failedNode) { + public void setFailedNode(List failedNode) { this.failedNode = failedNode; } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/dependent/DependentParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/dependent/DependentParameters.java index 5f2e0e1853f0aabea2dd7d4231ddde0391d1417e..72cf790074e0696d2a38dcf44f59464f88ecb2cc 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/dependent/DependentParameters.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/dependent/DependentParameters.java @@ -16,6 +16,7 @@ */ package org.apache.dolphinscheduler.common.task.dependent; +import org.apache.commons.collections.CollectionUtils; import org.apache.dolphinscheduler.common.enums.DependentRelation; import org.apache.dolphinscheduler.common.model.DependentTaskModel; import org.apache.dolphinscheduler.common.process.ResourceInfo; @@ -33,7 +34,7 @@ public class DependentParameters extends AbstractParameters { @Override public boolean checkParameters() { - return true; + return CollectionUtils.isNotEmpty(dependTaskList) && relation != null; } @Override diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sql/SqlParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sql/SqlParameters.java index c7a341d561e48adf63fd7b8c07c3b6667338524c..bd3825d1314539a95252a883b713979f0f9f8e92 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sql/SqlParameters.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sql/SqlParameters.java @@ -21,9 +21,9 @@ import org.apache.dolphinscheduler.common.enums.DataType; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.task.AbstractParameters; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.StringUtils; import java.util.ArrayList; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/subprocess/SubProcessParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/subprocess/SubProcessParameters.java index 46f0e8510ce37a9c4ec43d7a36e0c606baf41b12..21a4f3e52a261b3b3098dd4a7de859adec350a1f 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/subprocess/SubProcessParameters.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/subprocess/SubProcessParameters.java @@ -15,6 +15,7 @@ * limitations under the License. */ package org.apache.dolphinscheduler.common.task.subprocess; + import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.task.AbstractParameters; @@ -27,19 +28,19 @@ public class SubProcessParameters extends AbstractParameters { /** * process definition id */ - private Integer processDefinitionId; + private long processDefinitionCode; - public void setProcessDefinitionId(Integer processDefinitionId){ - this.processDefinitionId = processDefinitionId; + public void setProcessDefinitionCode(long processDefinitionCode) { + this.processDefinitionCode = processDefinitionCode; } - public Integer getProcessDefinitionId(){ - return this.processDefinitionId; + public long getProcessDefinitionCode() { + return this.processDefinitionCode; } @Override public boolean checkParameters() { - return this.processDefinitionId != null && this.processDefinitionId != 0; + return this.processDefinitionCode != 0; } @Override diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/switchtask/SwitchParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/switchtask/SwitchParameters.java index dc597953087b5921bd263b5bb09ecc38afdc1cfd..f7b4f7bea8c4f1baf0f39f32d526de4ce50c04e6 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/switchtask/SwitchParameters.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/switchtask/SwitchParameters.java @@ -21,6 +21,8 @@ import org.apache.dolphinscheduler.common.enums.DependentRelation; import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.task.AbstractParameters; +import org.apache.commons.collections.CollectionUtils; + import java.util.ArrayList; import java.util.List; @@ -28,7 +30,9 @@ public class SwitchParameters extends AbstractParameters { private DependentRelation dependRelation; private String relation; - private List nextNode; + private Long nextNode; + private int resultConditionLocation; + private List dependTaskList; @Override public boolean checkParameters() { @@ -40,9 +44,6 @@ public class SwitchParameters extends AbstractParameters { return new ArrayList<>(); } - private int resultConditionLocation; - private List dependTaskList; - public DependentRelation getDependRelation() { return dependRelation; } @@ -75,17 +76,18 @@ public class SwitchParameters extends AbstractParameters { this.dependTaskList = dependTaskList; } - public List getNextNode() { + public Long getNextNode() { return nextNode; } public void setNextNode(Object nextNode) { - if (nextNode instanceof String) { - List nextNodeList = new ArrayList<>(); - nextNodeList.add(String.valueOf(nextNode)); - this.nextNode = nextNodeList; + if (nextNode instanceof Long) { + this.nextNode = (Long) nextNode; } else { - this.nextNode = (ArrayList) nextNode; + List nextNodes = (ArrayList) nextNode; + if (CollectionUtils.isNotEmpty(nextNodes)) { + this.nextNode = Long.parseLong(nextNodes.get(0)); + } } } } \ No newline at end of file diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/switchtask/SwitchResultVo.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/switchtask/SwitchResultVo.java index 558a6f1b83ef022bea6109d4fc44390ea616a82d..58090c05be6cf5f0dc39e790e516b6f11d3ae03c 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/switchtask/SwitchResultVo.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/switchtask/SwitchResultVo.java @@ -17,13 +17,15 @@ package org.apache.dolphinscheduler.common.task.switchtask; +import org.apache.commons.collections.CollectionUtils; + import java.util.ArrayList; import java.util.List; public class SwitchResultVo { private String condition; - private List nextNode; + private Long nextNode; public String getCondition() { return condition; @@ -33,17 +35,18 @@ public class SwitchResultVo { this.condition = condition; } - public List getNextNode() { + public Long getNextNode() { return nextNode; } public void setNextNode(Object nextNode) { - if (nextNode instanceof String) { - List nextNodeList = new ArrayList<>(); - nextNodeList.add(String.valueOf(nextNode)); - this.nextNode = nextNodeList; + if (nextNode instanceof Long) { + this.nextNode = (Long) nextNode; } else { - this.nextNode = (ArrayList) nextNode; + List nextNodes = (ArrayList) nextNode; + if (CollectionUtils.isNotEmpty(nextNodes)) { + this.nextNode = Long.parseLong(nextNodes.get(0)); + } } } } \ No newline at end of file diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ArrayUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ArrayUtils.java deleted file mode 100644 index 60fcbcef370592e70059c2e56fb6f5eee05afb9b..0000000000000000000000000000000000000000 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ArrayUtils.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.common.utils; - -public class ArrayUtils { - - public static byte[] clone(byte[] array) { - return array == null ? null : (byte[])((byte[])array.clone()); - } - - public static byte[] addAll(byte[] array1, byte[] array2) { - if (array1 == null) { - return clone(array2); - } else if (array2 == null) { - return clone(array1); - } else { - byte[] joinedArray = new byte[array1.length + array2.length]; - System.arraycopy(array1, 0, joinedArray, 0, array1.length); - System.arraycopy(array2, 0, joinedArray, array1.length, array2.length); - return joinedArray; - } - } -} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CodeGenerateUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CodeGenerateUtils.java new file mode 100644 index 0000000000000000000000000000000000000000..ffea87be517ee51d23589bd0797885a0d0cbf8c1 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CodeGenerateUtils.java @@ -0,0 +1,74 @@ +/** Copyright 2010-2012 Twitter, Inc.*/ + +package org.apache.dolphinscheduler.common.utils; + +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.util.Objects; + +/** + * Rewriting based on Twitter snowflake algorithm + */ +public class CodeGenerateUtils { + // start timestamp + private static final long START_TIMESTAMP = 1609430400000L; //2021-01-01 00:00:00 + // Each machine generates 32 in the same millisecond + private static final long LOW_DIGIT_BIT = 5L; + private static final long MIDDLE_BIT = 2L; + private static final long MAX_LOW_DIGIT = ~(-1L << LOW_DIGIT_BIT); + // The displacement to the left + private static final long MIDDLE_LEFT = LOW_DIGIT_BIT; + private static final long HIGH_DIGIT_LEFT = LOW_DIGIT_BIT + MIDDLE_BIT; + private final long machineHash; + private long lowDigit = 0L; + private long recordMillisecond = -1L; + + private static final long SYSTEM_TIMESTAMP = System.currentTimeMillis(); + private static final long SYSTEM_NANOTIME = System.nanoTime(); + + private CodeGenerateUtils() throws CodeGenerateException { + try { + this.machineHash = Math.abs(Objects.hash(InetAddress.getLocalHost().getHostName())) % (2 << (MIDDLE_BIT - 1)); + } catch (UnknownHostException e) { + throw new CodeGenerateException(e.getMessage()); + } + } + + private static CodeGenerateUtils instance = null; + + public static synchronized CodeGenerateUtils getInstance() throws CodeGenerateException { + if (instance == null) { + instance = new CodeGenerateUtils(); + } + return instance; + } + + public synchronized long genCode() throws CodeGenerateException { + long nowtMillisecond = systemMillisecond(); + if (nowtMillisecond < recordMillisecond) { + throw new CodeGenerateException("New code exception because time is set back."); + } + if (nowtMillisecond == recordMillisecond) { + lowDigit = (lowDigit + 1) & MAX_LOW_DIGIT; + if (lowDigit == 0L) { + while (nowtMillisecond <= recordMillisecond) { + nowtMillisecond = systemMillisecond(); + } + } + } else { + lowDigit = 0L; + } + recordMillisecond = nowtMillisecond; + return (nowtMillisecond - START_TIMESTAMP) << HIGH_DIGIT_LEFT | machineHash << MIDDLE_LEFT | lowDigit; + } + + private long systemMillisecond() { + return SYSTEM_TIMESTAMP + (System.nanoTime() - SYSTEM_NANOTIME) / 1000000; + } + + public static class CodeGenerateException extends Exception { + public CodeGenerateException(String message) { + super(message); + } + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CollectionUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CollectionUtils.java index 9801beb6000771218ef52080f06d28e454d53c7d..0de217f9bb2f165fed99f65dfe01b6007688dc7a 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CollectionUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CollectionUtils.java @@ -18,18 +18,14 @@ package org.apache.dolphinscheduler.common.utils; import org.apache.commons.beanutils.BeanMap; -import org.apache.commons.lang.StringUtils; import java.util.ArrayList; import java.util.Collection; -import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.function.Function; -import java.util.stream.Collectors; /** * Provides utility methods and decorators for {@link Collection} instances. @@ -49,175 +45,6 @@ public class CollectionUtils { throw new UnsupportedOperationException("Construct CollectionUtils"); } - /** - * The load factor used when none specified in constructor. - */ - static final float DEFAULT_LOAD_FACTOR = 0.75f; - - /** - * Returns a new {@link Collection} containing a minus a subset of - * b. Only the elements of b that satisfy the predicate - * condition, p are subtracted from a. - * - *

The cardinality of each element e in the returned {@link Collection} - * that satisfies the predicate condition will be the cardinality of e in a - * minus the cardinality of e in b, or zero, whichever is greater.

- *

The cardinality of each element e in the returned {@link Collection} that does not - * satisfy the predicate condition will be equal to the cardinality of e in a.

- * - * @param a the collection to subtract from, must not be null - * @param b the collection to subtract, must not be null - * @param T - * @return a new collection with the results - * @see Collection#removeAll - */ - public static Collection subtract(Set a, Set b) { - return org.apache.commons.collections4.CollectionUtils.subtract(a, b); - } - - public static boolean isNotEmpty(Collection coll) { - return !isEmpty(coll); - } - - public static boolean isEmpty(Collection coll) { - return coll == null || coll.isEmpty(); - } - - /** - * String to map - * - * @param str string - * @param separator separator - * @return string to map - */ - public static Map stringToMap(String str, String separator) { - return stringToMap(str, separator, ""); - } - - /** - * String to map - * - * @param str string - * @param separator separator - * @param keyPrefix prefix - * @return string to map - */ - public static Map stringToMap(String str, String separator, String keyPrefix) { - - Map emptyMap = new HashMap<>(0); - if (StringUtils.isEmpty(str)) { - return emptyMap; - } - if (StringUtils.isEmpty(separator)) { - return emptyMap; - } - String[] strings = str.split(separator); - int initialCapacity = (int)(strings.length / DEFAULT_LOAD_FACTOR) + 1; - Map map = new HashMap<>(initialCapacity); - for (int i = 0; i < strings.length; i++) { - String[] strArray = strings[i].split("="); - if (strArray.length != 2) { - return emptyMap; - } - //strArray[0] KEY strArray[1] VALUE - if (StringUtils.isEmpty(keyPrefix)) { - map.put(strArray[0], strArray[1]); - } else { - map.put(keyPrefix + strArray[0], strArray[1]); - } - } - return map; - } - - /** - * Transform item in collection - * - * @param collection origin collection - * @param transformFunc transform function - * @param origin item type - * @param target type - * @return transform list - */ - public static List transformToList(Collection collection, Function transformFunc) { - if (isEmpty(collection)) { - return new ArrayList<>(); - } - return collection.stream().map(transformFunc).collect(Collectors.toList()); - } - - /** - * Collect collection to map - * - * @param collection origin collection - * @param keyTransformFunction key transform function - * @param target k type - * @param value - * @return map - */ - public static Map collectionToMap(Collection collection, Function keyTransformFunction) { - if (isEmpty(collection)) { - return new HashMap<>(); - } - return collection.stream().collect(Collectors.toMap(keyTransformFunction, Function.identity())); - } - - /** - * Helper class to easily access cardinality properties of two collections. - * - * @param the element type - */ - private static class CardinalityHelper { - - /** - * Contains the cardinality for each object in collection A. - */ - final Map cardinalityA; - - /** - * Contains the cardinality for each object in collection B. - */ - final Map cardinalityB; - - /** - * Create a new CardinalityHelper for two collections. - * - * @param a the first collection - * @param b the second collection - */ - public CardinalityHelper(final Iterable a, final Iterable b) { - cardinalityA = CollectionUtils.getCardinalityMap(a); - cardinalityB = CollectionUtils.getCardinalityMap(b); - } - - /** - * Returns the frequency of this object in collection A. - * - * @param obj the object - * @return the frequency of the object in collection A - */ - public int freqA(final Object obj) { - return getFreq(obj, cardinalityA); - } - - /** - * Returns the frequency of this object in collection B. - * - * @param obj the object - * @return the frequency of the object in collection B - */ - public int freqB(final Object obj) { - return getFreq(obj, cardinalityB); - } - - private int getFreq(final Object obj, final Map freqMap) { - final Integer count = freqMap.get(obj); - if (count != null) { - return count; - } - return 0; - } - } - /** * returns {@code true} iff the given {@link Collection}s contain * exactly the same elements with exactly the same cardinalities. @@ -236,55 +63,7 @@ public class CollectionUtils { return false; } - return isEqualCollection(a, b); - } - - /** - * Returns {@code true} iff the given {@link Collection}s contain - * exactly the same elements with exactly the same cardinalities. - *

- * That is, iff the cardinality of e in a is - * equal to the cardinality of e in b, - * for each element e in a or b. - * - * @param a the first collection, must not be null - * @param b the second collection, must not be null - * @return true iff the collections contain the same elements with the same cardinalities. - */ - public static boolean isEqualCollection(final Collection a, final Collection b) { - if (a.size() != b.size()) { - return false; - } - final CardinalityHelper helper = new CardinalityHelper<>(a, b); - if (helper.cardinalityA.size() != helper.cardinalityB.size()) { - return false; - } - for (final Object obj : helper.cardinalityA.keySet()) { - if (helper.freqA(obj) != helper.freqB(obj)) { - return false; - } - } - return true; - } - - /** - * Returns a {@link Map} mapping each unique element in the given - * {@link Collection} to an {@link Integer} representing the number - * of occurrences of that element in the {@link Collection}. - *

- * Only those elements present in the collection will appear as - * keys in the map. - * - * @param the type of object in the returned {@link Map}. This is a super type of O - * @param coll the collection to get the cardinality map for, must not be null - * @return the populated cardinality map - */ - public static Map getCardinalityMap(final Iterable coll) { - final Map count = new HashMap<>(); - for (final O obj : coll) { - count.put(obj, count.getOrDefault(obj, 0) + 1); - } - return count; + return org.apache.commons.collections.CollectionUtils.isEqualCollection(a, b); } /** diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CommonUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CommonUtils.java index 093383263213f9abc77cd9e0846efa135e19f014..ffed1d13f4869df7c96753b0177f81b029829750 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CommonUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CommonUtils.java @@ -18,7 +18,7 @@ package org.apache.dolphinscheduler.common.utils; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.ResUploadType; +import org.apache.dolphinscheduler.spi.enums.ResUploadType; import org.apache.commons.codec.binary.Base64; import org.apache.commons.lang.StringUtils; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/DependentUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/DependentUtils.java index b8b6c1d13e1724ddd32acc0e2f261757d8bc726d..f3fde77dae77ad7d9df5f6e10c8fdeaefaf1dc19 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/DependentUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/DependentUtils.java @@ -84,7 +84,7 @@ public class DependentUtils { result = DependentDateUtils.getLastHoursInterval(businessDate, 3); break; case "last24Hours": - result = DependentDateUtils.getSpecialLastDayInterval(businessDate); + result = DependentDateUtils.getLastHoursInterval(businessDate, 24); break; case "today": result = DependentDateUtils.getTodayInterval(businessDate); diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/EnumUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/EnumUtils.java deleted file mode 100644 index 10963b486a6a1c7f6a96c2461fe942f2c141033c..0000000000000000000000000000000000000000 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/EnumUtils.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.common.utils; - -public class EnumUtils { - - private EnumUtils() { - throw new UnsupportedOperationException("Construct EnumUtils"); - } - - public static > E getEnum(final Class enumClass, final String enumName) { - if (enumName == null) { - return null; - } - try { - return Enum.valueOf(enumClass, enumName); - } catch (final IllegalArgumentException ex) { - return null; - } - } - - public static > boolean isValidEnum(final Class enumClass, final String enumName) { - if (enumName == null) { - return false; - } - try { - Enum.valueOf(enumClass, enumName); - return true; - } catch (final IllegalArgumentException ex) { - return false; - } - } -} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java index 9fa9b5fb38df7017a1b59de2697ed0f30b4869c7..ef80a8af638bc09ff50fa3fb1d603d8caad44d75 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java @@ -17,28 +17,14 @@ package org.apache.dolphinscheduler.common.utils; -import static org.apache.dolphinscheduler.common.Constants.DATA_BASEDIR_PATH; -import static org.apache.dolphinscheduler.common.Constants.RESOURCE_VIEW_SUFFIXS; -import static org.apache.dolphinscheduler.common.Constants.RESOURCE_VIEW_SUFFIXS_DEFAULT_VALUE; -import static org.apache.dolphinscheduler.common.Constants.YYYYMMDDHHMMSS; - import org.apache.commons.io.IOUtils; -import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -import java.io.BufferedReader; -import java.io.BufferedWriter; -import java.io.ByteArrayOutputStream; -import java.io.File; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStreamWriter; -import java.io.StringReader; +import java.io.*; import java.nio.charset.StandardCharsets; -import java.util.Optional; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import static org.apache.dolphinscheduler.common.Constants.*; /** * file utils @@ -49,30 +35,10 @@ public class FileUtils { public static final String DATA_BASEDIR = PropertyUtils.getString(DATA_BASEDIR_PATH, "/tmp/dolphinscheduler"); - public static final ThreadLocal taskLoggerThreadLocal = new ThreadLocal<>(); - private FileUtils() { throw new UnsupportedOperationException("Construct FileUtils"); } - /** - * get file suffix - * - * @param filename file name - * @return file suffix - */ - public static String suffix(String filename) { - - String fileSuffix = ""; - if (!StringUtils.isEmpty(filename)) { - int lastIndex = filename.lastIndexOf('.'); - if (lastIndex > 0) { - fileSuffix = filename.substring(lastIndex + 1); - } - } - return fileSuffix; - } - /** * get download file absolute path and name * @@ -152,8 +118,7 @@ public class FileUtils { //create work dir org.apache.commons.io.FileUtils.forceMkdir(execLocalPathFile); String mkdirLog = "create dir success " + execLocalPath; - LoggerUtils.logInfo(Optional.ofNullable(logger), mkdirLog); - LoggerUtils.logInfo(Optional.ofNullable(taskLoggerThreadLocal.get()), mkdirLog); + logger.info(mkdirLog); } /** @@ -164,30 +129,16 @@ public class FileUtils { * @return true if write success */ public static boolean writeContent2File(String content, String filePath) { - BufferedReader bufferedReader = null; - BufferedWriter bufferedWriter = null; try { File distFile = new File(filePath); if (!distFile.getParentFile().exists() && !distFile.getParentFile().mkdirs()) { - FileUtils.logger.error("mkdir parent failed"); + logger.error("mkdir parent failed"); return false; } - bufferedReader = new BufferedReader(new StringReader(content)); - bufferedWriter = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(distFile), StandardCharsets.UTF_8)); - char[] buf = new char[1024]; - int len; - while ((len = bufferedReader.read(buf)) != -1) { - bufferedWriter.write(buf, 0, len); - } - bufferedWriter.flush(); - bufferedReader.close(); - bufferedWriter.close(); + IOUtils.write(content, new FileOutputStream(filePath), StandardCharsets.UTF_8); } catch (IOException e) { - FileUtils.logger.error(e.getMessage(), e); + logger.error(e.getMessage(), e); return false; - } finally { - IOUtils.closeQuietly(bufferedWriter); - IOUtils.closeQuietly(bufferedReader); } return true; } @@ -203,13 +154,9 @@ public class FileUtils { * * * @param filename file name - * @throws IOException in case deletion is unsuccessful */ - public static void deleteFile(String filename) throws IOException { - File file = new File(filename); - if (file.exists()) { - org.apache.commons.io.FileUtils.forceDelete(file); - } + public static void deleteFile(String filename) { + org.apache.commons.io.FileUtils.deleteQuietly(new File(filename)); } /** @@ -253,4 +200,25 @@ public class FileUtils { } } + /** + * Check whether the given string type of path can be traversal or not, return true if path could + * traversal, and return false if it is not. + * + * @param filename String type of filename + * @return whether file path could be traversal or not + */ + public static boolean directoryTraversal(String filename){ + if (filename.contains(FOLDER_SEPARATOR)) { + return true; + } + File file = new File(filename); + try { + File canonical = file.getCanonicalFile(); + File absolute = file.getAbsoluteFile(); + return !canonical.equals(absolute); + } catch (IOException e) { + return true; + } + } + } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java index 8b03913d82039832618b1bd59435ddb5a02acafd..508ca31985a68803f1a26ca36c799638cd56576a 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java @@ -22,7 +22,7 @@ import static org.apache.dolphinscheduler.common.Constants.RESOURCE_UPLOAD_PATH; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.ResUploadType; -import org.apache.dolphinscheduler.common.enums.ResourceType; +import org.apache.dolphinscheduler.spi.enums.ResourceType; import org.apache.dolphinscheduler.common.exception.BaseException; import org.apache.commons.io.IOUtils; @@ -92,8 +92,9 @@ public class HadoopUtils implements Closeable { private FileSystem fs; private HadoopUtils() { - init(); - initHdfsPath(); + if(init()) { + initHdfsPath(); + } } public static HadoopUtils getInstance() { @@ -120,7 +121,7 @@ public class HadoopUtils implements Closeable { /** * init hadoop configuration */ - private void init() { + private boolean init() { try { configuration = new HdfsConfiguration(); @@ -168,11 +169,13 @@ public class HadoopUtils implements Closeable { configuration.set(Constants.FS_S3A_ACCESS_KEY, PropertyUtils.getString(Constants.FS_S3A_ACCESS_KEY)); configuration.set(Constants.FS_S3A_SECRET_KEY, PropertyUtils.getString(Constants.FS_S3A_SECRET_KEY)); fs = FileSystem.get(configuration); + } else { + return false; } - } catch (Exception e) { logger.error(e.getMessage(), e); } + return true; } /** @@ -182,6 +185,13 @@ public class HadoopUtils implements Closeable { return configuration; } + /** + * @return DefaultFS + */ + public String getDefaultFS() { + return getConfiguration().get(Constants.FS_DEFAULTFS); + } + /** * get application url * @@ -446,6 +456,7 @@ public class HadoopUtils implements Closeable { case Constants.ACCEPTED: return ExecutionStatus.SUBMITTED_SUCCESS; case Constants.SUCCEEDED: + case Constants.ENDED: return ExecutionStatus.SUCCESS; case Constants.NEW: case Constants.NEW_SAVING: diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HeartBeat.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HeartBeat.java new file mode 100644 index 0000000000000000000000000000000000000000..e9830c52e47875945fe165769d9aedb3b1bf2ee6 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HeartBeat.java @@ -0,0 +1,262 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.common.utils; + +import org.apache.dolphinscheduler.common.Constants; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class HeartBeat { + + private static final Logger logger = LoggerFactory.getLogger(HeartBeat.class); + public static final String COMMA = ","; + + private long startupTime; + private long reportTime; + private double cpuUsage; + private double memoryUsage; + private double loadAverage; + private double availablePhysicalMemorySize; + private double maxCpuloadAvg; + private double reservedMemory; + private int serverStatus; + private int processId; + + private int workerHostWeight; // worker host weight + private int workerWaitingTaskCount; // worker waiting task count + private int workerExecThreadCount; // worker thread pool thread count + + private double diskAvailable; + + public double getDiskAvailable() { + return diskAvailable; + } + + public void setDiskAvailable(double diskAvailable) { + this.diskAvailable = diskAvailable; + } + + public long getStartupTime() { + return startupTime; + } + + public void setStartupTime(long startupTime) { + this.startupTime = startupTime; + } + + public long getReportTime() { + return reportTime; + } + + public void setReportTime(long reportTime) { + this.reportTime = reportTime; + } + + public double getCpuUsage() { + return cpuUsage; + } + + public void setCpuUsage(double cpuUsage) { + this.cpuUsage = cpuUsage; + } + + public double getMemoryUsage() { + return memoryUsage; + } + + public void setMemoryUsage(double memoryUsage) { + this.memoryUsage = memoryUsage; + } + + public double getLoadAverage() { + return loadAverage; + } + + public void setLoadAverage(double loadAverage) { + this.loadAverage = loadAverage; + } + + public double getAvailablePhysicalMemorySize() { + return availablePhysicalMemorySize; + } + + public void setAvailablePhysicalMemorySize(double availablePhysicalMemorySize) { + this.availablePhysicalMemorySize = availablePhysicalMemorySize; + } + + public double getMaxCpuloadAvg() { + return maxCpuloadAvg; + } + + public void setMaxCpuloadAvg(double maxCpuloadAvg) { + this.maxCpuloadAvg = maxCpuloadAvg; + } + + public double getReservedMemory() { + return reservedMemory; + } + + public void setReservedMemory(double reservedMemory) { + this.reservedMemory = reservedMemory; + } + + public int getServerStatus() { + return serverStatus; + } + + public void setServerStatus(int serverStatus) { + this.serverStatus = serverStatus; + } + + public int getProcessId() { + return processId; + } + + public void setProcessId(int processId) { + this.processId = processId; + } + + public int getWorkerHostWeight() { + return workerHostWeight; + } + + public void setWorkerHostWeight(int workerHostWeight) { + this.workerHostWeight = workerHostWeight; + } + + public int getWorkerWaitingTaskCount() { + return workerWaitingTaskCount; + } + + public void setWorkerWaitingTaskCount(int workerWaitingTaskCount) { + this.workerWaitingTaskCount = workerWaitingTaskCount; + } + + public int getWorkerExecThreadCount() { + return workerExecThreadCount; + } + + public void setWorkerExecThreadCount(int workerExecThreadCount) { + this.workerExecThreadCount = workerExecThreadCount; + } + + public HeartBeat() { + this.reportTime = System.currentTimeMillis(); + this.serverStatus = Constants.NORMAL_NODE_STATUS; + } + + public HeartBeat(long startupTime, double maxCpuloadAvg, double reservedMemory) { + this.reportTime = System.currentTimeMillis(); + this.serverStatus = Constants.NORMAL_NODE_STATUS; + this.startupTime = startupTime; + this.maxCpuloadAvg = maxCpuloadAvg; + this.reservedMemory = reservedMemory; + } + + public HeartBeat(long startupTime, double maxCpuloadAvg, double reservedMemory, int hostWeight, int workerExecThreadCount) { + this.reportTime = System.currentTimeMillis(); + this.serverStatus = Constants.NORMAL_NODE_STATUS; + this.startupTime = startupTime; + this.maxCpuloadAvg = maxCpuloadAvg; + this.reservedMemory = reservedMemory; + this.workerHostWeight = hostWeight; + this.workerExecThreadCount = workerExecThreadCount; + } + + /** + * fill system info + */ + private void fillSystemInfo() { + this.cpuUsage = OSUtils.cpuUsage(); + this.loadAverage = OSUtils.loadAverage(); + this.availablePhysicalMemorySize = OSUtils.availablePhysicalMemorySize(); + this.memoryUsage = OSUtils.memoryUsage(); + this.diskAvailable = OSUtils.diskAvailable(); + this.processId = OSUtils.getProcessID(); + } + + /** + * update server state + */ + public void updateServerState() { + this.reportTime = System.currentTimeMillis(); + if (loadAverage > maxCpuloadAvg || availablePhysicalMemorySize < reservedMemory) { + logger.warn("current cpu load average {} is too high or available memory {}G is too low, under max.cpuload.avg={} and reserved.memory={}G", + loadAverage, availablePhysicalMemorySize, maxCpuloadAvg, reservedMemory); + this.serverStatus = Constants.ABNORMAL_NODE_STATUS; + } else if (workerWaitingTaskCount > workerExecThreadCount) { + logger.warn("current waiting task count {} is large than worker thread count {}, worker is busy", workerWaitingTaskCount, workerExecThreadCount); + this.serverStatus = Constants.BUSY_NODE_STATUE; + } else { + this.serverStatus = Constants.NORMAL_NODE_STATUS; + } + } + + /** + * encode heartbeat + */ + public String encodeHeartBeat() { + this.fillSystemInfo(); + this.updateServerState(); + + StringBuilder builder = new StringBuilder(100); + builder.append(cpuUsage).append(COMMA); + builder.append(memoryUsage).append(COMMA); + builder.append(diskAvailable).append(COMMA); + builder.append(loadAverage).append(COMMA); + builder.append(availablePhysicalMemorySize).append(Constants.COMMA); + builder.append(maxCpuloadAvg).append(Constants.COMMA); + builder.append(reservedMemory).append(Constants.COMMA); + builder.append(startupTime).append(Constants.COMMA); + builder.append(reportTime).append(Constants.COMMA); + builder.append(serverStatus).append(COMMA); + builder.append(processId).append(COMMA); + builder.append(workerHostWeight).append(COMMA); + builder.append(workerExecThreadCount).append(COMMA); + builder.append(workerWaitingTaskCount); + + return builder.toString(); + } + + /** + * decode heartbeat + */ + public static HeartBeat decodeHeartBeat(String heartBeatInfo) { + String[] parts = heartBeatInfo.split(Constants.COMMA); + if (parts.length != Constants.HEARTBEAT_FOR_ZOOKEEPER_INFO_LENGTH) { + return null; + } + HeartBeat heartBeat = new HeartBeat(); + heartBeat.cpuUsage = Double.parseDouble(parts[0]); + heartBeat.memoryUsage = Double.parseDouble(parts[1]); + heartBeat.diskAvailable = Double.parseDouble(parts[2]); + heartBeat.loadAverage = Double.parseDouble(parts[3]); + heartBeat.availablePhysicalMemorySize = Double.parseDouble(parts[4]); + heartBeat.maxCpuloadAvg = Double.parseDouble(parts[5]); + heartBeat.reservedMemory = Double.parseDouble(parts[6]); + heartBeat.startupTime = Long.parseLong(parts[7]); + heartBeat.reportTime = Long.parseLong(parts[8]); + heartBeat.serverStatus = Integer.parseInt(parts[9]); + heartBeat.processId = Integer.parseInt(parts[10]); + heartBeat.workerHostWeight = Integer.parseInt(parts[11]); + heartBeat.workerExecThreadCount = Integer.parseInt(parts[12]); + heartBeat.workerWaitingTaskCount = Integer.parseInt(parts[13]); + return heartBeat; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/LoggerUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/LoggerUtils.java index 545bbb2274906052343743cc93ef7a59c40101f3..0b409809c58bb81b4ce27394e71cd59833075724 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/LoggerUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/LoggerUtils.java @@ -25,7 +25,6 @@ import java.io.IOException; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.List; -import java.util.Optional; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -53,11 +52,6 @@ public class LoggerUtils { */ public static final String TASK_LOGGER_INFO_PREFIX = "TASK"; - /** - * Task Logger Thread's name - */ - public static final String TASK_LOGGER_THREAD_NAME = "TaskLogInfo"; - /** * Task Logger Thread's name */ @@ -124,25 +118,4 @@ public class LoggerUtils { } return ""; } - - public static void logError(Optional optionalLogger - , String error) { - optionalLogger.ifPresent((Logger logger) -> logger.error(error)); - } - - public static void logError(Optional optionalLogger - , Throwable e) { - optionalLogger.ifPresent((Logger logger) -> logger.error(e.getMessage(), e)); - } - - public static void logError(Optional optionalLogger - , String error, Throwable e) { - optionalLogger.ifPresent((Logger logger) -> logger.error(error, e)); - } - - public static void logInfo(Optional optionalLogger - , String info) { - optionalLogger.ifPresent((Logger logger) -> logger.info(info)); - } - -} \ No newline at end of file +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java index 07053fa679817efc38a4cae6672a993d7b64a7d6..043f6047e4f1a8c0adbbd3ab34fbb084f32bc811 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java @@ -17,35 +17,29 @@ package org.apache.dolphinscheduler.common.utils; -import org.apache.dolphinscheduler.common.shell.ShellExecutor; - import org.apache.commons.lang.StringUtils; +import org.apache.commons.lang.SystemUtils; +import org.apache.dolphinscheduler.common.shell.ShellExecutor; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import oshi.SystemInfo; +import oshi.hardware.CentralProcessor; +import oshi.hardware.GlobalMemory; +import oshi.hardware.HardwareAbstractionLayer; import java.io.BufferedReader; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; +import java.io.File; import java.lang.management.ManagementFactory; import java.lang.management.OperatingSystemMXBean; import java.lang.management.RuntimeMXBean; import java.math.RoundingMode; import java.text.DecimalFormat; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Optional; -import java.util.StringTokenizer; +import java.util.*; import java.util.regex.Pattern; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import oshi.SystemInfo; -import oshi.hardware.CentralProcessor; -import oshi.hardware.GlobalMemory; -import oshi.hardware.HardwareAbstractionLayer; - /** * os utils */ @@ -53,8 +47,6 @@ public class OSUtils { private static final Logger logger = LoggerFactory.getLogger(OSUtils.class); - public static final ThreadLocal taskLoggerThreadLocal = new ThreadLocal<>(); - private static final SystemInfo SI = new SystemInfo(); public static final String TWO_DECIMAL = "0.00"; @@ -64,7 +56,10 @@ public class OSUtils { */ public static final double NEGATIVE_ONE = -1; - private static HardwareAbstractionLayer hal = SI.getHardware(); + private static final HardwareAbstractionLayer hal = SI.getHardware(); + private static long[] prevTicks = new long[CentralProcessor.TickType.values().length]; + private static long prevTickTime = 0L; + private static double cpuUsage = 0.0D; private OSUtils() { throw new UnsupportedOperationException("Construct OSUtils"); @@ -92,35 +87,37 @@ public class OSUtils { } /** - * get available physical memory size - *

+ * get disk usage * Keep 2 decimal * - * @return available Physical Memory Size, unit: G + * @return disk free space (GB) */ - public static double availablePhysicalMemorySize() { - GlobalMemory memory = hal.getMemory(); - double availablePhysicalMemorySize = memory.getAvailable() / 1024.0 / 1024 / 1024; + public static double diskAvailable() { + File file = new File("."); + long totalSpace = file.getTotalSpace(); //total disk space in bytes. + long freeSpace = file.getFreeSpace(); //unallocated / free disk space in bytes. + + double diskAvailable = freeSpace / 1024.0 / 1024 / 1024; DecimalFormat df = new DecimalFormat(TWO_DECIMAL); df.setRoundingMode(RoundingMode.HALF_UP); - return Double.parseDouble(df.format(availablePhysicalMemorySize)); + return Double.parseDouble(df.format(diskAvailable)); } /** - * get total physical memory size + * get available physical memory size *

* Keep 2 decimal * * @return available Physical Memory Size, unit: G */ - public static double totalPhysicalMemorySize() { + public static double availablePhysicalMemorySize() { GlobalMemory memory = hal.getMemory(); - double totalPhysicalMemorySize = memory.getTotal() / 1024.0 / 1024 / 1024; + double availablePhysicalMemorySize = memory.getAvailable() / 1024.0 / 1024 / 1024; DecimalFormat df = new DecimalFormat(TWO_DECIMAL); df.setRoundingMode(RoundingMode.HALF_UP); - return Double.parseDouble(df.format(totalPhysicalMemorySize)); + return Double.parseDouble(df.format(availablePhysicalMemorySize)); } /** @@ -135,7 +132,7 @@ public class OSUtils { loadAverage = osBean.getSystemLoadAverage(); } catch (Exception e) { logger.error("get operation system load average exception, try another method ", e); - loadAverage = hal.getProcessor().getSystemLoadAverage(); + loadAverage = hal.getProcessor().getSystemLoadAverage(1)[0]; if (Double.isNaN(loadAverage)) { return NEGATIVE_ONE; } @@ -152,7 +149,16 @@ public class OSUtils { */ public static double cpuUsage() { CentralProcessor processor = hal.getProcessor(); - double cpuUsage = processor.getSystemCpuLoad(); + + // Check if > ~ 0.95 seconds since last tick count. + long now = System.currentTimeMillis(); + if (now - prevTickTime > 950) { + // Enough time has elapsed. + cpuUsage = processor.getSystemCpuLoadBetweenTicks(prevTicks); + prevTickTime = System.currentTimeMillis(); + prevTicks = processor.getSystemCpuLoadTicks(); + } + if (Double.isNaN(cpuUsage)) { return NEGATIVE_ONE; } @@ -164,9 +170,9 @@ public class OSUtils { public static List getUserList() { try { - if (isMacOS()) { + if (SystemUtils.IS_OS_MAC) { return getUserListFromMac(); - } else if (isWindows()) { + } else if (SystemUtils.IS_OS_WINDOWS) { return getUserListFromWindows(); } else { return getUserListFromLinux(); @@ -262,14 +268,10 @@ public class OSUtils { */ public static void createUserIfAbsent(String userName) { // if not exists this user, then create - taskLoggerThreadLocal.set(taskLoggerThreadLocal.get()); if (!getUserList().contains(userName)) { boolean isSuccess = createUser(userName); - String infoLog = String.format("create user %s %s", userName, isSuccess ? "success" : "fail"); - LoggerUtils.logInfo(Optional.ofNullable(logger), infoLog); - LoggerUtils.logInfo(Optional.ofNullable(taskLoggerThreadLocal.get()), infoLog); + logger.info("create user {} {}", userName, isSuccess ? "success" : "fail"); } - taskLoggerThreadLocal.remove(); } /** @@ -283,21 +285,19 @@ public class OSUtils { String userGroup = getGroup(); if (StringUtils.isEmpty(userGroup)) { String errorLog = String.format("%s group does not exist for this operating system.", userGroup); - LoggerUtils.logError(Optional.ofNullable(logger), errorLog); - LoggerUtils.logError(Optional.ofNullable(taskLoggerThreadLocal.get()), errorLog); + logger.error(errorLog); return false; } - if (isMacOS()) { + if (SystemUtils.IS_OS_MAC) { createMacUser(userName, userGroup); - } else if (isWindows()) { + } else if (SystemUtils.IS_OS_WINDOWS) { createWindowsUser(userName, userGroup); } else { createLinuxUser(userName, userGroup); } return true; } catch (Exception e) { - LoggerUtils.logError(Optional.ofNullable(logger), e); - LoggerUtils.logError(Optional.ofNullable(taskLoggerThreadLocal.get()), e); + logger.error(e.getMessage(), e); } return false; @@ -311,14 +311,9 @@ public class OSUtils { * @throws IOException in case of an I/O error */ private static void createLinuxUser(String userName, String userGroup) throws IOException { - String infoLog1 = String.format("create linux os user : %s", userName); - LoggerUtils.logInfo(Optional.ofNullable(logger), infoLog1); - LoggerUtils.logInfo(Optional.ofNullable(taskLoggerThreadLocal.get()), infoLog1); - + logger.info("create linux os user: {}", userName); String cmd = String.format("sudo useradd -g %s %s", userGroup, userName); - String infoLog2 = String.format("execute cmd : %s", cmd); - LoggerUtils.logInfo(Optional.ofNullable(logger), infoLog2); - LoggerUtils.logInfo(Optional.ofNullable(taskLoggerThreadLocal.get()), infoLog2); + logger.info("execute cmd: {}", cmd); exeCmd(cmd); } @@ -330,23 +325,14 @@ public class OSUtils { * @throws IOException in case of an I/O error */ private static void createMacUser(String userName, String userGroup) throws IOException { - Optional optionalLogger = Optional.ofNullable(logger); - Optional optionalTaskLogger = Optional.ofNullable(taskLoggerThreadLocal.get()); - - String infoLog1 = String.format("create mac os user : %s", userName); - LoggerUtils.logInfo(optionalLogger, infoLog1); - LoggerUtils.logInfo(optionalTaskLogger, infoLog1); + logger.info("create mac os user: {}", userName); String createUserCmd = String.format("sudo sysadminctl -addUser %s -password %s", userName, userName); - String infoLog2 = String.format("create user command : %s", createUserCmd); - LoggerUtils.logInfo(optionalLogger, infoLog2); - LoggerUtils.logInfo(optionalTaskLogger, infoLog2); + logger.info("create user command: {}", createUserCmd); exeCmd(createUserCmd); String appendGroupCmd = String.format("sudo dseditgroup -o edit -a %s -t user %s", userName, userGroup); - String infoLog3 = String.format("append user to group : %s", appendGroupCmd); - LoggerUtils.logInfo(optionalLogger, infoLog3); - LoggerUtils.logInfo(optionalTaskLogger, infoLog3); + logger.info("append user to group: {}", appendGroupCmd); exeCmd(appendGroupCmd); } @@ -358,20 +344,14 @@ public class OSUtils { * @throws IOException in case of an I/O error */ private static void createWindowsUser(String userName, String userGroup) throws IOException { - String infoLog1 = String.format("create windows os user : %s", userName); - LoggerUtils.logInfo(Optional.ofNullable(logger), infoLog1); - LoggerUtils.logInfo(Optional.ofNullable(taskLoggerThreadLocal.get()), infoLog1); + logger.info("create windows os user: {}", userName); String userCreateCmd = String.format("net user \"%s\" /add", userName); - String infoLog2 = String.format("execute create user command : %s", userCreateCmd); - LoggerUtils.logInfo(Optional.ofNullable(logger), infoLog2); - LoggerUtils.logInfo(Optional.ofNullable(taskLoggerThreadLocal.get()), infoLog2); + logger.info("execute create user command: {}", userCreateCmd); exeCmd(userCreateCmd); String appendGroupCmd = String.format("net localgroup \"%s\" \"%s\" /add", userGroup, userName); - String infoLog3 = String.format("execute append user to group : %s", appendGroupCmd); - LoggerUtils.logInfo(Optional.ofNullable(logger), infoLog3); - LoggerUtils.logInfo(Optional.ofNullable(taskLoggerThreadLocal.get()), infoLog3); + logger.info("execute append user to group: {}", appendGroupCmd); exeCmd(appendGroupCmd); } @@ -382,7 +362,7 @@ public class OSUtils { * @throws IOException errors */ public static String getGroup() throws IOException { - if (isWindows()) { + if (SystemUtils.IS_OS_WINDOWS) { String currentProcUserName = System.getProperty("user.name"); String result = exeCmd(String.format("net user \"%s\"", currentProcUserName)); String line = result.split("\n")[22]; @@ -454,33 +434,6 @@ public class OSUtils { return Integer.parseInt(runtimeMXBean.getName().split("@")[0]); } - /** - * whether is macOS - * - * @return true if mac - */ - public static boolean isMacOS() { - return getOSName().startsWith("Mac"); - } - - /** - * whether is windows - * - * @return true if windows - */ - public static boolean isWindows() { - return getOSName().startsWith("Windows"); - } - - /** - * get current OS name - * - * @return current OS name - */ - public static String getOSName() { - return System.getProperty("os.name"); - } - /** * check memory and cpu usage * diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ParameterUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ParameterUtils.java index 8aa6f80379e517f8ae7f8f106581e8ab03bf20d5..f247aa0a1677f38424d38173513430bfe6843149 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ParameterUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ParameterUtils.java @@ -19,7 +19,6 @@ package org.apache.dolphinscheduler.common.utils; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.CommandType; -import org.apache.dolphinscheduler.common.enums.DataType; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.utils.placeholder.BusinessTimeUtils; import org.apache.dolphinscheduler.common.utils.placeholder.PlaceholderUtils; @@ -27,7 +26,6 @@ import org.apache.dolphinscheduler.common.utils.placeholder.TimePlaceholderUtils import org.apache.commons.lang.StringUtils; -import java.sql.PreparedStatement; import java.util.Date; import java.util.HashMap; import java.util.Iterator; @@ -37,16 +35,10 @@ import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - /** * parameter parse utils */ public class ParameterUtils { - - private static final Logger logger = LoggerFactory.getLogger(ParameterUtils.class); - private static final String DATE_PARSE_PATTERN = "\\$\\[([^\\$\\]]+)]"; private static final String DATE_START_PATTERN = "^[0-9]"; @@ -85,72 +77,6 @@ public class ParameterUtils { return parameterString; } - /** - * new - * convert parameters place holders - * - * @param parameterString parameter - * @param parameterMap parameter map - * @return convert parameters place holders - */ - public static String convertParameterPlaceholders2(String parameterString, Map parameterMap) { - if (StringUtils.isEmpty(parameterString)) { - return parameterString; - } - //Get current time, schedule execute time - String cronTimeStr = parameterMap.get(Constants.PARAMETER_SHECDULE_TIME); - Date cronTime = null; - - if (!StringUtils.isEmpty(cronTimeStr)) { - cronTime = DateUtils.parse(cronTimeStr, Constants.PARAMETER_FORMAT_TIME); - - } else { - cronTime = new Date(); - } - - // replace variable ${} form,refers to the replacement of system variables and custom variables - if (!parameterMap.isEmpty()) { - parameterString = PlaceholderUtils.replacePlaceholders(parameterString, parameterMap, true); - } - - // replace time $[...] form, eg. $[yyyyMMdd] - if (cronTime != null) { - return dateTemplateParse(parameterString, cronTime); - } - return parameterString; - } - - /** - * set in parameter - * - * @param index index - * @param stmt preparedstatement - * @param dataType data type - * @param value value - * @throws Exception errors - */ - public static void setInParameter(int index, PreparedStatement stmt, DataType dataType, String value) throws Exception { - if (dataType.equals(DataType.VARCHAR)) { - stmt.setString(index, value); - } else if (dataType.equals(DataType.INTEGER)) { - stmt.setInt(index, Integer.parseInt(value)); - } else if (dataType.equals(DataType.LONG)) { - stmt.setLong(index, Long.parseLong(value)); - } else if (dataType.equals(DataType.FLOAT)) { - stmt.setFloat(index, Float.parseFloat(value)); - } else if (dataType.equals(DataType.DOUBLE)) { - stmt.setDouble(index, Double.parseDouble(value)); - } else if (dataType.equals(DataType.DATE)) { - stmt.setDate(index, java.sql.Date.valueOf(value)); - } else if (dataType.equals(DataType.TIME)) { - stmt.setString(index, value); - } else if (dataType.equals(DataType.TIMESTAMP)) { - stmt.setTimestamp(index, java.sql.Timestamp.valueOf(value)); - } else if (dataType.equals(DataType.BOOLEAN)) { - stmt.setBoolean(index, Boolean.parseBoolean(value)); - } - } - /** * curing user define parameters * @@ -173,8 +99,8 @@ public class ParameterUtils { } Map allParamMap = new HashMap<>(); //If it is a complement, a complement time needs to be passed in, according to the task type - Map timeParams = BusinessTimeUtils - .getBusinessTime(commandType, scheduleTime); + Map timeParams = BusinessTimeUtils. + getBusinessTime(commandType, scheduleTime); if (timeParams != null) { allParamMap.putAll(timeParams); @@ -217,26 +143,6 @@ public class ParameterUtils { return inputString; } - /** - * $[yyyyMMdd] replace schedule time - */ - public static String replaceScheduleTime(String text, Date scheduleTime) { - Map paramsMap = new HashMap<>(); - //if getScheduleTime null ,is current date - if (null == scheduleTime) { - scheduleTime = new Date(); - } - - String dateTime = org.apache.dolphinscheduler.common.utils.DateUtils.format(scheduleTime, Constants.PARAMETER_FORMAT_TIME); - Property p = new Property(); - p.setValue(dateTime); - p.setProp(Constants.PARAMETER_SHECDULE_TIME); - paramsMap.put(Constants.PARAMETER_SHECDULE_TIME, p); - text = ParameterUtils.convertParameterPlaceholders2(text, convert(paramsMap)); - - return text; - } - /** * format convert * diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java index 53a97d9755262778ad43eb1ae8bfebe8e1b2e0ac..96519d4249b96dca1f828ecd24ad8891d7014fea 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java @@ -20,7 +20,7 @@ package org.apache.dolphinscheduler.common.utils; import static org.apache.dolphinscheduler.common.Constants.COMMON_PROPERTIES_PATH; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.ResUploadType; +import org.apache.dolphinscheduler.spi.enums.ResUploadType; import org.apache.directory.api.util.Strings; @@ -179,58 +179,6 @@ public class PropertyUtils { return val == null ? defaultVal : Long.parseLong(val); } - /** - * @param key key - * @return property value - */ - public static long getLong(String key) { - return getLong(key, -1); - } - - /** - * @param key key - * @param defaultVal default value - * @return property value - */ - public static double getDouble(String key, double defaultVal) { - String val = getString(key); - return val == null ? defaultVal : Double.parseDouble(val); - } - - /** - * get array - * - * @param key property name - * @param splitStr separator - * @return property value through array - */ - public static String[] getArray(String key, String splitStr) { - String value = getString(key); - if (value == null) { - return new String[0]; - } - try { - String[] propertyArray = value.split(splitStr); - return propertyArray; - } catch (NumberFormatException e) { - logger.info(e.getMessage(), e); - } - return new String[0]; - } - - /** - * @param key key - * @param type type - * @param defaultValue default value - * @param T - * @return get enum value - */ - public static > T getEnum(String key, Class type, - T defaultValue) { - String val = getString(key); - return val == null ? defaultValue : Enum.valueOf(type, val); - } - /** * get all properties with specified prefix, like: fs. * @@ -247,9 +195,6 @@ public class PropertyUtils { return matchedProperties; } - /** - * - */ public static void setValue(String key, String value) { properties.setProperty(key, value); } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ResInfo.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ResInfo.java deleted file mode 100644 index f54bd17830ce0c8a14798262d7a6a519ad4d3341..0000000000000000000000000000000000000000 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ResInfo.java +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.common.utils; - -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.model.Server; - -import org.apache.commons.lang.StringUtils; - -/** - * heartbeat for ZK reigster res info - */ -public class ResInfo { - - /** - * cpuUsage - */ - private double cpuUsage; - - /** - * memoryUsage - */ - private double memoryUsage; - - /** - * loadAverage - */ - private double loadAverage; - - public ResInfo(double cpuUsage, double memoryUsage) { - this.cpuUsage = cpuUsage; - this.memoryUsage = memoryUsage; - } - - public ResInfo(double cpuUsage, double memoryUsage, double loadAverage) { - this(cpuUsage,memoryUsage); - this.loadAverage = loadAverage; - } - - public double getCpuUsage() { - return cpuUsage; - } - - public void setCpuUsage(double cpuUsage) { - this.cpuUsage = cpuUsage; - } - - public double getMemoryUsage() { - return memoryUsage; - } - - public void setMemoryUsage(double memoryUsage) { - this.memoryUsage = memoryUsage; - } - - public double getLoadAverage() { - return loadAverage; - } - - public void setLoadAverage(double loadAverage) { - this.loadAverage = loadAverage; - } - - /** - * get CPU and memory usage - * @param cpuUsage cpu usage - * @param memoryUsage memory usage - * @param loadAverage load average - * @return cpu and memory usage - */ - public static String getResInfoJson(double cpuUsage, double memoryUsage, double loadAverage) { - ResInfo resInfo = new ResInfo(cpuUsage,memoryUsage,loadAverage); - return JSONUtils.toJsonString(resInfo); - } - - /** - * parse heartbeat info for zk - * @param heartBeatInfo heartbeat info - * @return heartbeat info to Server - */ - public static Server parseHeartbeatForRegistryInfo(String heartBeatInfo) { - if (!isValidHeartbeatForRegistryInfo(heartBeatInfo)) { - return null; - } - String[] parts = heartBeatInfo.split(Constants.COMMA); - Server server = new Server(); - server.setResInfo(getResInfoJson(Double.parseDouble(parts[0]), - Double.parseDouble(parts[1]), - Double.parseDouble(parts[2]))); - server.setCreateTime(DateUtils.stringToDate(parts[6])); - server.setLastHeartbeatTime(DateUtils.stringToDate(parts[7])); - //set process id - server.setId(Integer.parseInt(parts[9])); - return server; - } - - /** - * is valid heartbeat info for zk - * @param heartBeatInfo heartbeat info - * @return heartbeat info is valid - */ - public static boolean isValidHeartbeatForRegistryInfo(String heartBeatInfo) { - if (!StringUtils.isEmpty(heartBeatInfo)) { - String[] parts = heartBeatInfo.split(Constants.COMMA); - return parts.length == Constants.HEARTBEAT_FOR_ZOOKEEPER_INFO_LENGTH - || parts.length == Constants.HEARTBEAT_WITH_WEIGHT_FOR_ZOOKEEPER_INFO_LENGTH; - } - return false; - } - - /** - * is new heartbeat info for zk with weight - * @param parts heartbeat info parts - * @return heartbeat info is new with weight - */ - public static boolean isNewHeartbeatWithWeight(String[] parts) { - return parts.length == Constants.HEARTBEAT_WITH_WEIGHT_FOR_ZOOKEEPER_INFO_LENGTH; - } - -} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/RetryerUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/RetryerUtils.java index 23861c7084cca2e9d8febd497280b9d9ecb213ef..da04bc0160836e934e9e8c7c45b79cc85829c923 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/RetryerUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/RetryerUtils.java @@ -17,26 +17,14 @@ package org.apache.dolphinscheduler.common.utils; +import com.github.rholder.retry.*; import org.apache.dolphinscheduler.common.Constants; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.github.rholder.retry.RetryException; -import com.github.rholder.retry.Retryer; -import com.github.rholder.retry.RetryerBuilder; -import com.github.rholder.retry.StopStrategies; -import com.github.rholder.retry.WaitStrategies; - -/** - * The Retryer util. - */ public class RetryerUtils { - private static final Logger logger = LoggerFactory.getLogger(RetryerUtils.class); private static Retryer defaultRetryerResultCheck; private static Retryer defaultRetryerResultNoCheck; @@ -114,31 +102,4 @@ public class RetryerUtils { public static Boolean retryCall(final Callable callable) throws ExecutionException, RetryException { return retryCall(callable, true); } - - /** - * Retry call silent without exceptions thrown - * - * @param callable the callable - * @param checkResult whether check result - * @return if no exceptions ,it's result returned by callable ,else always false - */ - public static boolean retryCallSilent(final Callable callable, boolean checkResult) { - boolean result = false; - try { - result = getDefaultRetryer(checkResult).call(callable); - } catch (ExecutionException | RetryException e) { - logger.warn("Retry call {} failed {}", callable, e.getMessage(), e); - } - return result; - } - - /** - * Retry call silent without exceptions thrown - * - * @param callable the callable - * @return if no exceptions ,it's result returned by callable ,else always false - */ - public static boolean retryCallSilent(final Callable callable) { - return retryCallSilent(callable, true); - } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/SnowFlakeUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/SnowFlakeUtils.java deleted file mode 100644 index 1fa14fd6cf96dc55c1db8e843469e9e808a6a82f..0000000000000000000000000000000000000000 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/SnowFlakeUtils.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.common.utils; - -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.util.Objects; - -public class SnowFlakeUtils { - // start timestamp - private static final long START_TIMESTAMP = 1609430400000L; //2021-01-01 00:00:00 - // Number of digits - private static final long SEQUENCE_BIT = 13; - private static final long MACHINE_BIT = 2; - private static final long MAX_SEQUENCE = ~(-1L << SEQUENCE_BIT); - // The displacement to the left - private static final long MACHINE_LEFT = SEQUENCE_BIT; - private static final long TIMESTAMP_LEFT = SEQUENCE_BIT + MACHINE_BIT; - private final int machineId; - private long sequence = 0L; - private long lastTimestamp = -1L; - - private SnowFlakeUtils() throws SnowFlakeException { - try { - this.machineId = Math.abs(Objects.hash(InetAddress.getLocalHost().getHostName())) % 32; - } catch (UnknownHostException e) { - throw new SnowFlakeException(e.getMessage()); - } - } - - private static SnowFlakeUtils instance = null; - - public static synchronized SnowFlakeUtils getInstance() throws SnowFlakeException { - if (instance == null) { - instance = new SnowFlakeUtils(); - } - return instance; - } - - public synchronized long nextId() throws SnowFlakeException { - long currStmp = nowTimestamp(); - if (currStmp < lastTimestamp) { - throw new SnowFlakeException("Clock moved backwards. Refusing to generate id"); - } - if (currStmp == lastTimestamp) { - sequence = (sequence + 1) & MAX_SEQUENCE; - if (sequence == 0L) { - currStmp = getNextMill(); - } - } else { - sequence = 0L; - } - lastTimestamp = currStmp; - return (currStmp - START_TIMESTAMP) << TIMESTAMP_LEFT - | machineId << MACHINE_LEFT - | sequence; - } - - private long getNextMill() { - long mill = nowTimestamp(); - while (mill <= lastTimestamp) { - mill = nowTimestamp(); - } - return mill; - } - - private long nowTimestamp() { - return System.currentTimeMillis(); - } - - public static class SnowFlakeException extends Exception { - public SnowFlakeException(String message) { - super(message); - } - } -} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/BusinessTimeUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/BusinessTimeUtils.java index 23db4b626b1020b612ef389bd6ffbdda4c70fe4b..4c6787f9cb06fca97b508870819bfd9c4c8ebf67 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/BusinessTimeUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/BusinessTimeUtils.java @@ -14,10 +14,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.common.utils.placeholder; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.CommandType; +import org.apache.dolphinscheduler.spi.utils.DateUtils; import java.util.Date; import java.util.HashMap; @@ -33,43 +35,46 @@ import static org.apache.commons.lang.time.DateUtils.addDays; * business time utils */ public class BusinessTimeUtils { - private BusinessTimeUtils() { - throw new IllegalStateException("BusinessTimeUtils class"); - } - /** - * get business time in parameters by different command types - * - * @param commandType command type - * @param runTime run time or schedule time - * @return business time - */ - public static Map getBusinessTime(CommandType commandType, Date runTime) { - Date businessDate = runTime; - switch (commandType) { - case COMPLEMENT_DATA: - break; - case START_PROCESS: - case START_CURRENT_TASK_PROCESS: - case RECOVER_TOLERANCE_FAULT_PROCESS: - case RECOVER_SUSPENDED_PROCESS: - case START_FAILURE_TASK_PROCESS: - case REPEAT_RUNNING: - case SCHEDULER: - default: - businessDate = addDays(new Date(), -1); - if (runTime != null){ - /** - * If there is a scheduled time, take the scheduling time. Recovery from failed nodes, suspension of recovery, re-run for scheduling - */ - businessDate = addDays(runTime, -1); - } - break; + private BusinessTimeUtils() { + throw new IllegalStateException("BusinessTimeUtils class"); + } + + /** + * get business time in parameters by different command types + * + * @param commandType command type + * @param runTime run time or schedule time + * @return business time + */ + public static Map getBusinessTime(CommandType commandType, Date runTime) { + Date businessDate; + Map result = new HashMap<>(); + switch (commandType) { + case COMPLEMENT_DATA: + if (runTime == null) { + return result; + } + case START_PROCESS: + case START_CURRENT_TASK_PROCESS: + case RECOVER_TOLERANCE_FAULT_PROCESS: + case RECOVER_SUSPENDED_PROCESS: + case START_FAILURE_TASK_PROCESS: + case REPEAT_RUNNING: + case SCHEDULER: + default: + businessDate = addDays(new Date(), -1); + if (runTime != null) { + /** + * If there is a scheduled time, take the scheduling time. Recovery from failed nodes, suspension of recovery, re-run for scheduling + */ + businessDate = addDays(runTime, -1); + } + break; + } + Date businessCurrentDate = addDays(businessDate, 1); + result.put(Constants.PARAMETER_CURRENT_DATE, format(businessCurrentDate, PARAMETER_FORMAT_DATE)); + result.put(Constants.PARAMETER_BUSINESS_DATE, format(businessDate, PARAMETER_FORMAT_DATE)); + result.put(Constants.PARAMETER_DATETIME, format(businessCurrentDate, PARAMETER_FORMAT_TIME)); + return result; } - Date businessCurrentDate = addDays(businessDate, 1); - Map result = new HashMap<>(); - result.put(Constants.PARAMETER_CURRENT_DATE, format(businessCurrentDate, PARAMETER_FORMAT_DATE)); - result.put(Constants.PARAMETER_BUSINESS_DATE, format(businessDate, PARAMETER_FORMAT_DATE)); - result.put(Constants.PARAMETER_DATETIME, format(businessCurrentDate, PARAMETER_FORMAT_TIME)); - return result; - } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/PropertyPlaceholderHelper.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/PropertyPlaceholderHelper.java index 4fb8555233995243e1da398090c34bb0c4eaf15b..099a6f65a8383269d56bf6d83e72518c977fa69e 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/PropertyPlaceholderHelper.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/PropertyPlaceholderHelper.java @@ -16,12 +16,13 @@ */ package org.apache.dolphinscheduler.common.utils.placeholder; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import java.util.*; +import static java.util.Objects.requireNonNull; + /** * Utility class for working with Strings that have placeholder values in them. A placeholder takes the form * {@code ${name}}. Using {@code PropertyPlaceholderHelper} these placeholders can be substituted for @@ -36,7 +37,7 @@ public class PropertyPlaceholderHelper { private static final Log logger = LogFactory.getLog(PropertyPlaceholderHelper.class); - private static final Map wellKnownSimplePrefixes = new HashMap(4); + private static final Map wellKnownSimplePrefixes = new HashMap<>(4); static { wellKnownSimplePrefixes.put("}", "{"); @@ -58,16 +59,7 @@ public class PropertyPlaceholderHelper { /** * Creates a new {@code PropertyPlaceholderHelper} that uses the supplied prefix and suffix. - * Unresolvable placeholders are ignored. - * @param placeholderPrefix the prefix that denotes the start of a placeholder - * @param placeholderSuffix the suffix that denotes the end of a placeholder - */ - public PropertyPlaceholderHelper(String placeholderPrefix, String placeholderSuffix) { - this(placeholderPrefix, placeholderSuffix, null, true); - } - - /** - * Creates a new {@code PropertyPlaceholderHelper} that uses the supplied prefix and suffix. + * * @param placeholderPrefix the prefix that denotes the start of a placeholder * @param placeholderSuffix the suffix that denotes the end of a placeholder * @param valueSeparator the separating character between the placeholder variable @@ -78,15 +70,14 @@ public class PropertyPlaceholderHelper { public PropertyPlaceholderHelper(String placeholderPrefix, String placeholderSuffix, String valueSeparator, boolean ignoreUnresolvablePlaceholders) { - notNull(placeholderPrefix, "'placeholderPrefix' must not be null"); - notNull(placeholderSuffix, "'placeholderSuffix' must not be null"); + requireNonNull((Object) placeholderPrefix, "'placeholderPrefix' must not be null"); + requireNonNull((Object) placeholderSuffix, "'placeholderSuffix' must not be null"); this.placeholderPrefix = placeholderPrefix; this.placeholderSuffix = placeholderSuffix; String simplePrefixForSuffix = wellKnownSimplePrefixes.get(this.placeholderSuffix); if (simplePrefixForSuffix != null && this.placeholderPrefix.endsWith(simplePrefixForSuffix)) { this.simplePrefix = simplePrefixForSuffix; - } - else { + } else { this.simplePrefix = this.placeholderPrefix; } this.valueSeparator = valueSeparator; @@ -94,37 +85,21 @@ public class PropertyPlaceholderHelper { } - /** - * Replaces all placeholders of format {@code ${name}} with the corresponding - * property from the supplied {@link Properties}. - * @param value the value containing the placeholders to be replaced - * @param properties the {@code Properties} to use for replacement - * @return the supplied value with placeholders replaced inline - */ - public String replacePlaceholders(String value, final Properties properties) { - notNull(properties, "'properties' must not be null"); - return replacePlaceholders(value, new PlaceholderResolver() { - @Override - public String resolvePlaceholder(String placeholderName) { - return properties.getProperty(placeholderName); - } - }); - } - /** * Replaces all placeholders of format {@code ${name}} with the value returned * from the supplied {@link PlaceholderResolver}. + * * @param value the value containing the placeholders to be replaced * @param placeholderResolver the {@code PlaceholderResolver} to use for replacement * @return the supplied value with placeholders replaced inline */ public String replacePlaceholders(String value, PlaceholderResolver placeholderResolver) { - notNull(value, "'value' must not be null"); + requireNonNull((Object) value, "'value' must not be null"); return parseStringValue(value, placeholderResolver, new HashSet()); } protected String parseStringValue( - String value, PlaceholderResolver placeholderResolver, Set visitedPlaceholders) { + String value, PlaceholderResolver placeholderResolver, Set visitedPlaceholders) { StringBuilder result = new StringBuilder(value); @@ -136,7 +111,7 @@ public class PropertyPlaceholderHelper { String originalPlaceholder = placeholder; if (!visitedPlaceholders.add(originalPlaceholder)) { throw new IllegalArgumentException( - "Circular placeholder reference '" + originalPlaceholder + "' in property definitions"); + "Circular placeholder reference '" + originalPlaceholder + "' in property definitions"); } // Recursive invocation, parsing placeholders contained in the placeholder key. placeholder = parseStringValue(placeholder, placeholderResolver, visitedPlaceholders); @@ -162,18 +137,15 @@ public class PropertyPlaceholderHelper { logger.trace("Resolved placeholder '" + placeholder + "'"); } startIndex = result.indexOf(this.placeholderPrefix, startIndex + propVal.length()); - } - else if (this.ignoreUnresolvablePlaceholders) { + } else if (this.ignoreUnresolvablePlaceholders) { // Proceed with unprocessed value. startIndex = result.indexOf(this.placeholderPrefix, endIndex + this.placeholderSuffix.length()); - } - else { + } else { throw new IllegalArgumentException("Could not resolve placeholder '" + - placeholder + "'" + " in value \"" + value + "\""); + placeholder + "'" + " in value \"" + value + "\""); } visitedPlaceholders.remove(originalPlaceholder); - } - else { + } else { startIndex = -1; } } @@ -189,16 +161,13 @@ public class PropertyPlaceholderHelper { if (withinNestedPlaceholder > 0) { withinNestedPlaceholder--; index = index + this.placeholderSuffix.length(); - } - else { + } else { return index; } - } - else if (substringMatch(buf, index, this.simplePrefix)) { + } else if (substringMatch(buf, index, this.simplePrefix)) { withinNestedPlaceholder++; index = index + this.simplePrefix.length(); - } - else { + } else { index++; } } @@ -213,6 +182,7 @@ public class PropertyPlaceholderHelper { /** * Resolve the supplied placeholder name to the replacement value. + * * @param placeholderName the name of the placeholder to resolve * @return the replacement value, or {@code null} if no replacement is to be made */ @@ -222,6 +192,7 @@ public class PropertyPlaceholderHelper { /** * Test whether the given string matches the given substring * at the given index. + * * @param str the original string (or StringBuilder) * @param index the index in the original string to start matching against * @param substring the substring to match at the given index @@ -236,20 +207,5 @@ public class PropertyPlaceholderHelper { } return true; } - - /** - * Assert that an object is not {@code null}. - *

Assert.notNull(clazz, "The class must not be null");
- * @param object the object to check - * @param message the exception message to use if the assertion fails - * @throws IllegalArgumentException if the object is {@code null} - */ - public static void notNull(Object object, String message) { - if (object == null) { - throw new IllegalArgumentException(message); - } - } - - } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtils.java index bfb5e28544301fb190e8a1752b3799d1e71d57c7..19744602f30f68434d76ad6adb179217d3c64070 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtils.java @@ -24,7 +24,7 @@ import static org.apache.dolphinscheduler.common.Constants.DIVISION_STRING; import static org.apache.dolphinscheduler.common.Constants.LEFT_BRACE_CHAR; import static org.apache.dolphinscheduler.common.Constants.LEFT_BRACE_STRING; import static org.apache.dolphinscheduler.common.Constants.MULTIPLY_CHAR; -import static org.apache.dolphinscheduler.common.Constants.MULTIPLY_STRING; +import static org.apache.dolphinscheduler.common.Constants.STAR; import static org.apache.dolphinscheduler.common.Constants.N; import static org.apache.dolphinscheduler.common.Constants.P; import static org.apache.dolphinscheduler.common.Constants.RIGHT_BRACE_CHAR; @@ -266,9 +266,9 @@ public class TimePlaceholderUtils { * @return true or false */ private static boolean compare(String peek, String cur) { - if (MULTIPLY_STRING.equals(peek) && (DIVISION_STRING.equals(cur) || MULTIPLY_STRING.equals(cur) || ADD_STRING.equals(cur) || SUBTRACT_STRING.equals(cur))) { + if (STAR.equals(peek) && (DIVISION_STRING.equals(cur) || STAR.equals(cur) || ADD_STRING.equals(cur) || SUBTRACT_STRING.equals(cur))) { return true; - } else if (DIVISION_STRING.equals(peek) && (DIVISION_STRING.equals(cur) || MULTIPLY_STRING.equals(cur) || ADD_STRING.equals(cur) || SUBTRACT_STRING.equals(cur))) { + } else if (DIVISION_STRING.equals(peek) && (DIVISION_STRING.equals(cur) || STAR.equals(cur) || ADD_STRING.equals(cur) || SUBTRACT_STRING.equals(cur))) { return true; } else if (ADD_STRING.equals(peek) && (ADD_STRING.equals(cur) || SUBTRACT_STRING.equals(cur))) { return true; diff --git a/dolphinscheduler-common/src/main/resources/common.properties b/dolphinscheduler-common/src/main/resources/common.properties index 4005a0afafd82283c2c5c76d5123e614b7a274ce..eb5de5d905113c3f6d11d24a897c7452f78b5e47 100644 --- a/dolphinscheduler-common/src/main/resources/common.properties +++ b/dolphinscheduler-common/src/main/resources/common.properties @@ -75,6 +75,9 @@ datasource.encryption.enable=false # datasource encryption salt datasource.encryption.salt=!@#$%^&* +# Whether hive SQL is executed in the same session +support.hive.oneSession=false + # use sudo or not, if set true, executing user is tenant user and deploy user needs sudo permissions; if set false, executing user is the deploy user and doesn't need sudo permissions sudo.enable=true @@ -89,3 +92,6 @@ sudo.enable=true # development state development.state=false + +#datasource.plugin.dir config +datasource.plugin.dir=lib/plugin/datasource diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/CommonTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/CommonTest.java deleted file mode 100644 index 3752fe670c41304a99f28c7f1c9615828a1b67c5..0000000000000000000000000000000000000000 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/CommonTest.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.common; - -import static org.junit.Assert.assertTrue; - -import java.lang.reflect.Field; -import java.lang.reflect.Modifier; - -import org.junit.Test; - -/** - * CommonTest - */ -public class CommonTest { - - public static void setFinalStatic(Field field, Object newValue) throws NoSuchFieldException, IllegalAccessException { - field.setAccessible(true); - Field modifiersField = Field.class.getDeclaredField("modifiers"); - modifiersField.setAccessible(true); - modifiersField.setInt(field, field.getModifiers() & ~Modifier.FINAL); - field.set(null, newValue); - } - - @Test - public void testSetFinalStatic() throws Exception { - setFinalStatic(Constants.class.getDeclaredField("KUBERNETES_MODE"), true); - assertTrue(Constants.KUBERNETES_MODE); - } - -} diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/ConstantsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/ConstantsTest.java index 3280a9629f74177e76deb751be92cc37ee9967be..3f2c3016ba0cb5bf006c74ca25ff289a5eed7961 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/ConstantsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/ConstantsTest.java @@ -16,7 +16,8 @@ */ package org.apache.dolphinscheduler.common; -import org.apache.dolphinscheduler.common.utils.OSUtils; +import org.apache.commons.lang.SystemUtils; + import org.junit.Assert; import org.junit.Test; @@ -30,7 +31,7 @@ public class ConstantsTest { */ @Test public void testPID() { - if (OSUtils.isWindows()) { + if (SystemUtils.IS_OS_WINDOWS) { Assert.assertEquals(Constants.PID, "handle"); } else { Assert.assertEquals(Constants.PID, "pid"); diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/os/OSUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/os/OSUtilsTest.java index d6f3ffd0247e3a785a87306f3380e277f28b6525..480ff2dd8b5a81dd0efb487e92adf2c3f39ccbdf 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/os/OSUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/os/OSUtilsTest.java @@ -40,13 +40,10 @@ public class OSUtilsTest { } @Test - public void physicalMemorySize() { - double availablePhysicalMemorySize = OSUtils.availablePhysicalMemorySize(); - double totalPhysicalMemorySize = OSUtils.totalPhysicalMemorySize(); - logger.info("availablePhysicalMemorySize : {}", availablePhysicalMemorySize); - logger.info("totalPhysicalMemorySize : {}", totalPhysicalMemorySize); - Assert.assertTrue(availablePhysicalMemorySize >= 0.0); - Assert.assertTrue(totalPhysicalMemorySize >= 0.0); + public void diskAvailable() { + double diskAvailable = OSUtils.diskAvailable(); + logger.info("diskAvailable : {}", diskAvailable); + Assert.assertTrue(diskAvailable >= 0.0); } @Test diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/os/OshiTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/os/OshiTest.java index cfc069f9bb4c0608db44d1c979ff109b9ee0d1a7..1d17541b81c0a4de5c06884401f02baec1353834 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/os/OshiTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/os/OshiTest.java @@ -51,7 +51,7 @@ public class OshiTest { logger.info("Checking CPU..."); - printCpu(hal.getProcessor()); + printCpu(si); } @@ -64,18 +64,21 @@ public class OshiTest { } - private static void printCpu(CentralProcessor processor) { - logger.info(String.format("CPU load: %.1f%% (OS MXBean)%n", processor.getSystemCpuLoad() * 100));//CPU load: 24.9% (OS MXBean) - logger.info("CPU load averages : {}", processor.getSystemLoadAverage());//CPU load averages : 1.5234375 + private static void printCpu(SystemInfo si) { + CentralProcessor processor = si.getHardware().getProcessor(); + long[] systemCpuLoadTicks = processor.getSystemCpuLoadTicks(); + Util.sleep(1000); + logger.info(String.format("CPU load: %.1f%% (OS MXBean)%n", processor.getSystemCpuLoadBetweenTicks(systemCpuLoadTicks) * 100));//CPU load: 24.9% (OS MXBean) + logger.info("CPU load averages : {}", processor.getSystemLoadAverage(1)[0]);//CPU load averages : 1.5234375 - logger.info("Uptime: " + FormatUtil.formatElapsedSecs(processor.getSystemUptime())); + logger.info("Uptime: " + FormatUtil.formatElapsedSecs(si.getOperatingSystem().getSystemUptime())); logger.info("Context Switches/Interrupts: " + processor.getContextSwitches() + " / " + processor.getInterrupts()); long[] prevTicks = processor.getSystemCpuLoadTicks(); logger.info("CPU, IOWait, and IRQ ticks @ 0 sec:" + Arrays.toString(prevTicks)); - //Wait a second... + //Wait a second... Util.sleep(1000); long[] ticks = processor.getSystemCpuLoadTicks(); logger.info("CPU, IOWait, and IRQ ticks @ 1 sec:" + Arrays.toString(ticks)); @@ -93,7 +96,7 @@ public class OshiTest { "User: %.1f%% Nice: %.1f%% System: %.1f%% Idle: %.1f%% IOwait: %.1f%% IRQ: %.1f%% SoftIRQ: %.1f%% Steal: %.1f%%%n", 100d * user / totalCpu, 100d * nice / totalCpu, 100d * sys / totalCpu, 100d * idle / totalCpu, 100d * iowait / totalCpu, 100d * irq / totalCpu, 100d * softirq / totalCpu, 100d * steal / totalCpu)); - logger.info(String.format("CPU load: %.1f%% (counting ticks)%n", processor.getSystemCpuLoadBetweenTicks() * 100)); + logger.info(String.format("CPU load: %.1f%% (counting ticks)%n", processor.getSystemCpuLoadBetweenTicks(prevTicks) * 100)); @@ -103,7 +106,9 @@ public class OshiTest { + (loadAverage[2] < 0 ? " N/A" : String.format(" %.2f", loadAverage[2]))); // per core CPU StringBuilder procCpu = new StringBuilder("CPU load per processor:"); - double[] load = processor.getProcessorCpuLoadBetweenTicks(); + long[][] processorCpuLoadTicks = processor.getProcessorCpuLoadTicks(); + Util.sleep(1000); + double[] load = processor.getProcessorCpuLoadBetweenTicks(processorCpuLoadTicks); for (double avg : load) { procCpu.append(String.format(" %.1f%%", avg * 100)); } diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/FlinkParametersTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/FlinkParametersTest.java index cd7b4f2200cbd4a2cb6d1e0e4cfc8aaceb0fa5f2..f7ad9de70e3be5c7e5fe5bfb52cc0098c34e6b53 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/FlinkParametersTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/FlinkParametersTest.java @@ -18,7 +18,9 @@ package org.apache.dolphinscheduler.common.task; import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.task.flink.FlinkParameters; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; + +import org.apache.commons.collections.CollectionUtils; + import org.junit.Assert; import org.junit.Test; diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/SparkParametersTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/SparkParametersTest.java index dbd98ed4fe31cace01d5161a1ace662783138cbe..a3d4d9eda9d780d7ef0899b51e8918c0b36e7bed 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/SparkParametersTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/SparkParametersTest.java @@ -19,7 +19,8 @@ package org.apache.dolphinscheduler.common.task; import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.task.spark.SparkParameters; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; + +import org.apache.commons.collections.CollectionUtils; import java.util.LinkedList; import java.util.List; diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/SqlParametersTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/SqlParametersTest.java index 17e95cf9d62b166edb21156bd1a27e6202582e0d..3f7a980b42be2f6edc4fff4214a35425258df038 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/SqlParametersTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/SqlParametersTest.java @@ -23,7 +23,8 @@ import org.apache.dolphinscheduler.common.enums.DataType; import org.apache.dolphinscheduler.common.enums.Direct; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.task.sql.SqlParameters; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; + +import org.apache.commons.collections.CollectionUtils; import java.util.ArrayList; import java.util.List; diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/threadutils/ThreadPoolExecutorsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/threadutils/ThreadPoolExecutorsTest.java index 987915488959cd624f38e1775d99e170e108f60a..11c114c29e1792acf404739387ae902bb6ad577d 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/threadutils/ThreadPoolExecutorsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/threadutils/ThreadPoolExecutorsTest.java @@ -25,13 +25,10 @@ import org.slf4j.LoggerFactory; * Thread Pool Executor Test */ public class ThreadPoolExecutorsTest { - private static final Logger logger = LoggerFactory.getLogger(ThreadPoolExecutors.class); - @Test public void testThreadPoolExecutors() throws InterruptedException { - Thread2[] threadArr = new Thread2[10]; for (int i = 0; i < threadArr.length; i++) { @@ -43,14 +40,10 @@ public class ThreadPoolExecutorsTest { Thread.currentThread().join(40000l); } - - //test thread - class Thread2 extends Thread { + static class Thread2 extends Thread { @Override public void run() { logger.info("ThreadPoolExecutors instance's hashcode is: {} ",ThreadPoolExecutors.getInstance("a",2).hashCode()); } } - - } diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/SensitiveLogUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CodeGenerateUtilsTest.java similarity index 67% rename from dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/SensitiveLogUtilsTest.java rename to dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CodeGenerateUtilsTest.java index 03880b69ccc28952c096057af297226acccd7bb9..d949bd82b8d411b69fe1b9ed701307c021fad69a 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/SensitiveLogUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CodeGenerateUtilsTest.java @@ -14,24 +14,22 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.common.utils; +import java.util.HashSet; -import org.apache.dolphinscheduler.common.Constants; import org.junit.Assert; import org.junit.Test; - -public class SensitiveLogUtilsTest { - +public class CodeGenerateUtilsTest { @Test - public void testMaskDataSourcePwd() { - - String password = "123456"; - String emptyPassword = ""; - - Assert.assertEquals(Constants.PASSWORD_DEFAULT, SensitiveLogUtils.maskDataSourcePwd(password)); - Assert.assertEquals("", SensitiveLogUtils.maskDataSourcePwd(emptyPassword)); - + public void testNoGenerateDuplicateCode() throws CodeGenerateUtils.CodeGenerateException { + HashSet existsCode = new HashSet<>(); + for (int i = 0; i < 100; i++) { + Long currentCode = CodeGenerateUtils.getInstance().genCode(); + Assert.assertFalse(existsCode.contains(currentCode)); + existsCode.add(currentCode); + } } } diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CollectionUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CollectionUtilsTest.java deleted file mode 100644 index ccf8ace802b9f0e1d22ac3f22d4a47f58415310e..0000000000000000000000000000000000000000 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CollectionUtilsTest.java +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.dolphinscheduler.common.utils; - -import org.apache.dolphinscheduler.common.Constants; -import org.junit.Assert; -import org.junit.Test; - -import java.util.*; - - -public class CollectionUtilsTest { - - @Test - public void equalLists() { - Assert.assertTrue(CollectionUtils.equalLists(null,null)); - Assert.assertTrue(CollectionUtils.equalLists(new ArrayList(),new ArrayList())); - List a = new ArrayList(); - a.add(1); - a.add(2); - List b = new ArrayList(); - b.add(1); - b.add(2); - Assert.assertTrue(CollectionUtils.equalLists(a, b)); - a.add(1); - Assert.assertFalse(CollectionUtils.equalLists(a, b)); - b.add(2); - Assert.assertFalse(CollectionUtils.equalLists(a, b)); - a.add(2); - b.add(1); - a.add(4); - b.add(2); - Assert.assertFalse(CollectionUtils.equalLists(a, b)); - Assert.assertFalse(CollectionUtils.equalLists(null, new ArrayList())); - Assert.assertFalse(CollectionUtils.equalLists(new ArrayList(), null)); - } - - @Test - public void subtract() { - Set a = new HashSet(); - a.add(1); - a.add(2); - a.add(3); - Set b = new HashSet(); - b.add(0); - b.add(2); - b.add(4); - Assert.assertArrayEquals(new Integer[]{1,3},CollectionUtils.subtract(a,b).toArray()); - } - - @Test - public void stringToMap() { - Map a = CollectionUtils.stringToMap("a=b;c=d;", ";"); - Assert.assertNotNull(a); - Assert.assertTrue(a.size() == 2); - a = CollectionUtils.stringToMap(null, ";"); - Assert.assertTrue(a.isEmpty()); - a = CollectionUtils.stringToMap("", ";"); - Assert.assertTrue(a.isEmpty()); - a = CollectionUtils.stringToMap("a=b;c=d", ""); - Assert.assertTrue(a.isEmpty()); - a = CollectionUtils.stringToMap("a=b;c=d", null); - Assert.assertTrue(a.isEmpty()); - a = CollectionUtils.stringToMap("a=b;c=d;e=f", ";"); - Assert.assertEquals(3, a.size()); - a = CollectionUtils.stringToMap("a;b=f", ";"); - Assert.assertTrue(a.isEmpty()); - a = CollectionUtils.stringToMap("a=b;c=d;e=f;", ";", "test"); - Assert.assertEquals(3, a.size()); - Assert.assertNotNull(a.get("testa")); - } - - @Test - public void getListByExclusion() { - Assert.assertNotNull(CollectionUtils.getListByExclusion(null, null)); - List originList = new ArrayList<>(); - originList.add(1); - originList.add(2); - List> ret = CollectionUtils.getListByExclusion(originList, null); - Assert.assertEquals(2, ret.size()); - ret = CollectionUtils.getListByExclusion(originList, new HashSet<>()); - Assert.assertEquals(2, ret.size()); - Assert.assertFalse(ret.get(0).isEmpty()); - Set exclusion = new HashSet<>(); - exclusion.add(Constants.CLASS); - ret = CollectionUtils.getListByExclusion(originList, exclusion); - Assert.assertEquals(2, ret.size()); - Assert.assertTrue(ret.get(0).isEmpty()); - } - - @Test - public void isNotEmpty() { - List list = new ArrayList<>(); - Assert.assertFalse(CollectionUtils.isNotEmpty(list)); - Assert.assertFalse(CollectionUtils.isNotEmpty(null)); - } - @Test - public void isEmpty(){ - List list = new ArrayList<>(); - Assert.assertTrue(CollectionUtils.isEmpty(list)); - Assert.assertTrue(CollectionUtils.isEmpty(null)); - list.add(1); - Assert.assertFalse(CollectionUtils.isEmpty(list)); - } - @Test - public void isEqualCollection() { - List a = new ArrayList<>(); - a.add(1); - List b = new ArrayList<>(); - b.add(1); - Assert.assertTrue(CollectionUtils.isEqualCollection(a,b)); - b.add(2); - Assert.assertFalse(CollectionUtils.isEqualCollection(a,b)); - } - - @Test - public void getCardinalityMap(){ - List a = new ArrayList<>(); - a.add(1); - a.add(2); - a.add(2); - a.add(3); - a.add(3); - a.add(3); - Map cardinalityMap = CollectionUtils.getCardinalityMap(a); - Assert.assertEquals(3, cardinalityMap.size()); - Assert.assertEquals(1, cardinalityMap.get(1).intValue()); - Assert.assertEquals(2, cardinalityMap.get(2).intValue()); - Assert.assertEquals(3, cardinalityMap.get(3).intValue()); - } - - @Test - public void transformToList() { - List stringList = new ArrayList<>(); - stringList.add("1"); - List integers = CollectionUtils.transformToList(stringList, String::length); - Assert.assertFalse(integers.isEmpty()); - } - - @Test - public void collectionToMap() { - List stringList = new ArrayList<>(); - stringList.add("1"); - Map lengthStringMap = CollectionUtils.collectionToMap(stringList, String::length); - Assert.assertFalse(lengthStringMap.isEmpty()); - } -} diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CommonUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CommonUtilsTest.java index 92f0d7bd49c2b47178c12424d3919ad25e87567e..713709030f63e9f35087d4e68967cdcb2393977b 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CommonUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CommonUtilsTest.java @@ -17,9 +17,8 @@ package org.apache.dolphinscheduler.common.utils; -import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.spi.utils.PropertyUtils; -import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.UserGroupInformation; import java.net.InetAddress; @@ -28,7 +27,6 @@ import java.net.UnknownHostException; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; -import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import org.slf4j.Logger; @@ -53,41 +51,6 @@ public class CommonUtilsTest { Assert.assertTrue(true); } - @Test - public void getKerberosStartupState() { - boolean kerberosStartupState = CommonUtils.getKerberosStartupState(); - logger.info("kerberos startup state: {}",kerberosStartupState); - Assert.assertFalse(kerberosStartupState); - PowerMockito.mockStatic(PropertyUtils.class); - PowerMockito.when(PropertyUtils.getUpperCaseString(Constants.RESOURCE_STORAGE_TYPE)).thenReturn("HDFS"); - PowerMockito.when(PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE, false)).thenReturn(Boolean.TRUE); - kerberosStartupState = CommonUtils.getKerberosStartupState(); - logger.info("kerberos startup state: {}",kerberosStartupState); - Assert.assertTrue(kerberosStartupState); - - } - - @Test - public void loadKerberosConf() { - try { - PowerMockito.mockStatic(PropertyUtils.class); - PowerMockito.when(PropertyUtils.getUpperCaseString(Constants.RESOURCE_STORAGE_TYPE)).thenReturn("HDFS"); - PowerMockito.when(PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE, false)).thenReturn(Boolean.TRUE); - PowerMockito.when(PropertyUtils.getString(Constants.JAVA_SECURITY_KRB5_CONF_PATH)).thenReturn("/opt/krb5.conf"); - PowerMockito.when(PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_USERNAME)).thenReturn("hdfs-mycluster@ESZ.COM"); - PowerMockito.when(PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_PATH)).thenReturn("/opt/hdfs.headless.keytab"); - - PowerMockito.mockStatic(UserGroupInformation.class); - boolean result = CommonUtils.loadKerberosConf(new Configuration()); - Assert.assertTrue(result); - - CommonUtils.loadKerberosConf(null, null, null); - - } catch (Exception e) { - Assert.fail("load Kerberos Conf failed"); - } - } - @Test public void getHdfsDataBasePath() { logger.info(HadoopUtils.getHdfsDataBasePath()); @@ -124,41 +87,4 @@ public class CommonUtilsTest { Assert.assertTrue(true); } - @Test - public void encodePassword() { - - PropertyUtils.setValue(Constants.DATASOURCE_ENCRYPTION_ENABLE,"true"); - - Assert.assertEquals("",CommonUtils.encodePassword("")); - Assert.assertEquals("IUAjJCVeJipNVEl6TkRVMg==",CommonUtils.encodePassword("123456")); - Assert.assertEquals("IUAjJCVeJipJVkZCV2xoVFYwQT0=",CommonUtils.encodePassword("!QAZXSW@")); - Assert.assertEquals("IUAjJCVeJipOV1JtWjJWeUtFQT0=",CommonUtils.encodePassword("5dfger(@")); - - PropertyUtils.setValue(Constants.DATASOURCE_ENCRYPTION_ENABLE,"false"); - - Assert.assertEquals("",CommonUtils.encodePassword("")); - Assert.assertEquals("123456",CommonUtils.encodePassword("123456")); - Assert.assertEquals("!QAZXSW@",CommonUtils.encodePassword("!QAZXSW@")); - Assert.assertEquals("5dfger(@",CommonUtils.encodePassword("5dfger(@")); - - } - - @Test - public void decodePassword() { - - PropertyUtils.setValue(Constants.DATASOURCE_ENCRYPTION_ENABLE, "true"); - - Assert.assertEquals("", CommonUtils.decodePassword("")); - Assert.assertEquals("123456", CommonUtils.decodePassword("IUAjJCVeJipNVEl6TkRVMg==")); - Assert.assertEquals("!QAZXSW@", CommonUtils.decodePassword("IUAjJCVeJipJVkZCV2xoVFYwQT0=")); - Assert.assertEquals("5dfger(@", CommonUtils.decodePassword("IUAjJCVeJipOV1JtWjJWeUtFQT0=")); - - PropertyUtils.setValue(Constants.DATASOURCE_ENCRYPTION_ENABLE, "false"); - - Assert.assertEquals("", CommonUtils.decodePassword("")); - Assert.assertEquals("123456", CommonUtils.decodePassword("123456")); - Assert.assertEquals("!QAZXSW@", CommonUtils.decodePassword("!QAZXSW@")); - Assert.assertEquals("5dfger(@", CommonUtils.decodePassword("5dfger(@")); - } - } \ No newline at end of file diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/DependentUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/DependentUtilsTest.java index 40e3e5a8b0701be315b86f5382c81bbcc24bf3d4..5ed16f432ed32a8f30deaca84236bcbabf1b8390 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/DependentUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/DependentUtilsTest.java @@ -360,13 +360,13 @@ public class DependentUtilsTest { @Test public void testGetLast24Hour() { - Date curDay = DateUtils.stringToDate("2020-05-15 12:10:00"); + Date curDay = DateUtils.stringToDate("2020-05-15 02:10:00"); String dateValue = "last24Hours"; List dateIntervals = DependentUtils.getDateIntervalList(curDay, dateValue); List expect = Lists.newArrayList(); - for (int a = 1; a < 24; a++) { + for (int a = 2; a < 24; a++) { String i = a + ""; if (a < 10) { i = "0" + i; @@ -374,8 +374,8 @@ public class DependentUtilsTest { DateInterval dateInterval = new DateInterval(DateUtils.getStartOfHour(DateUtils.stringToDate("2020-05-14 " + i + ":00:00")), DateUtils.getEndOfHour(DateUtils.stringToDate("2020-05-14 " + i + ":59:59"))); expect.add(dateInterval); } - DateInterval dateInterval = new DateInterval(DateUtils.getStartOfHour(DateUtils.stringToDate("2020-05-15 00:00:00")), DateUtils.getEndOfHour(DateUtils.stringToDate("2020-05-15 00:59:59"))); - expect.add(dateInterval); + expect.add(new DateInterval(DateUtils.getStartOfHour(DateUtils.stringToDate("2020-05-15 00:00:00")), DateUtils.getEndOfHour(DateUtils.stringToDate("2020-05-15 00:59:59")))); + expect.add(new DateInterval(DateUtils.getStartOfHour(DateUtils.stringToDate("2020-05-15 01:00:00")), DateUtils.getEndOfHour(DateUtils.stringToDate("2020-05-15 01:59:59")))); Assert.assertEquals(24, dateIntervals.size()); diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/FileUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/FileUtilsTest.java index 4cbd4ae682b9be93a300c237aa2802c101f4295a..fb0075eca90770355e0d297e4e15fa9240a0cc5d 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/FileUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/FileUtilsTest.java @@ -36,14 +36,6 @@ import org.powermock.modules.junit4.PowerMockRunner; @PrepareForTest(DateUtils.class) public class FileUtilsTest { - @Test - public void suffix() { - Assert.assertEquals("java", FileUtils.suffix("ninfor.java")); - Assert.assertEquals("", FileUtils.suffix(null)); - Assert.assertEquals("", FileUtils.suffix("")); - Assert.assertEquals("", FileUtils.suffix("ninfor-java")); - } - @Test public void testGetDownloadFilename() { PowerMockito.mockStatic(DateUtils.class); @@ -97,4 +89,34 @@ public class FileUtilsTest { Assert.assertEquals(content, fileContent); } + @Test + public void testDirectoryTraversal() { + // test case which do not directory traversal + String path; + path = "abc.txt"; + Assert.assertFalse(FileUtils.directoryTraversal(path)); + + path = "abc...txt"; + Assert.assertFalse(FileUtils.directoryTraversal(path)); + + path = "..abc.txt"; + Assert.assertFalse(FileUtils.directoryTraversal(path)); + + // test case which will directory traversal + path = "../abc.txt"; + Assert.assertTrue(FileUtils.directoryTraversal(path)); + + path = "../../abc.txt"; + Assert.assertTrue(FileUtils.directoryTraversal(path)); + + path = "abc../def.txt"; + Assert.assertTrue(FileUtils.directoryTraversal(path)); + + path = "abc./def.txt"; + Assert.assertTrue(FileUtils.directoryTraversal(path)); + + path = "abc/def...txt"; + Assert.assertTrue(FileUtils.directoryTraversal(path)); + } + } diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HadoopUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HadoopUtilsTest.java index de7f9a4489a73338d61987647a26576292a1e233..a349cc6d69d3f0970f534d6a8383abfedd765a2f 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HadoopUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HadoopUtilsTest.java @@ -16,7 +16,7 @@ */ package org.apache.dolphinscheduler.common.utils; -import org.apache.dolphinscheduler.common.enums.ResourceType; +import org.apache.dolphinscheduler.spi.enums.ResourceType; import org.apache.hadoop.conf.Configuration; import org.junit.Assert; import org.junit.Test; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/SensitiveLogUtils.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HeartBeatTest.java similarity index 30% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/SensitiveLogUtils.java rename to dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HeartBeatTest.java index b95417e47edfc1df40fb16b0f5d5a0f4b4e91b0b..487fbe97d0b3988f464ec2bb724abbcbf09b35eb 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/SensitiveLogUtils.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HeartBeatTest.java @@ -17,29 +17,61 @@ package org.apache.dolphinscheduler.common.utils; +import static org.junit.Assert.assertEquals; + import org.apache.dolphinscheduler.common.Constants; -import org.apache.commons.lang.StringUtils; +import org.junit.Test; /** - * sensitive log Util + * NetUtilsTest */ -public class SensitiveLogUtils { +public class HeartBeatTest { + + @Test + public void testAbnormalState() { + long startupTime = System.currentTimeMillis(); + double loadAverage = 100; + double reservedMemory = 100; + HeartBeat heartBeat = new HeartBeat(startupTime, loadAverage, reservedMemory); + heartBeat.updateServerState(); + assertEquals(Constants.ABNORMAL_NODE_STATUS, heartBeat.getServerStatus()); + } + + @Test + public void testBusyState() { + long startupTime = System.currentTimeMillis(); + double loadAverage = 0; + double reservedMemory = 0; + int hostWeight = 1; + int taskCount = 200; + int workerThreadCount = 199; + HeartBeat heartBeat = new HeartBeat(startupTime, loadAverage, reservedMemory, hostWeight, workerThreadCount); - private SensitiveLogUtils() { - throw new UnsupportedOperationException("Construct SensitiveLogUtils"); + heartBeat.setWorkerWaitingTaskCount(taskCount); + heartBeat.updateServerState(); + assertEquals(Constants.BUSY_NODE_STATUE, heartBeat.getServerStatus()); } - /** - * @param dataSourcePwd data source password - * @return String - */ - public static String maskDataSourcePwd(String dataSourcePwd) { + @Test + public void testDecodeHeartBeat() throws Exception { + String heartBeatInfo = "0.35,0.58,5.86,3.09,6.47,5.0,1.0,1634033006749,1634033006857,1,29732,1,199,200"; + HeartBeat heartBeat = HeartBeat.decodeHeartBeat(heartBeatInfo); - if (!StringUtils.isEmpty(dataSourcePwd)) { - dataSourcePwd = Constants.PASSWORD_DEFAULT; - } - return dataSourcePwd; + double delta = 0.001; + assertEquals(0.35, heartBeat.getCpuUsage(), delta); + assertEquals(0.58, heartBeat.getMemoryUsage(), delta); + assertEquals(5.86, heartBeat.getDiskAvailable(), delta); + assertEquals(3.09, heartBeat.getLoadAverage(), delta); + assertEquals(6.47, heartBeat.getAvailablePhysicalMemorySize(), delta); + assertEquals(5.0, heartBeat.getMaxCpuloadAvg(), delta); + assertEquals(1.0, heartBeat.getReservedMemory(), delta); + assertEquals(1634033006749L, heartBeat.getStartupTime()); + assertEquals(1634033006857L, heartBeat.getReportTime()); + assertEquals(1, heartBeat.getServerStatus()); + assertEquals(29732, heartBeat.getProcessId()); + assertEquals(199, heartBeat.getWorkerExecThreadCount()); + assertEquals(200, heartBeat.getWorkerWaitingTaskCount()); } } diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/LoggerUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/LoggerUtilsTest.java deleted file mode 100644 index 80f0f5808c1527ec24451b8167aba107e7303fa0..0000000000000000000000000000000000000000 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/LoggerUtilsTest.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.common.utils; - -import java.io.BufferedReader; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStreamReader; -import java.util.List; -import java.util.Optional; - -import org.junit.Assert; -import org.junit.Test; -import org.junit.Test.None; -import org.junit.runner.RunWith; -import org.powermock.api.mockito.PowerMockito; -import org.powermock.core.classloader.annotations.PrepareForTest; -import org.powermock.modules.junit4.PowerMockRunner; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@RunWith(PowerMockRunner.class) -@PrepareForTest({LoggerUtils.class}) -public class LoggerUtilsTest { - private Logger logger = LoggerFactory.getLogger(LoggerUtilsTest.class); - - @Test - public void buildTaskId() { - - String taskId = LoggerUtils.buildTaskId(LoggerUtils.TASK_LOGGER_INFO_PREFIX, 798L,1,4084, 15210); - - Assert.assertEquals(" - [taskAppId=TASK-798_1-4084-15210]", taskId); - } - - @Test - public void getAppIds() { - List appIdList = LoggerUtils.getAppIds("Running job: application_1_1", logger); - Assert.assertEquals("application_1_1", appIdList.get(0)); - - } - - @Test - public void testReadWholeFileContent() throws Exception { - BufferedReader bufferedReader = PowerMockito.mock(BufferedReader.class); - PowerMockito.whenNew(BufferedReader.class).withAnyArguments().thenReturn(bufferedReader); - PowerMockito.when(bufferedReader.readLine()).thenReturn("").thenReturn(null); - FileInputStream fileInputStream = PowerMockito.mock(FileInputStream.class); - PowerMockito.whenNew(FileInputStream.class).withAnyArguments().thenReturn(fileInputStream); - - InputStreamReader inputStreamReader = PowerMockito.mock(InputStreamReader.class); - PowerMockito.whenNew(InputStreamReader.class).withAnyArguments().thenReturn(inputStreamReader); - - String log = LoggerUtils.readWholeFileContent("/tmp/log"); - Assert.assertNotNull(log); - - PowerMockito.when(bufferedReader.readLine()).thenThrow(new IOException()); - log = LoggerUtils.readWholeFileContent("/tmp/log"); - Assert.assertNotNull(log); - } - - @Test(expected = None.class) - public void testLogError() { - Optional loggerOptional = Optional.of(this.logger); - - LoggerUtils.logError(loggerOptional, "error message"); - LoggerUtils.logError(loggerOptional, new RuntimeException("error message")); - LoggerUtils.logError(loggerOptional, "error message", new RuntimeException("runtime exception")); - LoggerUtils.logInfo(loggerOptional, "info message"); - } -} \ No newline at end of file diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/NetUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/NetUtilsTest.java index 59dfa2f73a44527770c8bf4c3a1ced353a8a4286..cf629ca63190d4c4ccd5d2ffab303e3a564040d0 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/NetUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/NetUtilsTest.java @@ -23,17 +23,19 @@ import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -import org.apache.dolphinscheduler.common.CommonTest; import org.apache.dolphinscheduler.common.Constants; import java.net.InetAddress; +import org.junit.After; import org.junit.Test; +import org.powermock.reflect.Whitebox; -/** - * NetUtilsTest - */ public class NetUtilsTest { + @After + public void reset() { + Whitebox.setInternalState(Constants.class, "KUBERNETES_MODE", false); + } @Test public void testGetAddr() { @@ -43,31 +45,31 @@ public class NetUtilsTest { } @Test - public void testGetHost() throws Exception { + public void testGetHost() { InetAddress address = mock(InetAddress.class); when(address.getCanonicalHostName()).thenReturn("dolphinscheduler-worker-0.dolphinscheduler-worker-headless.default.svc.cluster.local"); when(address.getHostName()).thenReturn("dolphinscheduler-worker-0"); when(address.getHostAddress()).thenReturn("172.17.0.15"); assertEquals("172.17.0.15", NetUtils.getHost(address)); - CommonTest.setFinalStatic(Constants.class.getDeclaredField("KUBERNETES_MODE"), true); + Whitebox.setInternalState(Constants.class, "KUBERNETES_MODE", true); assertEquals("dolphinscheduler-worker-0.dolphinscheduler-worker-headless", NetUtils.getHost(address)); address = mock(InetAddress.class); when(address.getCanonicalHostName()).thenReturn("busybox-1.default-subdomain.my-namespace.svc.cluster-domain.example"); when(address.getHostName()).thenReturn("busybox-1"); - CommonTest.setFinalStatic(Constants.class.getDeclaredField("KUBERNETES_MODE"), true); + Whitebox.setInternalState(Constants.class, "KUBERNETES_MODE", true); assertEquals("busybox-1.default-subdomain", NetUtils.getHost(address)); address = mock(InetAddress.class); when(address.getCanonicalHostName()).thenReturn("dolphinscheduler.cluster-domain.example"); when(address.getHostName()).thenReturn("dolphinscheduler"); - CommonTest.setFinalStatic(Constants.class.getDeclaredField("KUBERNETES_MODE"), true); + Whitebox.setInternalState(Constants.class, "KUBERNETES_MODE", true); assertEquals("dolphinscheduler.cluster-domain.example", NetUtils.getHost(address)); address = mock(InetAddress.class); when(address.getCanonicalHostName()).thenReturn("dolphinscheduler-worker-0"); when(address.getHostName()).thenReturn("dolphinscheduler-worker-0"); - CommonTest.setFinalStatic(Constants.class.getDeclaredField("KUBERNETES_MODE"), true); + Whitebox.setInternalState(Constants.class, "KUBERNETES_MODE", true); assertEquals("dolphinscheduler-worker-0", NetUtils.getHost(address)); } diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/OSUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/OSUtilsTest.java deleted file mode 100644 index 8467cec9b4b7302db3d347fb12e0124184680c99..0000000000000000000000000000000000000000 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/OSUtilsTest.java +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.dolphinscheduler.common.utils; - -import java.io.IOException; -import java.util.List; - -import org.junit.Assert; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class OSUtilsTest { - private static final Logger logger = LoggerFactory.getLogger(OSUtilsTest.class); - - @Test - public void getUserList() { - List userList = OSUtils.getUserList(); - Assert.assertNotEquals("System user list should not be empty", userList.size(), 0); - logger.info("OS user list : {}", userList.toString()); - } - - @Test - public void testOSMetric() { - if (!OSUtils.isWindows()) { - double availablePhysicalMemorySize = OSUtils.availablePhysicalMemorySize(); - Assert.assertTrue(availablePhysicalMemorySize >= 0.0d); - double totalPhysicalMemorySize = OSUtils.totalPhysicalMemorySize(); - Assert.assertTrue(totalPhysicalMemorySize >= 0.0d); - double loadAverage = OSUtils.loadAverage(); - logger.info("loadAverage {}", loadAverage); - double memoryUsage = OSUtils.memoryUsage(); - Assert.assertTrue(memoryUsage >= 0.0d); - double cpuUsage = OSUtils.cpuUsage(); - Assert.assertTrue(cpuUsage >= 0.0d || cpuUsage == -1.0d); - } else { - // TODO window ut - } - } - - @Test - public void getGroup() { - try { - String group = OSUtils.getGroup(); - Assert.assertNotNull(group); - } catch (IOException e) { - Assert.fail("get group failed " + e.getMessage()); - } - } - - @Test - public void createUser() { - boolean result = OSUtils.createUser("test123"); - if (result) { - Assert.assertTrue("create user test123 success", true); - } else { - Assert.assertTrue("create user test123 fail", true); - } - } - - @Test - public void createUserIfAbsent() { - OSUtils.createUserIfAbsent("test123"); - Assert.assertTrue("create user test123 success", true); - } - - @Test - public void testGetSudoCmd() { - String cmd = "kill -9 1234"; - String sudoCmd = OSUtils.getSudoCmd("test123", cmd); - Assert.assertEquals("sudo -u test123 " + cmd, sudoCmd); - } - - @Test - public void exeCmd() { - if (OSUtils.isMacOS() || !OSUtils.isWindows()) { - try { - String result = OSUtils.exeCmd("echo helloWorld"); - Assert.assertEquals("helloWorld\n",result); - } catch (IOException e) { - Assert.fail("exeCmd " + e.getMessage()); - } - } - } - @Test - public void getProcessID() { - int processId = OSUtils.getProcessID(); - Assert.assertNotEquals(0, processId); - } - @Test - public void checkResource() { - boolean resource = OSUtils.checkResource(100,0); - Assert.assertTrue(resource); - resource = OSUtils.checkResource(0,Double.MAX_VALUE); - Assert.assertFalse(resource); - } - -} diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/RetryerUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/RetryerUtilsTest.java index 19b7853de375a812e9708e4630db7fd358a38f6f..7841e4658517f9fe5e77492b7679308d4df26a78 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/RetryerUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/RetryerUtilsTest.java @@ -213,113 +213,4 @@ public class RetryerUtilsTest { testRetryExceptionWithPara(true); testRetryExceptionWithPara(false); } - - @Test - public void testRetrySilent() { - try { - for (int execTarget = 1; execTarget <= 3; execTarget++) { - int finalExecTarget = execTarget; - int[] execTime = {0}; - boolean result = RetryerUtils.retryCallSilent(() -> { - execTime[0]++; - return execTime[0] == finalExecTarget; - }); - Assert.assertEquals(finalExecTarget, execTime[0]); - Assert.assertTrue(result); - } - } catch (Exception e) { - Assert.fail("Unexpected exception " + e.getMessage()); - } - int[] execTime = {0}; - try { - boolean result = RetryerUtils.retryCallSilent(() -> { - execTime[0]++; - return execTime[0] == 4; - }); - Assert.assertFalse(result); - } catch (Exception e) { - Assert.fail("Unexpected exception " + e.getMessage()); - } - } - - @Test - public void testRetrySilentWithPara() { - try { - for (int execTarget = 1; execTarget <= 3; execTarget++) { - int finalExecTarget = execTarget; - int[] execTime = {0}; - boolean result = RetryerUtils.retryCallSilent(() -> { - execTime[0]++; - return execTime[0] == finalExecTarget; - }, true); - Assert.assertEquals(finalExecTarget, execTime[0]); - Assert.assertTrue(result); - } - } catch (Exception e) { - Assert.fail("Unexpected exception " + e.getMessage()); - } - int[] execTime = {0}; - try { - boolean result = RetryerUtils.retryCallSilent(() -> { - execTime[0]++; - return execTime[0] == 4; - }, true); - Assert.assertFalse(result); - } catch (Exception e) { - Assert.fail("Unexpected exception " + e.getMessage()); - } - } - @Test - public void testRetrySilentNoCheckResult(){ - try { - for (int execTarget = 1; execTarget <= 5; execTarget++) { - int[] execTime = {0}; - boolean result = RetryerUtils.retryCallSilent(() -> { - execTime[0]++; - return execTime[0] > 1; - }, false); - Assert.assertEquals(1, execTime[0]); - Assert.assertFalse(result); - } - } catch (Exception e) { - Assert.fail("Unexpected exception " + e.getMessage()); - } - } - private void testRetrySilentExceptionWithPara(boolean checkResult) { - try { - for (int execTarget = 1; execTarget <= 3; execTarget++) { - int finalExecTarget = execTarget; - int[] execTime = {0}; - boolean result = RetryerUtils.retryCallSilent(() -> { - execTime[0]++; - if (execTime[0] != finalExecTarget) { - throw new IllegalArgumentException(String.valueOf(execTime[0])); - } - return true; - }, checkResult); - Assert.assertEquals(finalExecTarget, execTime[0]); - Assert.assertTrue(result); - } - } catch (Exception e) { - Assert.fail("Unexpected exception " + e.getMessage()); - } - int[] execTime = {0}; - try { - boolean result = RetryerUtils.retryCallSilent(() -> { - execTime[0]++; - if (execTime[0] != 4) { - throw new IllegalArgumentException(String.valueOf(execTime[0])); - } - return true; - }, checkResult); - Assert.assertFalse(result); - } catch (Exception e) { - Assert.fail("Unexpected exception " + e.getMessage()); - } - } - @Test - public void testRetrySilentException() { - testRetrySilentExceptionWithPara(true); - testRetrySilentExceptionWithPara(false); - } } diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/SnowFlakeUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/SnowFlakeUtilsTest.java deleted file mode 100644 index 4f4c6673eac4c4f8bbb9a652f550c5226c7445cf..0000000000000000000000000000000000000000 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/SnowFlakeUtilsTest.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.common.utils; - -import org.junit.Test; - -public class SnowFlakeUtilsTest { - @Test - public void testNextId() { - try { - for (int i = 0; i < 5; i++) { - System.out.println(SnowFlakeUtils.getInstance().nextId()); - } - } catch (Exception e) { - e.printStackTrace(); - } - } -} diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtilsTest.java index 68f206d28f61d28ee3cdb6ae5c33f22e39bb0172..59b61fd98066dae7afa3aa0c3718e7ac8c0dc08b 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtilsTest.java @@ -20,15 +20,22 @@ package org.apache.dolphinscheduler.common.utils.placeholder; import org.apache.dolphinscheduler.common.utils.DateUtils; import java.util.Date; +import java.util.TimeZone; import org.junit.Assert; import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; public class TimePlaceholderUtilsTest { private Date date; + @BeforeClass + public static void setup() { + TimeZone.setDefault(TimeZone.getTimeZone("Asia/Shanghai")); + } + @Before public void init() { date = DateUtils.parse("20170101010101", "yyyyMMddHHmmss"); @@ -72,4 +79,4 @@ public class TimePlaceholderUtilsTest { Assert.assertEquals("20170101", TimePlaceholderUtils.getPlaceHolderTime("yyyyMMdd", date)); } -} \ No newline at end of file +} diff --git a/dolphinscheduler-dao/pom.xml b/dolphinscheduler-dao/pom.xml index 578bc1c0ce25fcbd0d4eac463e6a75ccf61c364f..958569d275cd867e6ed01b75041770f9fffcecfa 100644 --- a/dolphinscheduler-dao/pom.xml +++ b/dolphinscheduler-dao/pom.xml @@ -16,43 +16,26 @@ ~ limitations under the License. --> - + 4.0.0 org.apache.dolphinscheduler dolphinscheduler - 2.0.0-SNAPSHOT + 2.0.10-SNAPSHOT dolphinscheduler-dao ${project.artifactId} - - UTF-8 - org.apache.dolphinscheduler dolphinscheduler-common - - - protobuf-java - com.google.protobuf - - - junit - junit - test - - - org.jacoco - org.jacoco.agent - runtime - test + com.zaxxer + HikariCP com.baomidou @@ -75,38 +58,6 @@ postgresql - - org.springframework.boot - spring-boot-starter-test - test - - - org.ow2.asm - asm - - - org.springframework.boot - spring-boot - - - org.springframework.boot - spring-boot-autoconfigure - - - log4j-api - org.apache.logging.log4j - - - org.springframework.boot - spring-boot-starter-tomcat - - - org.apache.logging.log4j - log4j-to-slf4j - - - - mysql mysql-connector-java @@ -115,10 +66,6 @@ com.h2database h2 - - com.alibaba - druid - ch.qos.logback @@ -151,14 +98,35 @@ commons-configuration + + org.yaml + snakeyaml + org.springframework spring-test test - org.yaml - snakeyaml + org.springframework.boot + spring-boot-starter-test + test + + + + + + org.apache.maven.plugins + maven-jar-plugin + + + sql/ + *.yaml + + + + + diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java index 18e8e0de4d960b1316e079097a76e8e4de1ec600..827d6831de267281f4f72a00d5f65378c824992d 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java @@ -17,22 +17,24 @@ package org.apache.dolphinscheduler.dao; +import com.google.common.collect.Lists; +import org.apache.commons.lang.StringUtils; import org.apache.dolphinscheduler.common.enums.AlertEvent; import org.apache.dolphinscheduler.common.enums.AlertStatus; import org.apache.dolphinscheduler.common.enums.AlertWarnLevel; import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.apache.dolphinscheduler.dao.datasource.ConnectionFactory; import org.apache.dolphinscheduler.dao.entity.Alert; import org.apache.dolphinscheduler.dao.entity.AlertPluginInstance; import org.apache.dolphinscheduler.dao.entity.ProcessAlertContent; -import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.entity.ProjectUser; import org.apache.dolphinscheduler.dao.entity.ServerAlertContent; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.mapper.AlertGroupMapper; import org.apache.dolphinscheduler.dao.mapper.AlertMapper; import org.apache.dolphinscheduler.dao.mapper.AlertPluginInstanceMapper; - -import org.apache.commons.lang.StringUtils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; import java.util.ArrayList; import java.util.Arrays; @@ -40,18 +42,8 @@ import java.util.Date; import java.util.List; import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; - -import com.google.common.collect.Lists; - @Component -public class AlertDao extends AbstractBaseDao { - - private final Logger logger = LoggerFactory.getLogger(getClass()); - +public class AlertDao { @Autowired private AlertMapper alertMapper; @@ -61,13 +53,6 @@ public class AlertDao extends AbstractBaseDao { @Autowired private AlertGroupMapper alertGroupMapper; - @Override - protected void init() { - alertMapper = ConnectionFactory.getInstance().getMapper(AlertMapper.class); - alertPluginInstanceMapper = ConnectionFactory.getInstance().getMapper(AlertPluginInstanceMapper.class); - alertGroupMapper = ConnectionFactory.getInstance().getMapper(AlertGroupMapper.class); - } - /** * insert alert * @@ -125,15 +110,24 @@ public class AlertDao extends AbstractBaseDao { * process time out alert * * @param processInstance processInstance - * @param processDefinition processDefinition + * @param projectUser projectUser */ - public void sendProcessTimeoutAlert(ProcessInstance processInstance, ProcessDefinition processDefinition) { + public void sendProcessTimeoutAlert(ProcessInstance processInstance, ProjectUser projectUser) { int alertGroupId = processInstance.getWarningGroupId(); Alert alert = new Alert(); List processAlertContentList = new ArrayList<>(1); ProcessAlertContent processAlertContent = ProcessAlertContent.newBuilder() + .projectCode(projectUser.getProjectCode()) + .projectName(projectUser.getProjectName()) + .owner(projectUser.getUserName()) .processId(processInstance.getId()) + .processDefinitionCode(processInstance.getProcessDefinitionCode()) .processName(processInstance.getName()) + .processType(processInstance.getCommandType()) + .processState(processInstance.getState()) + .runTimes(processInstance.getRunTimes()) + .processStartTime(processInstance.getStartTime()) + .processHost(processInstance.getHost()) .event(AlertEvent.TIME_OUT) .warningLevel(AlertWarnLevel.MIDDLE) .build(); @@ -154,36 +148,38 @@ public class AlertDao extends AbstractBaseDao { /** * task timeout warn * - * @param alertGroupId alertGroupId - * @param processInstanceId processInstanceId - * @param processInstanceName processInstanceName - * @param taskId taskId - * @param taskName taskName + * @param processInstance processInstanceId + * @param taskInstance taskInstance + * @param projectUser projectUser */ - public void sendTaskTimeoutAlert(int alertGroupId, int processInstanceId, - String processInstanceName, int taskId, String taskName) { + public void sendTaskTimeoutAlert(ProcessInstance processInstance, TaskInstance taskInstance, ProjectUser projectUser) { Alert alert = new Alert(); List processAlertContentList = new ArrayList<>(1); ProcessAlertContent processAlertContent = ProcessAlertContent.newBuilder() - .processId(processInstanceId) - .processName(processInstanceName) - .taskId(taskId) - .taskName(taskName) + .projectCode(projectUser.getProjectCode()) + .projectName(projectUser.getProjectName()) + .owner(projectUser.getUserName()) + .processId(processInstance.getId()) + .processDefinitionCode(processInstance.getProcessDefinitionCode()) + .processName(processInstance.getName()) + .taskCode(taskInstance.getTaskCode()) + .taskName(taskInstance.getName()) + .taskType(taskInstance.getTaskType()) + .taskStartTime(taskInstance.getStartTime()) + .taskHost(taskInstance.getHost()) .event(AlertEvent.TIME_OUT) .warningLevel(AlertWarnLevel.MIDDLE) .build(); processAlertContentList.add(processAlertContent); String content = JSONUtils.toJsonString(processAlertContentList); alert.setTitle("Task Timeout Warn"); - saveTaskTimeoutAlert(alert, content, alertGroupId); + saveTaskTimeoutAlert(alert, content, processInstance.getWarningGroupId()); } /** - * list the alert information of waiting to be executed - * - * @return alert list + * List alerts that are pending for execution */ - public List listWaitExecutionAlert() { + public List listPendingAlerts() { return alertMapper.listAlertByStatus(AlertStatus.WAIT_EXECUTION); } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/DaoConfiguration.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/DaoConfiguration.java new file mode 100644 index 0000000000000000000000000000000000000000..f0c78fcf44fe5fb1276bdfb10d0b215f5954d74b --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/DaoConfiguration.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + */ + +package org.apache.dolphinscheduler.dao; + +import org.mybatis.spring.annotation.MapperScan; +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; +import org.springframework.context.annotation.Configuration; + +@Configuration +@EnableAutoConfiguration +@MapperScan("org.apache.dolphinscheduler.dao") +public class DaoConfiguration { +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/DaoFactory.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/DaoFactory.java index 2db41a332f9a994c1010bbdce15e4b8083cafa41..4a88a34b5421980f7fe83278f096e3cb6cbb90d7 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/DaoFactory.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/DaoFactory.java @@ -23,7 +23,7 @@ import java.util.Map; import java.util.concurrent.ConcurrentHashMap; /** - * dao factory + * dao factory TODO remove */ public class DaoFactory { diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/MonitorDBDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/MonitorDBDao.java index 53366777f76fdc9c1e780f57d55575f0b229be37..69491ab605d9d697f711420088aa425f424c1fa3 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/MonitorDBDao.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/MonitorDBDao.java @@ -16,20 +16,23 @@ */ package org.apache.dolphinscheduler.dao; -import com.alibaba.druid.pool.DruidDataSource; -import java.sql.Connection; -import java.util.ArrayList; -import java.util.List; -import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.utils.ConnectionUtils; import org.apache.dolphinscheduler.dao.entity.MonitorRecord; import org.apache.dolphinscheduler.dao.utils.MysqlPerformance; import org.apache.dolphinscheduler.dao.utils.PostgrePerformance; +import org.apache.dolphinscheduler.spi.enums.DbType; + +import java.sql.Connection; +import java.util.ArrayList; +import java.util.List; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; +import com.zaxxer.hikari.HikariDataSource; + /** * database state dao @@ -42,7 +45,7 @@ public class MonitorDBDao { public static final String VARIABLE_NAME = "variable_name"; @Autowired - private DruidDataSource dataSource; + private HikariDataSource dataSource; /** diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/PluginDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/PluginDao.java index b6aa9e55911a60b5373b7fee827feceb1b9518e1..cf69f5e50c60d42869acc1c903ad4e60e98a03b2 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/PluginDao.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/PluginDao.java @@ -17,33 +17,18 @@ package org.apache.dolphinscheduler.dao; -import static java.util.Objects.requireNonNull; - -import org.apache.dolphinscheduler.common.utils.CollectionUtils; -import org.apache.dolphinscheduler.dao.datasource.ConnectionFactory; import org.apache.dolphinscheduler.dao.entity.PluginDefine; import org.apache.dolphinscheduler.dao.mapper.PluginDefineMapper; - -import java.util.List; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; -@Component -public class PluginDao extends AbstractBaseDao { - - private final Logger logger = LoggerFactory.getLogger(getClass()); +import static java.util.Objects.requireNonNull; +@Component +public class PluginDao { @Autowired private PluginDefineMapper pluginDefineMapper; - @Override - protected void init() { - pluginDefineMapper = ConnectionFactory.getInstance().getMapper(PluginDefineMapper.class); - } - /** * check plugin define table exist * @@ -73,11 +58,13 @@ public class PluginDao extends AbstractBaseDao { requireNonNull(pluginDefine.getPluginName(), "pluginName is null"); requireNonNull(pluginDefine.getPluginType(), "pluginType is null"); - List pluginDefineList = pluginDefineMapper.queryByNameAndType(pluginDefine.getPluginName(), pluginDefine.getPluginType()); - if (CollectionUtils.isEmpty(pluginDefineList)) { - return pluginDefineMapper.insert(pluginDefine); + PluginDefine currPluginDefine = pluginDefineMapper.queryByNameAndType(pluginDefine.getPluginName(), pluginDefine.getPluginType()); + if (currPluginDefine == null) { + if (pluginDefineMapper.insert(pluginDefine) == 1 && pluginDefine.getId() > 0) { + return pluginDefine.getId(); + } + throw new IllegalStateException("Failed to insert plugin definition"); } - PluginDefine currPluginDefine = pluginDefineList.get(0); if (!currPluginDefine.getPluginParams().equals(pluginDefine.getPluginParams())) { currPluginDefine.setUpdateTime(pluginDefine.getUpdateTime()); currPluginDefine.setPluginParams(pluginDefine.getPluginParams()); diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/ConnectionFactory.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/ConnectionFactory.java deleted file mode 100644 index 25fe5927d3e2cd99a0ff17c689c71c4b796aaa0b..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/ConnectionFactory.java +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.dolphinscheduler.dao.datasource; - -import com.alibaba.druid.pool.DruidDataSource; -import com.baomidou.mybatisplus.core.MybatisConfiguration; -import com.baomidou.mybatisplus.extension.plugins.PaginationInterceptor; -import com.baomidou.mybatisplus.extension.spring.MybatisSqlSessionFactoryBean; -import org.apache.dolphinscheduler.common.Constants; -import org.apache.ibatis.mapping.Environment; -import org.apache.ibatis.session.SqlSession; -import org.apache.ibatis.session.SqlSessionFactory; -import org.apache.ibatis.transaction.TransactionFactory; -import org.apache.ibatis.transaction.jdbc.JdbcTransactionFactory; - -import java.sql.SQLException; - -import javax.sql.DataSource; - -import org.mybatis.spring.SqlSessionTemplate; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * not spring manager connection, only use for init db, and alert module for non-spring application - * data source connection factory - */ -public class ConnectionFactory extends SpringConnectionFactory { - - private static final Logger logger = LoggerFactory.getLogger(ConnectionFactory.class); - - private static class ConnectionFactoryHolder { - private static final ConnectionFactory connectionFactory = new ConnectionFactory(); - } - - public static ConnectionFactory getInstance() { - return ConnectionFactoryHolder.connectionFactory; - } - - private ConnectionFactory() { - try { - dataSource = buildDataSource(); - sqlSessionFactory = getSqlSessionFactory(); - sqlSessionTemplate = getSqlSessionTemplate(); - } catch (Exception e) { - logger.error("Initializing ConnectionFactory error", e); - throw new RuntimeException(e); - } - } - - /** - * sql session factory - */ - private SqlSessionFactory sqlSessionFactory; - - /** - * sql session template - */ - private SqlSessionTemplate sqlSessionTemplate; - - private DataSource dataSource; - - public DataSource getDataSource() { - return dataSource; - } - - /** - * get the data source - * - * @return druid dataSource - */ - private DataSource buildDataSource() throws SQLException { - - DruidDataSource druidDataSource = dataSource(); - return druidDataSource; - } - - /** - * * get sql session factory - * - * @return sqlSessionFactory - * @throws Exception sqlSessionFactory exception - */ - private SqlSessionFactory getSqlSessionFactory() throws Exception { - TransactionFactory transactionFactory = new JdbcTransactionFactory(); - - Environment environment = new Environment("development", transactionFactory, getDataSource()); - - MybatisConfiguration configuration = new MybatisConfiguration(); - configuration.setEnvironment(environment); - configuration.setLazyLoadingEnabled(true); - configuration.addMappers("org.apache.dolphinscheduler.dao.mapper"); - configuration.addInterceptor(new PaginationInterceptor()); - - MybatisSqlSessionFactoryBean sqlSessionFactoryBean = new MybatisSqlSessionFactoryBean(); - sqlSessionFactoryBean.setConfiguration(configuration); - sqlSessionFactoryBean.setDataSource(getDataSource()); - - sqlSessionFactoryBean.setTypeEnumsPackage("org.apache.dolphinscheduler.*.enums"); - sqlSessionFactory = sqlSessionFactoryBean.getObject(); - - return sqlSessionFactory; -} - - private SqlSessionTemplate getSqlSessionTemplate() { - sqlSessionTemplate = new SqlSessionTemplate(sqlSessionFactory); - return sqlSessionTemplate; - } - - /** - * get sql session - * - * @return sqlSession - */ - public SqlSession getSqlSession() { - return sqlSessionTemplate; - } - - /** - * get mapper - * - * @param type target class - * @param generic - * @return target object - */ - public T getMapper(Class type) { - try { - return getSqlSession().getMapper(type); - } catch (Exception e) { - logger.error(e.getMessage(), e); - throw new RuntimeException("get mapper failed"); - } - } - -} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SpringConnectionFactory.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SpringConnectionFactory.java index ca4a7e20bdfea7d57ac136e04ba21ba7df283af7..ca7fc8080fa50fc295a3c7d40974349a5e1a1301 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SpringConnectionFactory.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SpringConnectionFactory.java @@ -17,125 +17,52 @@ package org.apache.dolphinscheduler.dao.datasource; -import static org.apache.dolphinscheduler.common.Constants.DATASOURCE_PROPERTIES; - -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.utils.PropertyUtils; - +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.core.MybatisConfiguration; +import com.baomidou.mybatisplus.core.config.GlobalConfig; +import com.baomidou.mybatisplus.extension.plugins.PaginationInterceptor; +import com.baomidou.mybatisplus.extension.spring.MybatisSqlSessionFactoryBean; import org.apache.ibatis.mapping.DatabaseIdProvider; import org.apache.ibatis.mapping.VendorDatabaseIdProvider; import org.apache.ibatis.session.SqlSession; import org.apache.ibatis.session.SqlSessionFactory; import org.apache.ibatis.type.JdbcType; - -import java.sql.SQLException; -import java.util.Properties; - import org.mybatis.spring.SqlSessionTemplate; -import org.mybatis.spring.annotation.MapperScan; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.core.io.support.PathMatchingResourcePatternResolver; import org.springframework.core.io.support.ResourcePatternResolver; import org.springframework.jdbc.datasource.DataSourceTransactionManager; -import com.alibaba.druid.pool.DruidDataSource; -import com.baomidou.mybatisplus.annotation.IdType; -import com.baomidou.mybatisplus.core.MybatisConfiguration; -import com.baomidou.mybatisplus.core.config.GlobalConfig; -import com.baomidou.mybatisplus.extension.plugins.PaginationInterceptor; -import com.baomidou.mybatisplus.extension.spring.MybatisSqlSessionFactoryBean; - +import javax.sql.DataSource; +import java.util.Properties; -/** - * data source connection factory - */ @Configuration -@MapperScan("org.apache.dolphinscheduler.*.mapper") public class SpringConnectionFactory { - private static final Logger logger = LoggerFactory.getLogger(SpringConnectionFactory.class); - - static { - PropertyUtils.loadPropertyFile(DATASOURCE_PROPERTIES); - } - - /** - * pagination interceptor - * - * @return pagination interceptor - */ @Bean public PaginationInterceptor paginationInterceptor() { return new PaginationInterceptor(); } - /** - * get the data source - * - * @return druid dataSource - */ - @Bean(destroyMethod = "") - public DruidDataSource dataSource() throws SQLException { - - DruidDataSource druidDataSource = new DruidDataSource(); - - druidDataSource.setDriverClassName(PropertyUtils.getString(Constants.SPRING_DATASOURCE_DRIVER_CLASS_NAME)); - druidDataSource.setUrl(PropertyUtils.getString(Constants.SPRING_DATASOURCE_URL)); - druidDataSource.setUsername(PropertyUtils.getString(Constants.SPRING_DATASOURCE_USERNAME)); - druidDataSource.setPassword(PropertyUtils.getString(Constants.SPRING_DATASOURCE_PASSWORD)); - druidDataSource.setValidationQuery(PropertyUtils.getString(Constants.SPRING_DATASOURCE_VALIDATION_QUERY, "SELECT 1")); - - druidDataSource.setPoolPreparedStatements(PropertyUtils.getBoolean(Constants.SPRING_DATASOURCE_POOL_PREPARED_STATEMENTS, true)); - druidDataSource.setTestWhileIdle(PropertyUtils.getBoolean(Constants.SPRING_DATASOURCE_TEST_WHILE_IDLE, true)); - druidDataSource.setTestOnBorrow(PropertyUtils.getBoolean(Constants.SPRING_DATASOURCE_TEST_ON_BORROW, true)); - druidDataSource.setTestOnReturn(PropertyUtils.getBoolean(Constants.SPRING_DATASOURCE_TEST_ON_RETURN, true)); - druidDataSource.setKeepAlive(PropertyUtils.getBoolean(Constants.SPRING_DATASOURCE_KEEP_ALIVE, true)); - - druidDataSource.setMinIdle(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_MIN_IDLE, 5)); - druidDataSource.setMaxActive(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_MAX_ACTIVE, 50)); - druidDataSource.setMaxWait(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_MAX_WAIT, 60000)); - druidDataSource.setMaxPoolPreparedStatementPerConnectionSize(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_MAX_POOL_PREPARED_STATEMENT_PER_CONNECTION_SIZE, 20)); - druidDataSource.setInitialSize(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_INITIAL_SIZE, 5)); - druidDataSource.setTimeBetweenEvictionRunsMillis(PropertyUtils.getLong(Constants.SPRING_DATASOURCE_TIME_BETWEEN_EVICTION_RUNS_MILLIS, 60000)); - druidDataSource.setTimeBetweenConnectErrorMillis(PropertyUtils.getLong(Constants.SPRING_DATASOURCE_TIME_BETWEEN_CONNECT_ERROR_MILLIS, 60000)); - druidDataSource.setMinEvictableIdleTimeMillis(PropertyUtils.getLong(Constants.SPRING_DATASOURCE_MIN_EVICTABLE_IDLE_TIME_MILLIS, 300000)); - druidDataSource.setValidationQueryTimeout(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_VALIDATION_QUERY_TIMEOUT, 3)); - //auto commit - druidDataSource.setDefaultAutoCommit(PropertyUtils.getBoolean(Constants.SPRING_DATASOURCE_DEFAULT_AUTO_COMMIT, true)); - druidDataSource.init(); - return druidDataSource; - } - - /** - * * get transaction manager - * - * @return DataSourceTransactionManager - */ @Bean - public DataSourceTransactionManager transactionManager() throws SQLException { - return new DataSourceTransactionManager(dataSource()); + public DataSourceTransactionManager transactionManager(DataSource dataSource) { + return new DataSourceTransactionManager(dataSource); } - /** - * * get sql session factory - * - * @return sqlSessionFactory - * @throws Exception sqlSessionFactory exception - */ @Bean - public SqlSessionFactory sqlSessionFactory() throws Exception { + public SqlSessionFactory sqlSessionFactory(DataSource dataSource) throws Exception { MybatisConfiguration configuration = new MybatisConfiguration(); configuration.setMapUnderscoreToCamelCase(true); configuration.setCacheEnabled(false); configuration.setCallSettersOnNulls(true); configuration.setJdbcTypeForNull(JdbcType.NULL); configuration.addInterceptor(paginationInterceptor()); + + configuration.setGlobalConfig(new GlobalConfig().setBanner(false)); MybatisSqlSessionFactoryBean sqlSessionFactoryBean = new MybatisSqlSessionFactoryBean(); sqlSessionFactoryBean.setConfiguration(configuration); - sqlSessionFactoryBean.setDataSource(dataSource()); + sqlSessionFactoryBean.setDataSource(dataSource); GlobalConfig.DbConfig dbConfig = new GlobalConfig.DbConfig(); dbConfig.setIdType(IdType.AUTO); @@ -150,14 +77,9 @@ public class SpringConnectionFactory { return sqlSessionFactoryBean.getObject(); } - /** - * get sql session - * - * @return SqlSession - */ @Bean - public SqlSession sqlSession() throws Exception { - return new SqlSessionTemplate(sqlSessionFactory()); + public SqlSession sqlSession(SqlSessionFactory sqlSessionFactory) { + return new SqlSessionTemplate(sqlSessionFactory); } @Bean diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Command.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Command.java index b1ed217537c47062a41ffee594c28e8a96f9d14b..ae2ff6258a18f3de588de3cb5a629dec6c48d8db 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Command.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Command.java @@ -132,6 +132,12 @@ public class Command { @TableField("dry_run") private int dryRun; + @TableField("process_instance_id") + private int processInstanceId; + + @TableField("process_definition_version") + private int processDefinitionVersion; + public Command() { this.taskDependType = TaskDependType.TASK_POST; this.failureStrategy = FailureStrategy.CONTINUE; @@ -152,7 +158,10 @@ public class Command { String workerGroup, Long environmentCode, Priority processInstancePriority, - int dryRun) { + int dryRun, + int processInstanceId, + int processDefinitionVersion + ) { this.commandType = commandType; this.executorId = executorId; this.processDefinitionCode = processDefinitionCode; @@ -168,6 +177,8 @@ public class Command { this.environmentCode = environmentCode; this.processInstancePriority = processInstancePriority; this.dryRun = dryRun; + this.processInstanceId = processInstanceId; + this.processDefinitionVersion = processDefinitionVersion; } public TaskDependType getTaskDependType() { @@ -298,6 +309,22 @@ public class Command { this.dryRun = dryRun; } + public int getProcessInstanceId() { + return processInstanceId; + } + + public void setProcessInstanceId(int processInstanceId) { + this.processInstanceId = processInstanceId; + } + + public int getProcessDefinitionVersion() { + return processDefinitionVersion; + } + + public void setProcessDefinitionVersion(int processDefinitionVersion) { + this.processDefinitionVersion = processDefinitionVersion; + } + @Override public boolean equals(Object o) { if (this == o) { @@ -353,8 +380,13 @@ public class Command { if (processInstancePriority != command.processInstancePriority) { return false; } + if (processInstanceId != command.processInstanceId) { + return false; + } + if (processDefinitionVersion != command.getProcessDefinitionVersion()) { + return false; + } return !(updateTime != null ? !updateTime.equals(command.updateTime) : command.updateTime != null); - } @Override @@ -375,6 +407,8 @@ public class Command { result = 31 * result + (workerGroup != null ? workerGroup.hashCode() : 0); result = 31 * result + (environmentCode != null ? environmentCode.hashCode() : 0); result = 31 * result + dryRun; + result = 31 * result + processInstanceId; + result = 31 * result + processDefinitionVersion; return result; } @@ -397,7 +431,10 @@ public class Command { + ", workerGroup='" + workerGroup + '\'' + ", environmentCode='" + environmentCode + '\'' + ", dryRun='" + dryRun + '\'' + + ", processInstanceId='" + processInstanceId + '\'' + + ", processDefinitionVersion='" + processDefinitionVersion + '\'' + '}'; } + } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DataSource.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DataSource.java index ecd5be045563bba03d1a5702c1e6014e89d8e8ca..b15d0ef7e11794d925bd8d89d24bfb0fcd0b8c31 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DataSource.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DataSource.java @@ -16,14 +16,15 @@ */ package org.apache.dolphinscheduler.dao.entity; -import com.fasterxml.jackson.annotation.JsonFormat; -import org.apache.dolphinscheduler.common.enums.DbType; +import org.apache.dolphinscheduler.spi.enums.DbType; + +import java.util.Date; + import com.baomidou.mybatisplus.annotation.IdType; import com.baomidou.mybatisplus.annotation.TableField; import com.baomidou.mybatisplus.annotation.TableId; import com.baomidou.mybatisplus.annotation.TableName; - -import java.util.Date; +import com.fasterxml.jackson.annotation.JsonFormat; @TableName("t_ds_datasource") public class DataSource { diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/MonitorRecord.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/MonitorRecord.java index 318f24535fa68e98935ba313600edc43f5f3cba5..1d6ebe0a0898d60f5a1d118b362819c68d1098ee 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/MonitorRecord.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/MonitorRecord.java @@ -16,12 +16,13 @@ */ package org.apache.dolphinscheduler.dao.entity; -import com.fasterxml.jackson.annotation.JsonFormat; -import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.Flag; +import org.apache.dolphinscheduler.spi.enums.DbType; import java.util.Date; +import com.fasterxml.jackson.annotation.JsonFormat; + /** * monitor record for database */ diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessAlertContent.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessAlertContent.java index 4f46140c214f3733bdd3c441bf9c64cafdd6c00e..e39d3f76a67ea58e3fb5325bcd743d0a8a5191eb 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessAlertContent.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessAlertContent.java @@ -33,14 +33,19 @@ import com.fasterxml.jackson.annotation.JsonProperty; @JsonInclude(Include.NON_NULL) public class ProcessAlertContent implements Serializable { + @JsonProperty("projectId") - private int projectId; + private Integer projectId; + @JsonProperty("projectCode") + private Long projectCode; @JsonProperty("projectName") private String projectName; @JsonProperty("owner") private String owner; @JsonProperty("processId") - private int processId; + private Integer processId; + @JsonProperty("processDefinitionCode") + private Long processDefinitionCode; @JsonProperty("processName") private String processName; @JsonProperty("processType") @@ -50,7 +55,7 @@ public class ProcessAlertContent implements Serializable { @JsonProperty("recovery") private Flag recovery; @JsonProperty("runTimes") - private int runTimes; + private Integer runTimes; @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") @JsonProperty("processStartTime") private Date processStartTime; @@ -59,8 +64,8 @@ public class ProcessAlertContent implements Serializable { private Date processEndTime; @JsonProperty("processHost") private String processHost; - @JsonProperty("taskId") - private int taskId; + @JsonProperty("taskCode") + private Long taskCode; @JsonProperty("taskName") private String taskName; @JsonProperty("event") @@ -70,7 +75,7 @@ public class ProcessAlertContent implements Serializable { @JsonProperty("taskType") private String taskType; @JsonProperty("retryTimes") - private int retryTimes; + private Integer retryTimes; @JsonProperty("taskState") private ExecutionStatus taskState; @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") @@ -86,9 +91,11 @@ public class ProcessAlertContent implements Serializable { private ProcessAlertContent(Builder builder) { this.projectId = builder.projectId; + this.projectCode = builder.projectCode; this.projectName = builder.projectName; this.owner = builder.owner; this.processId = builder.processId; + this.processDefinitionCode = builder.processDefinitionCode; this.processName = builder.processName; this.processType = builder.processType; this.recovery = builder.recovery; @@ -97,7 +104,7 @@ public class ProcessAlertContent implements Serializable { this.processStartTime = builder.processStartTime; this.processEndTime = builder.processEndTime; this.processHost = builder.processHost; - this.taskId = builder.taskId; + this.taskCode = builder.taskCode; this.taskName = builder.taskName; this.event = builder.event; this.warnLevel = builder.warnLevel; @@ -116,34 +123,40 @@ public class ProcessAlertContent implements Serializable { } public static class Builder { - private int projectId; + private Integer projectId; + private Long projectCode; private String projectName; private String owner; - private int processId; + private Integer processId; + private Long processDefinitionCode; private String processName; private CommandType processType; private Flag recovery; private ExecutionStatus processState; - private int runTimes; + private Integer runTimes; private Date processStartTime; private Date processEndTime; private String processHost; - private int taskId; + private Long taskCode; private String taskName; private AlertEvent event; private AlertWarnLevel warnLevel; private String taskType; - private int retryTimes; + private Integer retryTimes; private ExecutionStatus taskState; private Date taskStartTime; private Date taskEndTime; private String taskHost; private String logPath; - public Builder projectId(int projectId) { + public Builder projectId(Integer projectId) { this.projectId = projectId; return this; } + public Builder projectCode(Long projectCode) { + this.projectCode = projectCode; + return this; + } public Builder projectName(String projectName) { this.projectName = projectName; @@ -155,10 +168,14 @@ public class ProcessAlertContent implements Serializable { return this; } - public Builder processId(int processId) { + public Builder processId(Integer processId) { this.processId = processId; return this; } + public Builder processDefinitionCode(Long processDefinitionCode) { + this.processDefinitionCode = processDefinitionCode; + return this; + } public Builder processName(String processName) { this.processName = processName; @@ -180,7 +197,7 @@ public class ProcessAlertContent implements Serializable { return this; } - public Builder runTimes(int runTimes) { + public Builder runTimes(Integer runTimes) { this.runTimes = runTimes; return this; } @@ -200,8 +217,8 @@ public class ProcessAlertContent implements Serializable { return this; } - public Builder taskId(int taskId) { - this.taskId = taskId; + public Builder taskCode(Long taskCode) { + this.taskCode = taskCode; return this; } @@ -225,7 +242,7 @@ public class ProcessAlertContent implements Serializable { return this; } - public Builder retryTimes(int retryTimes) { + public Builder retryTimes(Integer retryTimes) { this.retryTimes = retryTimes; return this; } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessDefinition.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessDefinition.java index 8e50ce8a68ca22ad43c552ec18662079edd2fe8e..4c4dfcf92ed152da9fc49a992cab7926c8b81ebd 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessDefinition.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessDefinition.java @@ -162,12 +162,6 @@ public class ProcessDefinition { @TableField(exist = false) private String modifyBy; - /** - * resource ids - */ - @TableField(exist = false) - private String resourceIds; - /** * warningGroupId */ @@ -340,14 +334,6 @@ public class ProcessDefinition { this.scheduleReleaseState = scheduleReleaseState; } - public String getResourceIds() { - return resourceIds; - } - - public void setResourceIds(String resourceIds) { - this.resourceIds = resourceIds; - } - public int getTimeout() { return timeout; } @@ -458,7 +444,6 @@ public class ProcessDefinition { + ", tenantId=" + tenantId + ", tenantCode='" + tenantCode + '\'' + ", modifyBy='" + modifyBy + '\'' - + ", resourceIds='" + resourceIds + '\'' + ", warningGroupId=" + warningGroupId + '}'; } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessDefinitionLog.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessDefinitionLog.java index 30840e86021d68af642dd0b7202f578b721b210b..eaaceac37095711087d489bc2de51601edcaf8f2 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessDefinitionLog.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessDefinitionLog.java @@ -65,7 +65,6 @@ public class ProcessDefinitionLog extends ProcessDefinition { this.setTimeout(processDefinition.getTimeout()); this.setTenantId(processDefinition.getTenantId()); this.setModifyBy(processDefinition.getModifyBy()); - this.setResourceIds(processDefinition.getResourceIds()); this.setWarningGroupId(processDefinition.getWarningGroupId()); } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessInstance.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessInstance.java index 18c386b854d0ac08972befa05e6a6cc4a9b9cb77..f20b13a08bfabe80cefad3befc063ed57101acc1 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessInstance.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessInstance.java @@ -244,6 +244,12 @@ public class ProcessInstance { */ private int dryRun; + /** + * re-start time + */ + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + private Date restartTime; + public ProcessInstance() { } @@ -516,6 +522,14 @@ public class ProcessInstance { this.dryRun = dryRun; } + public Date getRestartTime() { + return restartTime; + } + + public void setRestartTime(Date restartTime) { + this.restartTime = restartTime; + } + /** * add command to history * @@ -684,6 +698,10 @@ public class ProcessInstance { + ", dryRun='" + dryRun + '\'' + + '}' + + ", restartTime='" + + restartTime + + '\'' + '}'; } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProjectUser.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProjectUser.java index 37722f3c792970bc15da25570956d8ef5f85de44..353e0a21ccb2327f46097e4da8226cb0f91164bf 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProjectUser.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProjectUser.java @@ -37,6 +37,12 @@ public class ProjectUser { @TableField("project_id") private int projectId; + /** + * project code + */ + @TableField(exist = false) + private long projectCode; + /** * project name */ @@ -124,12 +130,21 @@ public class ProjectUser { this.perm = perm; } + public long getProjectCode() { + return projectCode; + } + + public void setProjectCode(long projectCode) { + this.projectCode = projectCode; + } + @Override public String toString() { return "ProjectUser{" + "id=" + id + ", userId=" + userId + ", projectId=" + projectId + + ", projectCode=" + projectCode + ", projectName='" + projectName + '\'' + ", userName='" + userName + '\'' + ", perm=" + perm diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Resource.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Resource.java index 214fbe83e48862c86304556ae1f4dfd950683aec..7475bf738e04c75a6e2869d045be47bc5b2828bc 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Resource.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Resource.java @@ -14,232 +14,222 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.dao.entity; +import org.apache.dolphinscheduler.spi.enums.ResourceType; + +import java.util.Date; -import com.fasterxml.jackson.annotation.JsonFormat; -import org.apache.dolphinscheduler.common.enums.ResourceType; import com.baomidou.mybatisplus.annotation.IdType; import com.baomidou.mybatisplus.annotation.TableId; import com.baomidou.mybatisplus.annotation.TableName; - -import java.util.Date; +import com.fasterxml.jackson.annotation.JsonFormat; @TableName("t_ds_resources") public class Resource { - /** - * id - */ - @TableId(value="id", type=IdType.AUTO) - private int id; - - /** - * parent id - */ - private int pid; - - /** - * resource alias - */ - private String alias; - - /** - * full name - */ - private String fullName; - - /** - * is directory - */ - private boolean isDirectory=false; - - /** - * description - */ - private String description; - - /** - * file alias - */ - private String fileName; - - /** - * user id - */ - private int userId; - - /** - * resource type - */ - private ResourceType type; - - /** - * resource size - */ - private long size; - - /** - * create time - */ - @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss",timezone="GMT+8") - private Date createTime; - - /** - * update time - */ - @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss",timezone="GMT+8") - private Date updateTime; - - public Resource() { - } - - public Resource(int id, String alias, String fileName, String description, int userId, - ResourceType type, long size, - Date createTime, Date updateTime) { - this.id = id; - this.alias = alias; - this.fileName = fileName; - this.description = description; - this.userId = userId; - this.type = type; - this.size = size; - this.createTime = createTime; - this.updateTime = updateTime; - } - - public Resource(int id, int pid, String alias, String fullName, boolean isDirectory) { - this.id = id; - this.pid = pid; - this.alias = alias; - this.fullName = fullName; - this.isDirectory = isDirectory; - } - - /*public Resource(String alias, String fileName, String description, int userId, ResourceType type, long size, Date createTime, Date updateTime) { - this.alias = alias; - this.fileName = fileName; - this.description = description; - this.userId = userId; - this.type = type; - this.size = size; - this.createTime = createTime; - this.updateTime = updateTime; - }*/ - - public Resource(int pid, String alias, String fullName, boolean isDirectory, String description, String fileName, int userId, ResourceType type, long size, Date createTime, Date updateTime) { - this.pid = pid; - this.alias = alias; - this.fullName = fullName; - this.isDirectory = isDirectory; - this.description = description; - this.fileName = fileName; - this.userId = userId; - this.type = type; - this.size = size; - this.createTime = createTime; - this.updateTime = updateTime; - } - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public String getAlias() { - return alias; - } - - public void setAlias(String alias) { - this.alias = alias; - } - - public int getPid() { - return pid; - } - - public void setPid(int pid) { - this.pid = pid; - } - - public String getFullName() { - return fullName; - } - - public void setFullName(String fullName) { - this.fullName = fullName; - } - - public boolean isDirectory() { - return isDirectory; - } - - public void setDirectory(boolean directory) { - isDirectory = directory; - } - - public String getFileName() { - return fileName; - } - - public void setFileName(String fileName) { - this.fileName = fileName; - } - - public String getDescription() { - return description; - } - - public void setDescription(String description) { - this.description = description; - } - - public int getUserId() { - return userId; - } - - public void setUserId(int userId) { - this.userId = userId; - } - - - public ResourceType getType() { - return type; - } - - public void setType(ResourceType type) { - this.type = type; - } - - public long getSize() { - return size; - } - - public void setSize(long size) { - this.size = size; - } - - public Date getCreateTime() { - return createTime; - } - - public void setCreateTime(Date createTime) { - this.createTime = createTime; - } - - public Date getUpdateTime() { - return updateTime; - } - - public void setUpdateTime(Date updateTime) { - this.updateTime = updateTime; - } + /** + * id + */ + @TableId(value = "id", type = IdType.AUTO) + private int id; + + /** + * parent id + */ + private int pid; + + /** + * resource alias + */ + private String alias; + + /** + * full name + */ + private String fullName; + + /** + * is directory + */ + private boolean isDirectory = false; + + /** + * description + */ + private String description; + + /** + * file alias + */ + private String fileName; + + /** + * user id + */ + private int userId; + + /** + * resource type + */ + private ResourceType type; + + /** + * resource size + */ + private long size; + + /** + * create time + */ + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + private Date createTime; + + /** + * update time + */ + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + private Date updateTime; + + public Resource() { + } + + public Resource(int id, String alias, String fileName, String description, int userId, + ResourceType type, long size, + Date createTime, Date updateTime) { + this.id = id; + this.alias = alias; + this.fileName = fileName; + this.description = description; + this.userId = userId; + this.type = type; + this.size = size; + this.createTime = createTime; + this.updateTime = updateTime; + } + + public Resource(int id, int pid, String alias, String fullName, boolean isDirectory) { + this.id = id; + this.pid = pid; + this.alias = alias; + this.fullName = fullName; + this.isDirectory = isDirectory; + } + + public Resource(int pid, String alias, String fullName, boolean isDirectory, String description, String fileName, int userId, ResourceType type, long size, Date createTime, Date updateTime) { + this.pid = pid; + this.alias = alias; + this.fullName = fullName; + this.isDirectory = isDirectory; + this.description = description; + this.fileName = fileName; + this.userId = userId; + this.type = type; + this.size = size; + this.createTime = createTime; + this.updateTime = updateTime; + } + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } - @Override - public String toString() { - return "Resource{" + + public String getAlias() { + return alias; + } + + public void setAlias(String alias) { + this.alias = alias; + } + + public int getPid() { + return pid; + } + + public void setPid(int pid) { + this.pid = pid; + } + + public String getFullName() { + return fullName; + } + + public void setFullName(String fullName) { + this.fullName = fullName; + } + + public boolean isDirectory() { + return isDirectory; + } + + public void setDirectory(boolean directory) { + isDirectory = directory; + } + + public String getFileName() { + return fileName; + } + + public void setFileName(String fileName) { + this.fileName = fileName; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public int getUserId() { + return userId; + } + + public void setUserId(int userId) { + this.userId = userId; + } + + + public ResourceType getType() { + return type; + } + + public void setType(ResourceType type) { + this.type = type; + } + + public long getSize() { + return size; + } + + public void setSize(long size) { + this.size = size; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + @Override + public String toString() { + return "Resource{" + "id=" + id + ", pid=" + pid + ", alias='" + alias + '\'' + @@ -253,30 +243,30 @@ public class Resource { ", createTime=" + createTime + ", updateTime=" + updateTime + '}'; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; } - Resource resource = (Resource) o; + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } - if (id != resource.id) { - return false; - } - return alias.equals(resource.alias); + Resource resource = (Resource) o; + + if (id != resource.id) { + return false; + } + return alias.equals(resource.alias); - } + } - @Override - public int hashCode() { - int result = id; - result = 31 * result + alias.hashCode(); - return result; - } + @Override + public int hashCode() { + int result = id; + result = 31 * result + alias.hashCode(); + return result; + } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskDefinition.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskDefinition.java index 7da11a6f18b6b70ba1f12db929ca102fa5539807..f8af169d2450f54e239c7267feb3b49a4552c163 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskDefinition.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskDefinition.java @@ -31,6 +31,7 @@ import java.util.Map; import java.util.Objects; import java.util.stream.Collectors; +import com.baomidou.mybatisplus.annotation.FieldStrategy; import com.baomidou.mybatisplus.annotation.IdType; import com.baomidou.mybatisplus.annotation.TableField; import com.baomidou.mybatisplus.annotation.TableId; @@ -157,6 +158,7 @@ public class TaskDefinition { /** * timeout notify strategy */ + @TableField(updateStrategy = FieldStrategy.IGNORED) private TaskTimeoutStrategy timeoutNotifyStrategy; /** @@ -292,7 +294,7 @@ public class TaskDefinition { public Map getTaskParamMap() { if (taskParamMap == null && StringUtils.isNotEmpty(taskParams)) { JsonNode localParams = JSONUtils.parseObject(taskParams).findValue("localParams"); - if (localParams != null) { + if (localParams != null && localParams.size() > 0) { List propList = JSONUtils.toList(localParams.toString(), Property.class); taskParamMap = propList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue)); } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskInstance.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskInstance.java index ac18975d9676330356f8a1e0719047d18cc8bdc9..e2b80341137a84dace8e5b94a56f1be2fd2c5722 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskInstance.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskInstance.java @@ -17,6 +17,8 @@ package org.apache.dolphinscheduler.dao.entity; +import static org.apache.dolphinscheduler.common.Constants.SEC_2_MINUTES_TIME_UNIT; + import com.fasterxml.jackson.core.type.TypeReference; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; @@ -25,6 +27,7 @@ import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.TaskType; import org.apache.dolphinscheduler.common.task.dependent.DependentParameters; import org.apache.dolphinscheduler.common.task.switchtask.SwitchParameters; +import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import java.io.Serializable; @@ -543,9 +546,7 @@ public class TaskInstance implements Serializable { } public boolean isTaskComplete() { - - return this.getState().typeIsPause() - || this.getState().typeIsSuccess() + return this.getState().typeIsSuccess() || this.getState().typeIsCancel() || (this.getState().typeIsFailure() && !taskCanRetry()); } @@ -587,8 +588,28 @@ public class TaskInstance implements Serializable { return true; } else { return (this.getState().typeIsFailure() - && this.getRetryTimes() < this.getMaxRetryTimes()); + && this.getRetryTimes() <= this.getMaxRetryTimes()); + } + } + + /** + * whether the retry interval is timed out + * + * @return Boolean + */ + public boolean retryTaskIntervalOverTime() { + if (getState() != ExecutionStatus.FAILURE) { + return false; + } + if (getId() == 0 + || getMaxRetryTimes() == 0 + || getRetryInterval() == 0) { + return false; } + Date now = new Date(); + long failedTimeInterval = DateUtils.differSec(now, getEndTime()); + // task retry does not over time, return false + return getRetryInterval() * SEC_2_MINUTES_TIME_UNIT < failedTimeInterval; } public Priority getTaskInstancePriority() { diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AccessTokenMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AccessTokenMapper.java index 472ba35e3b2419d2eb62aa1a1721eaa68f29d646..3c73a56bbfdf909a70cca5285e331d42ef39863b 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AccessTokenMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AccessTokenMapper.java @@ -25,6 +25,8 @@ import com.baomidou.mybatisplus.core.mapper.BaseMapper; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import java.util.List; + /** * accesstoken mapper interface */ @@ -44,6 +46,14 @@ public interface AccessTokenMapper extends BaseMapper { @Param("userId") int userId ); + /** + * Query access token for specified user + * + * @param userId userId + * @return access token for specified user + */ + List queryAccessTokenByUser(@Param("userId") int userId); + /** * delete by userId * diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AlertGroupMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AlertGroupMapper.java index 72eac71441c5e9ba7eb3f32601de7601a6f60a5c..0e090f43e050865ae5302894c0fa1c01fc8a6f75 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AlertGroupMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AlertGroupMapper.java @@ -18,7 +18,6 @@ package org.apache.dolphinscheduler.dao.mapper; import org.apache.dolphinscheduler.dao.entity.AlertGroup; -import org.apache.dolphinscheduler.dao.vo.AlertGroupVo; import org.apache.ibatis.annotations.Param; @@ -84,12 +83,4 @@ public interface AlertGroupMapper extends BaseMapper { */ String queryAlertGroupInstanceIdsById(@Param("alertGroupId") int alertGroupId); - /** - * query alertGroupVo page list - * @param page page - * @param groupName groupName - * @return IPage: include alert group id and group_name - */ - IPage queryAlertGroupVo(Page page, - @Param("groupName") String groupName); } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AlertPluginInstanceMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AlertPluginInstanceMapper.java index 12da99c3a0416fc1845c29ae6c5d02058f4d1b1d..1fe32d320d6975494e6eea6e8d33409ebfb6b3f0 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AlertPluginInstanceMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/AlertPluginInstanceMapper.java @@ -24,6 +24,8 @@ import org.apache.ibatis.annotations.Param; import java.util.List; import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; public interface AlertPluginInstanceMapper extends BaseMapper { @@ -42,7 +44,13 @@ public interface AlertPluginInstanceMapper extends BaseMapper queryByIds(@Param("ids") List ids); - List queryByInstanceName(@Param("instanceName")String instanceName); + /** + * Query alert plugin instance by given name + * @param page page + * @param instanceName Alert plugin name + * @return alertPluginInstance Ipage + */ + IPage queryByInstanceNamePage(Page page, @Param("instanceName") String instanceName); /** * diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/CommandMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/CommandMapper.java index 2bbfb4b7b1ce5e2475dd32210b2325fc523ba045..22913845c3a82ca313259054a6983a97dac7d597 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/CommandMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/CommandMapper.java @@ -31,13 +31,6 @@ import java.util.List; */ public interface CommandMapper extends BaseMapper { - - /** - * get one command - * @return command - */ - Command getOneToRun(); - /** * count command state * @param userId userId diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/PluginDefineMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/PluginDefineMapper.java index 1ce76707aaa8342e0d0bf7d9349607ce74ecc05a..1bca24aab5f00d74af6d248684f065805a87d6aa 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/PluginDefineMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/PluginDefineMapper.java @@ -17,14 +17,12 @@ package org.apache.dolphinscheduler.dao.mapper; +import com.baomidou.mybatisplus.core.mapper.BaseMapper; import org.apache.dolphinscheduler.dao.entity.PluginDefine; - import org.apache.ibatis.annotations.Param; import java.util.List; -import com.baomidou.mybatisplus.core.mapper.BaseMapper; - public interface PluginDefineMapper extends BaseMapper { /** @@ -64,5 +62,5 @@ public interface PluginDefineMapper extends BaseMapper { * @param pluginType * @return */ - List queryByNameAndType(@Param("pluginName") String pluginName, @Param("pluginType") String pluginType); + PluginDefine queryByNameAndType(@Param("pluginName") String pluginName, @Param("pluginType") String pluginType); } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionLogMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionLogMapper.java index 038ed5d2f61bca9b4331ace1ca8b29569e634cf2..bd9297ec74e42b8da9d214af5c41f91bf17ae938 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionLogMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionLogMapper.java @@ -23,6 +23,9 @@ import org.apache.ibatis.annotations.Param; import java.util.List; +import org.springframework.cache.annotation.CacheConfig; +import org.springframework.cache.annotation.Cacheable; + import com.baomidou.mybatisplus.core.mapper.BaseMapper; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; @@ -30,8 +33,19 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page; /** * process definition log mapper interface */ +@CacheConfig(cacheNames = "processDefinition", keyGenerator = "cacheKeyGenerator") public interface ProcessDefinitionLogMapper extends BaseMapper { + /** + * query the certain process definition version info by process definition code and version number + * + * @param code process definition code + * @param version version number + * @return the process definition version info + */ + @Cacheable(sync = true) + ProcessDefinitionLog queryByDefinitionCodeAndVersion(@Param("code") long code, @Param("version") int version); + /** * query process definition log by name * @@ -59,23 +73,15 @@ public interface ProcessDefinitionLogMapper extends BaseMapper queryProcessDefinitionVersionsPaging(Page page, @Param("code") long code); + IPage queryProcessDefinitionVersionsPaging(Page page, @Param("code") long code, @Param("projectCode") long projectCode); /** * delete the certain process definition version by process definition id and version number diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.java index 3a731ea1fb20c75286af28209bccbf6230898e99..912b8f39b1fff9d5dded45bfe42878ce1493c2e4 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.java @@ -19,7 +19,6 @@ package org.apache.dolphinscheduler.dao.mapper; import org.apache.dolphinscheduler.dao.entity.DefinitionGroupByUser; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; -import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionLog; import org.apache.ibatis.annotations.MapKey; import org.apache.ibatis.annotations.Param; @@ -28,13 +27,17 @@ import java.util.Collection; import java.util.List; import java.util.Map; +import org.springframework.cache.annotation.CacheConfig; +import org.springframework.cache.annotation.CacheEvict; +import org.springframework.cache.annotation.Cacheable; + import com.baomidou.mybatisplus.core.mapper.BaseMapper; import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; /** * process definition mapper interface */ +@CacheConfig(cacheNames = "processDefinition", keyGenerator = "cacheKeyGenerator") public interface ProcessDefinitionMapper extends BaseMapper { /** @@ -43,15 +46,14 @@ public interface ProcessDefinitionMapper extends BaseMapper { * @param code code * @return process definition */ + @Cacheable(sync = true) ProcessDefinition queryByCode(@Param("code") long code); /** - * query process definition by code list - * - * @param codes codes - * @return process definition list + * update */ - List queryByCodes(@Param("codes") Collection codes); + @CacheEvict(key = "#p0.code") + int updateById(@Param("et") ProcessDefinition processDefinition); /** * delete process definition by code @@ -59,8 +61,17 @@ public interface ProcessDefinitionMapper extends BaseMapper { * @param code code * @return delete result */ + @CacheEvict int deleteByCode(@Param("code") long code); + /** + * query process definition by code list + * + * @param codes codes + * @return process definition list + */ + List queryByCodes(@Param("codes") Collection codes); + /** * verify process definition by name * diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapMapper.java index 0e5a38140f8f8f52bf9ad24c0a190a20e395cfc2..8ad7e894131a4e4da68207df5df86da1e75dba24 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapMapper.java @@ -17,11 +17,13 @@ package org.apache.dolphinscheduler.dao.mapper; import org.apache.dolphinscheduler.dao.entity.ProcessInstanceMap; -import com.baomidou.mybatisplus.core.mapper.BaseMapper; + import org.apache.ibatis.annotations.Param; import java.util.List; +import com.baomidou.mybatisplus.core.mapper.BaseMapper; + /** * process instance map mapper interface */ diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.java index 7be58a742203b213af95b9aaae9688542f80c1f6..4a156ce48718610936a483cb8b5f6aa173cffc30 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.java @@ -43,6 +43,14 @@ public interface ProcessInstanceMapper extends BaseMapper { */ ProcessInstance queryDetailById(@Param("processId") int processId); + /** + * query process instance host by stateArray + * + * @param stateArray + * @return + */ + List queryNeedFailoverProcessInstanceHost(@Param("states") int[] stateArray); + /** * query process instance by host and stateArray * @@ -213,14 +221,19 @@ public interface ProcessInstanceMapper extends BaseMapper { /** * query top n process instance order by running duration * + * @param size size + * @param startTime start time + * @param startTime end time * @param status process instance status + * @param projectCode project code * @return ProcessInstance list */ List queryTopNProcessInstance(@Param("size") int size, @Param("startTime") Date startTime, @Param("endTime") Date endTime, - @Param("status") ExecutionStatus status); + @Param("status") ExecutionStatus status, + @Param("projectCode") long projectCode); /** * query process instance by processDefinitionCode and stateArray diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessTaskRelationLogMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessTaskRelationLogMapper.java index 472018baf6c04751e43251cb15ed8b005968c4b7..e4ffa49955ad6e37442d0c7811e634eb250df110 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessTaskRelationLogMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessTaskRelationLogMapper.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.dao.mapper; +import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelation; import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelationLog; import org.apache.ibatis.annotations.Param; @@ -57,4 +58,20 @@ public interface ProcessTaskRelationLogMapper extends BaseMapper { /** @@ -38,9 +44,26 @@ public interface ProcessTaskRelationMapper extends BaseMapper queryByProcessCode(@Param("projectCode") long projectCode, @Param("processCode") long processCode); + /** + * update + */ + @CacheEvict(key = "#p0.projectCode + '_' + #p0.processDefinitionCode") + int updateById(@Param("et") ProcessTaskRelation processTaskRelation); + + /** + * delete process task relation by processCode + * + * @param projectCode projectCode + * @param processCode processCode + * @return int + */ + @CacheEvict + int deleteByCode(@Param("projectCode") long projectCode, @Param("processCode") long processCode); + /** * process task relation by taskCode * @@ -58,20 +81,95 @@ public interface ProcessTaskRelationMapper extends BaseMapper queryByTaskCode(@Param("taskCode") long taskCode); /** - * delete process task relation by processCode + * batch insert process task relation * - * @param projectCode projectCode - * @param processCode processCode + * @param taskRelationList taskRelationList * @return int */ - int deleteByCode(@Param("projectCode") long projectCode, - @Param("processCode") long processCode); + int batchInsert(@Param("taskRelationList") List taskRelationList); /** - * batch insert process task relation + * query downstream process task relation by taskCode * - * @param taskRelationList taskRelationList + * @param taskCode taskCode + * @return ProcessTaskRelation + */ + List queryDownstreamByTaskCode(@Param("taskCode") long taskCode); + + /** + * query upstream process task relation by taskCode + * + * @param projectCode projectCode + * @param taskCode taskCode + * @return ProcessTaskRelation + */ + List queryUpstreamByCode(@Param("projectCode") long projectCode, @Param("taskCode") long taskCode); + + /** + * query downstream process task relation by taskCode + * + * @param projectCode projectCode + * @param taskCode taskCode + * @return ProcessTaskRelation + */ + List queryDownstreamByCode(@Param("projectCode") long projectCode, @Param("taskCode") long taskCode); + + /** + * query task relation by codes + * + * @param projectCode projectCode + * @param taskCode taskCode + * @param preTaskCodes preTaskCode list + * @return ProcessTaskRelation + */ + List queryUpstreamByCodes(@Param("projectCode") long projectCode, @Param("taskCode") long taskCode, @Param("preTaskCodes") Long[] preTaskCodes); + + /** + * count upstream by codes + * + * @param projectCode projectCode + * @param taskCode taskCode + * @param processDefinitionCodes processDefinitionCodes + * @return upstream count list group by process definition code + */ + List> countUpstreamByCodeGroupByProcessDefinitionCode(@Param("projectCode") long projectCode, + @Param("processDefinitionCodes") Long[] processDefinitionCodes, + @Param("taskCode") long taskCode); + + /** + * query by code + * + * @param projectCode projectCode + * @param processDefinitionCode processDefinitionCode + * @param preTaskCode preTaskCode + * @param postTaskCode postTaskCode + * @return ProcessTaskRelation + */ + List queryByCode(@Param("projectCode") long projectCode, + @Param("processDefinitionCode") long processDefinitionCode, + @Param("preTaskCode") long preTaskCode, + @Param("postTaskCode") long postTaskCode); + + /** + * delete process task relation + * + * @param processTaskRelationLog processTaskRelationLog * @return int */ - int batchInsert(@Param("taskRelationList") List taskRelationList); + int deleteRelation(@Param("processTaskRelationLog") ProcessTaskRelationLog processTaskRelationLog); + + /** + * count by code + * + * @param projectCode projectCode + * @param processDefinitionCode processDefinitionCode + * @param preTaskCode preTaskCode + * @param postTaskCode postTaskCode + * @return ProcessTaskRelation + */ + int countByCode(@Param("projectCode") long projectCode, + @Param("processDefinitionCode") long processDefinitionCode, + @Param("preTaskCode") long preTaskCode, + @Param("postTaskCode") long postTaskCode); + } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/QueueMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/QueueMapper.java index 027bfd2b527753cae7c44b08dc9c77570ca29b56..e48607007d57e762651f93cc0c858deb79c04c53 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/QueueMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/QueueMapper.java @@ -53,4 +53,11 @@ public interface QueueMapper extends BaseMapper { * @return true if exist else return null */ Boolean existQueue(@Param("queue") String queue, @Param("queueName") String queueName); + + /** + * query queue by queue name + * @param queueName queueName + * @return queue list + */ + List queryQueueName(@Param("queueName") String queueName); } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapper.java index 37fae5d3933b0e2257bc81dfa31e3f7fafca4034..d0b2d326be94fd86e916962cc10617059e907ef2 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapper.java @@ -14,6 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.dao.mapper; import org.apache.dolphinscheduler.dao.entity.Schedule; @@ -22,16 +23,37 @@ import org.apache.ibatis.annotations.Param; import java.util.List; +import org.springframework.cache.annotation.CacheConfig; +import org.springframework.cache.annotation.CacheEvict; +import org.springframework.cache.annotation.Cacheable; + import com.baomidou.mybatisplus.core.mapper.BaseMapper; import com.baomidou.mybatisplus.core.metadata.IPage; /** * scheduler mapper interface */ +@CacheConfig(cacheNames = "schedule", keyGenerator = "cacheKeyGenerator") public interface ScheduleMapper extends BaseMapper { + @CacheEvict(key = "#p0.processDefinitionCode") + int insert(Schedule entity); + + @CacheEvict(key = "#p0.processDefinitionCode") + int updateById(@Param("et") Schedule entity); + + /** + * query schedule list by process definition code + * + * @param processDefinitionCode processDefinitionCode + * @return schedule list + */ + @Cacheable(sync = true) + List queryReleaseSchedulerListByProcessDefinitionCode(@Param("processDefinitionCode") long processDefinitionCode); + /** * scheduler page + * * @param page page * @param processDefinitionCode processDefinitionCode * @param searchVal searchVal @@ -43,6 +65,7 @@ public interface ScheduleMapper extends BaseMapper { /** * query schedule list by project name + * * @param projectName projectName * @return schedule list */ @@ -50,6 +73,7 @@ public interface ScheduleMapper extends BaseMapper { /** * query schedule list by process definition codes + * * @param processDefineCodes processDefineCodes * @return schedule list */ @@ -57,16 +81,9 @@ public interface ScheduleMapper extends BaseMapper { /** * query schedule list by process definition code + * * @param processDefinitionCode processDefinitionCode - * @return schedule list - */ - List queryByProcessDefinitionCode(@Param("processDefinitionCode") long processDefinitionCode); - - /** - * query schedule list by process definition code - * @param processDefinitionCode processDefinitionCode - * @return schedule list + * @return schedule */ - List queryReleaseSchedulerListByProcessDefinitionCode(@Param("processDefinitionCode") long processDefinitionCode); - + Schedule queryByProcessDefinitionCode(@Param("processDefinitionCode") long processDefinitionCode); } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskDefinitionLogMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskDefinitionLogMapper.java index 70ca9f70c3f7ec7743d928cfbac1f22f36cc1c6c..851d8dac518d0156080e8bfa0dac71cef01c7992 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskDefinitionLogMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskDefinitionLogMapper.java @@ -25,6 +25,10 @@ import org.apache.ibatis.annotations.Param; import java.util.Collection; import java.util.List; +import org.springframework.cache.annotation.CacheConfig; +import org.springframework.cache.annotation.CacheEvict; +import org.springframework.cache.annotation.Cacheable; + import com.baomidou.mybatisplus.core.mapper.BaseMapper; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; @@ -32,24 +36,41 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page; /** * task definition log mapper interface */ +@CacheConfig(cacheNames = "taskDefinition", keyGenerator = "cacheKeyGenerator") public interface TaskDefinitionLogMapper extends BaseMapper { /** - * query max version for definition + * query task definition log * * @param code taskDefinitionCode + * @param version version + * @return task definition log */ - Integer queryMaxVersionForDefinition(@Param("code") long code); + @Cacheable(sync = true) + TaskDefinitionLog queryByDefinitionCodeAndVersion(@Param("code") long code, @Param("version") int version); /** - * query task definition log + * update + */ + @CacheEvict(key = "#p0.code + '_' + #p0.version") + int updateById(@Param("et") TaskDefinitionLog taskDefinitionLog); + + /** + * delete the certain task definition version by task definition code and version + * + * @param code task definition code + * @param version task definition version + * @return delete result + */ + @CacheEvict + int deleteByCodeAndVersion(@Param("code") long code, @Param("version") int version); + + /** + * query max version for definition * * @param code taskDefinitionCode - * @param version version - * @return task definition log */ - TaskDefinitionLog queryByDefinitionCodeAndVersion(@Param("code") long code, - @Param("version") int version); + Integer queryMaxVersionForDefinition(@Param("code") long code); /** * @param taskDefinitions taskDefinition list @@ -65,21 +86,13 @@ public interface TaskDefinitionLogMapper extends BaseMapper { */ int batchInsert(@Param("taskDefinitionLogs") List taskDefinitionLogs); - /** - * delete the certain task definition version by task definition code and version - * - * @param code task definition code - * @param version task definition version - * @return delete result - */ - int deleteByCodeAndVersion(@Param("code") long code, @Param("version") int version); - /** * query the paging task definition version list by pagination info * * @param page pagination info + * @param projectCode project code * @param code process definition code * @return the paging task definition version list */ - IPage queryTaskDefinitionVersionsPaging(Page page, @Param("code") long code); + IPage queryTaskDefinitionVersionsPaging(Page page, @Param("code") long code, @Param("projectCode") long projectCode); } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.java index 898708e888396165c24d9e1c67d8e2642ac9e96f..b2a766ce49e6827c8f8837e63e381d0b5b2d45bb 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.java @@ -17,18 +17,17 @@ package org.apache.dolphinscheduler.dao.mapper; +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.dao.entity.ExecuteStatusCount; import org.apache.dolphinscheduler.dao.entity.TaskInstance; - import org.apache.ibatis.annotations.Param; import java.util.Date; import java.util.List; - -import com.baomidou.mybatisplus.core.mapper.BaseMapper; -import com.baomidou.mybatisplus.core.metadata.IPage; +import java.util.Set; /** * task instance mapper interface @@ -51,6 +50,8 @@ public interface TaskInstanceMapper extends BaseMapper { TaskInstance queryByInstanceIdAndName(@Param("processInstanceId") int processInstanceId, @Param("name") String name); + TaskInstance queryByInstanceIdAndCode(@Param("processInstanceId") int processInstanceId, @Param("taskCode") Long taskCode); + Integer countTask(@Param("projectCodes") Long[] projectCodes, @Param("taskIds") int[] taskIds); @@ -68,6 +69,21 @@ public interface TaskInstanceMapper extends BaseMapper { @Param("states") int[] statusArray, @Param("host") String host, @Param("startTime") Date startTime, - @Param("endTime") Date endTime - ); + @Param("endTime") Date endTime); + + int updateHostAndSubmitTimeById(@Param("id") int id, @Param("host") String host, @Param("submitTime") Date submitTime); + + /** + * query last task instance + * + * @param taskCode taskCode + * @param startTime startTime + * @param endTime endTime + * @return task instance + */ + TaskInstance queryLastTaskInstance(@Param("taskCode") long taskCode, @Param("startTime") Date startTime, @Param("endTime") Date endTime); + + List queryLastTaskInstanceList(@Param("taskCodes") Set taskCodes, @Param("startTime") Date startTime, @Param("endTime") Date endTime); + + List queryTaskInstanceListByIds(@Param("ids") Set ids); } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TenantMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TenantMapper.java index 89b62378c085daf893e1684ecc502f52cb8700ee..88f56002d9a1ff623db1259e9322cb1ba1499582 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TenantMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TenantMapper.java @@ -14,29 +14,50 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.dao.mapper; import org.apache.dolphinscheduler.dao.entity.Tenant; -import com.baomidou.mybatisplus.core.mapper.BaseMapper; -import com.baomidou.mybatisplus.core.metadata.IPage; + import org.apache.ibatis.annotations.Param; -import java.util.List; +import org.springframework.cache.annotation.CacheConfig; +import org.springframework.cache.annotation.CacheEvict; +import org.springframework.cache.annotation.Cacheable; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; /** * tenant mapper interface */ +@CacheConfig(cacheNames = "tenant", keyGenerator = "cacheKeyGenerator") public interface TenantMapper extends BaseMapper { /** * query tenant by id + * * @param tenantId tenantId * @return tenant */ + @Cacheable(sync = true) Tenant queryById(@Param("tenantId") int tenantId); + /** + * delete by id + */ + @CacheEvict + int deleteById(int id); + + /** + * update + */ + @CacheEvict(key = "#p0.id") + int updateById(@Param("et") Tenant tenant); + /** * query tenant by code + * * @param tenantCode tenantCode * @return tenant */ @@ -44,6 +65,7 @@ public interface TenantMapper extends BaseMapper { /** * tenant page + * * @param page page * @param searchVal searchVal * @return tenant IPage @@ -53,6 +75,7 @@ public interface TenantMapper extends BaseMapper { /** * check tenant exist + * * @param tenantCode tenantCode * @return true if exist else return null */ diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UserMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UserMapper.java index 4418363dc265ae283ccccf79ce4608a17aabc853..bd611417c110e80314a54e6b1bbfb0f7034ae5b9 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UserMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UserMapper.java @@ -14,29 +14,57 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.dao.mapper; import org.apache.dolphinscheduler.dao.entity.User; -import com.baomidou.mybatisplus.core.mapper.BaseMapper; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; + import org.apache.ibatis.annotations.Param; import java.util.List; +import org.springframework.cache.annotation.CacheConfig; +import org.springframework.cache.annotation.CacheEvict; +import org.springframework.cache.annotation.Cacheable; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; + /** * user mapper interface */ +@CacheConfig(cacheNames = "user", keyGenerator = "cacheKeyGenerator") public interface UserMapper extends BaseMapper { + /** + * select by user id + */ + @Cacheable(sync = true) + User selectById(int id); + + /** + * delete by id + */ + @CacheEvict + int deleteById(int id); + + /** + * update + */ + @CacheEvict(key = "#p0.id") + int updateById(@Param("et") User user); + /** * query all general user + * * @return user list */ List queryAllGeneralUser(); /** * query user by name + * * @param userName userName * @return user */ @@ -44,6 +72,7 @@ public interface UserMapper extends BaseMapper { /** * query user by userName and password + * * @param userName userName * @param password password * @return user @@ -53,6 +82,7 @@ public interface UserMapper extends BaseMapper { /** * user page + * * @param page page * @param userName userName * @return user IPage @@ -62,6 +92,7 @@ public interface UserMapper extends BaseMapper { /** * query user detail by id + * * @param userId userId * @return user */ @@ -69,6 +100,7 @@ public interface UserMapper extends BaseMapper { /** * query user list by alertgroupId + * * @param alertgroupId alertgroupId * @return user list */ @@ -76,6 +108,7 @@ public interface UserMapper extends BaseMapper { /** * query user list by tenantId + * * @param tenantId tenantId * @return user list */ @@ -83,6 +116,7 @@ public interface UserMapper extends BaseMapper { /** * query user by userId + * * @param userId userId * @return user */ @@ -90,6 +124,7 @@ public interface UserMapper extends BaseMapper { /** * query user by token + * * @param token token * @return user */ @@ -97,6 +132,7 @@ public interface UserMapper extends BaseMapper { /** * query user by queue name + * * @param queueName queue name * @return user list */ @@ -104,13 +140,15 @@ public interface UserMapper extends BaseMapper { /** * check the user exist - * @param queueName queue name + * + * @param queue queue name * @return true if exist else return null */ Boolean existUser(@Param("queue") String queue); /** * update user with old queue + * * @param oldQueue old queue name * @param newQueue new queue name * @return update rows @@ -124,4 +162,12 @@ public interface UserMapper extends BaseMapper { * @return user list */ List selectByIds(@Param("ids") List ids); + + /** + * query authed user list by projectId + * + * @param projectId projectId + * @return user list + */ + List queryAuthedUserListByProjectId(@Param("projectId") int projectId); } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/WorkerGroupMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/WorkerGroupMapper.java index 21af4b38c3519d31050ddb6db829952d7d24ae8f..fcff987d025f6b553bc0cf5ef616f007fcbd77fe 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/WorkerGroupMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/WorkerGroupMapper.java @@ -17,27 +17,46 @@ package org.apache.dolphinscheduler.dao.mapper; +import static org.apache.dolphinscheduler.common.Constants.CACHE_KEY_VALUE_ALL; + import org.apache.dolphinscheduler.dao.entity.WorkerGroup; import org.apache.ibatis.annotations.Param; import java.util.List; +import org.springframework.cache.annotation.CacheConfig; +import org.springframework.cache.annotation.CacheEvict; +import org.springframework.cache.annotation.Cacheable; + import com.baomidou.mybatisplus.core.mapper.BaseMapper; /** * worker group mapper interface */ +@CacheConfig(cacheNames = "workerGroup", keyGenerator = "cacheKeyGenerator") public interface WorkerGroupMapper extends BaseMapper { /** * query all worker group + * * @return worker group list */ + @Cacheable(sync = true, key = CACHE_KEY_VALUE_ALL) List queryAllWorkerGroup(); + @CacheEvict(key = CACHE_KEY_VALUE_ALL) + int deleteById(Integer id); + + @CacheEvict(key = CACHE_KEY_VALUE_ALL) + int insert(WorkerGroup entity); + + @CacheEvict(key = CACHE_KEY_VALUE_ALL) + int updateById(@Param("et") WorkerGroup entity); + /** * query worer grouop by name + * * @param name name * @return worker group list */ diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/DolphinSchedulerManager.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/DolphinSchedulerManager.java index b2daae28cb45ba424d9d23732bc1c5c5f4bba7a6..6743acb2e5880ae8484b940451b0f9f89f2fba1f 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/DolphinSchedulerManager.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/DolphinSchedulerManager.java @@ -14,115 +14,110 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.dao.upgrade; -import org.apache.dolphinscheduler.common.enums.DbType; -import org.apache.dolphinscheduler.common.utils.SchemaUtils; +import org.apache.dolphinscheduler.spi.enums.DbType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.context.annotation.Profile; +import org.springframework.stereotype.Service; +import javax.sql.DataSource; +import java.io.IOException; +import java.sql.Connection; import java.util.List; -/** - * upgrade manager - */ +@Service +@Profile("shell-cli") public class DolphinSchedulerManager { private static final Logger logger = LoggerFactory.getLogger(DolphinSchedulerManager.class); - UpgradeDao upgradeDao; - /** - * init upgrade dao - */ - private void initUpgradeDao() { - DbType dbType = UpgradeDao.getDbType(); - if (dbType != null) { - switch (dbType) { - case MYSQL: - upgradeDao = MysqlUpgradeDao.getInstance(); - break; - case POSTGRESQL: - upgradeDao = PostgresqlUpgradeDao.getInstance(); - break; - default: - logger.error("not support sql type: {},can't upgrade", dbType); - throw new IllegalArgumentException("not support sql type,can't upgrade"); - } + private final UpgradeDao upgradeDao; + + public DolphinSchedulerManager(DataSource dataSource, List daos) throws Exception { + final DbType type = getCurrentDbType(dataSource); + upgradeDao = daos.stream() + .filter(it -> it.getDbType() == type) + .findFirst() + .orElseThrow(() -> new RuntimeException( + "Cannot find UpgradeDao implementation for db type: " + type + )); + } + + private DbType getCurrentDbType(DataSource dataSource) throws Exception { + try (Connection conn = dataSource.getConnection()) { + String name = conn.getMetaData().getDatabaseProductName().toUpperCase(); + return DbType.valueOf(name); } } - /** - * constructor init - */ - public DolphinSchedulerManager() { - initUpgradeDao(); + public void initDolphinScheduler() { + this.initDolphinSchedulerSchema(); } /** - * init DolphinScheduler + * whether schema is initialized + * @return true if schema is initialized */ - public void initDolphinScheduler() { + public boolean schemaIsInitialized() { // Determines whether the dolphinscheduler table structure has been init - if (upgradeDao.isExistsTable("t_escheduler_version") || - upgradeDao.isExistsTable("t_ds_version") || - upgradeDao.isExistsTable("t_escheduler_queue")) { + if (upgradeDao.isExistsTable("t_escheduler_version") + || upgradeDao.isExistsTable("t_ds_version") + || upgradeDao.isExistsTable("t_escheduler_queue")) { logger.info("The database has been initialized. Skip the initialization step"); - return; + return true; } - this.initDolphinSchedulerSchema(); + return false; } - /** - * init DolphinScheduler Schema - */ public void initDolphinSchedulerSchema() { - logger.info("Start initializing the DolphinScheduler manager table structure"); upgradeDao.initSchema(); } - - /** - * upgrade DolphinScheduler - * @throws Exception if error throws Exception - */ - public void upgradeDolphinScheduler() throws Exception{ - + public void upgradeDolphinScheduler() throws IOException { // Gets a list of all upgrades List schemaList = SchemaUtils.getAllSchemaList(); - if(schemaList == null || schemaList.size() == 0) { + if (schemaList == null || schemaList.size() == 0) { logger.info("There is no schema to upgrade!"); - }else { - - String version = ""; + } else { + String version; // Gets the version of the current system if (upgradeDao.isExistsTable("t_escheduler_version")) { version = upgradeDao.getCurrentVersion("t_escheduler_version"); - }else if(upgradeDao.isExistsTable("t_ds_version")){ + } else if (upgradeDao.isExistsTable("t_ds_version")) { version = upgradeDao.getCurrentVersion("t_ds_version"); - }else if(upgradeDao.isExistsColumn("t_escheduler_queue","create_time")){ + } else if (upgradeDao.isExistsColumn("t_escheduler_queue", "create_time")) { version = "1.0.1"; - }else if(upgradeDao.isExistsTable("t_escheduler_queue")){ + } else if (upgradeDao.isExistsTable("t_escheduler_queue")) { version = "1.0.0"; - }else{ + } else { logger.error("Unable to determine current software version, so cannot upgrade"); throw new RuntimeException("Unable to determine current software version, so cannot upgrade"); } // The target version of the upgrade String schemaVersion = ""; - for(String schemaDir : schemaList) { + String currentVersion = version; + for (String schemaDir : schemaList) { schemaVersion = schemaDir.split("_")[0]; - if(SchemaUtils.isAGreatVersion(schemaVersion , version)) { + if (SchemaUtils.isAGreatVersion(schemaVersion, version)) { logger.info("upgrade DolphinScheduler metadata version from {} to {}", version, schemaVersion); logger.info("Begin upgrading DolphinScheduler's table structure"); - upgradeDao.upgradeDolphinScheduler(schemaDir); + upgradeDao.upgradeDolphinScheduler(schemaDir); if ("1.3.0".equals(schemaVersion)) { upgradeDao.upgradeDolphinSchedulerWorkerGroup(); } else if ("1.3.2".equals(schemaVersion)) { upgradeDao.upgradeDolphinSchedulerResourceList(); + } else if ("2.0.0".equals(schemaVersion)) { + upgradeDao.upgradeDolphinSchedulerTo200(schemaDir); } version = schemaVersion; } + } + if (SchemaUtils.isAGreatVersion("2.0.6", currentVersion) && SchemaUtils.isAGreatVersion(SchemaUtils.getSoftVersion(), currentVersion)) { + upgradeDao.upgradeDolphinSchedulerResourceFileSize(); } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/JsonSplitDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/JsonSplitDao.java new file mode 100644 index 0000000000000000000000000000000000000000..46c7610fde793aabbeb3c1633a47ed16e64b365a --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/JsonSplitDao.java @@ -0,0 +1,248 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.upgrade; + +import org.apache.dolphinscheduler.common.utils.ConnectionUtils; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionLog; +import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelationLog; +import org.apache.dolphinscheduler.dao.entity.TaskDefinitionLog; + +import java.sql.Connection; +import java.sql.Date; +import java.sql.PreparedStatement; +import java.util.List; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class JsonSplitDao { + + public static final Logger logger = LoggerFactory.getLogger(JsonSplitDao.class); + + /** + * executeJsonSplitProcessDefinition + * + * @param conn jdbc connection + * @param processDefinitionLogs processDefinitionLogs + */ + public void executeJsonSplitProcessDefinition(Connection conn, List processDefinitionLogs) { + String updateSql = "UPDATE t_ds_process_definition SET global_params=?,timeout=?,tenant_id=?,locations=?,update_time=? where id=?"; + String insertLogSql = "insert into t_ds_process_definition_log (code,name,version,description,project_code,release_state,user_id," + + "global_params,flag,locations,timeout,tenant_id,operator,operate_time,create_time,update_time) values (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"; + try { + PreparedStatement processUpdate = conn.prepareStatement(updateSql); + PreparedStatement insertLog = conn.prepareStatement(insertLogSql); + int i = 0; + for (ProcessDefinitionLog processDefinitionLog : processDefinitionLogs) { + processUpdate.setString(1, processDefinitionLog.getGlobalParams()); + processUpdate.setInt(2, processDefinitionLog.getTimeout()); + processUpdate.setInt(3, processDefinitionLog.getTenantId()); + processUpdate.setString(4, processDefinitionLog.getLocations()); + processUpdate.setDate(5, new Date(processDefinitionLog.getUpdateTime().getTime())); + processUpdate.setInt(6, processDefinitionLog.getId()); + processUpdate.addBatch(); + + insertLog.setLong(1, processDefinitionLog.getCode()); + insertLog.setString(2, processDefinitionLog.getName()); + insertLog.setInt(3, processDefinitionLog.getVersion()); + insertLog.setString(4, processDefinitionLog.getDescription()); + insertLog.setLong(5, processDefinitionLog.getProjectCode()); + insertLog.setInt(6, processDefinitionLog.getReleaseState().getCode()); + insertLog.setInt(7, processDefinitionLog.getUserId()); + insertLog.setString(8, processDefinitionLog.getGlobalParams()); + insertLog.setInt(9, processDefinitionLog.getFlag().getCode()); + insertLog.setString(10, processDefinitionLog.getLocations()); + insertLog.setInt(11, processDefinitionLog.getTimeout()); + insertLog.setInt(12, processDefinitionLog.getTenantId()); + insertLog.setInt(13, processDefinitionLog.getOperator()); + insertLog.setDate(14, new Date(processDefinitionLog.getOperateTime().getTime())); + insertLog.setDate(15, new Date(processDefinitionLog.getCreateTime().getTime())); + insertLog.setDate(16, new Date(processDefinitionLog.getUpdateTime().getTime())); + insertLog.addBatch(); + + i++; + if (i % 1000 == 0) { + processUpdate.executeBatch(); + processUpdate.clearBatch(); + insertLog.executeBatch(); + insertLog.clearBatch(); + } + } + processUpdate.executeBatch(); + insertLog.executeBatch(); + processUpdate.close(); + insertLog.close(); + } catch (Exception e) { + logger.error(e.getMessage(), e); + throw new RuntimeException(e); + } finally { + ConnectionUtils.releaseResource(conn); + } + } + + /** + * executeJsonSplitProcessDefinition + * + * @param conn jdbc connection + * @param processTaskRelationLogs processTaskRelationLogs + */ + public void executeJsonSplitProcessTaskRelation(Connection conn, List processTaskRelationLogs) { + String insertSql = "insert into t_ds_process_task_relation (project_code,process_definition_code,process_definition_version,pre_task_code,pre_task_version," + + "post_task_code,post_task_version,condition_type,condition_params,create_time,update_time) values (?,?,?,?,?,?,?,?,?,?,?)"; + String insertLogSql = "insert into t_ds_process_task_relation_log (project_code,process_definition_code,process_definition_version,pre_task_code," + + "pre_task_version,post_task_code,post_task_version,condition_type,condition_params,operator,operate_time,create_time,update_time) " + + "values (?,?,?,?,?,?,?,?,?,?,?,?,?)"; + try { + PreparedStatement insert = conn.prepareStatement(insertSql); + PreparedStatement insertLog = conn.prepareStatement(insertLogSql); + int i = 0; + for (ProcessTaskRelationLog processTaskRelationLog : processTaskRelationLogs) { + insert.setLong(1, processTaskRelationLog.getProjectCode()); + insert.setLong(2, processTaskRelationLog.getProcessDefinitionCode()); + insert.setInt(3, processTaskRelationLog.getProcessDefinitionVersion()); + insert.setLong(4, processTaskRelationLog.getPreTaskCode()); + insert.setInt(5, processTaskRelationLog.getPreTaskVersion()); + insert.setLong(6, processTaskRelationLog.getPostTaskCode()); + insert.setInt(7, processTaskRelationLog.getPostTaskVersion()); + insert.setInt(8, processTaskRelationLog.getConditionType().getCode()); + insert.setString(9, processTaskRelationLog.getConditionParams()); + insert.setDate(10, new Date(processTaskRelationLog.getCreateTime().getTime())); + insert.setDate(11, new Date(processTaskRelationLog.getUpdateTime().getTime())); + insert.addBatch(); + + insertLog.setLong(1, processTaskRelationLog.getProjectCode()); + insertLog.setLong(2, processTaskRelationLog.getProcessDefinitionCode()); + insertLog.setInt(3, processTaskRelationLog.getProcessDefinitionVersion()); + insertLog.setLong(4, processTaskRelationLog.getPreTaskCode()); + insertLog.setInt(5, processTaskRelationLog.getPreTaskVersion()); + insertLog.setLong(6, processTaskRelationLog.getPostTaskCode()); + insertLog.setInt(7, processTaskRelationLog.getPostTaskVersion()); + insertLog.setInt(8, processTaskRelationLog.getConditionType().getCode()); + insertLog.setString(9, processTaskRelationLog.getConditionParams()); + insertLog.setInt(10, processTaskRelationLog.getOperator()); + insertLog.setDate(11, new Date(processTaskRelationLog.getOperateTime().getTime())); + insertLog.setDate(12, new Date(processTaskRelationLog.getCreateTime().getTime())); + insertLog.setDate(13, new Date(processTaskRelationLog.getUpdateTime().getTime())); + insertLog.addBatch(); + + i++; + if (i % 1000 == 0) { + insert.executeBatch(); + insert.clearBatch(); + insertLog.executeBatch(); + insertLog.clearBatch(); + } + } + insert.executeBatch(); + insertLog.executeBatch(); + insert.close(); + insertLog.close(); + } catch (Exception e) { + logger.error(e.getMessage(), e); + throw new RuntimeException(e); + } finally { + ConnectionUtils.releaseResource(conn); + } + } + + /** + * executeJsonSplitTaskDefinition + * + * @param conn jdbc connection + * @param taskDefinitionLogs taskDefinitionLogs + */ + public void executeJsonSplitTaskDefinition(Connection conn, List taskDefinitionLogs) { + String insertSql = "insert into t_ds_task_definition (code,name,version,description,project_code,user_id,task_type,task_params,flag,task_priority," + + "worker_group,environment_code,fail_retry_times,fail_retry_interval,timeout_flag,timeout_notify_strategy,timeout,delay_time,resource_ids," + + "create_time,update_time) values (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"; + String insertLogSql = "insert into t_ds_task_definition_log (code,name,version,description,project_code,user_id,task_type,task_params,flag,task_priority," + + "worker_group,environment_code,fail_retry_times,fail_retry_interval,timeout_flag,timeout_notify_strategy,timeout,delay_time,resource_ids,operator," + + "operate_time,create_time,update_time) values (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"; + try { + PreparedStatement insert = conn.prepareStatement(insertSql); + PreparedStatement insertLog = conn.prepareStatement(insertLogSql); + int i = 0; + for (TaskDefinitionLog taskDefinitionLog : taskDefinitionLogs) { + insert.setLong(1, taskDefinitionLog.getCode()); + insert.setString(2, taskDefinitionLog.getName()); + insert.setInt(3, taskDefinitionLog.getVersion()); + insert.setString(4, taskDefinitionLog.getDescription()); + insert.setLong(5, taskDefinitionLog.getProjectCode()); + insert.setInt(6, taskDefinitionLog.getUserId()); + insert.setString(7, taskDefinitionLog.getTaskType()); + insert.setString(8, taskDefinitionLog.getTaskParams()); + insert.setInt(9, taskDefinitionLog.getFlag().getCode()); + insert.setInt(10, taskDefinitionLog.getTaskPriority().getCode()); + insert.setString(11, taskDefinitionLog.getWorkerGroup()); + insert.setLong(12, taskDefinitionLog.getEnvironmentCode()); + insert.setInt(13, taskDefinitionLog.getFailRetryTimes()); + insert.setInt(14, taskDefinitionLog.getFailRetryInterval()); + insert.setInt(15, taskDefinitionLog.getTimeoutFlag().getCode()); + insert.setInt(16, taskDefinitionLog.getTimeoutNotifyStrategy() == null ? 0 : taskDefinitionLog.getTimeoutNotifyStrategy().getCode()); + insert.setInt(17, taskDefinitionLog.getTimeout()); + insert.setInt(18, taskDefinitionLog.getDelayTime()); + insert.setString(19, taskDefinitionLog.getResourceIds()); + insert.setDate(20, new Date(taskDefinitionLog.getCreateTime().getTime())); + insert.setDate(21, new Date(taskDefinitionLog.getUpdateTime().getTime())); + insert.addBatch(); + + insertLog.setLong(1, taskDefinitionLog.getCode()); + insertLog.setString(2, taskDefinitionLog.getName()); + insertLog.setInt(3, taskDefinitionLog.getVersion()); + insertLog.setString(4, taskDefinitionLog.getDescription()); + insertLog.setLong(5, taskDefinitionLog.getProjectCode()); + insertLog.setInt(6, taskDefinitionLog.getUserId()); + insertLog.setString(7, taskDefinitionLog.getTaskType()); + insertLog.setString(8, taskDefinitionLog.getTaskParams()); + insertLog.setInt(9, taskDefinitionLog.getFlag().getCode()); + insertLog.setInt(10, taskDefinitionLog.getTaskPriority().getCode()); + insertLog.setString(11, taskDefinitionLog.getWorkerGroup()); + insertLog.setLong(12, taskDefinitionLog.getEnvironmentCode()); + insertLog.setInt(13, taskDefinitionLog.getFailRetryTimes()); + insertLog.setInt(14, taskDefinitionLog.getFailRetryInterval()); + insertLog.setInt(15, taskDefinitionLog.getTimeoutFlag().getCode()); + insertLog.setInt(16, taskDefinitionLog.getTimeoutNotifyStrategy() == null ? 0 : taskDefinitionLog.getTimeoutNotifyStrategy().getCode()); + insertLog.setInt(17, taskDefinitionLog.getTimeout()); + insertLog.setInt(18, taskDefinitionLog.getDelayTime()); + insertLog.setString(19, taskDefinitionLog.getResourceIds()); + insertLog.setInt(20, taskDefinitionLog.getOperator()); + insertLog.setDate(21, new Date(taskDefinitionLog.getOperateTime().getTime())); + insertLog.setDate(22, new Date(taskDefinitionLog.getCreateTime().getTime())); + insertLog.setDate(23, new Date(taskDefinitionLog.getUpdateTime().getTime())); + insertLog.addBatch(); + + i++; + if (i % 1000 == 0) { + insert.executeBatch(); + insert.clearBatch(); + insertLog.executeBatch(); + insertLog.clearBatch(); + } + } + insert.executeBatch(); + insertLog.executeBatch(); + insert.close(); + insertLog.close(); + } catch (Exception e) { + logger.error(e.getMessage(), e); + throw new RuntimeException(e); + } finally { + ConnectionUtils.releaseResource(conn); + } + } +} \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/MysqlUpgradeDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/MysqlUpgradeDao.java index 255f1cf081a49ee74bcf9c78f3ad33a866e87476..c27e7a0159744bba2372085da26f416bf191fffc 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/MysqlUpgradeDao.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/MysqlUpgradeDao.java @@ -17,38 +17,36 @@ package org.apache.dolphinscheduler.dao.upgrade; import org.apache.dolphinscheduler.common.utils.ConnectionUtils; +import org.apache.dolphinscheduler.spi.enums.DbType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.context.annotation.Profile; +import org.springframework.stereotype.Service; +import javax.sql.DataSource; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; -/** - * mysql upgrade dao - */ +@Service +@Profile("shell-cli") public class MysqlUpgradeDao extends UpgradeDao { - public static final Logger logger = LoggerFactory.getLogger(MysqlUpgradeDao.class); - /** - * mysql upgrade dao holder - */ - private static class MysqlUpgradeDaoHolder { - private static final MysqlUpgradeDao INSTANCE = new MysqlUpgradeDao(); + private MysqlUpgradeDao(DataSource dataSource) { + super(dataSource); } - /** - * mysql upgrade dao constructor - */ - private MysqlUpgradeDao() { + @Override + protected String initSqlPath() { + return "create/release-1.0.0_schema/mysql"; } - public static final MysqlUpgradeDao getInstance() { - return MysqlUpgradeDaoHolder.INSTANCE; + @Override + protected DbType getDbType() { + return DbType.MYSQL; } - /** * determines whether a table exists * @param tableName tableName @@ -60,7 +58,7 @@ public class MysqlUpgradeDao extends UpgradeDao { Connection conn = null; try { conn = dataSource.getConnection(); - rs = conn.getMetaData().getTables(null, null, tableName, null); + rs = conn.getMetaData().getTables(conn.getCatalog(), conn.getSchema(), tableName, null); return rs.next(); } catch (SQLException e) { logger.error(e.getMessage(),e); @@ -82,7 +80,7 @@ public class MysqlUpgradeDao extends UpgradeDao { Connection conn = null; try { conn = dataSource.getConnection(); - ResultSet rs = conn.getMetaData().getColumns(null,null,tableName,columnName); + ResultSet rs = conn.getMetaData().getColumns(conn.getCatalog(), conn.getSchema(),tableName,columnName); return rs.next(); } catch (SQLException e) { diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/PostgresqlUpgradeDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/PostgresqlUpgradeDao.java index b4049450abe8e62e692c80e1d6d6e054cfc71b68..b9cd625c2cd992c7c7bee1593b2cf76aa8aa39cc 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/PostgresqlUpgradeDao.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/PostgresqlUpgradeDao.java @@ -17,44 +17,38 @@ package org.apache.dolphinscheduler.dao.upgrade; import org.apache.dolphinscheduler.common.utils.ConnectionUtils; +import org.apache.dolphinscheduler.spi.enums.DbType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.context.annotation.Profile; +import org.springframework.stereotype.Service; +import javax.sql.DataSource; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; -/** - * postgresql upgrade dao - */ +@Service +@Profile("shell-cli") public class PostgresqlUpgradeDao extends UpgradeDao { - public static final Logger logger = LoggerFactory.getLogger(PostgresqlUpgradeDao.class); - private static final String SCHEMA = getSchema(); - /** - * postgresql upgrade dao holder - */ - private static class PostgresqlUpgradeDaoHolder { - private static final PostgresqlUpgradeDao INSTANCE = new PostgresqlUpgradeDao(); + private PostgresqlUpgradeDao(DataSource dataSource) { + super(dataSource); } - /** - * PostgresqlUpgradeDao Constructor - */ - private PostgresqlUpgradeDao() { + @Override + protected String initSqlPath() { + return "create/release-1.2.0_schema/postgresql"; } - public static final PostgresqlUpgradeDao getInstance() { - return PostgresqlUpgradeDaoHolder.INSTANCE; + @Override + protected DbType getDbType() { + return DbType.POSTGRESQL; } - /** - * getSchema - * @return schema - */ - public static String getSchema(){ + public String getSchema() { Connection conn = null; PreparedStatement pstmt = null; ResultSet resultSet = null; @@ -62,14 +56,14 @@ public class PostgresqlUpgradeDao extends UpgradeDao { conn = dataSource.getConnection(); pstmt = conn.prepareStatement("select current_schema()"); resultSet = pstmt.executeQuery(); - while (resultSet.next()){ - if(resultSet.isFirst()){ + while (resultSet.next()) { + if (resultSet.isFirst()) { return resultSet.getString(1); } } } catch (SQLException e) { - logger.error(e.getMessage(),e); + logger.error(e.getMessage(), e); } finally { ConnectionUtils.releaseResource(resultSet, pstmt, conn); } @@ -79,6 +73,7 @@ public class PostgresqlUpgradeDao extends UpgradeDao { /** * determines whether a table exists + * * @param tableName tableName * @return if table exist return true,else return false */ @@ -89,12 +84,12 @@ public class PostgresqlUpgradeDao extends UpgradeDao { try { conn = dataSource.getConnection(); - rs = conn.getMetaData().getTables(null, SCHEMA, tableName, null); + rs = conn.getMetaData().getTables(conn.getCatalog(), getSchema(), tableName, null); return rs.next(); } catch (SQLException e) { - logger.error(e.getMessage(),e); - throw new RuntimeException(e.getMessage(),e); + logger.error(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); } finally { ConnectionUtils.releaseResource(rs, conn); } @@ -103,21 +98,22 @@ public class PostgresqlUpgradeDao extends UpgradeDao { /** * determines whether a field exists in the specified table + * * @param tableName tableName * @param columnName columnName - * @return if column name exist return true,else return false + * @return if column name exist return true,else return false */ @Override - public boolean isExistsColumn(String tableName,String columnName) { + public boolean isExistsColumn(String tableName, String columnName) { Connection conn = null; ResultSet rs = null; try { conn = dataSource.getConnection(); - rs = conn.getMetaData().getColumns(null, SCHEMA,tableName,columnName); + rs = conn.getMetaData().getColumns(conn.getCatalog(), getSchema(), tableName, columnName); return rs.next(); } catch (SQLException e) { - logger.error(e.getMessage(),e); - throw new RuntimeException(e.getMessage(),e); + logger.error(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); } finally { ConnectionUtils.releaseResource(rs, conn); diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ProcessDefinitionDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ProcessDefinitionDao.java index 8b6d7624131e92173f4f4ec69632ce5b704d2d2d..f4d198e0a14e5a60f1ebf69c25221dce279630f9 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ProcessDefinitionDao.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ProcessDefinitionDao.java @@ -17,12 +17,19 @@ package org.apache.dolphinscheduler.dao.upgrade; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.Flag; +import org.apache.dolphinscheduler.common.enums.ReleaseState; +import org.apache.dolphinscheduler.common.utils.CodeGenerateUtils; import org.apache.dolphinscheduler.common.utils.ConnectionUtils; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; +import java.util.ArrayList; import java.util.HashMap; +import java.util.List; import java.util.Map; import org.slf4j.Logger; @@ -43,7 +50,7 @@ public class ProcessDefinitionDao { Map processDefinitionJsonMap = new HashMap<>(); - String sql = String.format("SELECT id,process_definition_json FROM t_ds_process_definition"); + String sql = "SELECT id,process_definition_json FROM t_ds_process_definition"; ResultSet rs = null; PreparedStatement pstmt = null; try { @@ -66,7 +73,6 @@ public class ProcessDefinitionDao { return processDefinitionJsonMap; } - /** * updateProcessDefinitionJson * @@ -82,9 +88,78 @@ public class ProcessDefinitionDao { pstmt.setInt(2, entry.getKey()); pstmt.executeUpdate(); } + } + } catch (Exception e) { + logger.error(e.getMessage(), e); + throw new RuntimeException("sql: " + sql, e); + } finally { + ConnectionUtils.releaseResource(conn); + } + } + public List queryProcessDefinition(Connection conn) { + List processDefinitions = new ArrayList<>(); + String sql = "SELECT id,code,project_code,user_id,locations,name,description,release_state,flag,create_time FROM t_ds_process_definition"; + ResultSet rs = null; + PreparedStatement pstmt = null; + try { + pstmt = conn.prepareStatement(sql); + rs = pstmt.executeQuery(); + while (rs.next()) { + ProcessDefinition processDefinition = new ProcessDefinition(); + processDefinition.setId(rs.getInt(1)); + long code = rs.getLong(2); + if (code == 0L) { + code = CodeGenerateUtils.getInstance().genCode(); + } + processDefinition.setCode(code); + processDefinition.setVersion(Constants.VERSION_FIRST); + processDefinition.setProjectCode(rs.getLong(3)); + processDefinition.setUserId(rs.getInt(4)); + processDefinition.setLocations(rs.getString(5)); + processDefinition.setName(rs.getString(6)); + processDefinition.setDescription(rs.getString(7)); + processDefinition.setReleaseState(ReleaseState.getEnum(rs.getInt(8))); + processDefinition.setFlag(rs.getInt(9) == 1 ? Flag.YES : Flag.NO); + processDefinition.setCreateTime(rs.getDate(10)); + processDefinitions.add(processDefinition); } + } catch (Exception e) { + logger.error(e.getMessage(), e); + throw new RuntimeException("sql: " + sql, e); + } finally { + ConnectionUtils.releaseResource(rs, pstmt, conn); + } + return processDefinitions; + } + /** + * updateProcessDefinitionCode + * + * @param conn jdbc connection + * @param processDefinitions processDefinitions + * @param projectIdCodeMap projectIdCodeMap + */ + public void updateProcessDefinitionCode(Connection conn, List processDefinitions, Map projectIdCodeMap) { + String sql = "UPDATE t_ds_process_definition SET code=?, project_code=?, version=? where id=?"; + try { + for (ProcessDefinition processDefinition : processDefinitions) { + try (PreparedStatement pstmt = conn.prepareStatement(sql)) { + pstmt.setLong(1, processDefinition.getCode()); + long projectCode = processDefinition.getProjectCode(); + if (String.valueOf(projectCode).length() <= 10) { + Integer projectId = Integer.parseInt(String.valueOf(projectCode)); + if (projectIdCodeMap.containsKey(projectId)) { + projectCode = projectIdCodeMap.get(projectId); + processDefinition.setProjectCode(projectCode); + } + } + pstmt.setLong(2, projectCode); + pstmt.setInt(3, processDefinition.getVersion()); + pstmt.setInt(4, processDefinition.getId()); + pstmt.executeUpdate(); + } + } } catch (Exception e) { logger.error(e.getMessage(), e); throw new RuntimeException("sql: " + sql, e); diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ProjectDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ProjectDao.java new file mode 100644 index 0000000000000000000000000000000000000000..2906902f5a71926a469abfa3fd60cbb6b5e5e90b --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ProjectDao.java @@ -0,0 +1,90 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.upgrade; + +import org.apache.dolphinscheduler.common.utils.CodeGenerateUtils; +import org.apache.dolphinscheduler.common.utils.ConnectionUtils; + +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.util.HashMap; +import java.util.Map; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class ProjectDao { + + public static final Logger logger = LoggerFactory.getLogger(ProjectDao.class); + + /** + * queryAllProject + * + * @param conn jdbc connection + * @return Project List + */ + public Map queryAllProject(Connection conn) { + Map projectMap = new HashMap<>(); + String sql = "SELECT id,code FROM t_ds_project"; + ResultSet rs = null; + PreparedStatement pstmt = null; + try { + pstmt = conn.prepareStatement(sql); + rs = pstmt.executeQuery(); + while (rs.next()) { + Integer id = rs.getInt(1); + long code = rs.getLong(2); + if (code == 0L) { + code = CodeGenerateUtils.getInstance().genCode(); + } + projectMap.put(id, code); + } + } catch (Exception e) { + logger.error(e.getMessage(), e); + throw new RuntimeException("sql: " + sql, e); + } finally { + ConnectionUtils.releaseResource(rs, pstmt, conn); + } + return projectMap; + } + + /** + * updateProjectCode + * + * @param conn jdbc connection + * @param projectMap projectMap + */ + public void updateProjectCode(Connection conn, Map projectMap) { + String sql = "UPDATE t_ds_project SET code=? where id=?"; + try { + for (Map.Entry entry : projectMap.entrySet()) { + try (PreparedStatement pstmt = conn.prepareStatement(sql)) { + pstmt.setLong(1, entry.getValue()); + pstmt.setInt(2, entry.getKey()); + pstmt.executeUpdate(); + } + } + } catch (Exception e) { + logger.error(e.getMessage(), e); + throw new RuntimeException("sql: " + sql, e); + } finally { + ConnectionUtils.releaseResource(conn); + } + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ResourceDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ResourceDao.java index c9ff149306240d9b5b3a7bcf46e4db620c71e7e6..7d587e708fbc02eb00d1c8f533b0d6be2014de19 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ResourceDao.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ResourceDao.java @@ -18,21 +18,27 @@ package org.apache.dolphinscheduler.dao.upgrade; import org.apache.dolphinscheduler.common.utils.ConnectionUtils; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.apache.dolphinscheduler.spi.utils.StringUtils; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.Arrays; import java.util.HashMap; import java.util.Map; +import java.util.Objects; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.base.Joiner; /** * resource dao */ public class ResourceDao { - public static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionDao.class); + public static final Logger logger = LoggerFactory.getLogger(ResourceDao.class); /** * list all resources @@ -66,4 +72,89 @@ public class ResourceDao { return resourceMap; } + /** + * list all resources by the type + * + * @param conn connection + * @return map that key is full_name and value is the folder's size + */ + private Map listAllResourcesByFileType(Connection conn, int type) { + Map resourceSizeMap = new HashMap<>(); + + String sql = String.format("SELECT full_name, type, size, is_directory FROM t_ds_resources where type = %d", type); + ResultSet rs = null; + PreparedStatement pstmt = null; + try { + pstmt = conn.prepareStatement(sql); + rs = pstmt.executeQuery(); + + while (rs.next()) { + String fullName = rs.getString("full_name"); + Boolean isDirectory = rs.getBoolean("is_directory"); + long fileSize = rs.getLong("size"); + + if (StringUtils.isNotBlank(fullName) && !isDirectory) { + String[] splits = fullName.split("/"); + for (int i = 1; i < splits.length; i++) { + String parentFullName = Joiner.on("/").join(Arrays.copyOfRange(splits,0, splits.length - i)); + if (StringUtils.isNotEmpty(parentFullName)) { + long size = resourceSizeMap.getOrDefault(parentFullName, 0L); + resourceSizeMap.put(parentFullName, size + fileSize); + } + } + } + } + } catch (Exception e) { + logger.error(e.getMessage(), e); + throw new RuntimeException("sql: " + sql, e); + } finally { + if (Objects.nonNull(pstmt)) { + try { + if (!pstmt.isClosed()) { + pstmt.close(); + } + } catch (SQLException e) { + logger.error(e.getMessage(), e); + } + } + } + return resourceSizeMap; + } + + /** + * update the folder's size + * + * @param conn connection + */ + public void updateResourceFolderSizeByFileType(Connection conn, int type) { + Map resourceSizeMap = listAllResourcesByFileType(conn, type); + + String sql = "UPDATE t_ds_resources SET size=? where type=? and full_name=? and is_directory = true"; + PreparedStatement pstmt = null; + try { + pstmt = conn.prepareStatement(sql); + for (Map.Entry entry : resourceSizeMap.entrySet()) { + pstmt.setLong(1, entry.getValue()); + pstmt.setInt(2, type); + pstmt.setString(3, entry.getKey()); + pstmt.addBatch(); + } + pstmt.executeBatch(); + } catch (Exception e) { + logger.error(e.getMessage(), e); + throw new RuntimeException("sql: " + sql, e); + } finally { + if (Objects.nonNull(pstmt)) { + try { + if (!pstmt.isClosed()) { + pstmt.close(); + } + } catch (SQLException e) { + logger.error(e.getMessage(), e); + } + } + ConnectionUtils.releaseResource(conn); + } + } + } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ScheduleDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ScheduleDao.java new file mode 100644 index 0000000000000000000000000000000000000000..80a49fd42ba61cce57272e11839b4001abf863fa --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ScheduleDao.java @@ -0,0 +1,98 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.upgrade; + +import org.apache.dolphinscheduler.common.utils.ConnectionUtils; + +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.time.Clock; +import java.util.HashMap; +import java.util.Map; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class ScheduleDao { + + public static final Logger logger = LoggerFactory.getLogger(ScheduleDao.class); + + /** + * queryAllSchedule + * + * @param conn jdbc connection + * @return Schedule List + */ + public Map queryAllSchedule(Connection conn) { + Map scheduleMap = new HashMap<>(); + String sql = "SELECT id,process_definition_code FROM t_ds_schedules"; + ResultSet rs = null; + PreparedStatement pstmt = null; + try { + pstmt = conn.prepareStatement(sql); + rs = pstmt.executeQuery(); + while (rs.next()) { + Integer id = rs.getInt(1); + long processDefinitionCode = rs.getLong(2); + scheduleMap.put(id, processDefinitionCode); + } + } catch (Exception e) { + logger.error(e.getMessage(), e); + throw new RuntimeException("sql: " + sql, e); + } finally { + ConnectionUtils.releaseResource(rs, pstmt, conn); + } + return scheduleMap; + } + + /** + * update schedule + * + * @param conn jdbc connection + * @param scheduleMap scheduleMap + * @param processIdCodeMap processIdCodeMap + */ + public void updateScheduleCode(Connection conn, Map scheduleMap, Map processIdCodeMap) { + String sql = "UPDATE t_ds_schedules SET process_definition_code=?,timezone_id=?,environment_code=-1 where id=?"; + try { + Clock clock = Clock.systemDefaultZone(); + String timezoneId = clock.getZone().getId(); + for (Map.Entry entry : scheduleMap.entrySet()) { + try (PreparedStatement pstmt = conn.prepareStatement(sql)) { + long projectDefinitionCode = entry.getValue(); + if (String.valueOf(projectDefinitionCode).length() <= 10) { + Integer projectDefinitionId = Integer.parseInt(String.valueOf(projectDefinitionCode)); + if (processIdCodeMap.containsKey(projectDefinitionId)) { + projectDefinitionCode = processIdCodeMap.get(projectDefinitionId); + } + } + pstmt.setLong(1, projectDefinitionCode); + pstmt.setString(2, timezoneId); + pstmt.setInt(3, entry.getKey()); + pstmt.executeUpdate(); + } + } + } catch (Exception e) { + logger.error(e.getMessage(), e); + throw new RuntimeException("sql: " + sql, e); + } finally { + ConnectionUtils.releaseResource(conn); + } + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/SchemaUtils.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/SchemaUtils.java similarity index 59% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/SchemaUtils.java rename to dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/SchemaUtils.java index 880c440bfbc8183bf0b278dab9b4119281e54888..83acb75c826058e0db7bdeda43d38ec64d5c04ea 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/SchemaUtils.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/SchemaUtils.java @@ -15,78 +15,59 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.utils; +package org.apache.dolphinscheduler.dao.upgrade; +import com.google.common.base.Strings; import org.apache.commons.lang.StringUtils; +import org.apache.dolphinscheduler.common.utils.FileUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.core.io.ClassPathResource; import java.io.File; -import java.io.FileInputStream; import java.io.FileNotFoundException; -import java.util.ArrayList; +import java.io.IOException; +import java.util.Arrays; import java.util.Collections; -import java.util.Comparator; import java.util.List; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import java.util.stream.Collectors; /** * Metadata related common classes */ public class SchemaUtils { - private static final Logger logger = LoggerFactory.getLogger(SchemaUtils.class); - private static Pattern p = Pattern.compile("\\s*|\t|\r|\n"); private SchemaUtils() { throw new UnsupportedOperationException("Construct SchemaUtils"); } - /** - * Gets upgradable schemas for all upgrade directories - * - * @return all schema list - */ - @SuppressWarnings("unchecked") - public static List getAllSchemaList() { - List schemaDirList = new ArrayList<>(); - File[] schemaDirArr = FileUtils.getAllDir("sql/upgrade"); + public static List getAllSchemaList() throws IOException { + final File[] schemaDirArr = new ClassPathResource("sql/upgrade").getFile().listFiles(); + if (schemaDirArr == null || schemaDirArr.length == 0) { - return null; + return Collections.emptyList(); } - for (File file : schemaDirArr) { - schemaDirList.add(file.getName()); - } + return Arrays.stream(schemaDirArr).map(File::getName).sorted((o1, o2) -> { + try { + String version1 = o1.split("_")[0]; + String version2 = o2.split("_")[0]; - Collections.sort(schemaDirList, new Comparator() { - @Override - public int compare(Object o1, Object o2) { - try { - String dir1 = String.valueOf(o1); - String dir2 = String.valueOf(o2); - String version1 = dir1.split("_")[0]; - String version2 = dir2.split("_")[0]; - if (version1.equals(version2)) { - return 0; - } - - if (SchemaUtils.isAGreatVersion(version1, version2)) { - return 1; - } - - return -1; - - } catch (Exception e) { - logger.error(e.getMessage(), e); - throw new RuntimeException(e); + if (version1.equals(version2)) { + return 0; + } + + if (SchemaUtils.isAGreatVersion(version1, version2)) { + return 1; } - } - }); - return schemaDirList; + return -1; + } catch (Exception e) { + logger.error(e.getMessage(), e); + throw new RuntimeException(e); + } + }).collect(Collectors.toList()); } /** @@ -121,11 +102,12 @@ public class SchemaUtils { * * @return current software version */ - public static String getSoftVersion() { + public static String getSoftVersion() throws IOException { + final ClassPathResource softVersionFile = new ClassPathResource("sql/soft_version"); String softVersion; try { - softVersion = FileUtils.readFile2Str(new FileInputStream(new File("sql/soft_version"))); - softVersion = replaceBlank(softVersion); + softVersion = FileUtils.readFile2Str(softVersionFile.getInputStream()); + softVersion = Strings.nullToEmpty(softVersion).replaceAll("\\s+|\r|\n", ""); } catch (FileNotFoundException e) { logger.error(e.getMessage(), e); throw new RuntimeException("Failed to get the product version description file. The file could not be found", e); @@ -133,19 +115,4 @@ public class SchemaUtils { return softVersion; } - /** - * Strips the string of space carriage returns and tabs - * - * @param str string - * @return string removed blank - */ - public static String replaceBlank(String str) { - String dest = ""; - if (str != null) { - - Matcher m = p.matcher(str); - dest = m.replaceAll(""); - } - return dest; - } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/UpgradeDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/UpgradeDao.java index fb3ffb0b88975df08f6270494c629f9209e613a0..0cc571bf02a17e31c7ecc133b731ae90f260afa4 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/UpgradeDao.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/UpgradeDao.java @@ -14,226 +14,101 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.dao.upgrade; -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import org.apache.dolphinscheduler.common.enums.DbType; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ConditionType; +import org.apache.dolphinscheduler.common.enums.Flag; +import org.apache.dolphinscheduler.common.enums.Priority; +import org.apache.dolphinscheduler.common.enums.TaskType; +import org.apache.dolphinscheduler.common.enums.TimeoutFlag; import org.apache.dolphinscheduler.common.process.ResourceInfo; -import org.apache.dolphinscheduler.common.utils.*; -import org.apache.dolphinscheduler.dao.AbstractBaseDao; -import org.apache.dolphinscheduler.dao.datasource.ConnectionFactory; - +import org.apache.dolphinscheduler.common.task.TaskTimeoutParameter; +import org.apache.dolphinscheduler.common.utils.CodeGenerateUtils; +import org.apache.dolphinscheduler.common.utils.ConnectionUtils; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.common.utils.ScriptRunner; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionLog; +import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelationLog; +import org.apache.dolphinscheduler.dao.entity.TaskDefinitionLog; +import org.apache.dolphinscheduler.spi.enums.DbType; + +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.sql.DataSource; -import java.io.*; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStreamReader; +import java.io.Reader; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; -import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.stream.Collectors; -public abstract class UpgradeDao extends AbstractBaseDao { +import javax.sql.DataSource; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.core.io.ClassPathResource; +import org.springframework.core.io.Resource; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +public abstract class UpgradeDao { public static final Logger logger = LoggerFactory.getLogger(UpgradeDao.class); private static final String T_VERSION_NAME = "t_escheduler_version"; private static final String T_NEW_VERSION_NAME = "t_ds_version"; - private static final String rootDir = System.getProperty("user.dir"); - protected static final DataSource dataSource = getDataSource(); - private static final DbType dbType = getCurrentDbType(); + protected final DataSource dataSource; - @Override - protected void init() { - + protected UpgradeDao(DataSource dataSource) { + this.dataSource = dataSource; } - /** - * get datasource - * @return DruidDataSource - */ - public static DataSource getDataSource(){ - return ConnectionFactory.getInstance().getDataSource(); - } + protected abstract String initSqlPath(); - /** - * get db type - * @return dbType - */ - public static DbType getDbType(){ - return dbType; - } + protected abstract DbType getDbType(); - /** - * get current dbType - * @return - */ - private static DbType getCurrentDbType(){ - Connection conn = null; - try { - conn = dataSource.getConnection(); - String name = conn.getMetaData().getDatabaseProductName().toUpperCase(); - return DbType.valueOf(name); - } catch (Exception e) { - logger.error(e.getMessage(),e); - return null; - }finally { - ConnectionUtils.releaseResource(conn); - } - } - - /** - * init schema - */ public void initSchema() { - DbType dbType = getDbType(); - String initSqlPath = ""; - if (dbType != null) { - switch (dbType) { - case MYSQL: - initSqlPath = "/sql/create/release-1.0.0_schema/mysql/"; - initSchema(initSqlPath); - break; - case POSTGRESQL: - initSqlPath = "/sql/create/release-1.2.0_schema/postgresql/"; - initSchema(initSqlPath); - break; - default: - logger.error("not support sql type: {},can't upgrade", dbType); - throw new IllegalArgumentException("not support sql type,can't upgrade"); - } - } + // Execute the dolphinscheduler full sql + runInitSql(getDbType()); } - /** - * init scheam - * - * @param initSqlPath initSqlPath + * run init sql to init db schema + * @param dbType db type */ - public void initSchema(String initSqlPath) { - - // Execute the dolphinscheduler DDL, it cannot be rolled back - runInitDDL(initSqlPath); - - // Execute the dolphinscheduler DML, it can be rolled back - runInitDML(initSqlPath); - - - } - - /** - * run DML - * - * @param initSqlPath initSqlPath - */ - private void runInitDML(String initSqlPath) { - Connection conn = null; - if (StringUtils.isEmpty(rootDir)) { - throw new RuntimeException("Environment variable user.dir not found"); - } - String mysqlSQLFilePath = rootDir + initSqlPath + "dolphinscheduler_dml.sql"; - try { - conn = dataSource.getConnection(); - conn.setAutoCommit(false); - - // Execute the dolphinscheduler_dml.sql script to import related data of dolphinscheduler - ScriptRunner initScriptRunner = new ScriptRunner(conn, false, true); - Reader initSqlReader = new FileReader(mysqlSQLFilePath); - initScriptRunner.runScript(initSqlReader); - - conn.commit(); - } catch (IOException e) { - try { - conn.rollback(); - } catch (SQLException e1) { - logger.error(e1.getMessage(), e1); - } - logger.error(e.getMessage(), e); - throw new RuntimeException(e.getMessage(), e); - } catch (Exception e) { - try { - if (null != conn) { - conn.rollback(); - } - } catch (SQLException e1) { - logger.error(e1.getMessage(), e1); - } - logger.error(e.getMessage(), e); - throw new RuntimeException(e.getMessage(), e); - } finally { - ConnectionUtils.releaseResource(conn); - - } - - } - - /** - * run DDL - * - * @param initSqlPath initSqlPath - */ - private void runInitDDL(String initSqlPath) { - Connection conn = null; - if (StringUtils.isEmpty(rootDir)) { - throw new RuntimeException("Environment variable user.dir not found"); - } - //String mysqlSQLFilePath = rootDir + "/sql/create/release-1.0.0_schema/mysql/dolphinscheduler_ddl.sql"; - String mysqlSQLFilePath = rootDir + initSqlPath + "dolphinscheduler_ddl.sql"; - try { - conn = dataSource.getConnection(); + private void runInitSql(DbType dbType) { + String sqlFile = String.format("dolphinscheduler_%s.sql",dbType.getDescp()); + Resource mysqlSQLFilePath = new ClassPathResource("sql/" + sqlFile); + try (Connection conn = dataSource.getConnection()) { // Execute the dolphinscheduler_ddl.sql script to create the table structure of dolphinscheduler ScriptRunner initScriptRunner = new ScriptRunner(conn, true, true); - Reader initSqlReader = new FileReader(mysqlSQLFilePath); + Reader initSqlReader = new InputStreamReader(mysqlSQLFilePath.getInputStream()); initScriptRunner.runScript(initSqlReader); - - } catch (IOException e) { - - logger.error(e.getMessage(), e); - throw new RuntimeException(e.getMessage(), e); } catch (Exception e) { - logger.error(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e); - } finally { - ConnectionUtils.releaseResource(conn); - } - } - /** - * determines whether a table exists - * - * @param tableName tableName - * @return if table exist return true,else return false - */ public abstract boolean isExistsTable(String tableName); - /** - * determines whether a field exists in the specified table - * - * @param tableName tableName - * @param columnName columnName - * @return if column name exist return true,else return false - */ public abstract boolean isExistsColumn(String tableName, String columnName); - - /** - * get current version - * - * @param versionName versionName - * @return version - */ public String getCurrentVersion(String versionName) { String sql = String.format("select version from %s", versionName); Connection conn = null; @@ -259,20 +134,11 @@ public abstract class UpgradeDao extends AbstractBaseDao { } } - - /** - * upgrade DolphinScheduler - * - * @param schemaDir schema dir - */ public void upgradeDolphinScheduler(String schemaDir) { - - upgradeDolphinSchedulerDDL(schemaDir); - + upgradeDolphinSchedulerDDL(schemaDir, "dolphinscheduler_ddl.sql"); upgradeDolphinSchedulerDML(schemaDir); } - /** * upgrade DolphinScheduler worker group * ds-1.3.0 modify the worker group for process definition json @@ -289,6 +155,29 @@ public abstract class UpgradeDao extends AbstractBaseDao { updateProcessDefinitionJsonResourceList(); } + /** + * upgrade DolphinScheduler to 2.0.0 + */ + public void upgradeDolphinSchedulerTo200(String schemaDir) { + processDefinitionJsonSplit(); + upgradeDolphinSchedulerDDL(schemaDir, "dolphinscheduler_ddl_post.sql"); + } + + /** + * upgrade DolphinScheduler to 2.0.6 + */ + public void upgradeDolphinSchedulerResourceFileSize() { + ResourceDao resourceDao = new ResourceDao(); + try { + // update the size of the folder that is the type of file. + resourceDao.updateResourceFolderSizeByFileType(dataSource.getConnection(), 0); + // update the size of the folder that is the type of udf. + resourceDao.updateResourceFolderSizeByFileType(dataSource.getConnection(), 1); + } catch (Exception ex) { + logger.error("Failed to upgrade because of failing to update the folder's size of resource files."); + } + } + /** * updateProcessDefinitionJsonWorkerGroup */ @@ -307,7 +196,7 @@ public abstract class UpgradeDao extends AbstractBaseDao { for (int i = 0; i < tasks.size(); i++) { ObjectNode task = (ObjectNode) tasks.path(i); ObjectNode workerGroupNode = (ObjectNode) task.path("workerGroupId"); - Integer workerGroupId = -1; + int workerGroupId = -1; if (workerGroupNode != null && workerGroupNode.canConvertToInt()) { workerGroupId = workerGroupNode.asInt(-1); } @@ -332,9 +221,6 @@ public abstract class UpgradeDao extends AbstractBaseDao { } } - /** - * updateProcessDefinitionJsonResourceList - */ protected void updateProcessDefinitionJsonResourceList() { ResourceDao resourceDao = new ResourceDao(); ProcessDefinitionDao processDefinitionDao = new ProcessDefinitionDao(); @@ -392,17 +278,13 @@ public abstract class UpgradeDao extends AbstractBaseDao { } - /** - * upgradeDolphinScheduler DML - * - * @param schemaDir schemaDir - */ private void upgradeDolphinSchedulerDML(String schemaDir) { String schemaVersion = schemaDir.split("_")[0]; - if (StringUtils.isEmpty(rootDir)) { - throw new RuntimeException("Environment variable user.dir not found"); + Resource sqlFilePath = new ClassPathResource(String.format("sql/upgrade/%s/%s/dolphinscheduler_dml.sql", schemaDir, getDbType().name().toLowerCase())); + if (!sqlFilePath.exists()) { + logger.info("No dml file {}, returning", sqlFilePath); + return; } - String sqlFilePath = MessageFormat.format("{0}/sql/upgrade/{1}/{2}/dolphinscheduler_dml.sql", rootDir, schemaDir, getDbType().name().toLowerCase()); logger.info("sqlSQLFilePath" + sqlFilePath); Connection conn = null; PreparedStatement pstmt = null; @@ -411,7 +293,7 @@ public abstract class UpgradeDao extends AbstractBaseDao { conn.setAutoCommit(false); // Execute the upgraded dolphinscheduler dml ScriptRunner scriptRunner = new ScriptRunner(conn, false, true); - Reader sqlReader = new FileReader(new File(sqlFilePath)); + Reader sqlReader = new InputStreamReader(sqlFilePath.getInputStream()); scriptRunner.runScript(sqlReader); if (isExistsTable(T_VERSION_NAME)) { // Change version in the version table to the new version @@ -443,16 +325,6 @@ public abstract class UpgradeDao extends AbstractBaseDao { } logger.error(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e); - } catch (SQLException e) { - try { - if (null != conn) { - conn.rollback(); - } - } catch (SQLException e1) { - logger.error(e1.getMessage(), e1); - } - logger.error(e.getMessage(), e); - throw new RuntimeException(e.getMessage(), e); } catch (Exception e) { try { if (null != conn) { @@ -474,11 +346,8 @@ public abstract class UpgradeDao extends AbstractBaseDao { * * @param schemaDir schemaDir */ - private void upgradeDolphinSchedulerDDL(String schemaDir) { - if (StringUtils.isEmpty(rootDir)) { - throw new RuntimeException("Environment variable user.dir not found"); - } - String sqlFilePath = MessageFormat.format("{0}/sql/upgrade/{1}/{2}/dolphinscheduler_ddl.sql", rootDir, schemaDir, getDbType().name().toLowerCase()); + private void upgradeDolphinSchedulerDDL(String schemaDir, String scriptFile) { + Resource sqlFilePath = new ClassPathResource(String.format("sql/upgrade/%s/%s/%s", schemaDir, getDbType().name().toLowerCase(), scriptFile)); Connection conn = null; PreparedStatement pstmt = null; try { @@ -488,32 +357,21 @@ public abstract class UpgradeDao extends AbstractBaseDao { conn.setAutoCommit(true); // Execute the dolphinscheduler ddl.sql for the upgrade ScriptRunner scriptRunner = new ScriptRunner(conn, true, true); - Reader sqlReader = new FileReader(new File(sqlFilePath)); + Reader sqlReader = new InputStreamReader(sqlFilePath.getInputStream()); scriptRunner.runScript(sqlReader); } catch (FileNotFoundException e) { logger.error(e.getMessage(), e); throw new RuntimeException("sql file not found ", e); - } catch (IOException e) { - - logger.error(e.getMessage(), e); - throw new RuntimeException(e.getMessage(), e); - } catch (SQLException e) { - - logger.error(e.getMessage(), e); - throw new RuntimeException(e.getMessage(), e); } catch (Exception e) { - logger.error(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e); } finally { ConnectionUtils.releaseResource(pstmt, conn); } - } - /** * update version * @@ -543,4 +401,286 @@ public abstract class UpgradeDao extends AbstractBaseDao { } + /** + * upgrade DolphinScheduler to 2.0.0, json split + */ + private void processDefinitionJsonSplit() { + ProjectDao projectDao = new ProjectDao(); + ProcessDefinitionDao processDefinitionDao = new ProcessDefinitionDao(); + ScheduleDao scheduleDao = new ScheduleDao(); + JsonSplitDao jsonSplitDao = new JsonSplitDao(); + try { + // execute project + Map projectIdCodeMap = projectDao.queryAllProject(dataSource.getConnection()); + projectDao.updateProjectCode(dataSource.getConnection(), projectIdCodeMap); + + // execute process definition code + List processDefinitions = processDefinitionDao.queryProcessDefinition(dataSource.getConnection()); + processDefinitionDao.updateProcessDefinitionCode(dataSource.getConnection(), processDefinitions, projectIdCodeMap); + + // execute schedule + Map allSchedule = scheduleDao.queryAllSchedule(dataSource.getConnection()); + Map processIdCodeMap = processDefinitions.stream().collect(Collectors.toMap(ProcessDefinition::getId, ProcessDefinition::getCode)); + scheduleDao.updateScheduleCode(dataSource.getConnection(), allSchedule, processIdCodeMap); + + // json split + Map processDefinitionJsonMap = processDefinitionDao.queryAllProcessDefinition(dataSource.getConnection()); + List processDefinitionLogs = new ArrayList<>(); + List processTaskRelationLogs = new ArrayList<>(); + List taskDefinitionLogs = new ArrayList<>(); + Map>> processTaskMap = new HashMap<>(); + splitProcessDefinitionJson(processDefinitions, processDefinitionJsonMap, processDefinitionLogs, processTaskRelationLogs, taskDefinitionLogs, processTaskMap); + convertDependence(taskDefinitionLogs, projectIdCodeMap, processTaskMap); + + // execute json split + jsonSplitDao.executeJsonSplitProcessDefinition(dataSource.getConnection(), processDefinitionLogs); + jsonSplitDao.executeJsonSplitProcessTaskRelation(dataSource.getConnection(), processTaskRelationLogs); + jsonSplitDao.executeJsonSplitTaskDefinition(dataSource.getConnection(), taskDefinitionLogs); + } catch (Exception e) { + logger.error("json split error", e); + } + } + + private void splitProcessDefinitionJson(List processDefinitions, + Map processDefinitionJsonMap, + List processDefinitionLogs, + List processTaskRelationLogs, + List taskDefinitionLogs, + Map>> processTaskMap) throws Exception { + Map processDefinitionMap = processDefinitions.stream() + .collect(Collectors.toMap(ProcessDefinition::getId, processDefinition -> processDefinition)); + Date now = new Date(); + for (Map.Entry entry : processDefinitionJsonMap.entrySet()) { + if (entry.getValue() == null) { + throw new Exception("processDefinitionJson is null"); + } + ObjectNode jsonObject = JSONUtils.parseObject(entry.getValue()); + ProcessDefinition processDefinition = processDefinitionMap.get(entry.getKey()); + if (processDefinition != null) { + processDefinition.setTenantId(jsonObject.get("tenantId") == null ? -1 : jsonObject.get("tenantId").asInt()); + processDefinition.setTimeout(jsonObject.get("timeout").asInt()); + processDefinition.setGlobalParams(jsonObject.get("globalParams").toString()); + } else { + throw new Exception("It can't find processDefinition, please check !"); + } + Map taskIdCodeMap = new HashMap<>(); + Map> taskNamePreMap = new HashMap<>(); + Map taskNameCodeMap = new HashMap<>(); + Map> processCodeTaskNameCodeMap = new HashMap<>(); + List taskDefinitionLogList = new ArrayList<>(); + ArrayNode tasks = JSONUtils.parseArray(jsonObject.get("tasks").toString()); + for (int i = 0; i < tasks.size(); i++) { + ObjectNode task = (ObjectNode) tasks.path(i); + ObjectNode param = (ObjectNode) task.get("params"); + TaskDefinitionLog taskDefinitionLog = new TaskDefinitionLog(); + String taskType = task.get("type").asText(); + if (param != null) { + JsonNode resourceJsonNode = param.get("resourceList"); + if (resourceJsonNode != null && !resourceJsonNode.isEmpty()) { + List resourceList = JSONUtils.toList(param.get("resourceList").toString(), ResourceInfo.class); + List resourceIds = resourceList.stream().map(ResourceInfo::getId).collect(Collectors.toList()); + taskDefinitionLog.setResourceIds(StringUtils.join(resourceIds, Constants.COMMA)); + } else { + taskDefinitionLog.setResourceIds(StringUtils.EMPTY); + } + if (TaskType.SUB_PROCESS.getDesc().equals(taskType)) { + JsonNode jsonNodeDefinitionId = param.get("processDefinitionId"); + if (jsonNodeDefinitionId != null) { + param.put("processDefinitionCode", processDefinitionMap.get(jsonNodeDefinitionId.asInt()).getCode()); + param.remove("processDefinitionId"); + } + } + param.put("conditionResult", task.get("conditionResult")); + param.put("dependence", task.get("dependence")); + taskDefinitionLog.setTaskParams(JSONUtils.toJsonString(param)); + } + TaskTimeoutParameter timeout = JSONUtils.parseObject(JSONUtils.toJsonString(task.get("timeout")), TaskTimeoutParameter.class); + if (timeout != null) { + taskDefinitionLog.setTimeout(timeout.getInterval()); + taskDefinitionLog.setTimeoutFlag(timeout.getEnable() ? TimeoutFlag.OPEN : TimeoutFlag.CLOSE); + taskDefinitionLog.setTimeoutNotifyStrategy(timeout.getStrategy()); + } + String desc = task.get("description") != null ? task.get("description").asText() : + task.get("desc") != null ? task.get("desc").asText() : ""; + taskDefinitionLog.setDescription(desc); + taskDefinitionLog.setFlag(Constants.FLOWNODE_RUN_FLAG_NORMAL.equals(task.get("runFlag").asText()) ? Flag.YES : Flag.NO); + taskDefinitionLog.setTaskType(taskType); + taskDefinitionLog.setFailRetryInterval(TaskType.SUB_PROCESS.getDesc().equals(taskType) ? 1 : task.get("retryInterval").asInt()); + taskDefinitionLog.setFailRetryTimes(TaskType.SUB_PROCESS.getDesc().equals(taskType) ? 0 : task.get("maxRetryTimes").asInt()); + taskDefinitionLog.setTaskPriority(JSONUtils.parseObject(JSONUtils.toJsonString(task.get("taskInstancePriority")), Priority.class)); + String name = task.get("name").asText(); + taskDefinitionLog.setName(name); + taskDefinitionLog.setWorkerGroup(task.get("workerGroup") == null ? "default" : task.get("workerGroup").asText()); + long taskCode = CodeGenerateUtils.getInstance().genCode(); + taskDefinitionLog.setCode(taskCode); + taskDefinitionLog.setVersion(Constants.VERSION_FIRST); + taskDefinitionLog.setProjectCode(processDefinition.getProjectCode()); + taskDefinitionLog.setUserId(processDefinition.getUserId()); + taskDefinitionLog.setEnvironmentCode(-1); + taskDefinitionLog.setDelayTime(0); + taskDefinitionLog.setOperator(1); + taskDefinitionLog.setOperateTime(now); + taskDefinitionLog.setCreateTime(now); + taskDefinitionLog.setUpdateTime(now); + taskDefinitionLogList.add(taskDefinitionLog); + taskIdCodeMap.put(task.get("id").asText(), taskCode); + List preTasks = JSONUtils.toList(task.get("preTasks").toString(), String.class); + taskNamePreMap.put(name, preTasks); + taskNameCodeMap.put(name, taskCode); + } + convertConditions(taskDefinitionLogList, taskNameCodeMap); + taskDefinitionLogs.addAll(taskDefinitionLogList); + processDefinition.setLocations(convertLocations(processDefinition.getLocations(), taskIdCodeMap)); + ProcessDefinitionLog processDefinitionLog = new ProcessDefinitionLog(processDefinition); + processDefinitionLog.setOperator(1); + processDefinitionLog.setOperateTime(now); + processDefinitionLog.setUpdateTime(now); + processDefinitionLogs.add(processDefinitionLog); + handleProcessTaskRelation(taskNamePreMap, taskNameCodeMap, processDefinition, processTaskRelationLogs); + processCodeTaskNameCodeMap.put(processDefinition.getCode(), taskNameCodeMap); + processTaskMap.put(entry.getKey(), processCodeTaskNameCodeMap); + } + } + + public void convertConditions(List taskDefinitionLogList, Map taskNameCodeMap) throws Exception { + for (TaskDefinitionLog taskDefinitionLog : taskDefinitionLogList) { + if (TaskType.CONDITIONS.getDesc().equals(taskDefinitionLog.getTaskType())) { + ObjectMapper objectMapper = new ObjectMapper(); + ObjectNode taskParams = JSONUtils.parseObject(taskDefinitionLog.getTaskParams()); + // reset conditionResult + ObjectNode conditionResult = (ObjectNode) taskParams.get("conditionResult"); + List successNode = JSONUtils.toList(conditionResult.get("successNode").toString(), String.class); + List nodeCode = new ArrayList<>(); + successNode.forEach(node -> nodeCode.add(taskNameCodeMap.get(node))); + conditionResult.set("successNode", objectMapper.readTree(objectMapper.writeValueAsString(nodeCode))); + List failedNode = JSONUtils.toList(conditionResult.get("failedNode").toString(), String.class); + nodeCode.clear(); + failedNode.forEach(node -> nodeCode.add(taskNameCodeMap.get(node))); + conditionResult.set("failedNode", objectMapper.readTree(objectMapper.writeValueAsString(nodeCode))); + // reset dependItemList + ObjectNode dependence = (ObjectNode) taskParams.get("dependence"); + ArrayNode dependTaskList = JSONUtils.parseArray(JSONUtils.toJsonString(dependence.get("dependTaskList"))); + for (int i = 0; i < dependTaskList.size(); i++) { + ObjectNode dependTask = (ObjectNode) dependTaskList.path(i); + ArrayNode dependItemList = JSONUtils.parseArray(JSONUtils.toJsonString(dependTask.get("dependItemList"))); + for (int j = 0; j < dependItemList.size(); j++) { + ObjectNode dependItem = (ObjectNode) dependItemList.path(j); + JsonNode depTasks = dependItem.get("depTasks"); + dependItem.put("depTaskCode", taskNameCodeMap.get(depTasks.asText())); + dependItem.remove("depTasks"); + dependItemList.set(j, dependItem); + } + dependTask.put("dependItemList", dependItemList); + dependTaskList.set(i, dependTask); + } + dependence.put("dependTaskList", dependTaskList); + taskDefinitionLog.setTaskParams(JSONUtils.toJsonString(taskParams)); + } + } + } + + private String convertLocations(String locations, Map taskIdCodeMap) { + if (StringUtils.isBlank(locations)) { + return locations; + } + Map locationsMap = JSONUtils.parseObject(locations, new TypeReference>() { + }); + if (locationsMap == null) { + return locations; + } + ArrayNode jsonNodes = JSONUtils.createArrayNode(); + for (Map.Entry entry : locationsMap.entrySet()) { + ObjectNode nodes = JSONUtils.createObjectNode(); + nodes.put("taskCode", taskIdCodeMap.get(entry.getKey())); + ObjectNode oldNodes = entry.getValue(); + nodes.put("x", oldNodes.get("x").asInt()); + nodes.put("y", oldNodes.get("y").asInt()); + jsonNodes.add(nodes); + } + return jsonNodes.toString(); + } + + public void convertDependence(List taskDefinitionLogs, + Map projectIdCodeMap, + Map>> processTaskMap) { + for (TaskDefinitionLog taskDefinitionLog : taskDefinitionLogs) { + if (TaskType.DEPENDENT.getDesc().equals(taskDefinitionLog.getTaskType())) { + ObjectNode taskParams = JSONUtils.parseObject(taskDefinitionLog.getTaskParams()); + ObjectNode dependence = (ObjectNode) taskParams.get("dependence"); + ArrayNode dependTaskList = JSONUtils.parseArray(JSONUtils.toJsonString(dependence.get("dependTaskList"))); + for (int i = 0; i < dependTaskList.size(); i++) { + ObjectNode dependTask = (ObjectNode) dependTaskList.path(i); + ArrayNode dependItemList = JSONUtils.parseArray(JSONUtils.toJsonString(dependTask.get("dependItemList"))); + for (int j = 0; j < dependItemList.size(); j++) { + ObjectNode dependItem = (ObjectNode) dependItemList.path(j); + dependItem.put("projectCode", projectIdCodeMap.get(dependItem.get("projectId").asInt())); + int definitionId = dependItem.get("definitionId").asInt(); + Map> processCodeTaskNameCodeMap = processTaskMap.get(definitionId); + if (processCodeTaskNameCodeMap == null) { + logger.warn("We can't find processDefinition [{}], please check it is not exist, remove this dependence", definitionId); + dependItemList.remove(j); + continue; + } + Optional>> mapEntry = processCodeTaskNameCodeMap.entrySet().stream().findFirst(); + if (mapEntry.isPresent()) { + Map.Entry> processCodeTaskNameCodeEntry = mapEntry.get(); + dependItem.put("definitionCode", processCodeTaskNameCodeEntry.getKey()); + String depTasks = dependItem.get("depTasks").asText(); + long taskCode = "ALL".equals(depTasks) || processCodeTaskNameCodeEntry.getValue() == null ? 0L : processCodeTaskNameCodeEntry.getValue().get(depTasks); + dependItem.put("depTaskCode", taskCode); + } + dependItem.remove("projectId"); + dependItem.remove("definitionId"); + dependItem.remove("depTasks"); + dependItemList.set(j, dependItem); + } + dependTask.put("dependItemList", dependItemList); + dependTaskList.set(i, dependTask); + } + dependence.put("dependTaskList", dependTaskList); + taskDefinitionLog.setTaskParams(JSONUtils.toJsonString(taskParams)); + } + } + } + + private void handleProcessTaskRelation(Map> taskNamePreMap, + Map taskNameCodeMap, + ProcessDefinition processDefinition, + List processTaskRelationLogs) { + Date now = new Date(); + for (Map.Entry> entry : taskNamePreMap.entrySet()) { + List entryValue = entry.getValue(); + if (CollectionUtils.isNotEmpty(entryValue)) { + for (String preTaskName : entryValue) { + ProcessTaskRelationLog processTaskRelationLog = setProcessTaskRelationLog(processDefinition, now); + processTaskRelationLog.setPreTaskCode(taskNameCodeMap.get(preTaskName)); + processTaskRelationLog.setPreTaskVersion(Constants.VERSION_FIRST); + processTaskRelationLog.setPostTaskCode(taskNameCodeMap.get(entry.getKey())); + processTaskRelationLog.setPostTaskVersion(Constants.VERSION_FIRST); + processTaskRelationLogs.add(processTaskRelationLog); + } + } else { + ProcessTaskRelationLog processTaskRelationLog = setProcessTaskRelationLog(processDefinition, now); + processTaskRelationLog.setPreTaskCode(0); + processTaskRelationLog.setPreTaskVersion(0); + processTaskRelationLog.setPostTaskCode(taskNameCodeMap.get(entry.getKey())); + processTaskRelationLog.setPostTaskVersion(Constants.VERSION_FIRST); + processTaskRelationLogs.add(processTaskRelationLog); + } + } + } + + private ProcessTaskRelationLog setProcessTaskRelationLog(ProcessDefinition processDefinition, Date now) { + ProcessTaskRelationLog processTaskRelationLog = new ProcessTaskRelationLog(); + processTaskRelationLog.setProjectCode(processDefinition.getProjectCode()); + processTaskRelationLog.setProcessDefinitionCode(processDefinition.getCode()); + processTaskRelationLog.setProcessDefinitionVersion(processDefinition.getVersion()); + processTaskRelationLog.setConditionType(ConditionType.NONE); + processTaskRelationLog.setConditionParams("{}"); + processTaskRelationLog.setOperator(1); + processTaskRelationLog.setOperateTime(now); + processTaskRelationLog.setCreateTime(now); + processTaskRelationLog.setUpdateTime(now); + return processTaskRelationLog; + } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/shell/CreateDolphinScheduler.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/shell/CreateDolphinScheduler.java index 14eceffa729fda02056060111e0a361fa2b81026..c7eec9462522434eddc60751eff97fcb0c20c5ff 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/shell/CreateDolphinScheduler.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/shell/CreateDolphinScheduler.java @@ -18,32 +18,49 @@ package org.apache.dolphinscheduler.dao.upgrade.shell; import org.apache.dolphinscheduler.dao.upgrade.DolphinSchedulerManager; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.boot.CommandLineRunner; +import org.springframework.boot.WebApplicationType; +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; +import org.springframework.boot.autoconfigure.quartz.QuartzAutoConfiguration; +import org.springframework.boot.builder.SpringApplicationBuilder; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.Profile; +import org.springframework.stereotype.Component; -/** - * create DolphinScheduler - * - */ +@ComponentScan(value = "org.apache.dolphinscheduler.dao") +@EnableAutoConfiguration(exclude = {QuartzAutoConfiguration.class}) public class CreateDolphinScheduler { + public static void main(String[] args) { + new SpringApplicationBuilder(CreateDolphinScheduler.class) + .profiles("shell-create", "shell-cli") + .web(WebApplicationType.NONE) + .run(args); + } + + @Component + @Profile("shell-create") + static class CreateRunner implements CommandLineRunner { + private static final Logger logger = LoggerFactory.getLogger(CreateRunner.class); + + private final DolphinSchedulerManager dolphinSchedulerManager; + + CreateRunner(DolphinSchedulerManager dolphinSchedulerManager) { + this.dolphinSchedulerManager = dolphinSchedulerManager; + } - private static final Logger logger = LoggerFactory.getLogger(CreateDolphinScheduler.class); - - /** - * create dolphin scheduler db - * @param args args - */ - public static void main(String[] args) { - DolphinSchedulerManager dolphinSchedulerManager = new DolphinSchedulerManager(); - try { - dolphinSchedulerManager.initDolphinScheduler(); - logger.info("init DolphinScheduler finished"); - dolphinSchedulerManager.upgradeDolphinScheduler(); - logger.info("upgrade DolphinScheduler finished"); - logger.info("create DolphinScheduler success"); - } catch (Exception e) { - logger.error("create DolphinScheduler failed",e); - } - - } + @Override + public void run(String... args) throws Exception { + if (dolphinSchedulerManager.schemaIsInitialized()) { + dolphinSchedulerManager.upgradeDolphinScheduler(); + logger.info("upgrade DolphinScheduler finished"); + } else { + dolphinSchedulerManager.initDolphinScheduler(); + logger.info("init DolphinScheduler finished"); + } + logger.info("create DolphinScheduler success"); + } + } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/shell/InitDolphinScheduler.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/shell/InitDolphinScheduler.java index 3bcb3abc032fd18cbfc2612c15a99de27419e22b..261b98895de32306c4c9eff7b66c2fe9070d8600 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/shell/InitDolphinScheduler.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/shell/InitDolphinScheduler.java @@ -17,26 +17,43 @@ package org.apache.dolphinscheduler.dao.upgrade.shell; import org.apache.dolphinscheduler.dao.upgrade.DolphinSchedulerManager; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.boot.CommandLineRunner; +import org.springframework.boot.WebApplicationType; +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; +import org.springframework.boot.autoconfigure.quartz.QuartzAutoConfiguration; +import org.springframework.boot.builder.SpringApplicationBuilder; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.Profile; +import org.springframework.stereotype.Component; -/** - * init DolphinScheduler - * - */ +@ComponentScan(value = "org.apache.dolphinscheduler.dao") +@EnableAutoConfiguration(exclude = {QuartzAutoConfiguration.class}) public class InitDolphinScheduler { + public static void main(String[] args) { + new SpringApplicationBuilder(InitDolphinScheduler.class) + .profiles("shell-init", "shell-cli") + .web(WebApplicationType.NONE) + .run(args); + } + + @Component + @Profile("shell-init") + static class InitRunner implements CommandLineRunner { + private static final Logger logger = LoggerFactory.getLogger(InitRunner.class); + + private final DolphinSchedulerManager dolphinSchedulerManager; - private static final Logger logger = LoggerFactory.getLogger(InitDolphinScheduler.class); + InitRunner(DolphinSchedulerManager dolphinSchedulerManager) { + this.dolphinSchedulerManager = dolphinSchedulerManager; + } - /** - * init dolphin scheduler db - * @param args args - */ - public static void main(String[] args) { - Thread.currentThread().setName("manager-InitDolphinScheduler"); - DolphinSchedulerManager dolphinSchedulerManager = new DolphinSchedulerManager(); - dolphinSchedulerManager.initDolphinScheduler(); - logger.info("init DolphinScheduler finished"); - - } + @Override + public void run(String... args) throws Exception { + dolphinSchedulerManager.initDolphinScheduler(); + logger.info("init DolphinScheduler finished"); + } + } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/shell/UpgradeDolphinScheduler.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/shell/UpgradeDolphinScheduler.java index 293b555c94047f83f59591c7031f5f2ba79a02cb..427f2f597382be3274bce194cd6a4ee50af500dc 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/shell/UpgradeDolphinScheduler.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/shell/UpgradeDolphinScheduler.java @@ -17,31 +17,43 @@ package org.apache.dolphinscheduler.dao.upgrade.shell; import org.apache.dolphinscheduler.dao.upgrade.DolphinSchedulerManager; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.boot.CommandLineRunner; +import org.springframework.boot.WebApplicationType; +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; +import org.springframework.boot.autoconfigure.quartz.QuartzAutoConfiguration; +import org.springframework.boot.builder.SpringApplicationBuilder; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.Profile; +import org.springframework.stereotype.Component; -/** - * upgrade DolphinScheduler - */ +@ComponentScan(value = "org.apache.dolphinscheduler.dao") +@EnableAutoConfiguration(exclude = {QuartzAutoConfiguration.class}) public class UpgradeDolphinScheduler { - private static final Logger logger = LoggerFactory.getLogger(UpgradeDolphinScheduler.class); + public static void main(String[] args) { + new SpringApplicationBuilder(UpgradeDolphinScheduler.class) + .profiles("shell-upgrade", "shell-cli") + .web(WebApplicationType.NONE) + .run(args); + } + + @Component + @Profile("shell-upgrade") + static class UpgradeRunner implements CommandLineRunner { + private static final Logger logger = LoggerFactory.getLogger(UpgradeRunner.class); + + private final DolphinSchedulerManager dolphinSchedulerManager; + + UpgradeRunner(DolphinSchedulerManager dolphinSchedulerManager) { + this.dolphinSchedulerManager = dolphinSchedulerManager; + } - /** - * upgrade dolphin scheduler db - * @param args args - */ - public static void main(String[] args) { - DolphinSchedulerManager dolphinSchedulerManager = new DolphinSchedulerManager(); - try { - dolphinSchedulerManager.upgradeDolphinScheduler(); - logger.info("upgrade DolphinScheduler success"); - } catch (Exception e) { - logger.error(e.getMessage(),e); - logger.info("Upgrade DolphinScheduler failed"); - throw new RuntimeException(e); - } - } - - - + @Override + public void run(String... args) throws Exception { + dolphinSchedulerManager.upgradeDolphinScheduler(); + logger.info("upgrade DolphinScheduler success"); + } + } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/DagHelper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/DagHelper.java index de27f173ea215d46cdbe30f487c52a6d2ac0d1a7..727e040287e16128b531ee42827d4a804a664621 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/DagHelper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/DagHelper.java @@ -25,11 +25,12 @@ import org.apache.dolphinscheduler.common.process.ProcessDag; import org.apache.dolphinscheduler.common.task.conditions.ConditionsParameters; import org.apache.dolphinscheduler.common.task.switchtask.SwitchParameters; import org.apache.dolphinscheduler.common.task.switchtask.SwitchResultVo; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelation; import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.commons.collections.CollectionUtils; + import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; @@ -37,6 +38,7 @@ import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -49,7 +51,6 @@ public class DagHelper { private static final Logger logger = LoggerFactory.getLogger(DagHelper.class); - /** * generate flow node relation list by task node list; * Edges that are not in the task Node List will not be added to the result @@ -63,9 +64,9 @@ public class DagHelper { String preTasks = taskNode.getPreTasks(); List preTaskList = JSONUtils.toList(preTasks, String.class); if (preTaskList != null) { - for (String depNodeName : preTaskList) { - if (null != findNodeByName(taskNodeList, depNodeName)) { - nodeRelationList.add(new TaskNodeRelation(depNodeName, taskNode.getName())); + for (String depNodeCode : preTaskList) { + if (null != findNodeByCode(taskNodeList, depNodeCode)) { + nodeRelationList.add(new TaskNodeRelation(depNodeCode, Long.toString(taskNode.getCode()))); } } } @@ -78,46 +79,47 @@ public class DagHelper { * * @param taskNodeList taskNodeList * @param startNodeNameList startNodeNameList - * @param recoveryNodeNameList recoveryNodeNameList + * @param recoveryNodeCodeList recoveryNodeCodeList * @param taskDependType taskDependType * @return task node list */ public static List generateFlowNodeListByStartNode(List taskNodeList, List startNodeNameList, - List recoveryNodeNameList, TaskDependType taskDependType) { + List recoveryNodeCodeList, TaskDependType taskDependType) { List destFlowNodeList = new ArrayList<>(); List startNodeList = startNodeNameList; - if (taskDependType != TaskDependType.TASK_POST - && CollectionUtils.isEmpty(startNodeList)) { + if (taskDependType != TaskDependType.TASK_POST && CollectionUtils.isEmpty(startNodeList)) { logger.error("start node list is empty! cannot continue run the process "); return destFlowNodeList; } + List destTaskNodeList = new ArrayList<>(); List tmpTaskNodeList = new ArrayList<>(); + if (taskDependType == TaskDependType.TASK_POST - && CollectionUtils.isNotEmpty(recoveryNodeNameList)) { - startNodeList = recoveryNodeNameList; + && CollectionUtils.isNotEmpty(recoveryNodeCodeList)) { + startNodeList = recoveryNodeCodeList; } if (CollectionUtils.isEmpty(startNodeList)) { // no special designation start nodes tmpTaskNodeList = taskNodeList; } else { // specified start nodes or resume execution - for (String startNodeName : startNodeList) { - TaskNode startNode = findNodeByName(taskNodeList, startNodeName); + for (String startNodeCode : startNodeList) { + TaskNode startNode = findNodeByCode(taskNodeList, startNodeCode); List childNodeList = new ArrayList<>(); if (startNode == null) { logger.error("start node name [{}] is not in task node list [{}] ", - startNodeName, + startNodeCode, taskNodeList ); continue; } else if (TaskDependType.TASK_POST == taskDependType) { - List visitedNodeNameList = new ArrayList<>(); - childNodeList = getFlowNodeListPost(startNode, taskNodeList, visitedNodeNameList); + List visitedNodeCodeList = new ArrayList<>(); + childNodeList = getFlowNodeListPost(startNode, taskNodeList, visitedNodeCodeList); } else if (TaskDependType.TASK_PRE == taskDependType) { - List visitedNodeNameList = new ArrayList<>(); - childNodeList = getFlowNodeListPre(startNode, recoveryNodeNameList, taskNodeList, visitedNodeNameList); + List visitedNodeCodeList = new ArrayList<>(); + childNodeList = getFlowNodeListPre(startNode, recoveryNodeCodeList, taskNodeList, visitedNodeCodeList); } else { childNodeList.add(startNode); } @@ -126,14 +128,13 @@ public class DagHelper { } for (TaskNode taskNode : tmpTaskNodeList) { - if (null == findNodeByName(destTaskNodeList, taskNode.getName())) { + if (null == findNodeByCode(destTaskNodeList, Long.toString(taskNode.getCode()))) { destTaskNodeList.add(taskNode); } } return destTaskNodeList; } - /** * find all the nodes that depended on the start node * @@ -141,33 +142,32 @@ public class DagHelper { * @param taskNodeList taskNodeList * @return task node list */ - private static List getFlowNodeListPost(TaskNode startNode, List taskNodeList, List visitedNodeNameList) { + private static List getFlowNodeListPost(TaskNode startNode, List taskNodeList, List visitedNodeCodeList) { List resultList = new ArrayList<>(); for (TaskNode taskNode : taskNodeList) { List depList = taskNode.getDepList(); - if (null != depList && null != startNode && depList.contains(startNode.getName()) && !visitedNodeNameList.contains(taskNode.getName())) { - resultList.addAll(getFlowNodeListPost(taskNode, taskNodeList, visitedNodeNameList)); + if (null != depList && null != startNode && depList.contains(Long.toString(startNode.getCode())) && !visitedNodeCodeList.contains(Long.toString(taskNode.getCode()))) { + resultList.addAll(getFlowNodeListPost(taskNode, taskNodeList, visitedNodeCodeList)); } } // why add (startNode != null) condition? for SonarCloud Quality Gate passed if (null != startNode) { - visitedNodeNameList.add(startNode.getName()); + visitedNodeCodeList.add(Long.toString(startNode.getCode())); } resultList.add(startNode); return resultList; } - /** * find all nodes that start nodes depend on. * * @param startNode startNode - * @param recoveryNodeNameList recoveryNodeNameList + * @param recoveryNodeCodeList recoveryNodeCodeList * @param taskNodeList taskNodeList * @return task node list */ - private static List getFlowNodeListPre(TaskNode startNode, List recoveryNodeNameList, List taskNodeList, List visitedNodeNameList) { + private static List getFlowNodeListPre(TaskNode startNode, List recoveryNodeCodeList, List taskNodeList, List visitedNodeCodeList) { List resultList = new ArrayList<>(); @@ -179,17 +179,17 @@ public class DagHelper { if (CollectionUtils.isEmpty(depList)) { return resultList; } - for (String depNodeName : depList) { - TaskNode start = findNodeByName(taskNodeList, depNodeName); - if (recoveryNodeNameList.contains(depNodeName)) { + for (String depNodeCode : depList) { + TaskNode start = findNodeByCode(taskNodeList, depNodeCode); + if (recoveryNodeCodeList.contains(depNodeCode)) { resultList.add(start); - } else if (!visitedNodeNameList.contains(depNodeName)) { - resultList.addAll(getFlowNodeListPre(start, recoveryNodeNameList, taskNodeList, visitedNodeNameList)); + } else if (!visitedNodeCodeList.contains(depNodeCode)) { + resultList.addAll(getFlowNodeListPre(start, recoveryNodeCodeList, taskNodeList, visitedNodeCodeList)); } } // why add (startNode != null) condition? for SonarCloud Quality Gate passed if (null != startNode) { - visitedNodeNameList.add(startNode.getName()); + visitedNodeCodeList.add(Long.toString(startNode.getCode())); } return resultList; } @@ -199,17 +199,17 @@ public class DagHelper { * * @param totalTaskNodeList totalTaskNodeList * @param startNodeNameList startNodeNameList - * @param recoveryNodeNameList recoveryNodeNameList + * @param recoveryNodeCodeList recoveryNodeCodeList * @param depNodeType depNodeType * @return process dag * @throws Exception if error throws Exception */ public static ProcessDag generateFlowDag(List totalTaskNodeList, List startNodeNameList, - List recoveryNodeNameList, + List recoveryNodeCodeList, TaskDependType depNodeType) throws Exception { - List destTaskNodeList = generateFlowNodeListByStartNode(totalTaskNodeList, startNodeNameList, recoveryNodeNameList, depNodeType); + List destTaskNodeList = generateFlowNodeListByStartNode(totalTaskNodeList, startNodeNameList, recoveryNodeCodeList, depNodeType); if (destTaskNodeList.isEmpty()) { return null; } @@ -236,6 +236,22 @@ public class DagHelper { return null; } + /** + * find node by node code + * + * @param nodeDetails nodeDetails + * @param nodeCode nodeCode + * @return task node + */ + public static TaskNode findNodeByCode(List nodeDetails, String nodeCode) { + for (TaskNode taskNode : nodeDetails) { + if (Long.toString(taskNode.getCode()).equals(nodeCode)) { + return taskNode; + } + } + return null; + } + /** * the task can be submit when all the depends nodes are forbidden or complete * @@ -252,11 +268,11 @@ public class DagHelper { if (dependList == null) { return true; } - for (String dependNodeName : dependList) { - TaskNode dependNode = dag.getNode(dependNodeName); - if (dependNode == null || completeTaskList.containsKey(dependNodeName) + for (String dependNodeCode : dependList) { + TaskNode dependNode = dag.getNode(dependNodeCode); + if (dependNode == null || completeTaskList.containsKey(dependNodeCode) || dependNode.isForbidden() - || skipTaskNodeList.containsKey(dependNodeName)) { + || skipTaskNodeList.containsKey(dependNodeCode)) { continue; } else { return false; @@ -272,25 +288,30 @@ public class DagHelper { * * @return successor nodes */ - public static Set parsePostNodes(String preNodeName, + public static Set parsePostNodes(String preNodeCode, Map skipTaskNodeList, DAG dag, Map completeTaskList) { Set postNodeList = new HashSet<>(); Collection startVertexes = new ArrayList<>(); - if (preNodeName == null) { + + if (preNodeCode == null) { startVertexes = dag.getBeginNode(); - } else if (dag.getNode(preNodeName).isConditionsTask()) { - List conditionTaskList = parseConditionTask(preNodeName, skipTaskNodeList, dag, completeTaskList); + } else if (dag.getNode(preNodeCode).isConditionsTask()) { + List conditionTaskList = parseConditionTask(preNodeCode, skipTaskNodeList, dag, completeTaskList); startVertexes.addAll(conditionTaskList); - } else if (dag.getNode(preNodeName).isSwitchTask()) { - List conditionTaskList = parseSwitchTask(preNodeName, skipTaskNodeList, dag, completeTaskList); + } else if (dag.getNode(preNodeCode).isSwitchTask()) { + List conditionTaskList = parseSwitchTask(preNodeCode, skipTaskNodeList, dag, completeTaskList); startVertexes.addAll(conditionTaskList); } else { - startVertexes = dag.getSubsequentNodes(preNodeName); + startVertexes = dag.getSubsequentNodes(preNodeCode); } for (String subsequent : startVertexes) { TaskNode taskNode = dag.getNode(subsequent); + if (taskNode == null) { + logger.error("taskNode {} is null, please check dag", subsequent); + continue; + } if (isTaskNodeNeedSkip(taskNode, skipTaskNodeList)) { setTaskNodeSkip(subsequent, dag, completeTaskList, skipTaskNodeList); continue; @@ -324,35 +345,37 @@ public class DagHelper { return true; } - /** * parse condition task find the branch process * set skip flag for another one. */ - public static List parseConditionTask(String nodeName, + public static List parseConditionTask(String nodeCode, Map skipTaskNodeList, DAG dag, Map completeTaskList) { List conditionTaskList = new ArrayList<>(); - TaskNode taskNode = dag.getNode(nodeName); + TaskNode taskNode = dag.getNode(nodeCode); if (!taskNode.isConditionsTask()) { return conditionTaskList; } - if (!completeTaskList.containsKey(nodeName)) { + if (!completeTaskList.containsKey(nodeCode)) { return conditionTaskList; } - TaskInstance taskInstance = completeTaskList.get(nodeName); + TaskInstance taskInstance = completeTaskList.get(nodeCode); ConditionsParameters conditionsParameters = JSONUtils.parseObject(taskNode.getConditionResult(), ConditionsParameters.class); + if (conditionsParameters == null) { + return conditionTaskList; + } List skipNodeList = new ArrayList<>(); if (taskInstance.getState().typeIsSuccess()) { - conditionTaskList = conditionsParameters.getSuccessNode(); - skipNodeList = conditionsParameters.getFailedNode(); + conditionTaskList = conditionsParameters.getSuccessNode().stream().map(String::valueOf).collect(Collectors.toList()); + skipNodeList = conditionsParameters.getFailedNode().stream().map(String::valueOf).collect(Collectors.toList()); } else if (taskInstance.getState().typeIsFailure()) { - conditionTaskList = conditionsParameters.getFailedNode(); - skipNodeList = conditionsParameters.getSuccessNode(); + conditionTaskList = conditionsParameters.getFailedNode().stream().map(String::valueOf).collect(Collectors.toList()); + skipNodeList = conditionsParameters.getSuccessNode().stream().map(String::valueOf).collect(Collectors.toList()); } else { - conditionTaskList.add(nodeName); + conditionTaskList.add(nodeCode); } for (String failedNode : skipNodeList) { setTaskNodeSkip(failedNode, dag, completeTaskList, skipTaskNodeList); @@ -363,42 +386,41 @@ public class DagHelper { /** * parse condition task find the branch process * set skip flag for another one. - * - * @param nodeName - * @return */ - public static List parseSwitchTask(String nodeName, + public static List parseSwitchTask(String nodeCode, Map skipTaskNodeList, DAG dag, Map completeTaskList) { List conditionTaskList = new ArrayList<>(); - TaskNode taskNode = dag.getNode(nodeName); + TaskNode taskNode = dag.getNode(nodeCode); if (!taskNode.isSwitchTask()) { return conditionTaskList; } - if (!completeTaskList.containsKey(nodeName)) { + if (!completeTaskList.containsKey(nodeCode)) { return conditionTaskList; } - conditionTaskList = skipTaskNode4Switch(taskNode, skipTaskNodeList, completeTaskList, dag); + conditionTaskList.add(String.valueOf(skipTaskNode4Switch(taskNode, skipTaskNodeList, completeTaskList, dag))); return conditionTaskList; } - private static List skipTaskNode4Switch(TaskNode taskNode, Map skipTaskNodeList, - Map completeTaskList, - DAG dag) { - SwitchParameters switchParameters = completeTaskList.get(taskNode.getName()).getSwitchDependency(); + private static Long skipTaskNode4Switch(TaskNode taskNode, Map skipTaskNodeList, + Map completeTaskList, + DAG dag) { + + SwitchParameters switchParameters = completeTaskList.get(Long.toString(taskNode.getCode())).getSwitchDependency(); int resultConditionLocation = switchParameters.getResultConditionLocation(); List conditionResultVoList = switchParameters.getDependTaskList(); - List switchTaskList = conditionResultVoList.get(resultConditionLocation).getNextNode(); - if (CollectionUtils.isEmpty(switchTaskList)) { - switchTaskList = new ArrayList<>(); + Long switchTaskList = conditionResultVoList.get(resultConditionLocation).getNextNode(); + if (switchTaskList == null) { + switchTaskList = 0L; } conditionResultVoList.remove(resultConditionLocation); for (SwitchResultVo info : conditionResultVoList) { - if (CollectionUtils.isEmpty(info.getNextNode())) { + Long nextNode = info.getNextNode(); + if (nextNode == null || nextNode == 0L) { continue; } - setTaskNodeSkip(info.getNextNode().get(0), dag, completeTaskList, skipTaskNodeList); + setTaskNodeSkip(String.valueOf(nextNode), dag, completeTaskList, skipTaskNodeList); } return switchTaskList; } @@ -406,15 +428,15 @@ public class DagHelper { /** * set task node and the post nodes skip flag */ - private static void setTaskNodeSkip(String skipNodeName, + private static void setTaskNodeSkip(String skipNodeCode, DAG dag, Map completeTaskList, Map skipTaskNodeList) { - if (!dag.containsNode(skipNodeName)) { + if (!dag.containsNode(skipNodeCode)) { return; } - skipTaskNodeList.putIfAbsent(skipNodeName, dag.getNode(skipNodeName)); - Collection postNodeList = dag.getSubsequentNodes(skipNodeName); + skipTaskNodeList.putIfAbsent(skipNodeCode, dag.getNode(skipNodeCode)); + Collection postNodeList = dag.getSubsequentNodes(skipNodeCode); for (String post : postNodeList) { TaskNode postNode = dag.getNode(post); if (isTaskNodeNeedSkip(postNode, skipTaskNodeList)) { @@ -423,7 +445,6 @@ public class DagHelper { } } - /*** * build dag graph * @param processDag processDag @@ -436,7 +457,7 @@ public class DagHelper { //add vertex if (CollectionUtils.isNotEmpty(processDag.getNodes())) { for (TaskNode node : processDag.getNodes()) { - dag.addNode(node.getName(), node); + dag.addNode(Long.toString(node.getCode()), node); } } @@ -466,7 +487,7 @@ public class DagHelper { // If the dependency is not empty if (preTasksList != null) { for (String depNode : preTasksList) { - taskNodeRelations.add(new TaskNodeRelation(depNode, taskNode.getName())); + taskNodeRelations.add(new TaskNodeRelation(depNode, Long.toString(taskNode.getCode()))); } } } @@ -500,7 +521,7 @@ public class DagHelper { && taskNodeMap.containsKey(preTaskCode) && taskNodeMap.containsKey(postTaskCode)) { TaskNode preNode = taskNodeMap.get(preTaskCode); TaskNode postNode = taskNodeMap.get(postTaskCode); - taskNodeRelations.add(new TaskNodeRelation(preNode.getName(), postNode.getName())); + taskNodeRelations.add(new TaskNodeRelation(Long.toString(preNode.getCode()), Long.toString(postNode.getCode()))); } } ProcessDag processDag = new ProcessDag(); @@ -512,18 +533,18 @@ public class DagHelper { /** * is there have conditions after the parent node */ - public static boolean haveConditionsAfterNode(String parentNodeName, + public static boolean haveConditionsAfterNode(String parentNodeCode, DAG dag ) { boolean result = false; - Set subsequentNodes = dag.getSubsequentNodes(parentNodeName); + Set subsequentNodes = dag.getSubsequentNodes(parentNodeCode); if (CollectionUtils.isEmpty(subsequentNodes)) { return result; } - for (String nodeName : subsequentNodes) { - TaskNode taskNode = dag.getNode(nodeName); + for (String nodeCode : subsequentNodes) { + TaskNode taskNode = dag.getNode(nodeCode); List preTasksList = JSONUtils.toList(taskNode.getPreTasks(), String.class); - if (preTasksList.contains(parentNodeName) && taskNode.isConditionsTask()) { + if (preTasksList.contains(parentNodeCode) && taskNode.isConditionsTask()) { return true; } } @@ -533,13 +554,13 @@ public class DagHelper { /** * is there have conditions after the parent node */ - public static boolean haveConditionsAfterNode(String parentNodeName, List taskNodes) { + public static boolean haveConditionsAfterNode(String parentNodeCode, List taskNodes) { if (CollectionUtils.isEmpty(taskNodes)) { return false; } for (TaskNode taskNode : taskNodes) { List preTasksList = JSONUtils.toList(taskNode.getPreTasks(), String.class); - if (preTasksList.contains(parentNodeName) && taskNode.isConditionsTask()) { + if (preTasksList.contains(parentNodeCode) && taskNode.isConditionsTask()) { return true; } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/MysqlPerformance.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/MysqlPerformance.java index e7e9c70f479848ad3e5dd5269580cae9f76d9d5f..e00f5e7e0339fbf6d7ac39f982ae7afe9debf409 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/MysqlPerformance.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/MysqlPerformance.java @@ -14,20 +14,21 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.dao.utils; +package org.apache.dolphinscheduler.dao.utils; import static org.apache.dolphinscheduler.dao.MonitorDBDao.VARIABLE_NAME; +import org.apache.dolphinscheduler.common.enums.Flag; +import org.apache.dolphinscheduler.dao.entity.MonitorRecord; +import org.apache.dolphinscheduler.spi.enums.DbType; + import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.Date; -import org.apache.dolphinscheduler.common.enums.DbType; -import org.apache.dolphinscheduler.common.enums.Flag; -import org.apache.dolphinscheduler.dao.entity.MonitorRecord; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/PostgrePerformance.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/PostgrePerformance.java index b1cdf6f1791e35116140fb8da7fa459b71440bdd..6afd9706e6ffb4661d75c31bd0f82554f6d2fe24 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/PostgrePerformance.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/PostgrePerformance.java @@ -14,17 +14,19 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.dao.utils; +import org.apache.dolphinscheduler.common.enums.Flag; +import org.apache.dolphinscheduler.dao.entity.MonitorRecord; +import org.apache.dolphinscheduler.spi.enums.DbType; + import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.Date; -import org.apache.dolphinscheduler.common.enums.DbType; -import org.apache.dolphinscheduler.common.enums.Flag; -import org.apache.dolphinscheduler.dao.entity.MonitorRecord; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/ResourceProcessDefinitionUtils.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/ResourceProcessDefinitionUtils.java index 45315791f76f6a5204bdab322057e06144f9456a..483c052b83e66644fa3c5dc38f29163433b63130 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/ResourceProcessDefinitionUtils.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/ResourceProcessDefinitionUtils.java @@ -16,7 +16,7 @@ */ package org.apache.dolphinscheduler.dao.utils; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.commons.collections.CollectionUtils; import java.util.Arrays; import java.util.HashMap; diff --git a/dolphinscheduler-dao/src/main/resources/application-h2.yaml b/dolphinscheduler-dao/src/main/resources/application-h2.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d42a1032f46ae7cc69ae420c91bfb97907a64409 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/application-h2.yaml @@ -0,0 +1,29 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +spring: + sql: + init: + schema-locations: classpath:sql/dolphinscheduler_h2.sql + datasource: + driver-class-name: org.h2.Driver + url: jdbc:h2:mem:dolphinscheduler;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true + username: sa + password: "" + jpa: + hibernate: + ddl-auto: none diff --git a/dolphinscheduler-dao/src/main/resources/application-mysql.yaml b/dolphinscheduler-dao/src/main/resources/application-mysql.yaml new file mode 100644 index 0000000000000000000000000000000000000000..be5872b90602aa0bd498f7b3e82c10b13251e75f --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/application-mysql.yaml @@ -0,0 +1,34 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +spring: + datasource: + driver-class-name: com.mysql.jdbc.Driver + url: jdbc:mysql://127.0.0.1:3306/dolphinscheduler?useUnicode=true&characterEncoding=UTF-8 + username: ds_user + password: dolphinscheduler + hikari: + connection-test-query: select 1 + minimum-idle: 5 + auto-commit: true + validation-timeout: 3000 + pool-name: DolphinScheduler + maximum-pool-size: 50 + connection-timeout: 30000 + idle-timeout: 600000 + leak-detection-threshold: 0 + initialization-fail-timeout: 1 diff --git a/dolphinscheduler-dao/src/main/resources/application-postgresql.yaml b/dolphinscheduler-dao/src/main/resources/application-postgresql.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e5ad49ffaeab7b71a3cdcb8e4cd6e2f07c296af3 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/application-postgresql.yaml @@ -0,0 +1,34 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +spring: + datasource: + driver-class-name: org.postgresql.Driver + url: jdbc:postgresql://127.0.0.1:5432/dolphinscheduler + username: root + password: root + hikari: + connection-test-query: select 1 + minimum-idle: 5 + auto-commit: true + validation-timeout: 3000 + pool-name: DolphinScheduler + maximum-pool-size: 50 + connection-timeout: 30000 + idle-timeout: 600000 + leak-detection-threshold: 0 + initialization-fail-timeout: 1 diff --git a/dolphinscheduler-dao/src/main/resources/datasource.properties b/dolphinscheduler-dao/src/main/resources/datasource.properties deleted file mode 100644 index 6720080806ecce2a3595bbf7e532a47d3150c1e0..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dao/src/main/resources/datasource.properties +++ /dev/null @@ -1,69 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# datasource configuration -spring.datasource.driver-class-name=org.postgresql.Driver -spring.datasource.url=jdbc:postgresql://127.0.0.1:5432/dolphinscheduler -spring.datasource.username=root -spring.datasource.password=root - -# mysql example -#spring.datasource.driver-class-name=com.mysql.jdbc.Driver -#spring.datasource.url=jdbc:mysql://127.0.0.1:3306/dolphinscheduler?useUnicode=true&characterEncoding=UTF-8 -#spring.datasource.username=ds_user -#spring.datasource.password=dolphinscheduler - -# connection configuration -#spring.datasource.initialSize=5 -# min connection number -#spring.datasource.minIdle=5 -# max connection number -#spring.datasource.maxActive=50 - -# max wait time for get a connection in milliseconds. if configuring maxWait, fair locks are enabled by default and concurrency efficiency decreases. -# If necessary, unfair locks can be used by configuring the useUnfairLock attribute to true. -#spring.datasource.maxWait=60000 - -# milliseconds for check to close free connections -#spring.datasource.timeBetweenEvictionRunsMillis=60000 - -# the Destroy thread detects the connection interval and closes the physical connection in milliseconds if the connection idle time is greater than or equal to minEvictableIdleTimeMillis. -#spring.datasource.timeBetweenConnectErrorMillis=60000 - -# the longest time a connection remains idle without being evicted, in milliseconds -#spring.datasource.minEvictableIdleTimeMillis=300000 - -#the SQL used to check whether the connection is valid requires a query statement. If validation Query is null, testOnBorrow, testOnReturn, and testWhileIdle will not work. -#spring.datasource.validationQuery=SELECT 1 - -#check whether the connection is valid for timeout, in seconds -#spring.datasource.validationQueryTimeout=3 - -# when applying for a connection, if it is detected that the connection is idle longer than time Between Eviction Runs Millis, -# validation Query is performed to check whether the connection is valid -#spring.datasource.testWhileIdle=true - -#execute validation to check if the connection is valid when applying for a connection -#spring.datasource.testOnBorrow=true -#execute validation to check if the connection is valid when the connection is returned -#spring.datasource.testOnReturn=false -#spring.datasource.defaultAutoCommit=true -#spring.datasource.keepAlive=true - -# open PSCache, specify count PSCache for every connection -#spring.datasource.poolPreparedStatements=true -#spring.datasource.maxPoolPreparedStatementPerConnectionSize=20 diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AccessTokenMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AccessTokenMapper.xml index 35312fb88a9eb8ecc74efd705638976ac3b5a503..f8c8ad4ba23b4b85f4c9fc9884cdd49e3f5b0c44 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AccessTokenMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AccessTokenMapper.xml @@ -31,6 +31,13 @@ order by t.update_time desc + + + delete from t_ds_access_token where user_id = #{userId} diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AlertGroupMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AlertGroupMapper.xml index efdacb13de88aa56ff0d31eca9e5cf6d38d99795..521fdce41d18709ec31c8fe2ac53fdc79f6f097b 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AlertGroupMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AlertGroupMapper.xml @@ -32,15 +32,7 @@ order by update_time desc - + - select * + select + from t_ds_alert_plugin_instance - where 1 = 1 + where 1 = 1 order by update_time desc - select - * + from t_ds_alert_plugin_instance - where instance_name = #{instanceName} + where 1 = 1 + + and instance_name like concat('%', #{instanceName}, '%') + - select cmd.id, cmd.command_type, cmd.process_definition_code, cmd.command_param, cmd.task_depend_type, cmd.failure_strategy, - cmd.warning_type, cmd.warning_group_id, cmd.schedule_time, cmd.start_time, cmd.executor_id, cmd.update_time, - cmd.process_instance_priority, cmd.worker_group, cmd.environment_code, cmd.dry_run - from t_ds_command cmd - join t_ds_process_definition definition on cmd.process_definition_code = definition.code - where definition.release_state = 1 AND definition.flag = 1 - order by cmd.update_time asc - limit 1 - diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.xml index 9f76dd13dc77c2504b97d804e99b9ec0f117b0cd..e1247728f7ccd5b9a6c74cbb6c230352bc5ea6dd 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.xml @@ -84,7 +84,7 @@ and td.user_id = #{userId} - order by sc.schedule_release_state desc,td.update_time desc + order by sc.schedule_release_state desc,td.update_time desc,td.id asc select @@ -45,12 +45,22 @@ order by id asc - + select instance.id, instance.command_type, instance.executor_id, instance.process_definition_version, instance.process_definition_code, instance.name, instance.state, instance.schedule_time, instance.start_time, - instance.end_time, instance.run_times, instance.recovery, instance.host, instance.dry_run + instance.end_time, instance.run_times, instance.recovery, instance.host, instance.dry_run, instance.restart_time from t_ds_process_instance instance join t_ds_process_definition define ON instance.process_definition_code = define.code where instance.is_sub_process=0 diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessTaskRelationLogMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessTaskRelationLogMapper.xml index 2eafb31a844537889280e7c85736f28db900ed8e..c3c0579d766282f59f888237b442c1799aa6c85d 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessTaskRelationLogMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessTaskRelationLogMapper.xml @@ -46,4 +46,28 @@ WHERE process_definition_code = #{processCode} and process_definition_version = #{processVersion} + + delete from t_ds_process_task_relation_log + WHERE project_code = #{processTaskRelationLog.projectCode} + and process_definition_code = #{processTaskRelationLog.processDefinitionCode} + and process_definition_version = #{processTaskRelationLog.processDefinitionVersion} + + and pre_task_code = #{processTaskRelationLog.preTaskCode} + and pre_task_version = #{processTaskRelationLog.preTaskVersion} + + and post_task_code = #{processTaskRelationLog.postTaskCode} + and post_task_version = #{processTaskRelationLog.postTaskVersion} + + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessTaskRelationMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessTaskRelationMapper.xml index 83c3b5b261e511327e404288ea0f0c5803b22cea..6a51a19225c571d78154c60a8172e94901eca05b 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessTaskRelationMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessTaskRelationMapper.xml @@ -69,4 +69,105 @@ #{relation.conditionType},#{relation.conditionParams},#{relation.createTime},#{relation.updateTime}) + + + + + + + + + + + + + + delete from t_ds_process_task_relation + WHERE project_code = #{processTaskRelationLog.projectCode} + and process_definition_code = #{processTaskRelationLog.processDefinitionCode} + and process_definition_version = #{processTaskRelationLog.processDefinitionVersion} + + and pre_task_code = #{processTaskRelationLog.preTaskCode} + and pre_task_version = #{processTaskRelationLog.preTaskVersion} + + and post_task_code = #{processTaskRelationLog.postTaskCode} + and post_task_version = #{processTaskRelationLog.postTaskVersion} + + + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProjectMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProjectMapper.xml index d1cc5f7d62bf794826bb0b9ac77d2662bf46c206..3fab14f3a2fb817df0ab7308f72fe427f85d7cad 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProjectMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProjectMapper.xml @@ -137,6 +137,7 @@ + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ResourceMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ResourceMapper.xml index 7a8b043c09a1940c3490cf717491e497a5156276..3d93c7ce2aa10d3daad96ade7b3b2a210ba385b3 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ResourceMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ResourceMapper.xml @@ -63,7 +63,7 @@ and ( - and d.id in + d.id in #{i} diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/TaskDefinitionLogMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/TaskDefinitionLogMapper.xml index da6c0ea2b156e910503a05283f3934c4c92c9c6d..0615167a896db9f4fcb089877acb4a88422e1c2f 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/TaskDefinitionLogMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/TaskDefinitionLogMapper.xml @@ -73,6 +73,9 @@ from t_ds_task_definition_log where code = #{code} + + and project_code = #{projectCode} + order by version desc diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.xml index 0964d1cf77d9131e6966d5b417d43b3abe2e915b..5ddec3f78dd93fce0e9ce5f73119afda34f6a222 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.xml @@ -81,8 +81,11 @@ #{i} - - and t.start_time > #{startTime} and t.start_time #{endTime} + + and t.start_time ]]> #{startTime} + + + and t.start_time #{endTime} group by t.state @@ -95,6 +98,15 @@ and flag = 1 limit 1 + + + update t_ds_task_instance + set host = #{host}, + submit_time = #{submitTime} + where id = #{id} + + + + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UserMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UserMapper.xml index dec6a51937e6465e34d63e1ce4dce9aef9793e64..570e395ffb1f11b2f6380d8e45cee254c33e706b 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UserMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UserMapper.xml @@ -123,4 +123,12 @@ #{id} + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/WorkFlowLineageMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/WorkFlowLineageMapper.xml index 638ac5d48175a3a006968c0ff9a7a104aebfba94..2d94b04c9f4d55704bab6dd962b9a8dfef83814a 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/WorkFlowLineageMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/WorkFlowLineageMapper.xml @@ -31,7 +31,7 @@ select tepd.code as work_flow_code,tepd.name as work_flow_name, - "" as source_work_flow_code, + '' as source_work_flow_code, tepd.release_state as work_flow_publish_status, tes.start_time as schedule_start_time, tes.end_time as schedule_end_time, diff --git a/sql/create/release-1.0.0_schema/mysql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/create/release-1.0.0_schema/mysql/dolphinscheduler_ddl.sql similarity index 100% rename from sql/create/release-1.0.0_schema/mysql/dolphinscheduler_ddl.sql rename to dolphinscheduler-dao/src/main/resources/sql/create/release-1.0.0_schema/mysql/dolphinscheduler_ddl.sql diff --git a/sql/create/release-1.0.0_schema/mysql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/create/release-1.0.0_schema/mysql/dolphinscheduler_dml.sql similarity index 100% rename from sql/create/release-1.0.0_schema/mysql/dolphinscheduler_dml.sql rename to dolphinscheduler-dao/src/main/resources/sql/create/release-1.0.0_schema/mysql/dolphinscheduler_dml.sql diff --git a/sql/create/release-1.2.0_schema/postgresql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/create/release-1.2.0_schema/postgresql/dolphinscheduler_ddl.sql similarity index 100% rename from sql/create/release-1.2.0_schema/postgresql/dolphinscheduler_ddl.sql rename to dolphinscheduler-dao/src/main/resources/sql/create/release-1.2.0_schema/postgresql/dolphinscheduler_ddl.sql diff --git a/sql/create/release-1.2.0_schema/postgresql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/create/release-1.2.0_schema/postgresql/dolphinscheduler_dml.sql similarity index 100% rename from sql/create/release-1.2.0_schema/postgresql/dolphinscheduler_dml.sql rename to dolphinscheduler-dao/src/main/resources/sql/create/release-1.2.0_schema/postgresql/dolphinscheduler_dml.sql diff --git a/sql/dolphinscheduler_h2.sql b/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_h2.sql similarity index 84% rename from sql/dolphinscheduler_h2.sql rename to dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_h2.sql index ffa3a3a609c981fc629f85d38456e390fa032a69..a177665e7245358115754d84021bc514882b2660 100644 --- a/sql/dolphinscheduler_h2.sql +++ b/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_h2.sql @@ -15,13 +15,13 @@ * limitations under the License. */ -SET -FOREIGN_KEY_CHECKS=0; +SET FOREIGN_KEY_CHECKS=0; +SET REFERENTIAL_INTEGRITY FALSE; -- ---------------------------- -- Table structure for QRTZ_JOB_DETAILS -- ---------------------------- -DROP TABLE IF EXISTS QRTZ_JOB_DETAILS; +DROP TABLE IF EXISTS QRTZ_JOB_DETAILS CASCADE; CREATE TABLE QRTZ_JOB_DETAILS ( SCHED_NAME varchar(120) NOT NULL, @@ -29,10 +29,10 @@ CREATE TABLE QRTZ_JOB_DETAILS JOB_GROUP varchar(200) NOT NULL, DESCRIPTION varchar(250) DEFAULT NULL, JOB_CLASS_NAME varchar(250) NOT NULL, - IS_DURABLE varchar(1) NOT NULL, - IS_NONCONCURRENT varchar(1) NOT NULL, - IS_UPDATE_DATA varchar(1) NOT NULL, - REQUESTS_RECOVERY varchar(1) NOT NULL, + IS_DURABLE boolean NOT NULL, + IS_NONCONCURRENT boolean NOT NULL, + IS_UPDATE_DATA boolean NOT NULL, + REQUESTS_RECOVERY boolean NOT NULL, JOB_DATA blob, PRIMARY KEY (SCHED_NAME, JOB_NAME, JOB_GROUP) ); @@ -40,7 +40,7 @@ CREATE TABLE QRTZ_JOB_DETAILS -- ---------------------------- -- Table structure for QRTZ_TRIGGERS -- ---------------------------- -DROP TABLE IF EXISTS QRTZ_TRIGGERS; +DROP TABLE IF EXISTS QRTZ_TRIGGERS CASCADE; CREATE TABLE QRTZ_TRIGGERS ( SCHED_NAME varchar(120) NOT NULL, @@ -66,7 +66,7 @@ CREATE TABLE QRTZ_TRIGGERS -- ---------------------------- -- Table structure for QRTZ_BLOB_TRIGGERS -- ---------------------------- -DROP TABLE IF EXISTS QRTZ_BLOB_TRIGGERS; +DROP TABLE IF EXISTS QRTZ_BLOB_TRIGGERS CASCADE; CREATE TABLE QRTZ_BLOB_TRIGGERS ( SCHED_NAME varchar(120) NOT NULL, @@ -84,7 +84,7 @@ CREATE TABLE QRTZ_BLOB_TRIGGERS -- ---------------------------- -- Table structure for QRTZ_CALENDARS -- ---------------------------- -DROP TABLE IF EXISTS QRTZ_CALENDARS; +DROP TABLE IF EXISTS QRTZ_CALENDARS CASCADE; CREATE TABLE QRTZ_CALENDARS ( SCHED_NAME varchar(120) NOT NULL, @@ -100,7 +100,7 @@ CREATE TABLE QRTZ_CALENDARS -- ---------------------------- -- Table structure for QRTZ_CRON_TRIGGERS -- ---------------------------- -DROP TABLE IF EXISTS QRTZ_CRON_TRIGGERS; +DROP TABLE IF EXISTS QRTZ_CRON_TRIGGERS CASCADE; CREATE TABLE QRTZ_CRON_TRIGGERS ( SCHED_NAME varchar(120) NOT NULL, @@ -119,7 +119,7 @@ CREATE TABLE QRTZ_CRON_TRIGGERS -- ---------------------------- -- Table structure for QRTZ_FIRED_TRIGGERS -- ---------------------------- -DROP TABLE IF EXISTS QRTZ_FIRED_TRIGGERS; +DROP TABLE IF EXISTS QRTZ_FIRED_TRIGGERS CASCADE; CREATE TABLE QRTZ_FIRED_TRIGGERS ( SCHED_NAME varchar(120) NOT NULL, @@ -133,8 +133,8 @@ CREATE TABLE QRTZ_FIRED_TRIGGERS STATE varchar(16) NOT NULL, JOB_NAME varchar(200) DEFAULT NULL, JOB_GROUP varchar(200) DEFAULT NULL, - IS_NONCONCURRENT varchar(1) DEFAULT NULL, - REQUESTS_RECOVERY varchar(1) DEFAULT NULL, + IS_NONCONCURRENT boolean DEFAULT NULL, + REQUESTS_RECOVERY boolean DEFAULT NULL, PRIMARY KEY (SCHED_NAME, ENTRY_ID) ); @@ -149,7 +149,7 @@ CREATE TABLE QRTZ_FIRED_TRIGGERS -- ---------------------------- -- Table structure for QRTZ_LOCKS -- ---------------------------- -DROP TABLE IF EXISTS QRTZ_LOCKS; +DROP TABLE IF EXISTS QRTZ_LOCKS CASCADE; CREATE TABLE QRTZ_LOCKS ( SCHED_NAME varchar(120) NOT NULL, @@ -164,7 +164,7 @@ CREATE TABLE QRTZ_LOCKS -- ---------------------------- -- Table structure for QRTZ_PAUSED_TRIGGER_GRPS -- ---------------------------- -DROP TABLE IF EXISTS QRTZ_PAUSED_TRIGGER_GRPS; +DROP TABLE IF EXISTS QRTZ_PAUSED_TRIGGER_GRPS CASCADE; CREATE TABLE QRTZ_PAUSED_TRIGGER_GRPS ( SCHED_NAME varchar(120) NOT NULL, @@ -179,7 +179,7 @@ CREATE TABLE QRTZ_PAUSED_TRIGGER_GRPS -- ---------------------------- -- Table structure for QRTZ_SCHEDULER_STATE -- ---------------------------- -DROP TABLE IF EXISTS QRTZ_SCHEDULER_STATE; +DROP TABLE IF EXISTS QRTZ_SCHEDULER_STATE CASCADE; CREATE TABLE QRTZ_SCHEDULER_STATE ( SCHED_NAME varchar(120) NOT NULL, @@ -196,7 +196,7 @@ CREATE TABLE QRTZ_SCHEDULER_STATE -- ---------------------------- -- Table structure for QRTZ_SIMPLE_TRIGGERS -- ---------------------------- -DROP TABLE IF EXISTS QRTZ_SIMPLE_TRIGGERS; +DROP TABLE IF EXISTS QRTZ_SIMPLE_TRIGGERS CASCADE; CREATE TABLE QRTZ_SIMPLE_TRIGGERS ( SCHED_NAME varchar(120) NOT NULL, @@ -216,7 +216,7 @@ CREATE TABLE QRTZ_SIMPLE_TRIGGERS -- ---------------------------- -- Table structure for QRTZ_SIMPROP_TRIGGERS -- ---------------------------- -DROP TABLE IF EXISTS QRTZ_SIMPROP_TRIGGERS; +DROP TABLE IF EXISTS QRTZ_SIMPROP_TRIGGERS CASCADE; CREATE TABLE QRTZ_SIMPROP_TRIGGERS ( SCHED_NAME varchar(120) NOT NULL, @@ -231,8 +231,8 @@ CREATE TABLE QRTZ_SIMPROP_TRIGGERS LONG_PROP_2 bigint(20) DEFAULT NULL, DEC_PROP_1 decimal(13, 4) DEFAULT NULL, DEC_PROP_2 decimal(13, 4) DEFAULT NULL, - BOOL_PROP_1 varchar(1) DEFAULT NULL, - BOOL_PROP_2 varchar(1) DEFAULT NULL, + BOOL_PROP_1 boolean DEFAULT NULL, + BOOL_PROP_2 boolean DEFAULT NULL, PRIMARY KEY (SCHED_NAME, TRIGGER_NAME, TRIGGER_GROUP), CONSTRAINT QRTZ_SIMPROP_TRIGGERS_ibfk_1 FOREIGN KEY (SCHED_NAME, TRIGGER_NAME, TRIGGER_GROUP) REFERENCES QRTZ_TRIGGERS (SCHED_NAME, TRIGGER_NAME, TRIGGER_GROUP) ); @@ -248,7 +248,7 @@ CREATE TABLE QRTZ_SIMPROP_TRIGGERS -- ---------------------------- -- Table structure for t_ds_access_token -- ---------------------------- -DROP TABLE IF EXISTS t_ds_access_token; +DROP TABLE IF EXISTS t_ds_access_token CASCADE; CREATE TABLE t_ds_access_token ( id int(11) NOT NULL AUTO_INCREMENT, @@ -267,7 +267,7 @@ CREATE TABLE t_ds_access_token -- ---------------------------- -- Table structure for t_ds_alert -- ---------------------------- -DROP TABLE IF EXISTS t_ds_alert; +DROP TABLE IF EXISTS t_ds_alert CASCADE; CREATE TABLE t_ds_alert ( id int(11) NOT NULL AUTO_INCREMENT, @@ -288,7 +288,7 @@ CREATE TABLE t_ds_alert -- ---------------------------- -- Table structure for t_ds_alertgroup -- ---------------------------- -DROP TABLE IF EXISTS t_ds_alertgroup; +DROP TABLE IF EXISTS t_ds_alertgroup CASCADE; CREATE TABLE t_ds_alertgroup ( id int(11) NOT NULL AUTO_INCREMENT, @@ -309,26 +309,29 @@ CREATE TABLE t_ds_alertgroup -- ---------------------------- -- Table structure for t_ds_command -- ---------------------------- -DROP TABLE IF EXISTS t_ds_command; +DROP TABLE IF EXISTS t_ds_command CASCADE; CREATE TABLE t_ds_command ( - id int(11) NOT NULL AUTO_INCREMENT, - command_type tinyint(4) DEFAULT NULL, - process_definition_code bigint(20) DEFAULT NULL, - command_param text, - task_depend_type tinyint(4) DEFAULT NULL, - failure_strategy tinyint(4) DEFAULT '0', - warning_type tinyint(4) DEFAULT '0', - warning_group_id int(11) DEFAULT NULL, - schedule_time datetime DEFAULT NULL, - start_time datetime DEFAULT NULL, - executor_id int(11) DEFAULT NULL, - update_time datetime DEFAULT NULL, - process_instance_priority int(11) DEFAULT NULL, - worker_group varchar(64), - environment_code bigint(20) DEFAULT '-1', + id int(11) NOT NULL AUTO_INCREMENT, + command_type tinyint(4) DEFAULT NULL, + process_definition_code bigint(20) DEFAULT NULL, + command_param text, + task_depend_type tinyint(4) DEFAULT NULL, + failure_strategy tinyint(4) DEFAULT '0', + warning_type tinyint(4) DEFAULT '0', + warning_group_id int(11) DEFAULT NULL, + schedule_time datetime DEFAULT NULL, + start_time datetime DEFAULT NULL, + executor_id int(11) DEFAULT NULL, + update_time datetime DEFAULT NULL, + process_instance_priority int(11) DEFAULT NULL, + worker_group varchar(64), + environment_code bigint(20) DEFAULT '-1', + dry_run int NULL DEFAULT 0, + process_instance_id int(11) DEFAULT 0, + process_definition_version int(11) DEFAULT 0, PRIMARY KEY (id), - KEY priority_id_index (process_instance_priority, id) + KEY priority_id_index (process_instance_priority, id) ); -- ---------------------------- @@ -338,7 +341,7 @@ CREATE TABLE t_ds_command -- ---------------------------- -- Table structure for t_ds_datasource -- ---------------------------- -DROP TABLE IF EXISTS t_ds_datasource; +DROP TABLE IF EXISTS t_ds_datasource CASCADE; CREATE TABLE t_ds_datasource ( id int(11) NOT NULL AUTO_INCREMENT, @@ -360,25 +363,28 @@ CREATE TABLE t_ds_datasource -- ---------------------------- -- Table structure for t_ds_error_command -- ---------------------------- -DROP TABLE IF EXISTS t_ds_error_command; +DROP TABLE IF EXISTS t_ds_error_command CASCADE; CREATE TABLE t_ds_error_command ( - id int(11) NOT NULL, - command_type tinyint(4) DEFAULT NULL, - executor_id int(11) DEFAULT NULL, - process_definition_code bigint(20) DEFAULT NULL, - command_param text, - task_depend_type tinyint(4) DEFAULT NULL, - failure_strategy tinyint(4) DEFAULT '0', - warning_type tinyint(4) DEFAULT '0', - warning_group_id int(11) DEFAULT NULL, - schedule_time datetime DEFAULT NULL, - start_time datetime DEFAULT NULL, - update_time datetime DEFAULT NULL, - process_instance_priority int(11) DEFAULT NULL, - worker_group varchar(64), - environment_code bigint(20) DEFAULT '-1', - message text, + id int(11) NOT NULL, + command_type tinyint(4) DEFAULT NULL, + executor_id int(11) DEFAULT NULL, + process_definition_code bigint(20) DEFAULT NULL, + command_param text, + task_depend_type tinyint(4) DEFAULT NULL, + failure_strategy tinyint(4) DEFAULT '0', + warning_type tinyint(4) DEFAULT '0', + warning_group_id int(11) DEFAULT NULL, + schedule_time datetime DEFAULT NULL, + start_time datetime DEFAULT NULL, + update_time datetime DEFAULT NULL, + process_instance_priority int(11) DEFAULT NULL, + worker_group varchar(64), + environment_code bigint(20) DEFAULT '-1', + message text, + dry_run int NULL DEFAULT 0, + process_instance_id int(11) DEFAULT 0, + process_definition_version int(11) DEFAULT 0, PRIMARY KEY (id) ); @@ -389,7 +395,7 @@ CREATE TABLE t_ds_error_command -- ---------------------------- -- Table structure for t_ds_process_definition -- ---------------------------- -DROP TABLE IF EXISTS t_ds_process_definition; +DROP TABLE IF EXISTS t_ds_process_definition CASCADE; CREATE TABLE t_ds_process_definition ( id int(11) NOT NULL AUTO_INCREMENT, @@ -406,6 +412,7 @@ CREATE TABLE t_ds_process_definition warning_group_id int(11) DEFAULT NULL, timeout int(11) DEFAULT '0', tenant_id int(11) NOT NULL DEFAULT '-1', + execution_type tinyint(4) DEFAULT '0', create_time datetime NOT NULL, update_time datetime DEFAULT NULL, PRIMARY KEY (id), @@ -420,7 +427,7 @@ CREATE TABLE t_ds_process_definition -- ---------------------------- -- Table structure for t_ds_process_definition_log -- ---------------------------- -DROP TABLE IF EXISTS t_ds_process_definition_log; +DROP TABLE IF EXISTS t_ds_process_definition_log CASCADE; CREATE TABLE t_ds_process_definition_log ( id int(11) NOT NULL AUTO_INCREMENT, @@ -437,6 +444,7 @@ CREATE TABLE t_ds_process_definition_log warning_group_id int(11) DEFAULT NULL, timeout int(11) DEFAULT '0', tenant_id int(11) NOT NULL DEFAULT '-1', + execution_type tinyint(4) DEFAULT '0', operator int(11) DEFAULT NULL, operate_time datetime DEFAULT NULL, create_time datetime NOT NULL, @@ -447,7 +455,7 @@ CREATE TABLE t_ds_process_definition_log -- ---------------------------- -- Table structure for t_ds_task_definition -- ---------------------------- -DROP TABLE IF EXISTS t_ds_task_definition; +DROP TABLE IF EXISTS t_ds_task_definition CASCADE; CREATE TABLE t_ds_task_definition ( id int(11) NOT NULL AUTO_INCREMENT, @@ -469,17 +477,16 @@ CREATE TABLE t_ds_task_definition timeout_notify_strategy tinyint(4) DEFAULT NULL, timeout int(11) DEFAULT '0', delay_time int(11) DEFAULT '0', - resource_ids varchar(255) DEFAULT NULL, + resource_ids text, create_time datetime NOT NULL, update_time datetime DEFAULT NULL, - PRIMARY KEY (id, code), - UNIQUE KEY task_unique (name,project_code) USING BTREE + PRIMARY KEY (id, code) ); -- ---------------------------- -- Table structure for t_ds_task_definition_log -- ---------------------------- -DROP TABLE IF EXISTS t_ds_task_definition_log; +DROP TABLE IF EXISTS t_ds_task_definition_log CASCADE; CREATE TABLE t_ds_task_definition_log ( id int(11) NOT NULL AUTO_INCREMENT, @@ -501,7 +508,7 @@ CREATE TABLE t_ds_task_definition_log timeout_notify_strategy tinyint(4) DEFAULT NULL, timeout int(11) DEFAULT '0', delay_time int(11) DEFAULT '0', - resource_ids varchar(255) DEFAULT NULL, + resource_ids text, operator int(11) DEFAULT NULL, operate_time datetime DEFAULT NULL, create_time datetime NOT NULL, @@ -512,7 +519,7 @@ CREATE TABLE t_ds_task_definition_log -- ---------------------------- -- Table structure for t_ds_process_task_relation -- ---------------------------- -DROP TABLE IF EXISTS t_ds_process_task_relation; +DROP TABLE IF EXISTS t_ds_process_task_relation CASCADE; CREATE TABLE t_ds_process_task_relation ( id int(11) NOT NULL AUTO_INCREMENT, @@ -534,7 +541,7 @@ CREATE TABLE t_ds_process_task_relation -- ---------------------------- -- Table structure for t_ds_process_task_relation_log -- ---------------------------- -DROP TABLE IF EXISTS t_ds_process_task_relation_log; +DROP TABLE IF EXISTS t_ds_process_task_relation_log CASCADE; CREATE TABLE t_ds_process_task_relation_log ( id int(11) NOT NULL AUTO_INCREMENT, @@ -558,7 +565,7 @@ CREATE TABLE t_ds_process_task_relation_log -- ---------------------------- -- Table structure for t_ds_process_instance -- ---------------------------- -DROP TABLE IF EXISTS t_ds_process_instance; +DROP TABLE IF EXISTS t_ds_process_instance CASCADE; CREATE TABLE t_ds_process_instance ( id int(11) NOT NULL AUTO_INCREMENT, @@ -590,8 +597,11 @@ CREATE TABLE t_ds_process_instance worker_group varchar(64) DEFAULT NULL, environment_code bigint(20) DEFAULT '-1', timeout int(11) DEFAULT '0', + next_process_instance_id int(11) DEFAULT '0', tenant_id int(11) NOT NULL DEFAULT '-1', var_pool longtext, + dry_run int NULL DEFAULT 0, + restart_time datetime DEFAULT NULL, PRIMARY KEY (id) ); @@ -602,7 +612,7 @@ CREATE TABLE t_ds_process_instance -- ---------------------------- -- Table structure for t_ds_project -- ---------------------------- -DROP TABLE IF EXISTS t_ds_project; +DROP TABLE IF EXISTS t_ds_project CASCADE; CREATE TABLE t_ds_project ( id int(11) NOT NULL AUTO_INCREMENT, @@ -623,7 +633,7 @@ CREATE TABLE t_ds_project -- ---------------------------- -- Table structure for t_ds_queue -- ---------------------------- -DROP TABLE IF EXISTS t_ds_queue; +DROP TABLE IF EXISTS t_ds_queue CASCADE; CREATE TABLE t_ds_queue ( id int(11) NOT NULL AUTO_INCREMENT, @@ -643,7 +653,7 @@ VALUES ('1', 'default', 'default', null, null); -- ---------------------------- -- Table structure for t_ds_relation_datasource_user -- ---------------------------- -DROP TABLE IF EXISTS t_ds_relation_datasource_user; +DROP TABLE IF EXISTS t_ds_relation_datasource_user CASCADE; CREATE TABLE t_ds_relation_datasource_user ( id int(11) NOT NULL AUTO_INCREMENT, @@ -662,7 +672,7 @@ CREATE TABLE t_ds_relation_datasource_user -- ---------------------------- -- Table structure for t_ds_relation_process_instance -- ---------------------------- -DROP TABLE IF EXISTS t_ds_relation_process_instance; +DROP TABLE IF EXISTS t_ds_relation_process_instance CASCADE; CREATE TABLE t_ds_relation_process_instance ( id int(11) NOT NULL AUTO_INCREMENT, @@ -679,7 +689,7 @@ CREATE TABLE t_ds_relation_process_instance -- ---------------------------- -- Table structure for t_ds_relation_project_user -- ---------------------------- -DROP TABLE IF EXISTS t_ds_relation_project_user; +DROP TABLE IF EXISTS t_ds_relation_project_user CASCADE; CREATE TABLE t_ds_relation_project_user ( id int(11) NOT NULL AUTO_INCREMENT, @@ -698,7 +708,7 @@ CREATE TABLE t_ds_relation_project_user -- ---------------------------- -- Table structure for t_ds_relation_resources_user -- ---------------------------- -DROP TABLE IF EXISTS t_ds_relation_resources_user; +DROP TABLE IF EXISTS t_ds_relation_resources_user CASCADE; CREATE TABLE t_ds_relation_resources_user ( id int(11) NOT NULL AUTO_INCREMENT, @@ -717,7 +727,7 @@ CREATE TABLE t_ds_relation_resources_user -- ---------------------------- -- Table structure for t_ds_relation_udfs_user -- ---------------------------- -DROP TABLE IF EXISTS t_ds_relation_udfs_user; +DROP TABLE IF EXISTS t_ds_relation_udfs_user CASCADE; CREATE TABLE t_ds_relation_udfs_user ( id int(11) NOT NULL AUTO_INCREMENT, @@ -732,7 +742,7 @@ CREATE TABLE t_ds_relation_udfs_user -- ---------------------------- -- Table structure for t_ds_resources -- ---------------------------- -DROP TABLE IF EXISTS t_ds_resources; +DROP TABLE IF EXISTS t_ds_resources CASCADE; CREATE TABLE t_ds_resources ( id int(11) NOT NULL AUTO_INCREMENT, @@ -745,7 +755,7 @@ CREATE TABLE t_ds_resources create_time datetime DEFAULT NULL, update_time datetime DEFAULT NULL, pid int(11) DEFAULT NULL, - full_name varchar(64) DEFAULT NULL, + full_name varchar(128) DEFAULT NULL, is_directory tinyint(4) DEFAULT NULL, PRIMARY KEY (id), UNIQUE KEY t_ds_resources_un (full_name, type) @@ -758,7 +768,7 @@ CREATE TABLE t_ds_resources -- ---------------------------- -- Table structure for t_ds_schedules -- ---------------------------- -DROP TABLE IF EXISTS t_ds_schedules; +DROP TABLE IF EXISTS t_ds_schedules CASCADE; CREATE TABLE t_ds_schedules ( id int(11) NOT NULL AUTO_INCREMENT, @@ -787,7 +797,7 @@ CREATE TABLE t_ds_schedules -- ---------------------------- -- Table structure for t_ds_session -- ---------------------------- -DROP TABLE IF EXISTS t_ds_session; +DROP TABLE IF EXISTS t_ds_session CASCADE; CREATE TABLE t_ds_session ( id varchar(64) NOT NULL, @@ -804,7 +814,7 @@ CREATE TABLE t_ds_session -- ---------------------------- -- Table structure for t_ds_task_instance -- ---------------------------- -DROP TABLE IF EXISTS t_ds_task_instance; +DROP TABLE IF EXISTS t_ds_task_instance CASCADE; CREATE TABLE t_ds_task_instance ( id int(11) NOT NULL AUTO_INCREMENT, @@ -831,11 +841,12 @@ CREATE TABLE t_ds_task_instance task_instance_priority int(11) DEFAULT NULL, worker_group varchar(64) DEFAULT NULL, environment_code bigint(20) DEFAULT '-1', - environment_config text DEFAULT '', + environment_config text DEFAULT '', executor_id int(11) DEFAULT NULL, first_submit_time datetime DEFAULT NULL, delay_time int(4) DEFAULT '0', var_pool longtext, + dry_run int NULL DEFAULT 0, PRIMARY KEY (id), FOREIGN KEY (process_instance_id) REFERENCES t_ds_process_instance (id) ON DELETE CASCADE ); @@ -847,7 +858,7 @@ CREATE TABLE t_ds_task_instance -- ---------------------------- -- Table structure for t_ds_tenant -- ---------------------------- -DROP TABLE IF EXISTS t_ds_tenant; +DROP TABLE IF EXISTS t_ds_tenant CASCADE; CREATE TABLE t_ds_tenant ( id int(11) NOT NULL AUTO_INCREMENT, @@ -866,7 +877,7 @@ CREATE TABLE t_ds_tenant -- ---------------------------- -- Table structure for t_ds_udfs -- ---------------------------- -DROP TABLE IF EXISTS t_ds_udfs; +DROP TABLE IF EXISTS t_ds_udfs CASCADE; CREATE TABLE t_ds_udfs ( id int(11) NOT NULL AUTO_INCREMENT, @@ -891,7 +902,7 @@ CREATE TABLE t_ds_udfs -- ---------------------------- -- Table structure for t_ds_user -- ---------------------------- -DROP TABLE IF EXISTS t_ds_user; +DROP TABLE IF EXISTS t_ds_user CASCADE; CREATE TABLE t_ds_user ( id int(11) NOT NULL AUTO_INCREMENT, @@ -916,7 +927,7 @@ CREATE TABLE t_ds_user -- ---------------------------- -- Table structure for t_ds_worker_group -- ---------------------------- -DROP TABLE IF EXISTS t_ds_worker_group; +DROP TABLE IF EXISTS t_ds_worker_group CASCADE; CREATE TABLE t_ds_worker_group ( id bigint(11) NOT NULL AUTO_INCREMENT, @@ -935,7 +946,7 @@ CREATE TABLE t_ds_worker_group -- ---------------------------- -- Table structure for t_ds_version -- ---------------------------- -DROP TABLE IF EXISTS t_ds_version; +DROP TABLE IF EXISTS t_ds_version CASCADE; CREATE TABLE t_ds_version ( id int(11) NOT NULL AUTO_INCREMENT, @@ -948,7 +959,7 @@ CREATE TABLE t_ds_version -- Records of t_ds_version -- ---------------------------- INSERT INTO t_ds_version -VALUES ('1', '1.4.0'); +VALUES ('1', '2.0.9'); -- ---------------------------- @@ -968,7 +979,7 @@ VALUES ('1', 'admin', '7ad2410b2f4c074479a8937a28a22b8f', '0', 'xxx@qq.com', '', -- ---------------------------- -- Table structure for t_ds_plugin_define -- ---------------------------- -DROP TABLE IF EXISTS t_ds_plugin_define; +DROP TABLE IF EXISTS t_ds_plugin_define CASCADE; CREATE TABLE t_ds_plugin_define ( id int NOT NULL AUTO_INCREMENT, @@ -984,7 +995,7 @@ CREATE TABLE t_ds_plugin_define -- ---------------------------- -- Table structure for t_ds_alert_plugin_instance -- ---------------------------- -DROP TABLE IF EXISTS t_ds_alert_plugin_instance; +DROP TABLE IF EXISTS t_ds_alert_plugin_instance CASCADE; CREATE TABLE t_ds_alert_plugin_instance ( id int NOT NULL AUTO_INCREMENT, @@ -999,34 +1010,34 @@ CREATE TABLE t_ds_alert_plugin_instance -- -- Table structure for table t_ds_environment -- -DROP TABLE IF EXISTS t_ds_environment; +DROP TABLE IF EXISTS t_ds_environment CASCADE; CREATE TABLE t_ds_environment ( - id int NOT NULL AUTO_INCREMENT, - code bigint(20) NOT NULL, - name varchar(100) DEFAULT NULL, - config text DEFAULT NULL, - description text, - operator int DEFAULT NULL, - create_time timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, - update_time timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - PRIMARY KEY (id), - UNIQUE KEY environment_name_unique (name), - UNIQUE KEY environment_code_unique (code) + id int NOT NULL AUTO_INCREMENT, + code bigint(20) NOT NULL, + name varchar(100) DEFAULT NULL, + config text DEFAULT NULL, + description text, + operator int DEFAULT NULL, + create_time timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + update_time timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + PRIMARY KEY (id), + UNIQUE KEY environment_name_unique (name), + UNIQUE KEY environment_code_unique (code) ); -- -- Table structure for table t_ds_environment_worker_group_relation -- -DROP TABLE IF EXISTS t_ds_environment_worker_group_relation; +DROP TABLE IF EXISTS t_ds_environment_worker_group_relation CASCADE; CREATE TABLE t_ds_environment_worker_group_relation ( - id int NOT NULL AUTO_INCREMENT, - environment_code bigint(20) NOT NULL, - worker_group varchar(255) NOT NULL, - operator int DEFAULT NULL, - create_time timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, - update_time timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - PRIMARY KEY (id) , + id int NOT NULL AUTO_INCREMENT, + environment_code bigint(20) NOT NULL, + worker_group varchar(255) NOT NULL, + operator int DEFAULT NULL, + create_time timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + update_time timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + PRIMARY KEY (id), UNIQUE KEY environment_worker_group_unique (environment_code,worker_group) ); diff --git a/sql/dolphinscheduler_mysql.sql b/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_mysql.sql similarity index 90% rename from sql/dolphinscheduler_mysql.sql rename to dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_mysql.sql index 8aa1519094b59488a00e22548000011215e6a138..422fcb3b43c2a32e0f01ed0d67f1f4417bbc830a 100644 --- a/sql/dolphinscheduler_mysql.sql +++ b/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_mysql.sql @@ -317,22 +317,24 @@ CREATE TABLE `t_ds_alertgroup`( -- ---------------------------- DROP TABLE IF EXISTS `t_ds_command`; CREATE TABLE `t_ds_command` ( - `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', - `command_type` tinyint(4) DEFAULT NULL COMMENT 'Command type: 0 start workflow, 1 start execution from current node, 2 resume fault-tolerant workflow, 3 resume pause process, 4 start execution from failed node, 5 complement, 6 schedule, 7 rerun, 8 pause, 9 stop, 10 resume waiting thread', - `process_definition_code` bigint(20) DEFAULT NULL COMMENT 'process definition code', - `command_param` text COMMENT 'json command parameters', - `task_depend_type` tinyint(4) DEFAULT NULL COMMENT 'Node dependency type: 0 current node, 1 forward, 2 backward', - `failure_strategy` tinyint(4) DEFAULT '0' COMMENT 'Failed policy: 0 end, 1 continue', - `warning_type` tinyint(4) DEFAULT '0' COMMENT 'Alarm type: 0 is not sent, 1 process is sent successfully, 2 process is sent failed, 3 process is sent successfully and all failures are sent', - `warning_group_id` int(11) DEFAULT NULL COMMENT 'warning group', - `schedule_time` datetime DEFAULT NULL COMMENT 'schedule time', - `start_time` datetime DEFAULT NULL COMMENT 'start time', - `executor_id` int(11) DEFAULT NULL COMMENT 'executor id', - `update_time` datetime DEFAULT NULL COMMENT 'update time', + `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', + `command_type` tinyint(4) DEFAULT NULL COMMENT 'Command type: 0 start workflow, 1 start execution from current node, 2 resume fault-tolerant workflow, 3 resume pause process, 4 start execution from failed node, 5 complement, 6 schedule, 7 rerun, 8 pause, 9 stop, 10 resume waiting thread', + `process_definition_code` bigint(20) NOT NULL COMMENT 'process definition code', + `process_definition_version` int(11) DEFAULT '0' COMMENT 'process definition version', + `process_instance_id` int(11) DEFAULT '0' COMMENT 'process instance id', + `command_param` text COMMENT 'json command parameters', + `task_depend_type` tinyint(4) DEFAULT NULL COMMENT 'Node dependency type: 0 current node, 1 forward, 2 backward', + `failure_strategy` tinyint(4) DEFAULT '0' COMMENT 'Failed policy: 0 end, 1 continue', + `warning_type` tinyint(4) DEFAULT '0' COMMENT 'Alarm type: 0 is not sent, 1 process is sent successfully, 2 process is sent failed, 3 process is sent successfully and all failures are sent', + `warning_group_id` int(11) DEFAULT NULL COMMENT 'warning group', + `schedule_time` datetime DEFAULT NULL COMMENT 'schedule time', + `start_time` datetime DEFAULT NULL COMMENT 'start time', + `executor_id` int(11) DEFAULT NULL COMMENT 'executor id', + `update_time` datetime DEFAULT NULL COMMENT 'update time', `process_instance_priority` int(11) DEFAULT NULL COMMENT 'process instance priority: 0 Highest,1 High,2 Medium,3 Low,4 Lowest', - `worker_group` varchar(64) COMMENT 'worker group', - `environment_code` bigint(20) DEFAULT '-1' COMMENT 'environment code', - `dry_run` int NULL DEFAULT 0 COMMENT 'dry run flag:0 normal, 1 dry run', + `worker_group` varchar(64) COMMENT 'worker group', + `environment_code` bigint(20) DEFAULT '-1' COMMENT 'environment code', + `dry_run` tinyint(4) DEFAULT '0' COMMENT 'dry run flag:0 normal, 1 dry run', PRIMARY KEY (`id`), KEY `priority_id_index` (`process_instance_priority`,`id`) USING BTREE ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; @@ -370,7 +372,9 @@ CREATE TABLE `t_ds_error_command` ( `id` int(11) NOT NULL COMMENT 'key', `command_type` tinyint(4) DEFAULT NULL COMMENT 'command type', `executor_id` int(11) DEFAULT NULL COMMENT 'executor id', - `process_definition_code` bigint(20) DEFAULT NULL COMMENT 'process definition code', + `process_definition_code` bigint(20) NOT NULL COMMENT 'process definition code', + `process_definition_version` int(11) DEFAULT '0' COMMENT 'process definition version', + `process_instance_id` int(11) DEFAULT '0' COMMENT 'process instance id: 0', `command_param` text COMMENT 'json command parameters', `task_depend_type` tinyint(4) DEFAULT NULL COMMENT 'task depend type', `failure_strategy` tinyint(4) DEFAULT '0' COMMENT 'failure strategy', @@ -383,7 +387,7 @@ CREATE TABLE `t_ds_error_command` ( `worker_group` varchar(64) COMMENT 'worker group', `environment_code` bigint(20) DEFAULT '-1' COMMENT 'environment code', `message` text COMMENT 'message', - `dry_run` int NULL DEFAULT NULL COMMENT 'dry run flag: 0 normal, 1 dry run', + `dry_run` tinyint(4) DEFAULT '0' COMMENT 'dry run flag: 0 normal, 1 dry run', PRIMARY KEY (`id`) USING BTREE ) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=DYNAMIC; @@ -399,7 +403,7 @@ CREATE TABLE `t_ds_process_definition` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'self-increasing id', `code` bigint(20) NOT NULL COMMENT 'encoding', `name` varchar(255) DEFAULT NULL COMMENT 'process definition name', - `version` int(11) DEFAULT NULL COMMENT 'process definition version', + `version` int(11) DEFAULT '0' COMMENT 'process definition version', `description` text COMMENT 'description', `project_code` bigint(20) NOT NULL COMMENT 'project code', `release_state` tinyint(4) DEFAULT NULL COMMENT 'process definition release state:0:offline,1:online', @@ -410,8 +414,9 @@ CREATE TABLE `t_ds_process_definition` ( `warning_group_id` int(11) DEFAULT NULL COMMENT 'alert group id', `timeout` int(11) DEFAULT '0' COMMENT 'time out, unit: minute', `tenant_id` int(11) NOT NULL DEFAULT '-1' COMMENT 'tenant id', + `execution_type` tinyint(4) DEFAULT '0' COMMENT 'execution_type 0:parallel,1:serial wait,2:serial discard,3:serial priority', `create_time` datetime NOT NULL COMMENT 'create time', - `update_time` datetime DEFAULT NULL COMMENT 'update time', + `update_time` datetime NOT NULL COMMENT 'update time', PRIMARY KEY (`id`,`code`), UNIQUE KEY `process_unique` (`name`,`project_code`) USING BTREE ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; @@ -428,7 +433,7 @@ CREATE TABLE `t_ds_process_definition_log` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'self-increasing id', `code` bigint(20) NOT NULL COMMENT 'encoding', `name` varchar(200) DEFAULT NULL COMMENT 'process definition name', - `version` int(11) DEFAULT NULL COMMENT 'process definition version', + `version` int(11) DEFAULT '0' COMMENT 'process definition version', `description` text COMMENT 'description', `project_code` bigint(20) NOT NULL COMMENT 'project code', `release_state` tinyint(4) DEFAULT NULL COMMENT 'process definition release state:0:offline,1:online', @@ -439,10 +444,11 @@ CREATE TABLE `t_ds_process_definition_log` ( `warning_group_id` int(11) DEFAULT NULL COMMENT 'alert group id', `timeout` int(11) DEFAULT '0' COMMENT 'time out,unit: minute', `tenant_id` int(11) NOT NULL DEFAULT '-1' COMMENT 'tenant id', + `execution_type` tinyint(4) DEFAULT '0' COMMENT 'execution_type 0:parallel,1:serial wait,2:serial discard,3:serial priority', `operator` int(11) DEFAULT NULL COMMENT 'operator user id', `operate_time` datetime DEFAULT NULL COMMENT 'operate time', `create_time` datetime NOT NULL COMMENT 'create time', - `update_time` datetime DEFAULT NULL COMMENT 'update time', + `update_time` datetime NOT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; @@ -454,7 +460,7 @@ CREATE TABLE `t_ds_task_definition` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'self-increasing id', `code` bigint(20) NOT NULL COMMENT 'encoding', `name` varchar(200) DEFAULT NULL COMMENT 'task definition name', - `version` int(11) DEFAULT NULL COMMENT 'task definition version', + `version` int(11) DEFAULT '0' COMMENT 'task definition version', `description` text COMMENT 'description', `project_code` bigint(20) NOT NULL COMMENT 'project code', `user_id` int(11) DEFAULT NULL COMMENT 'task definition creator id', @@ -470,11 +476,10 @@ CREATE TABLE `t_ds_task_definition` ( `timeout_notify_strategy` tinyint(4) DEFAULT NULL COMMENT 'timeout notification policy: 0 warning, 1 fail', `timeout` int(11) DEFAULT '0' COMMENT 'timeout length,unit: minute', `delay_time` int(11) DEFAULT '0' COMMENT 'delay execution time,unit: minute', - `resource_ids` varchar(255) DEFAULT NULL COMMENT 'resource id, separated by comma', + `resource_ids` text COMMENT 'resource id, separated by comma', `create_time` datetime NOT NULL COMMENT 'create time', - `update_time` datetime DEFAULT NULL COMMENT 'update time', - PRIMARY KEY (`id`,`code`), - UNIQUE KEY `task_unique` (`name`,`project_code`) USING BTREE + `update_time` datetime NOT NULL COMMENT 'update time', + PRIMARY KEY (`id`,`code`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- @@ -485,12 +490,12 @@ CREATE TABLE `t_ds_task_definition_log` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'self-increasing id', `code` bigint(20) NOT NULL COMMENT 'encoding', `name` varchar(200) DEFAULT NULL COMMENT 'task definition name', - `version` int(11) DEFAULT NULL COMMENT 'task definition version', + `version` int(11) DEFAULT '0' COMMENT 'task definition version', `description` text COMMENT 'description', `project_code` bigint(20) NOT NULL COMMENT 'project code', `user_id` int(11) DEFAULT NULL COMMENT 'task definition creator id', `task_type` varchar(50) NOT NULL COMMENT 'task type', - `task_params` text COMMENT 'job custom parameters', + `task_params` longtext COMMENT 'job custom parameters', `flag` tinyint(2) DEFAULT NULL COMMENT '0 not available, 1 available', `task_priority` tinyint(4) DEFAULT NULL COMMENT 'job priority', `worker_group` varchar(200) DEFAULT NULL COMMENT 'worker grouping', @@ -501,12 +506,14 @@ CREATE TABLE `t_ds_task_definition_log` ( `timeout_notify_strategy` tinyint(4) DEFAULT NULL COMMENT 'timeout notification policy: 0 warning, 1 fail', `timeout` int(11) DEFAULT '0' COMMENT 'timeout length,unit: minute', `delay_time` int(11) DEFAULT '0' COMMENT 'delay execution time,unit: minute', - `resource_ids` varchar(255) DEFAULT NULL COMMENT 'resource id, separated by comma', + `resource_ids` text DEFAULT NULL COMMENT 'resource id, separated by comma', `operator` int(11) DEFAULT NULL COMMENT 'operator user id', `operate_time` datetime DEFAULT NULL COMMENT 'operate time', `create_time` datetime NOT NULL COMMENT 'create time', - `update_time` datetime DEFAULT NULL COMMENT 'update time', - PRIMARY KEY (`id`) + `update_time` datetime NOT NULL COMMENT 'update time', + PRIMARY KEY (`id`), + KEY `idx_code_version` (`code`,`version`), + KEY `idx_project_code` (`project_code`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- @@ -516,9 +523,9 @@ DROP TABLE IF EXISTS `t_ds_process_task_relation`; CREATE TABLE `t_ds_process_task_relation` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'self-increasing id', `name` varchar(200) DEFAULT NULL COMMENT 'relation name', - `process_definition_version` int(11) DEFAULT NULL COMMENT 'process version', `project_code` bigint(20) NOT NULL COMMENT 'project code', `process_definition_code` bigint(20) NOT NULL COMMENT 'process code', + `process_definition_version` int(11) NOT NULL COMMENT 'process version', `pre_task_code` bigint(20) NOT NULL COMMENT 'pre task code', `pre_task_version` int(11) NOT NULL COMMENT 'pre task version', `post_task_code` bigint(20) NOT NULL COMMENT 'post task code', @@ -526,8 +533,9 @@ CREATE TABLE `t_ds_process_task_relation` ( `condition_type` tinyint(2) DEFAULT NULL COMMENT 'condition type : 0 none, 1 judge 2 delay', `condition_params` text COMMENT 'condition params(json)', `create_time` datetime NOT NULL COMMENT 'create time', - `update_time` datetime DEFAULT NULL COMMENT 'update time', - PRIMARY KEY (`id`) + `update_time` datetime NOT NULL COMMENT 'update time', + PRIMARY KEY (`id`), + KEY `idx_code` (`project_code`,`process_definition_code`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- @@ -537,9 +545,9 @@ DROP TABLE IF EXISTS `t_ds_process_task_relation_log`; CREATE TABLE `t_ds_process_task_relation_log` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'self-increasing id', `name` varchar(200) DEFAULT NULL COMMENT 'relation name', - `process_definition_version` int(11) DEFAULT NULL COMMENT 'process version', `project_code` bigint(20) NOT NULL COMMENT 'project code', `process_definition_code` bigint(20) NOT NULL COMMENT 'process code', + `process_definition_version` int(11) NOT NULL COMMENT 'process version', `pre_task_code` bigint(20) NOT NULL COMMENT 'pre task code', `pre_task_version` int(11) NOT NULL COMMENT 'pre task version', `post_task_code` bigint(20) NOT NULL COMMENT 'post task code', @@ -549,8 +557,9 @@ CREATE TABLE `t_ds_process_task_relation_log` ( `operator` int(11) DEFAULT NULL COMMENT 'operator user id', `operate_time` datetime DEFAULT NULL COMMENT 'operate time', `create_time` datetime NOT NULL COMMENT 'create time', - `update_time` datetime DEFAULT NULL COMMENT 'update time', - PRIMARY KEY (`id`) + `update_time` datetime NOT NULL COMMENT 'update time', + PRIMARY KEY (`id`), + KEY `idx_process_code_version` (`process_definition_code`,`process_definition_version`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- @@ -560,8 +569,8 @@ DROP TABLE IF EXISTS `t_ds_process_instance`; CREATE TABLE `t_ds_process_instance` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `name` varchar(255) DEFAULT NULL COMMENT 'process instance name', - `process_definition_version` int(11) DEFAULT NULL COMMENT 'process definition version', - `process_definition_code` bigint(20) not NULL COMMENT 'process definition code', + `process_definition_code` bigint(20) NOT NULL COMMENT 'process definition code', + `process_definition_version` int(11) DEFAULT '0' COMMENT 'process definition version', `state` tinyint(4) DEFAULT NULL COMMENT 'process instance Status: 0 commit succeeded, 1 running, 2 prepare to pause, 3 pause, 4 prepare to stop, 5 stop, 6 fail, 7 succeed, 8 need fault tolerance, 9 kill, 10 wait for thread, 11 wait for dependency to complete', `recovery` tinyint(4) DEFAULT NULL COMMENT 'process instance failover flag:0:normal,1:failover instance', `start_time` datetime DEFAULT NULL COMMENT 'process instance start time', @@ -589,10 +598,12 @@ CREATE TABLE `t_ds_process_instance` ( `timeout` int(11) DEFAULT '0' COMMENT 'time out', `tenant_id` int(11) NOT NULL DEFAULT '-1' COMMENT 'tenant id', `var_pool` longtext COMMENT 'var_pool', - `dry_run` int NULL DEFAULT 0 COMMENT 'dry run flag: 0 normal, 1 dry run ', + `dry_run` tinyint(4) DEFAULT '0' COMMENT 'dry run flag:0 normal, 1 dry run', + `next_process_instance_id` int(11) DEFAULT '0' COMMENT 'serial queue next processInstanceId', + `restart_time` datetime DEFAULT NULL COMMENT 'process instance restart time', PRIMARY KEY (`id`), KEY `process_instance_index` (`process_definition_code`,`id`) USING BTREE, - KEY `start_time_index` (`start_time`) USING BTREE + KEY `start_time_index` (`start_time`,`end_time`) USING BTREE ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- @@ -738,7 +749,7 @@ CREATE TABLE `t_ds_resources` ( `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', `pid` int(11) DEFAULT NULL, - `full_name` varchar(64) DEFAULT NULL, + `full_name` varchar(128) DEFAULT NULL, `is_directory` tinyint(4) DEFAULT NULL, PRIMARY KEY (`id`), UNIQUE KEY `t_ds_resources_un` (`full_name`,`type`) @@ -757,7 +768,7 @@ CREATE TABLE `t_ds_schedules` ( `process_definition_code` bigint(20) NOT NULL COMMENT 'process definition code', `start_time` datetime NOT NULL COMMENT 'start time', `end_time` datetime NOT NULL COMMENT 'end time', - `timezone_id` varchar(40) DEFAULT NULL COMMENT 'timezoneId', + `timezone_id` varchar(40) DEFAULT NULL COMMENT 'schedule timezone id', `crontab` varchar(255) NOT NULL COMMENT 'crontab description', `failure_strategy` tinyint(4) NOT NULL COMMENT 'failure strategy. 0:end,1:continue', `user_id` int(11) NOT NULL COMMENT 'user id', @@ -801,7 +812,7 @@ CREATE TABLE `t_ds_task_instance` ( `name` varchar(255) DEFAULT NULL COMMENT 'task name', `task_type` varchar(50) NOT NULL COMMENT 'task type', `task_code` bigint(20) NOT NULL COMMENT 'task definition code', - `task_definition_version` int(11) DEFAULT NULL COMMENT 'task definition version', + `task_definition_version` int(11) DEFAULT '0' COMMENT 'task definition version', `process_instance_id` int(11) DEFAULT NULL COMMENT 'process instance id', `state` tinyint(4) DEFAULT NULL COMMENT 'Status: 0 commit succeeded, 1 running, 2 prepare to pause, 3 pause, 4 prepare to stop, 5 stop, 6 fail, 7 succeed, 8 need fault tolerance, 9 kill, 10 wait for thread, 11 wait for dependency to complete', `submit_time` datetime DEFAULT NULL COMMENT 'task submit time', @@ -813,8 +824,8 @@ CREATE TABLE `t_ds_task_instance` ( `alert_flag` tinyint(4) DEFAULT NULL COMMENT 'whether alert', `retry_times` int(4) DEFAULT '0' COMMENT 'task retry times', `pid` int(4) DEFAULT NULL COMMENT 'pid of task', - `app_link` text COMMENT 'yarn app id', - `task_params` text COMMENT 'job custom parameters', + `app_link` longtext COMMENT 'yarn app id', + `task_params` longtext COMMENT 'job custom parameters', `flag` tinyint(4) DEFAULT '1' COMMENT '0 not available, 1 available', `retry_interval` int(4) DEFAULT NULL COMMENT 'retry interval when task failed ', `max_retry_times` int(2) DEFAULT NULL COMMENT 'max retry times', @@ -826,9 +837,10 @@ CREATE TABLE `t_ds_task_instance` ( `first_submit_time` datetime DEFAULT NULL COMMENT 'task first submit time', `delay_time` int(4) DEFAULT '0' COMMENT 'task delay execution time', `var_pool` longtext COMMENT 'var_pool', - `dry_run` int NULL DEFAULT NULL COMMENT 'dry run flag: 0 normal, 1 dry run', + `dry_run` tinyint(4) DEFAULT '0' COMMENT 'dry run flag: 0 normal, 1 dry run', PRIMARY KEY (`id`), KEY `process_instance_id` (`process_instance_id`) USING BTREE, + KEY `idx_code_version` (`task_code`, `task_definition_version`) USING BTREE, CONSTRAINT `foreign_key_instance_id` FOREIGN KEY (`process_instance_id`) REFERENCES `t_ds_process_instance` (`id`) ON DELETE CASCADE ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; @@ -893,7 +905,7 @@ CREATE TABLE `t_ds_user` ( `create_time` datetime DEFAULT NULL COMMENT 'create time', `update_time` datetime DEFAULT NULL COMMENT 'update time', `queue` varchar(64) DEFAULT NULL COMMENT 'queue', - `state` int(1) DEFAULT 1 COMMENT 'state 0:disable 1:enable', + `state` tinyint(4) DEFAULT '1' COMMENT 'state 0:disable 1:enable', PRIMARY KEY (`id`), UNIQUE KEY `user_name_unique` (`user_name`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; @@ -934,7 +946,7 @@ CREATE TABLE `t_ds_version` ( -- ---------------------------- -- Records of t_ds_version -- ---------------------------- -INSERT INTO `t_ds_version` VALUES ('1', '1.4.0'); +INSERT INTO `t_ds_version` VALUES ('1', '2.0.9'); -- ---------------------------- diff --git a/sql/dolphinscheduler_postgre.sql b/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_postgresql.sql similarity index 90% rename from sql/dolphinscheduler_postgre.sql rename to dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_postgresql.sql index 27f525958e7eecc07e7bf36f8f13e24bc62d9e64..681f98636aa2abea4cb316c45d784aea40cdb4d3 100644 --- a/sql/dolphinscheduler_postgre.sql +++ b/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_postgresql.sql @@ -191,7 +191,7 @@ CREATE INDEX IDX_QRTZ_FT_TG ON QRTZ_FIRED_TRIGGERS(SCHED_NAME,TRIGGER_GROUP); DROP TABLE IF EXISTS t_ds_access_token; CREATE TABLE t_ds_access_token ( - id int NOT NULL , + id serial NOT NULL , user_id int DEFAULT NULL , token varchar(64) DEFAULT NULL , expire_time timestamp DEFAULT NULL , @@ -206,7 +206,7 @@ CREATE TABLE t_ds_access_token ( DROP TABLE IF EXISTS t_ds_alert; CREATE TABLE t_ds_alert ( - id int NOT NULL , + id serial NOT NULL , title varchar(64) DEFAULT NULL , content text , alert_status int DEFAULT '0' , @@ -239,22 +239,24 @@ CREATE TABLE t_ds_alertgroup( DROP TABLE IF EXISTS t_ds_command; CREATE TABLE t_ds_command ( - id int NOT NULL , - command_type int DEFAULT NULL , - process_definition_code bigint NOT NULL , - command_param text , - task_depend_type int DEFAULT NULL , - failure_strategy int DEFAULT '0' , - warning_type int DEFAULT '0' , - warning_group_id int DEFAULT NULL , - schedule_time timestamp DEFAULT NULL , - start_time timestamp DEFAULT NULL , - executor_id int DEFAULT NULL , - update_time timestamp DEFAULT NULL , + id serial NOT NULL , + command_type int DEFAULT NULL , + process_definition_code bigint NOT NULL , + command_param text , + task_depend_type int DEFAULT NULL , + failure_strategy int DEFAULT '0' , + warning_type int DEFAULT '0' , + warning_group_id int DEFAULT NULL , + schedule_time timestamp DEFAULT NULL , + start_time timestamp DEFAULT NULL , + executor_id int DEFAULT NULL , + update_time timestamp DEFAULT NULL , process_instance_priority int DEFAULT NULL , - worker_group varchar(64), - environment_code bigint DEFAULT '-1', - dry_run int DEFAULT '0' , + worker_group varchar(64), + environment_code bigint DEFAULT '-1', + dry_run int DEFAULT '0' , + process_instance_id int DEFAULT 0, + process_definition_version int DEFAULT 0, PRIMARY KEY (id) ) ; @@ -266,7 +268,7 @@ create index priority_id_index on t_ds_command (process_instance_priority,id); DROP TABLE IF EXISTS t_ds_datasource; CREATE TABLE t_ds_datasource ( - id int NOT NULL , + id serial NOT NULL , name varchar(64) NOT NULL , note varchar(255) DEFAULT NULL , type int NOT NULL , @@ -284,23 +286,25 @@ CREATE TABLE t_ds_datasource ( DROP TABLE IF EXISTS t_ds_error_command; CREATE TABLE t_ds_error_command ( - id int NOT NULL , - command_type int DEFAULT NULL , - executor_id int DEFAULT NULL , - process_definition_code bigint NOT NULL , - command_param text , - task_depend_type int DEFAULT NULL , - failure_strategy int DEFAULT '0' , - warning_type int DEFAULT '0' , - warning_group_id int DEFAULT NULL , - schedule_time timestamp DEFAULT NULL , - start_time timestamp DEFAULT NULL , - update_time timestamp DEFAULT NULL , + id serial NOT NULL , + command_type int DEFAULT NULL , + process_definition_code bigint NOT NULL , + command_param text , + task_depend_type int DEFAULT NULL , + failure_strategy int DEFAULT '0' , + warning_type int DEFAULT '0' , + warning_group_id int DEFAULT NULL , + schedule_time timestamp DEFAULT NULL , + start_time timestamp DEFAULT NULL , + executor_id int DEFAULT NULL , + update_time timestamp DEFAULT NULL , process_instance_priority int DEFAULT NULL , - worker_group varchar(64), - environment_code bigint DEFAULT '-1', - message text , - dry_ru int DEFAULT '0' , + worker_group varchar(64), + environment_code bigint DEFAULT '-1', + dry_run int DEFAULT '0' , + message text , + process_instance_id int DEFAULT 0, + process_definition_version int DEFAULT 0, PRIMARY KEY (id) ); -- @@ -313,10 +317,10 @@ CREATE TABLE t_ds_error_command ( DROP TABLE IF EXISTS t_ds_process_definition; CREATE TABLE t_ds_process_definition ( - id int NOT NULL , + id serial NOT NULL , code bigint NOT NULL, name varchar(255) DEFAULT NULL , - version int DEFAULT NULL , + version int NOT NULL , description text , project_code bigint DEFAULT NULL , release_state int DEFAULT NULL , @@ -327,6 +331,7 @@ CREATE TABLE t_ds_process_definition ( flag int DEFAULT NULL , timeout int DEFAULT '0' , tenant_id int DEFAULT '-1' , + execution_type int DEFAULT '0', create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , PRIMARY KEY (id) , @@ -340,7 +345,7 @@ CREATE TABLE t_ds_process_definition_log ( id int NOT NULL , code bigint NOT NULL, name varchar(255) DEFAULT NULL , - version int DEFAULT NULL , + version int NOT NULL , description text , project_code bigint DEFAULT NULL , release_state int DEFAULT NULL , @@ -351,6 +356,7 @@ CREATE TABLE t_ds_process_definition_log ( flag int DEFAULT NULL , timeout int DEFAULT '0' , tenant_id int DEFAULT '-1' , + execution_type int DEFAULT '0', operator int DEFAULT NULL , operate_time timestamp DEFAULT NULL , create_time timestamp DEFAULT NULL , @@ -360,10 +366,10 @@ CREATE TABLE t_ds_process_definition_log ( DROP TABLE IF EXISTS t_ds_task_definition; CREATE TABLE t_ds_task_definition ( - id int NOT NULL , + id serial NOT NULL , code bigint NOT NULL, name varchar(255) DEFAULT NULL , - version int DEFAULT NULL , + version int NOT NULL , description text , project_code bigint DEFAULT NULL , user_id int DEFAULT NULL , @@ -379,21 +385,20 @@ CREATE TABLE t_ds_task_definition ( timeout_notify_strategy int DEFAULT NULL , timeout int DEFAULT '0' , delay_time int DEFAULT '0' , - resource_ids varchar(255) DEFAULT NULL , + resource_ids text , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , - PRIMARY KEY (id) , - CONSTRAINT task_definition_unique UNIQUE (name, project_code) + PRIMARY KEY (id) ) ; -create index task_definition_index on t_ds_task_definition (project_code,id); +create index task_definition_index on t_ds_task_definition (code,id); DROP TABLE IF EXISTS t_ds_task_definition_log; CREATE TABLE t_ds_task_definition_log ( id int NOT NULL , code bigint NOT NULL, name varchar(255) DEFAULT NULL , - version int DEFAULT NULL , + version int NOT NULL , description text , project_code bigint DEFAULT NULL , user_id int DEFAULT NULL , @@ -409,7 +414,7 @@ CREATE TABLE t_ds_task_definition_log ( timeout_notify_strategy int DEFAULT NULL , timeout int DEFAULT '0' , delay_time int DEFAULT '0' , - resource_ids varchar(255) DEFAULT NULL , + resource_ids text , operator int DEFAULT NULL , operate_time timestamp DEFAULT NULL , create_time timestamp DEFAULT NULL , @@ -417,13 +422,16 @@ CREATE TABLE t_ds_task_definition_log ( PRIMARY KEY (id) ) ; +create index idx_code_version on t_ds_task_definition_log (code,version); +create index idx_task_definition_log_project_code on t_ds_task_definition_log (project_code); + DROP TABLE IF EXISTS t_ds_process_task_relation; CREATE TABLE t_ds_process_task_relation ( - id int NOT NULL , + id serial NOT NULL , name varchar(255) DEFAULT NULL , - process_definition_version int DEFAULT NULL , project_code bigint DEFAULT NULL , process_definition_code bigint DEFAULT NULL , + process_definition_version int DEFAULT NULL , pre_task_code bigint DEFAULT NULL , pre_task_version int DEFAULT '0' , post_task_code bigint DEFAULT NULL , @@ -435,13 +443,15 @@ CREATE TABLE t_ds_process_task_relation ( PRIMARY KEY (id) ) ; +create index idx_code on t_ds_process_task_relation (project_code,process_definition_code); + DROP TABLE IF EXISTS t_ds_process_task_relation_log; CREATE TABLE t_ds_process_task_relation_log ( id int NOT NULL , name varchar(255) DEFAULT NULL , - process_definition_version int DEFAULT NULL , project_code bigint DEFAULT NULL , process_definition_code bigint DEFAULT NULL , + process_definition_version int DEFAULT NULL , pre_task_code bigint DEFAULT NULL , pre_task_version int DEFAULT '0' , post_task_code bigint DEFAULT NULL , @@ -454,17 +464,17 @@ CREATE TABLE t_ds_process_task_relation_log ( update_time timestamp DEFAULT NULL , PRIMARY KEY (id) ) ; - +create index idx_process_code_version on t_ds_process_task_relation_log (process_definition_code,process_definition_version); -- -- Table structure for table t_ds_process_instance -- DROP TABLE IF EXISTS t_ds_process_instance; CREATE TABLE t_ds_process_instance ( - id int NOT NULL , + id serial NOT NULL , name varchar(255) DEFAULT NULL , - process_definition_version int DEFAULT NULL , process_definition_code bigint DEFAULT NULL , + process_definition_version int DEFAULT NULL , state int DEFAULT NULL , recovery int DEFAULT NULL , start_time timestamp DEFAULT NULL , @@ -495,11 +505,13 @@ CREATE TABLE t_ds_process_instance ( tenant_id int NOT NULL DEFAULT '-1' , var_pool text , dry_run int DEFAULT '0' , + next_process_instance_id int DEFAULT '0', + restart_time timestamp DEFAULT NULL , PRIMARY KEY (id) ) ; create index process_instance_index on t_ds_process_instance (process_definition_code,id); -create index start_time_index on t_ds_process_instance (start_time); +create index start_time_index on t_ds_process_instance (start_time,end_time); -- -- Table structure for table t_ds_project @@ -507,7 +519,7 @@ create index start_time_index on t_ds_process_instance (start_time); DROP TABLE IF EXISTS t_ds_project; CREATE TABLE t_ds_project ( - id int NOT NULL , + id serial NOT NULL , name varchar(100) DEFAULT NULL , code bigint NOT NULL, description varchar(200) DEFAULT NULL , @@ -526,7 +538,7 @@ create index user_id_index on t_ds_project (user_id); DROP TABLE IF EXISTS t_ds_queue; CREATE TABLE t_ds_queue ( - id int NOT NULL , + id serial NOT NULL , queue_name varchar(64) DEFAULT NULL , queue varchar(64) DEFAULT NULL , create_time timestamp DEFAULT NULL , @@ -541,7 +553,7 @@ CREATE TABLE t_ds_queue ( DROP TABLE IF EXISTS t_ds_relation_datasource_user; CREATE TABLE t_ds_relation_datasource_user ( - id int NOT NULL , + id serial NOT NULL , user_id int NOT NULL , datasource_id int DEFAULT NULL , perm int DEFAULT '1' , @@ -557,7 +569,7 @@ CREATE TABLE t_ds_relation_datasource_user ( DROP TABLE IF EXISTS t_ds_relation_process_instance; CREATE TABLE t_ds_relation_process_instance ( - id int NOT NULL , + id serial NOT NULL , parent_process_instance_id int DEFAULT NULL , parent_task_instance_id int DEFAULT NULL , process_instance_id int DEFAULT NULL , @@ -571,7 +583,7 @@ CREATE TABLE t_ds_relation_process_instance ( DROP TABLE IF EXISTS t_ds_relation_project_user; CREATE TABLE t_ds_relation_project_user ( - id int NOT NULL , + id serial NOT NULL , user_id int NOT NULL , project_id int DEFAULT NULL , perm int DEFAULT '1' , @@ -587,7 +599,7 @@ create index relation_project_user_id_index on t_ds_relation_project_user (user_ DROP TABLE IF EXISTS t_ds_relation_resources_user; CREATE TABLE t_ds_relation_resources_user ( - id int NOT NULL , + id serial NOT NULL , user_id int NOT NULL , resources_id int DEFAULT NULL , perm int DEFAULT '1' , @@ -602,7 +614,7 @@ CREATE TABLE t_ds_relation_resources_user ( DROP TABLE IF EXISTS t_ds_relation_udfs_user; CREATE TABLE t_ds_relation_udfs_user ( - id int NOT NULL , + id serial NOT NULL , user_id int NOT NULL , udf_id int DEFAULT NULL , perm int DEFAULT '1' , @@ -618,7 +630,7 @@ CREATE TABLE t_ds_relation_udfs_user ( DROP TABLE IF EXISTS t_ds_resources; CREATE TABLE t_ds_resources ( - id int NOT NULL , + id serial NOT NULL , alias varchar(64) DEFAULT NULL , file_name varchar(64) DEFAULT NULL , description varchar(255) DEFAULT NULL , @@ -628,8 +640,8 @@ CREATE TABLE t_ds_resources ( create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , pid int, - full_name varchar(64), - is_directory int, + full_name varchar(128), + is_directory boolean DEFAULT FALSE, PRIMARY KEY (id), CONSTRAINT t_ds_resources_un UNIQUE (full_name, type) ) ; @@ -641,7 +653,7 @@ CREATE TABLE t_ds_resources ( DROP TABLE IF EXISTS t_ds_schedules; CREATE TABLE t_ds_schedules ( - id int NOT NULL , + id serial NOT NULL , process_definition_code bigint NOT NULL , start_time timestamp NOT NULL , end_time timestamp NOT NULL , @@ -679,7 +691,7 @@ CREATE TABLE t_ds_session ( DROP TABLE IF EXISTS t_ds_task_instance; CREATE TABLE t_ds_task_instance ( - id int NOT NULL , + id serial NOT NULL , name varchar(255) DEFAULT NULL , task_type varchar(50) DEFAULT NULL , task_code bigint NOT NULL, @@ -709,17 +721,18 @@ CREATE TABLE t_ds_task_instance ( delay_time int DEFAULT '0' , var_pool text , dry_run int DEFAULT '0' , - PRIMARY KEY (id), - CONSTRAINT foreign_key_instance_id FOREIGN KEY(process_instance_id) REFERENCES t_ds_process_instance(id) ON DELETE CASCADE + PRIMARY KEY (id) ) ; +create index idx_task_instance_code_version on t_ds_task_instance (task_code, task_definition_version); + -- -- Table structure for table t_ds_tenant -- DROP TABLE IF EXISTS t_ds_tenant; CREATE TABLE t_ds_tenant ( - id int NOT NULL , + id serial NOT NULL , tenant_code varchar(64) DEFAULT NULL , description varchar(255) DEFAULT NULL , queue_id int DEFAULT NULL , @@ -734,7 +747,7 @@ CREATE TABLE t_ds_tenant ( DROP TABLE IF EXISTS t_ds_udfs; CREATE TABLE t_ds_udfs ( - id int NOT NULL , + id serial NOT NULL , user_id int NOT NULL , func_name varchar(100) NOT NULL , class_name varchar(255) NOT NULL , @@ -755,7 +768,7 @@ CREATE TABLE t_ds_udfs ( DROP TABLE IF EXISTS t_ds_user; CREATE TABLE t_ds_user ( - id int NOT NULL , + id serial NOT NULL , user_name varchar(64) DEFAULT NULL , user_password varchar(64) DEFAULT NULL , user_type int DEFAULT NULL , @@ -803,7 +816,7 @@ CREATE TABLE t_ds_worker_group ( DROP TABLE IF EXISTS t_ds_worker_server; CREATE TABLE t_ds_worker_server ( - id int NOT NULL , + id serial NOT NULL , host varchar(45) DEFAULT NULL , port int DEFAULT NULL , zk_directory varchar(64) DEFAULT NULL , @@ -920,7 +933,7 @@ INSERT INTO t_ds_queue(queue_name, queue, create_time, update_time) VALUES ('default', 'default', '2018-11-29 10:22:33', '2018-11-29 10:22:33'); -- Records of t_ds_queue,default queue name : default -INSERT INTO t_ds_version(version) VALUES ('1.4.0'); +INSERT INTO t_ds_version(version) VALUES ('2.0.9'); -- -- Table structure for table t_ds_plugin_define diff --git a/dolphinscheduler-dao/src/main/resources/sql/soft_version b/dolphinscheduler-dao/src/main/resources/sql/soft_version new file mode 100644 index 0000000000000000000000000000000000000000..ed35d092626c38637a554337b00e2a6a33c79584 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/sql/soft_version @@ -0,0 +1 @@ +2.0.9 \ No newline at end of file diff --git a/sql/upgrade/1.0.1_schema/mysql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.0.1_schema/mysql/dolphinscheduler_ddl.sql similarity index 100% rename from sql/upgrade/1.0.1_schema/mysql/dolphinscheduler_ddl.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.0.1_schema/mysql/dolphinscheduler_ddl.sql diff --git a/sql/upgrade/1.0.1_schema/mysql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.0.1_schema/mysql/dolphinscheduler_dml.sql similarity index 100% rename from sql/upgrade/1.0.1_schema/mysql/dolphinscheduler_dml.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.0.1_schema/mysql/dolphinscheduler_dml.sql diff --git a/sql/upgrade/1.0.2_schema/mysql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.0.2_schema/mysql/dolphinscheduler_ddl.sql similarity index 100% rename from sql/upgrade/1.0.2_schema/mysql/dolphinscheduler_ddl.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.0.2_schema/mysql/dolphinscheduler_ddl.sql diff --git a/sql/upgrade/1.0.2_schema/mysql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.0.2_schema/mysql/dolphinscheduler_dml.sql similarity index 100% rename from sql/upgrade/1.0.2_schema/mysql/dolphinscheduler_dml.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.0.2_schema/mysql/dolphinscheduler_dml.sql diff --git a/sql/upgrade/1.1.0_schema/mysql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.1.0_schema/mysql/dolphinscheduler_ddl.sql similarity index 100% rename from sql/upgrade/1.1.0_schema/mysql/dolphinscheduler_ddl.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.1.0_schema/mysql/dolphinscheduler_ddl.sql diff --git a/sql/upgrade/1.1.0_schema/mysql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.1.0_schema/mysql/dolphinscheduler_dml.sql similarity index 100% rename from sql/upgrade/1.1.0_schema/mysql/dolphinscheduler_dml.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.1.0_schema/mysql/dolphinscheduler_dml.sql diff --git a/sql/upgrade/1.2.0_schema/mysql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.2.0_schema/mysql/dolphinscheduler_ddl.sql similarity index 100% rename from sql/upgrade/1.2.0_schema/mysql/dolphinscheduler_ddl.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.2.0_schema/mysql/dolphinscheduler_ddl.sql diff --git a/sql/upgrade/1.2.0_schema/mysql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.2.0_schema/mysql/dolphinscheduler_dml.sql similarity index 100% rename from sql/upgrade/1.2.0_schema/mysql/dolphinscheduler_dml.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.2.0_schema/mysql/dolphinscheduler_dml.sql diff --git a/sql/upgrade/1.2.0_schema/postgresql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.2.0_schema/postgresql/dolphinscheduler_ddl.sql similarity index 100% rename from sql/upgrade/1.2.0_schema/postgresql/dolphinscheduler_ddl.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.2.0_schema/postgresql/dolphinscheduler_ddl.sql diff --git a/sql/upgrade/1.2.0_schema/postgresql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.2.0_schema/postgresql/dolphinscheduler_dml.sql similarity index 100% rename from sql/upgrade/1.2.0_schema/postgresql/dolphinscheduler_dml.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.2.0_schema/postgresql/dolphinscheduler_dml.sql diff --git a/sql/upgrade/1.3.0_schema/mysql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.0_schema/mysql/dolphinscheduler_ddl.sql similarity index 100% rename from sql/upgrade/1.3.0_schema/mysql/dolphinscheduler_ddl.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.0_schema/mysql/dolphinscheduler_ddl.sql diff --git a/sql/upgrade/1.3.0_schema/mysql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.0_schema/mysql/dolphinscheduler_dml.sql similarity index 100% rename from sql/upgrade/1.3.0_schema/mysql/dolphinscheduler_dml.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.0_schema/mysql/dolphinscheduler_dml.sql diff --git a/sql/upgrade/1.3.0_schema/postgresql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.0_schema/postgresql/dolphinscheduler_ddl.sql similarity index 100% rename from sql/upgrade/1.3.0_schema/postgresql/dolphinscheduler_ddl.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.0_schema/postgresql/dolphinscheduler_ddl.sql diff --git a/sql/upgrade/1.3.0_schema/postgresql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.0_schema/postgresql/dolphinscheduler_dml.sql similarity index 100% rename from sql/upgrade/1.3.0_schema/postgresql/dolphinscheduler_dml.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.0_schema/postgresql/dolphinscheduler_dml.sql diff --git a/sql/upgrade/1.3.2_schema/mysql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.2_schema/mysql/dolphinscheduler_ddl.sql similarity index 100% rename from sql/upgrade/1.3.2_schema/mysql/dolphinscheduler_ddl.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.2_schema/mysql/dolphinscheduler_ddl.sql diff --git a/sql/upgrade/1.3.2_schema/mysql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.2_schema/mysql/dolphinscheduler_dml.sql similarity index 100% rename from sql/upgrade/1.3.2_schema/mysql/dolphinscheduler_dml.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.2_schema/mysql/dolphinscheduler_dml.sql diff --git a/sql/upgrade/1.3.2_schema/postgresql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.2_schema/postgresql/dolphinscheduler_ddl.sql similarity index 100% rename from sql/upgrade/1.3.2_schema/postgresql/dolphinscheduler_ddl.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.2_schema/postgresql/dolphinscheduler_ddl.sql diff --git a/sql/upgrade/1.3.2_schema/postgresql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.2_schema/postgresql/dolphinscheduler_dml.sql similarity index 100% rename from sql/upgrade/1.3.2_schema/postgresql/dolphinscheduler_dml.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.2_schema/postgresql/dolphinscheduler_dml.sql diff --git a/sql/upgrade/1.3.3_schema/mysql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.3_schema/mysql/dolphinscheduler_ddl.sql similarity index 100% rename from sql/upgrade/1.3.3_schema/mysql/dolphinscheduler_ddl.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.3_schema/mysql/dolphinscheduler_ddl.sql diff --git a/sql/upgrade/1.3.3_schema/mysql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.3_schema/mysql/dolphinscheduler_dml.sql similarity index 100% rename from sql/upgrade/1.3.3_schema/mysql/dolphinscheduler_dml.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.3_schema/mysql/dolphinscheduler_dml.sql diff --git a/sql/upgrade/1.3.3_schema/postgresql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.3_schema/postgresql/dolphinscheduler_ddl.sql similarity index 100% rename from sql/upgrade/1.3.3_schema/postgresql/dolphinscheduler_ddl.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.3_schema/postgresql/dolphinscheduler_ddl.sql diff --git a/sql/upgrade/1.3.3_schema/postgresql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.3_schema/postgresql/dolphinscheduler_dml.sql similarity index 100% rename from sql/upgrade/1.3.3_schema/postgresql/dolphinscheduler_dml.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.3_schema/postgresql/dolphinscheduler_dml.sql diff --git a/sql/upgrade/1.3.5_schema/mysql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.5_schema/mysql/dolphinscheduler_ddl.sql similarity index 100% rename from sql/upgrade/1.3.5_schema/mysql/dolphinscheduler_ddl.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.5_schema/mysql/dolphinscheduler_ddl.sql diff --git a/sql/upgrade/1.3.5_schema/mysql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.5_schema/mysql/dolphinscheduler_dml.sql similarity index 100% rename from sql/upgrade/1.3.5_schema/mysql/dolphinscheduler_dml.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.5_schema/mysql/dolphinscheduler_dml.sql diff --git a/sql/upgrade/1.3.5_schema/postgresql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.5_schema/postgresql/dolphinscheduler_ddl.sql similarity index 100% rename from sql/upgrade/1.3.5_schema/postgresql/dolphinscheduler_ddl.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.5_schema/postgresql/dolphinscheduler_ddl.sql diff --git a/sql/upgrade/1.3.5_schema/postgresql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.5_schema/postgresql/dolphinscheduler_dml.sql similarity index 100% rename from sql/upgrade/1.3.5_schema/postgresql/dolphinscheduler_dml.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.5_schema/postgresql/dolphinscheduler_dml.sql diff --git a/sql/upgrade/1.3.6_schema/mysql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.6_schema/mysql/dolphinscheduler_ddl.sql similarity index 100% rename from sql/upgrade/1.3.6_schema/mysql/dolphinscheduler_ddl.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.6_schema/mysql/dolphinscheduler_ddl.sql diff --git a/sql/upgrade/1.3.6_schema/mysql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.6_schema/mysql/dolphinscheduler_dml.sql similarity index 100% rename from sql/upgrade/1.3.6_schema/mysql/dolphinscheduler_dml.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.6_schema/mysql/dolphinscheduler_dml.sql diff --git a/sql/upgrade/1.3.6_schema/postgresql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.6_schema/postgresql/dolphinscheduler_ddl.sql similarity index 100% rename from sql/upgrade/1.3.6_schema/postgresql/dolphinscheduler_ddl.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.6_schema/postgresql/dolphinscheduler_ddl.sql diff --git a/sql/upgrade/1.3.6_schema/postgresql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.6_schema/postgresql/dolphinscheduler_dml.sql similarity index 100% rename from sql/upgrade/1.3.6_schema/postgresql/dolphinscheduler_dml.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.6_schema/postgresql/dolphinscheduler_dml.sql diff --git a/sql/upgrade/1.3.7_schema/mysql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.7_schema/mysql/dolphinscheduler_ddl.sql similarity index 100% rename from sql/upgrade/1.3.7_schema/mysql/dolphinscheduler_ddl.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.7_schema/mysql/dolphinscheduler_ddl.sql diff --git a/sql/upgrade/1.3.7_schema/mysql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.7_schema/mysql/dolphinscheduler_dml.sql similarity index 100% rename from sql/upgrade/1.3.7_schema/mysql/dolphinscheduler_dml.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.7_schema/mysql/dolphinscheduler_dml.sql diff --git a/sql/upgrade/1.3.7_schema/postgresql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.7_schema/postgresql/dolphinscheduler_ddl.sql similarity index 100% rename from sql/upgrade/1.3.7_schema/postgresql/dolphinscheduler_ddl.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.7_schema/postgresql/dolphinscheduler_ddl.sql diff --git a/sql/upgrade/1.3.7_schema/postgresql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.7_schema/postgresql/dolphinscheduler_dml.sql similarity index 100% rename from sql/upgrade/1.3.7_schema/postgresql/dolphinscheduler_dml.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/1.3.7_schema/postgresql/dolphinscheduler_dml.sql diff --git a/sql/upgrade/1.4.0_schema/mysql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.0_schema/mysql/dolphinscheduler_ddl.sql similarity index 45% rename from sql/upgrade/1.4.0_schema/mysql/dolphinscheduler_ddl.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.0_schema/mysql/dolphinscheduler_ddl.sql index 1dc9097ae014a307daf51566316b5be926eb27ec..aeaeb4b49e2cf14777a15370a0b56fecf82e1b09 100644 --- a/sql/upgrade/1.4.0_schema/mysql/dolphinscheduler_ddl.sql +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.0_schema/mysql/dolphinscheduler_ddl.sql @@ -27,7 +27,7 @@ CREATE PROCEDURE uc_dolphin_T_t_ds_user_A_state() AND TABLE_SCHEMA=(SELECT DATABASE()) AND COLUMN_NAME ='state') THEN - ALTER TABLE t_ds_user ADD `state` int(1) DEFAULT 1 COMMENT 'state 0:disable 1:enable'; + ALTER TABLE t_ds_user ADD `state` tinyint(4) DEFAULT '1' COMMENT 'state 0:disable 1:enable'; END IF; END; @@ -57,186 +57,6 @@ delimiter ; CALL uc_dolphin_T_t_ds_tenant_A_tenant_name; DROP PROCEDURE uc_dolphin_T_t_ds_tenant_A_tenant_name; --- uc_dolphin_T_t_ds_task_instance_A_first_submit_time -drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_task_instance_A_first_submit_time; -delimiter d// -CREATE PROCEDURE uc_dolphin_T_t_ds_task_instance_A_first_submit_time() - BEGIN - IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS - WHERE TABLE_NAME='t_ds_task_instance' - AND TABLE_SCHEMA=(SELECT DATABASE()) - AND COLUMN_NAME ='first_submit_time') - THEN - ALTER TABLE t_ds_task_instance ADD `first_submit_time` datetime DEFAULT NULL COMMENT 'task first submit time'; - END IF; - END; - -d// - -delimiter ; -CALL uc_dolphin_T_t_ds_task_instance_A_first_submit_time(); -DROP PROCEDURE uc_dolphin_T_t_ds_task_instance_A_first_submit_time; - --- uc_dolphin_T_t_ds_task_instance_A_delay_time -drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_task_instance_A_delay_time; -delimiter d// -CREATE PROCEDURE uc_dolphin_T_t_ds_task_instance_A_delay_time() - BEGIN - IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS - WHERE TABLE_NAME='t_ds_task_instance' - AND TABLE_SCHEMA=(SELECT DATABASE()) - AND COLUMN_NAME ='delay_time') - THEN - ALTER TABLE t_ds_task_instance ADD `delay_time` int(4) DEFAULT '0' COMMENT 'task delay execution time'; - END IF; - END; - -d// - -delimiter ; -CALL uc_dolphin_T_t_ds_task_instance_A_delay_time(); -DROP PROCEDURE uc_dolphin_T_t_ds_task_instance_A_delay_time; - --- uc_dolphin_T_t_ds_task_instance_A_var_pool -drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_task_instance_A_var_pool; -delimiter d// -CREATE PROCEDURE uc_dolphin_T_t_ds_task_instance_A_var_pool() - BEGIN - IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS - WHERE TABLE_NAME='t_ds_task_instance' - AND TABLE_SCHEMA=(SELECT DATABASE()) - AND COLUMN_NAME ='var_pool') - THEN - ALTER TABLE t_ds_task_instance ADD `var_pool` longtext NULL; - END IF; - END; - -d// - -delimiter ; -CALL uc_dolphin_T_t_ds_task_instance_A_var_pool(); -DROP PROCEDURE uc_dolphin_T_t_ds_task_instance_A_var_pool; - --- uc_dolphin_T_t_ds_process_instance_A_var_pool -drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_process_instance_A_var_pool; -delimiter d// -CREATE PROCEDURE uc_dolphin_T_t_ds_process_instance_A_var_pool() - BEGIN - IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS - WHERE TABLE_NAME='t_ds_process_instance' - AND TABLE_SCHEMA=(SELECT DATABASE()) - AND COLUMN_NAME ='var_pool') - THEN - ALTER TABLE t_ds_process_instance ADD `var_pool` longtext NULL; - END IF; - END; - -d// - -delimiter ; -CALL uc_dolphin_T_t_ds_process_instance_A_var_pool(); -DROP PROCEDURE uc_dolphin_T_t_ds_process_instance_A_var_pool; - --- uc_dolphin_T_t_ds_process_definition_A_modify_by -drop PROCEDURE if EXISTS ct_dolphin_T_t_ds_process_definition_version; -delimiter d// -CREATE PROCEDURE ct_dolphin_T_t_ds_process_definition_version() -BEGIN - CREATE TABLE IF NOT EXISTS `t_ds_process_definition_version` ( - `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', - `process_definition_id` int(11) NOT NULL COMMENT 'process definition id', - `version` int(11) DEFAULT NULL COMMENT 'process definition version', - `process_definition_json` longtext COMMENT 'process definition json content', - `description` text, - `global_params` text COMMENT 'global parameters', - `locations` text COMMENT 'Node location information', - `connects` text COMMENT 'Node connection information', - `receivers` text COMMENT 'receivers', - `receivers_cc` text COMMENT 'cc', - `create_time` datetime DEFAULT NULL COMMENT 'create time', - `timeout` int(11) DEFAULT '0' COMMENT 'time out', - `resource_ids` varchar(255) DEFAULT NULL COMMENT 'resource ids', - PRIMARY KEY (`id`), - UNIQUE KEY `process_definition_id_and_version` (`process_definition_id`,`version`) USING BTREE, - KEY `process_definition_index` (`id`) USING BTREE - ) ENGINE=InnoDB AUTO_INCREMENT=84 DEFAULT CHARSET=utf8; -END; - -d// - -delimiter ; -CALL ct_dolphin_T_t_ds_process_definition_version; -DROP PROCEDURE ct_dolphin_T_t_ds_process_definition_version; - --- ---------------------------- --- Table structure for t_ds_plugin_define --- ---------------------------- -DROP TABLE IF EXISTS `t_ds_plugin_define`; -CREATE TABLE `t_ds_plugin_define` ( - `id` int NOT NULL AUTO_INCREMENT, - `plugin_name` varchar(100) NOT NULL COMMENT 'the name of plugin eg: email', - `plugin_type` varchar(100) NOT NULL COMMENT 'plugin type . alert=alert plugin, job=job plugin', - `plugin_params` text COMMENT 'plugin params', - `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, - `update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - PRIMARY KEY (`id`), - UNIQUE KEY `t_ds_plugin_define_UN` (`plugin_name`,`plugin_type`) -) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8; - --- ---------------------------- --- Table structure for t_ds_alert_plugin_instance --- ---------------------------- -DROP TABLE IF EXISTS `t_ds_alert_plugin_instance`; -CREATE TABLE `t_ds_alert_plugin_instance` ( - `id` int NOT NULL AUTO_INCREMENT, - `plugin_define_id` int NOT NULL, - `plugin_instance_params` text COMMENT 'plugin instance params. Also contain the params value which user input in web ui.', - `create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP, - `update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - `instance_name` varchar(200) DEFAULT NULL COMMENT 'alert instance name', - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - --- uc_dolphin_T_t_ds_process_definition_A_warning_group_id -drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_process_definition_A_warning_group_id; -delimiter d// -CREATE PROCEDURE uc_dolphin_T_t_ds_process_definition_A_warning_group_id() - BEGIN - IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS - WHERE TABLE_NAME='t_ds_process_definition' - AND TABLE_SCHEMA=(SELECT DATABASE()) - AND COLUMN_NAME ='warning_group_id') - THEN - ALTER TABLE t_ds_process_definition ADD COLUMN `warning_group_id` int(11) DEFAULT NULL COMMENT 'alert group id' AFTER `connects`; - END IF; - END; - -d// - -delimiter ; -CALL uc_dolphin_T_t_ds_process_definition_A_warning_group_id(); -DROP PROCEDURE uc_dolphin_T_t_ds_process_definition_A_warning_group_id; - --- uc_dolphin_T_t_ds_process_definition_version_A_warning_group_id -drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_process_definition_version_A_warning_group_id; -delimiter d// -CREATE PROCEDURE uc_dolphin_T_t_ds_process_definition_version_A_warning_group_id() - BEGIN - IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS - WHERE TABLE_NAME='t_ds_process_definition_version' - AND TABLE_SCHEMA=(SELECT DATABASE()) - AND COLUMN_NAME ='warning_group_id') - THEN - ALTER TABLE t_ds_process_definition_version ADD COLUMN `warning_group_id` int(11) DEFAULT NULL COMMENT 'alert group id' AFTER `connects`; - END IF; - END; - -d// - -delimiter ; -CALL uc_dolphin_T_t_ds_process_definition_version_A_warning_group_id(); -DROP PROCEDURE uc_dolphin_T_t_ds_process_definition_version_A_warning_group_id; - -- uc_dolphin_T_t_ds_alertgroup_A_alert_instance_ids drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_alertgroup_A_alert_instance_ids; delimiter d// @@ -317,42 +137,116 @@ delimiter ; CALL uc_dolphin_T_t_ds_datasource_A_add_UN_datasourceName(); DROP PROCEDURE uc_dolphin_T_t_ds_datasource_A_add_UN_datasourceName; --- uc_dolphin_T_t_ds_schedules_A_add_timezone -drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_schedules_A_add_timezone; +-- uc_dolphin_T_t_ds_project_A_add_code +drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_project_A_add_code; delimiter d// -CREATE PROCEDURE uc_dolphin_T_t_ds_schedules_A_add_timezone() +CREATE PROCEDURE uc_dolphin_T_t_ds_project_A_add_code() BEGIN IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS - WHERE TABLE_NAME='t_ds_schedules' + WHERE TABLE_NAME='t_ds_project' AND TABLE_SCHEMA=(SELECT DATABASE()) - AND COLUMN_NAME ='timezone_id') + AND COLUMN_NAME ='code') THEN - ALTER TABLE t_ds_schedules ADD COLUMN `timezone_id` varchar(40) default NULL COMMENT 'schedule timezone id' AFTER `end_time`; + alter table t_ds_project add `code` bigint(20) COMMENT 'encoding' AFTER `name`; + -- update default value for not null + UPDATE t_ds_project SET code = id; + alter table t_ds_project modify `code` bigint(20) NOT NULL; END IF; END; d// delimiter ; -CALL uc_dolphin_T_t_ds_schedules_A_add_timezone(); -DROP PROCEDURE uc_dolphin_T_t_ds_schedules_A_add_timezone; +CALL uc_dolphin_T_t_ds_project_A_add_code(); +DROP PROCEDURE uc_dolphin_T_t_ds_project_A_add_code; + +-- ---------------------------- +-- Table structure for t_ds_plugin_define +-- ---------------------------- +SET sql_mode=(SELECT REPLACE(@@sql_mode,'ONLY_FULL_GROUP_BY','')); +DROP TABLE IF EXISTS `t_ds_plugin_define`; +CREATE TABLE `t_ds_plugin_define` ( + `id` int NOT NULL AUTO_INCREMENT, + `plugin_name` varchar(100) NOT NULL COMMENT 'the name of plugin eg: email', + `plugin_type` varchar(100) NOT NULL COMMENT 'plugin type . alert=alert plugin, job=job plugin', + `plugin_params` text COMMENT 'plugin params', + `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + UNIQUE KEY `t_ds_plugin_define_UN` (`plugin_name`,`plugin_type`) +) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8; + +-- ---------------------------- +-- Table structure for t_ds_alert_plugin_instance +-- ---------------------------- +DROP TABLE IF EXISTS `t_ds_alert_plugin_instance`; +CREATE TABLE `t_ds_alert_plugin_instance` ( + `id` int NOT NULL AUTO_INCREMENT, + `plugin_define_id` int NOT NULL, + `plugin_instance_params` text COMMENT 'plugin instance params. Also contain the params value which user input in web ui.', + `create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP, + `update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + `instance_name` varchar(200) DEFAULT NULL COMMENT 'alert instance name', + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for t_ds_environment -- ---------------------------- DROP TABLE IF EXISTS `t_ds_environment`; CREATE TABLE `t_ds_environment` ( - `id` bigint(11) NOT NULL AUTO_INCREMENT COMMENT 'id', - `code` bigint(20) DEFAULT NULL COMMENT 'encoding', - `name` varchar(100) NOT NULL COMMENT 'environment config name', - `config` text NULL DEFAULT NULL COMMENT 'this config contains many environment variables config', - `description` text NULL DEFAULT NULL COMMENT 'the details', - `operator` int(11) DEFAULT NULL COMMENT 'operator user id', - `create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP, - `update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - PRIMARY KEY (`id`), - UNIQUE KEY `environment_name_unique` (`name`), - UNIQUE KEY `environment_code_unique` (`code`) + `id` bigint(11) NOT NULL AUTO_INCREMENT COMMENT 'id', + `code` bigint(20) DEFAULT NULL COMMENT 'encoding', + `name` varchar(100) NOT NULL COMMENT 'environment name', + `config` text NULL DEFAULT NULL COMMENT 'this config contains many environment variables config', + `description` text NULL DEFAULT NULL COMMENT 'the details', + `operator` int(11) DEFAULT NULL COMMENT 'operator user id', + `create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP, + `update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + UNIQUE KEY `environment_name_unique` (`name`), + UNIQUE KEY `environment_code_unique` (`code`) +) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; + +-- ---------------------------- +-- Table structure for t_ds_environment_worker_group_relation +-- ---------------------------- +DROP TABLE IF EXISTS `t_ds_environment_worker_group_relation`; +CREATE TABLE `t_ds_environment_worker_group_relation` ( + `id` bigint(11) NOT NULL AUTO_INCREMENT COMMENT 'id', + `environment_code` bigint(20) NOT NULL COMMENT 'environment code', + `worker_group` varchar(255) NOT NULL COMMENT 'worker group id', + `operator` int(11) DEFAULT NULL COMMENT 'operator user id', + `create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP, + `update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + UNIQUE KEY `environment_worker_group_unique` (`environment_code`,`worker_group`) +) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; + +-- ---------------------------- +-- Table structure for t_ds_process_definition_log +-- ---------------------------- +DROP TABLE IF EXISTS `t_ds_process_definition_log`; +CREATE TABLE `t_ds_process_definition_log` ( + `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'self-increasing id', + `code` bigint(20) NOT NULL COMMENT 'encoding', + `name` varchar(200) DEFAULT NULL COMMENT 'process definition name', + `version` int(11) DEFAULT '0' COMMENT 'process definition version', + `description` text COMMENT 'description', + `project_code` bigint(20) NOT NULL COMMENT 'project code', + `release_state` tinyint(4) DEFAULT NULL COMMENT 'process definition release state:0:offline,1:online', + `user_id` int(11) DEFAULT NULL COMMENT 'process definition creator id', + `global_params` text COMMENT 'global parameters', + `flag` tinyint(4) DEFAULT NULL COMMENT '0 not available, 1 available', + `locations` text COMMENT 'Node location information', + `warning_group_id` int(11) DEFAULT NULL COMMENT 'alert group id', + `timeout` int(11) DEFAULT '0' COMMENT 'time out,unit: minute', + `tenant_id` int(11) NOT NULL DEFAULT '-1' COMMENT 'tenant id', + `operator` int(11) DEFAULT NULL COMMENT 'operator user id', + `operate_time` datetime DEFAULT NULL COMMENT 'operate time', + `create_time` datetime NOT NULL COMMENT 'create time', + `update_time` datetime NOT NULL COMMENT 'update time', + PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- @@ -363,7 +257,7 @@ CREATE TABLE `t_ds_task_definition` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'self-increasing id', `code` bigint(20) NOT NULL COMMENT 'encoding', `name` varchar(200) DEFAULT NULL COMMENT 'task definition name', - `version` int(11) DEFAULT NULL COMMENT 'task definition version', + `version` int(11) DEFAULT '0' COMMENT 'task definition version', `description` text COMMENT 'description', `project_code` bigint(20) NOT NULL COMMENT 'project code', `user_id` int(11) DEFAULT NULL COMMENT 'task definition creator id', @@ -379,11 +273,10 @@ CREATE TABLE `t_ds_task_definition` ( `timeout_notify_strategy` tinyint(4) DEFAULT NULL COMMENT 'timeout notification policy: 0 warning, 1 fail', `timeout` int(11) DEFAULT '0' COMMENT 'timeout length,unit: minute', `delay_time` int(11) DEFAULT '0' COMMENT 'delay execution time,unit: minute', - `resource_ids` varchar(255) DEFAULT NULL COMMENT 'resource id, separated by comma', + `resource_ids` text COMMENT 'resource id, separated by comma', `create_time` datetime NOT NULL COMMENT 'create time', - `update_time` datetime DEFAULT NULL COMMENT 'update time', - PRIMARY KEY (`id`,`code`), - UNIQUE KEY `task_unique` (`name`,`project_code`) USING BTREE + `update_time` datetime NOT NULL COMMENT 'update time', + PRIMARY KEY (`id`,`code`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- @@ -394,12 +287,12 @@ CREATE TABLE `t_ds_task_definition_log` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'self-increasing id', `code` bigint(20) NOT NULL COMMENT 'encoding', `name` varchar(200) DEFAULT NULL COMMENT 'task definition name', - `version` int(11) DEFAULT NULL COMMENT 'task definition version', + `version` int(11) DEFAULT '0' COMMENT 'task definition version', `description` text COMMENT 'description', `project_code` bigint(20) NOT NULL COMMENT 'project code', `user_id` int(11) DEFAULT NULL COMMENT 'task definition creator id', `task_type` varchar(50) NOT NULL COMMENT 'task type', - `task_params` text COMMENT 'job custom parameters', + `task_params` longtext COMMENT 'job custom parameters', `flag` tinyint(2) DEFAULT NULL COMMENT '0 not available, 1 available', `task_priority` tinyint(4) DEFAULT NULL COMMENT 'job priority', `worker_group` varchar(200) DEFAULT NULL COMMENT 'worker grouping', @@ -410,50 +303,112 @@ CREATE TABLE `t_ds_task_definition_log` ( `timeout_notify_strategy` tinyint(4) DEFAULT NULL COMMENT 'timeout notification policy: 0 warning, 1 fail', `timeout` int(11) DEFAULT '0' COMMENT 'timeout length,unit: minute', `delay_time` int(11) DEFAULT '0' COMMENT 'delay execution time,unit: minute', - `resource_ids` varchar(255) DEFAULT NULL COMMENT 'resource id, separated by comma', + `resource_ids` text DEFAULT NULL COMMENT 'resource id, separated by comma', `operator` int(11) DEFAULT NULL COMMENT 'operator user id', `operate_time` datetime DEFAULT NULL COMMENT 'operate time', `create_time` datetime NOT NULL COMMENT 'create time', - `update_time` datetime DEFAULT NULL COMMENT 'update time', + `update_time` datetime NOT NULL COMMENT 'update time', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -ALTER TABLE t_ds_command ADD COLUMN `environment_code` bigint(20) default '-1' COMMENT 'environment code' AFTER `worker_group`; -ALTER TABLE t_ds_error_command ADD COLUMN `environment_code` bigint(20) default '-1' COMMENT 'environment code' AFTER `worker_group`; -ALTER TABLE t_ds_schedules ADD COLUMN `environment_code` bigint(20) default '-1' COMMENT 'environment code' AFTER `worker_group`; -ALTER TABLE t_ds_process_instance ADD COLUMN `environment_code` bigint(20) default '-1' COMMENT 'environment code' AFTER `worker_group`; -ALTER TABLE t_ds_task_instance ADD COLUMN `environment_code` bigint(20) default '-1' COMMENT 'environment code' AFTER `worker_group`; -ALTER TABLE t_ds_task_instance ADD COLUMN `environment_config` text COMMENT 'environment config' AFTER `environment_code`; - -- ---------------------------- --- Table structure for t_ds_environment_worker_group_relation +-- Table structure for t_ds_process_task_relation -- ---------------------------- -DROP TABLE IF EXISTS `t_ds_environment_worker_group_relation`; -CREATE TABLE `t_ds_environment_worker_group_relation` ( - `id` bigint(11) NOT NULL AUTO_INCREMENT COMMENT 'id', - `environment_code` bigint(20) NOT NULL COMMENT 'environment code', - `worker_group` varchar(255) NOT NULL COMMENT 'worker group id', - `operator` int(11) DEFAULT NULL COMMENT 'operator user id', - `create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP, - `update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - PRIMARY KEY (`id`), - UNIQUE KEY `environment_worker_group_unique` (`environment_code`,`worker_group`) +DROP TABLE IF EXISTS `t_ds_process_task_relation`; +CREATE TABLE `t_ds_process_task_relation` ( + `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'self-increasing id', + `name` varchar(200) DEFAULT NULL COMMENT 'relation name', + `project_code` bigint(20) NOT NULL COMMENT 'project code', + `process_definition_code` bigint(20) NOT NULL COMMENT 'process code', + `process_definition_version` int(11) NOT NULL COMMENT 'process version', + `pre_task_code` bigint(20) NOT NULL COMMENT 'pre task code', + `pre_task_version` int(11) NOT NULL COMMENT 'pre task version', + `post_task_code` bigint(20) NOT NULL COMMENT 'post task code', + `post_task_version` int(11) NOT NULL COMMENT 'post task version', + `condition_type` tinyint(2) DEFAULT NULL COMMENT 'condition type : 0 none, 1 judge 2 delay', + `condition_params` text COMMENT 'condition params(json)', + `create_time` datetime NOT NULL COMMENT 'create time', + `update_time` datetime NOT NULL COMMENT 'update time', + PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; -- ---------------------------- --- These columns will not be used in the new version,if you determine that the historical data is useless, you can delete it using the sql below +-- Table structure for t_ds_process_task_relation_log -- ---------------------------- +DROP TABLE IF EXISTS `t_ds_process_task_relation_log`; +CREATE TABLE `t_ds_process_task_relation_log` ( + `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'self-increasing id', + `name` varchar(200) DEFAULT NULL COMMENT 'relation name', + `project_code` bigint(20) NOT NULL COMMENT 'project code', + `process_definition_code` bigint(20) NOT NULL COMMENT 'process code', + `process_definition_version` int(11) NOT NULL COMMENT 'process version', + `pre_task_code` bigint(20) NOT NULL COMMENT 'pre task code', + `pre_task_version` int(11) NOT NULL COMMENT 'pre task version', + `post_task_code` bigint(20) NOT NULL COMMENT 'post task code', + `post_task_version` int(11) NOT NULL COMMENT 'post task version', + `condition_type` tinyint(2) DEFAULT NULL COMMENT 'condition type : 0 none, 1 judge 2 delay', + `condition_params` text COMMENT 'condition params(json)', + `operator` int(11) DEFAULT NULL COMMENT 'operator user id', + `operate_time` datetime DEFAULT NULL COMMENT 'operate time', + `create_time` datetime NOT NULL COMMENT 'create time', + `update_time` datetime NOT NULL COMMENT 'update time', + PRIMARY KEY (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; --- ALTER TABLE t_ds_alert DROP `show_type`, DROP `alert_type`, DROP `receivers`, DROP `receivers_cc`; - --- ALTER TABLE t_ds_alertgroup DROP `group_type`; - --- ALTER TABLE t_ds_process_definition DROP `receivers`, DROP `receivers_cc`; - --- ALTER TABLE t_ds_process_definition_version DROP `receivers`, DROP `receivers_cc`; - --- DROP TABLE IF EXISTS t_ds_relation_user_alertgroup; - --- ALTER TABLE t_ds_command DROP `dependence`; - --- ALTER TABLE t_ds_error_command DROP `dependence`; +-- t_ds_command +alter table t_ds_command change process_definition_id process_definition_code bigint(20) NOT NULL COMMENT 'process definition code'; +alter table t_ds_command add environment_code bigint(20) DEFAULT '-1' COMMENT 'environment code' AFTER worker_group; +alter table t_ds_command add dry_run tinyint(4) DEFAULT '0' COMMENT 'dry run flag:0 normal, 1 dry run' AFTER environment_code; +alter table t_ds_command add process_definition_version int(11) DEFAULT '0' COMMENT 'process definition version' AFTER process_definition_code; +alter table t_ds_command add process_instance_id int(11) DEFAULT '0' COMMENT 'process instance id' AFTER process_definition_version; +alter table t_ds_command add KEY `priority_id_index` (`process_instance_priority`,`id`) USING BTREE; + +-- t_ds_error_command +alter table t_ds_error_command change process_definition_id process_definition_code bigint(20) NOT NULL COMMENT 'process definition code'; +alter table t_ds_error_command add environment_code bigint(20) DEFAULT '-1' COMMENT 'environment code' AFTER worker_group; +alter table t_ds_error_command add dry_run tinyint(4) DEFAULT '0' COMMENT 'dry run flag:0 normal, 1 dry run' AFTER message; +alter table t_ds_error_command add process_definition_version int(11) DEFAULT '0' COMMENT 'process definition version' AFTER process_definition_code; +alter table t_ds_error_command add process_instance_id int(11) DEFAULT '0' COMMENT 'process instance id' AFTER process_definition_version; + +-- t_ds_process_instance note: Data migration is not supported +alter table t_ds_process_instance change process_definition_id process_definition_code bigint(20) NOT NULL COMMENT 'process definition code'; +alter table t_ds_process_instance add process_definition_version int(11) DEFAULT '0' COMMENT 'process definition version' AFTER process_definition_code; +alter table t_ds_process_instance add environment_code bigint(20) DEFAULT '-1' COMMENT 'environment code' AFTER worker_group; +alter table t_ds_process_instance add var_pool longtext COMMENT 'var_pool' AFTER tenant_id; +alter table t_ds_process_instance add dry_run tinyint(4) DEFAULT '0' COMMENT 'dry run flag:0 normal, 1 dry run' AFTER var_pool; +alter table t_ds_process_instance drop KEY `process_instance_index`; +alter table t_ds_process_instance add KEY `process_instance_index` (`process_definition_code`,`id`) USING BTREE; +alter table t_ds_process_instance drop process_instance_json; +alter table t_ds_process_instance drop locations; +alter table t_ds_process_instance drop connects; +alter table t_ds_process_instance drop dependence_schedule_times; + +-- t_ds_task_instance note: Data migration is not supported +alter table t_ds_task_instance change process_definition_id task_code bigint(20) NOT NULL COMMENT 'task definition code'; +alter table t_ds_task_instance add task_definition_version int(11) DEFAULT '0' COMMENT 'task definition version' AFTER task_code; +alter table t_ds_task_instance add task_params text COMMENT 'job custom parameters' AFTER app_link; +alter table t_ds_task_instance add environment_code bigint(20) DEFAULT '-1' COMMENT 'environment code' AFTER worker_group; +alter table t_ds_task_instance add environment_config text COMMENT 'this config contains many environment variables config' AFTER environment_code; +alter table t_ds_task_instance add first_submit_time datetime DEFAULT NULL COMMENT 'task first submit time' AFTER executor_id; +alter table t_ds_task_instance add delay_time int(4) DEFAULT '0' COMMENT 'task delay execution time' AFTER first_submit_time; +alter table t_ds_task_instance add var_pool longtext COMMENT 'var_pool' AFTER delay_time; +alter table t_ds_task_instance add dry_run tinyint(4) DEFAULT '0' COMMENT 'dry run flag:0 normal, 1 dry run' AFTER var_pool; +alter table t_ds_task_instance drop KEY `task_instance_index`; +alter table t_ds_task_instance drop task_json; + +-- t_ds_schedules +alter table t_ds_schedules change process_definition_id process_definition_code bigint(20) NOT NULL COMMENT 'process definition code'; +alter table t_ds_schedules add timezone_id varchar(40) DEFAULT NULL COMMENT 'timezoneId' AFTER end_time; +alter table t_ds_schedules add environment_code bigint(20) DEFAULT '-1' COMMENT 'environment code' AFTER worker_group; + +-- t_ds_process_definition +alter table t_ds_process_definition add `code` bigint(20) COMMENT 'encoding' AFTER `id`; +-- update default value for not null +UPDATE t_ds_process_definition SET code = id; +alter table t_ds_process_definition modify `code` bigint(20) NOT NULL; +alter table t_ds_process_definition change project_id project_code bigint(20) NOT NULL COMMENT 'project code' AFTER `description`; +alter table t_ds_process_definition add `warning_group_id` int(11) DEFAULT NULL COMMENT 'alert group id' AFTER `locations`; +alter table t_ds_process_definition add UNIQUE KEY `process_unique` (`name`,`project_code`) USING BTREE; +alter table t_ds_process_definition modify `description` text COMMENT 'description' after `version`; +alter table t_ds_process_definition modify `release_state` tinyint(4) DEFAULT NULL COMMENT 'process definition release state:0:offline,1:online' after `project_code`; +alter table t_ds_process_definition modify `create_time` datetime DEFAULT NULL COMMENT 'create time' after `tenant_id`; diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.0_schema/mysql/dolphinscheduler_ddl_post.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.0_schema/mysql/dolphinscheduler_ddl_post.sql new file mode 100644 index 0000000000000000000000000000000000000000..dfde96209e4c305433e4759f4ca39c820be2a41f --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.0_schema/mysql/dolphinscheduler_ddl_post.sql @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ + +alter table t_ds_process_definition drop primary key, ADD PRIMARY KEY (`id`,`code`); +ALTER TABLE t_ds_process_definition drop KEY `process_definition_unique`; +ALTER TABLE t_ds_process_definition drop KEY `process_definition_index`; +alter table t_ds_process_definition drop process_definition_json; +alter table t_ds_process_definition drop connects; +alter table t_ds_process_definition drop receivers; +alter table t_ds_process_definition drop receivers_cc; +alter table t_ds_process_definition drop modify_by; +alter table t_ds_process_definition drop resource_ids; \ No newline at end of file diff --git a/sql/upgrade/1.4.0_schema/mysql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.0_schema/mysql/dolphinscheduler_dml.sql similarity index 100% rename from sql/upgrade/1.4.0_schema/mysql/dolphinscheduler_dml.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.0_schema/mysql/dolphinscheduler_dml.sql diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.0_schema/postgresql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.0_schema/postgresql/dolphinscheduler_ddl.sql new file mode 100644 index 0000000000000000000000000000000000000000..deca2d49178d60ad1582d80d1986c69707317fc9 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.0_schema/postgresql/dolphinscheduler_ddl.sql @@ -0,0 +1,313 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ +delimiter d// +CREATE OR REPLACE FUNCTION public.dolphin_update_metadata( + ) + RETURNS character varying + LANGUAGE 'plpgsql' + COST 100 + VOLATILE PARALLEL UNSAFE +AS $BODY$ +DECLARE + v_schema varchar; +BEGIN + ---get schema name + v_schema =current_schema(); + + --- rename columns + EXECUTE 'ALTER TABLE IF EXISTS ' || quote_ident(v_schema) ||'.t_ds_command RENAME COLUMN process_definition_id to process_definition_code'; + EXECUTE 'ALTER TABLE IF EXISTS ' || quote_ident(v_schema) ||'.t_ds_error_command RENAME COLUMN process_definition_id to process_definition_code'; + EXECUTE 'ALTER TABLE IF EXISTS ' || quote_ident(v_schema) ||'.t_ds_process_instance RENAME COLUMN process_definition_id to process_definition_code'; + EXECUTE 'ALTER TABLE IF EXISTS ' || quote_ident(v_schema) ||'.t_ds_task_instance RENAME COLUMN process_definition_id to task_code'; + EXECUTE 'ALTER TABLE IF EXISTS ' || quote_ident(v_schema) ||'.t_ds_schedules RENAME COLUMN process_definition_id to process_definition_code'; + EXECUTE 'ALTER TABLE IF EXISTS ' || quote_ident(v_schema) ||'.t_ds_process_definition RENAME COLUMN project_id to project_code'; + + --- alter column type + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_command ALTER COLUMN process_definition_code TYPE bigint'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_error_command ALTER COLUMN process_definition_code TYPE bigint'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_process_instance ALTER COLUMN process_definition_code TYPE bigint'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_task_instance ALTER COLUMN task_code TYPE bigint'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_schedules ALTER COLUMN process_definition_code TYPE bigint'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_process_definition ALTER COLUMN project_code TYPE bigint'; + + --- add columns + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_user ADD COLUMN IF NOT EXISTS "state" int DEFAULT 1'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_alertgroup ADD COLUMN IF NOT EXISTS "alert_instance_ids" varchar(255) DEFAULT NULL'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_alertgroup ADD COLUMN IF NOT EXISTS "create_user_id" int4 DEFAULT NULL'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_project ADD COLUMN IF NOT EXISTS "code" bigint NOT NULL'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_command ADD COLUMN IF NOT EXISTS "environment_code" bigint DEFAULT -1'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_command ADD COLUMN IF NOT EXISTS "dry_run" int DEFAULT 0'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_command ADD COLUMN IF NOT EXISTS "process_definition_version" int DEFAULT 0'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_command ADD COLUMN IF NOT EXISTS "process_instance_id" int DEFAULT 0'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_error_command ADD COLUMN IF NOT EXISTS "environment_code" bigint DEFAULT -1'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_error_command ADD COLUMN IF NOT EXISTS "dry_run" int DEFAULT 0'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_error_command ADD COLUMN IF NOT EXISTS "process_definition_version" int DEFAULT 0'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_error_command ADD COLUMN IF NOT EXISTS "process_instance_id" int DEFAULT 0'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_process_instance ADD COLUMN IF NOT EXISTS "process_definition_version" int DEFAULT 0'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_process_instance ADD COLUMN IF NOT EXISTS "environment_code" bigint DEFAULT -1'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_process_instance ADD COLUMN IF NOT EXISTS "var_pool" text'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_process_instance ADD COLUMN IF NOT EXISTS "dry_run" int DEFAULT 0'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_task_instance ADD COLUMN IF NOT EXISTS "task_definition_version" int DEFAULT 0'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_task_instance ADD COLUMN IF NOT EXISTS "task_params" text'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_task_instance ADD COLUMN IF NOT EXISTS "environment_code" bigint DEFAULT -1'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_task_instance ADD COLUMN IF NOT EXISTS "environment_config" text'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_task_instance ADD COLUMN IF NOT EXISTS "first_submit_time" timestamp DEFAULT NULL'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_task_instance ADD COLUMN IF NOT EXISTS "delay_time" int DEFAULT 0'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_task_instance ADD COLUMN IF NOT EXISTS "var_pool" text'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_task_instance ADD COLUMN IF NOT EXISTS "dry_run" int DEFAULT 0'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_schedules ADD COLUMN IF NOT EXISTS "timezone_id" varchar(40) DEFAULT NULL'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_schedules ADD COLUMN IF NOT EXISTS "environment_code" int DEFAULT -1'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_process_definition ADD COLUMN IF NOT EXISTS "code" bigint'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_process_definition ADD COLUMN IF NOT EXISTS "warning_group_id" int'; + + --update default value for not null + EXECUTE 'UPDATE ' || quote_ident(v_schema) ||'.t_ds_process_definition SET code = id'; + EXECUTE 'UPDATE ' || quote_ident(v_schema) ||'.t_ds_project SET code = id'; + + ---drop columns + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_tenant DROP COLUMN IF EXISTS "tenant_name"'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_process_instance DROP COLUMN IF EXISTS "process_instance_json"'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_process_instance DROP COLUMN IF EXISTS "locations"'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_process_instance DROP COLUMN IF EXISTS "connects"'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_process_instance DROP COLUMN IF EXISTS "dependence_schedule_times"'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_task_instance DROP COLUMN IF EXISTS "task_json"'; + + -- add CONSTRAINT + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'."t_ds_alertgroup" ADD CONSTRAINT "t_ds_alertgroup_name_un" UNIQUE ("group_name")'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'."t_ds_datasource" ADD CONSTRAINT "t_ds_datasource_name_un" UNIQUE ("name","type")'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'."t_ds_command" ALTER COLUMN "process_definition_code" SET NOT NULL'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'."t_ds_process_instance" ALTER COLUMN "process_definition_code" SET NOT NULL'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'."t_ds_task_instance" ALTER COLUMN "task_code" SET NOT NULL'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'."t_ds_schedules" ALTER COLUMN "process_definition_code" SET NOT NULL'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'."t_ds_process_definition" ALTER COLUMN "code" SET NOT NULL'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'."t_ds_process_definition" ALTER COLUMN "project_code" SET NOT NULL'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'."t_ds_process_definition" ADD CONSTRAINT "process_unique" UNIQUE ("name","project_code")'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'."t_ds_project" ALTER COLUMN "code" SET NOT NULL'; + + --- drop index + EXECUTE 'DROP INDEX IF EXISTS "process_instance_index"'; + EXECUTE 'DROP INDEX IF EXISTS "task_instance_index"'; + + --- create index + EXECUTE 'CREATE INDEX IF NOT EXISTS priority_id_index ON ' || quote_ident(v_schema) ||'.t_ds_command USING Btree("process_instance_priority","id")'; + EXECUTE 'CREATE INDEX IF NOT EXISTS process_instance_index ON ' || quote_ident(v_schema) ||'.t_ds_process_instance USING Btree("process_definition_code","id")'; + + ---add comment + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_user.state is ''state 0:disable 1:enable'''; + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_alertgroup.alert_instance_ids is ''alert instance ids'''; + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_alertgroup.create_user_id is ''create user id'''; + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_project.code is ''coding'''; + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_command.process_definition_code is ''process definition code'''; + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_command.environment_code is ''environment code'''; + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_command.dry_run is ''dry run flag:0 normal, 1 dry run'''; + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_command.process_definition_version is ''process definition version'''; + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_command.process_instance_id is ''process instance id'''; + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_error_command.process_definition_code is ''process definition code'''; + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_error_command.environment_code is ''environment code'''; + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_error_command.dry_run is ''dry run flag:0 normal, 1 dry run'''; + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_error_command.process_definition_version is ''process definition version'''; + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_error_command.process_instance_id is ''process instance id'''; + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_process_instance.process_definition_code is ''process instance code'''; + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_process_instance.process_definition_version is ''process instance version'''; + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_process_instance.environment_code is ''environment code'''; + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_process_instance.var_pool is ''var pool'''; + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_process_instance.dry_run is ''dry run flag:0 normal, 1 dry run'''; + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_task_instance.task_code is ''task definition code'''; + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_task_instance.task_definition_version is ''task definition version'''; + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_task_instance.task_params is ''task params'''; + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_task_instance.environment_code is ''environment code'''; + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_task_instance.environment_config is ''this config contains many environment variables config'''; + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_task_instance.first_submit_time is ''task first submit time'''; + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_task_instance.delay_time is ''task delay execution time'''; + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_task_instance.var_pool is ''var pool'''; + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_task_instance.dry_run is ''dry run flag:0 normal, 1 dry run'''; + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_schedules.process_definition_code is ''process definition code'''; + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_schedules.timezone_id is ''timezone id'''; + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_schedules.environment_code is ''environment code'''; + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_process_definition.code is ''encoding'''; + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_process_definition.project_code is ''project code'''; + EXECUTE 'comment on column ' || quote_ident(v_schema) ||'.t_ds_process_definition.warning_group_id is ''alert group id'''; + + --create table + EXECUTE 'CREATE TABLE IF NOT EXISTS '|| quote_ident(v_schema) ||'."t_ds_plugin_define" ( + id serial NOT NULL, + plugin_name varchar(100) NOT NULL, + plugin_type varchar(100) NOT NULL, + plugin_params text NULL, + create_time timestamp NULL, + update_time timestamp NULL, + CONSTRAINT t_ds_plugin_define_pk PRIMARY KEY (id), + CONSTRAINT t_ds_plugin_define_un UNIQUE (plugin_name, plugin_type) + )'; + + EXECUTE 'CREATE TABLE IF NOT EXISTS '|| quote_ident(v_schema) ||'."t_ds_alert_plugin_instance" ( + id serial NOT NULL, + plugin_define_id int4 NOT NULL, + plugin_instance_params text NULL, + create_time timestamp NULL, + update_time timestamp NULL, + instance_name varchar(200) NULL, + CONSTRAINT t_ds_alert_plugin_instance_pk PRIMARY KEY (id) + )'; + + EXECUTE 'CREATE TABLE IF NOT EXISTS '|| quote_ident(v_schema) ||'."t_ds_environment" ( + id serial NOT NULL, + code bigint NOT NULL, + name varchar(100) DEFAULT NULL, + config text DEFAULT NULL, + description text, + operator int DEFAULT NULL, + create_time timestamp DEFAULT NULL, + update_time timestamp DEFAULT NULL, + PRIMARY KEY (id), + CONSTRAINT environment_name_unique UNIQUE (name), + CONSTRAINT environment_code_unique UNIQUE (code) + )'; + + EXECUTE 'CREATE TABLE IF NOT EXISTS '|| quote_ident(v_schema) ||'."t_ds_environment_worker_group_relation" ( + id serial NOT NULL, + environment_code bigint NOT NULL, + worker_group varchar(255) NOT NULL, + operator int DEFAULT NULL, + create_time timestamp DEFAULT NULL, + update_time timestamp DEFAULT NULL, + PRIMARY KEY (id) , + CONSTRAINT environment_worker_group_unique UNIQUE (environment_code,worker_group) + )'; + + EXECUTE 'CREATE TABLE IF NOT EXISTS '|| quote_ident(v_schema) ||'."t_ds_process_definition_log" ( + id int NOT NULL , + code bigint NOT NULL, + name varchar(255) DEFAULT NULL , + version int NOT NULL , + description text , + project_code bigint DEFAULT NULL , + release_state int DEFAULT NULL , + user_id int DEFAULT NULL , + global_params text , + locations text , + warning_group_id int DEFAULT NULL , + flag int DEFAULT NULL , + timeout int DEFAULT 0 , + tenant_id int DEFAULT -1 , + execution_type int DEFAULT 0, + operator int DEFAULT NULL , + operate_time timestamp DEFAULT NULL , + create_time timestamp DEFAULT NULL , + update_time timestamp DEFAULT NULL , + PRIMARY KEY (id) + )'; + + EXECUTE 'CREATE TABLE IF NOT EXISTS '|| quote_ident(v_schema) ||'."t_ds_task_definition" ( + id int NOT NULL , + code bigint NOT NULL, + name varchar(255) DEFAULT NULL , + version int NOT NULL , + description text , + project_code bigint DEFAULT NULL , + user_id int DEFAULT NULL , + task_type varchar(50) DEFAULT NULL , + task_params text , + flag int DEFAULT NULL , + task_priority int DEFAULT NULL , + worker_group varchar(255) DEFAULT NULL , + environment_code bigint DEFAULT -1, + fail_retry_times int DEFAULT NULL , + fail_retry_interval int DEFAULT NULL , + timeout_flag int DEFAULT NULL , + timeout_notify_strategy int DEFAULT NULL , + timeout int DEFAULT 0 , + delay_time int DEFAULT 0 , + resource_ids text , + create_time timestamp DEFAULT NULL , + update_time timestamp DEFAULT NULL , + PRIMARY KEY (id) + )'; + + EXECUTE 'CREATE TABLE IF NOT EXISTS '|| quote_ident(v_schema) ||'."t_ds_task_definition_log" ( + id int NOT NULL , + code bigint NOT NULL, + name varchar(255) DEFAULT NULL , + version int NOT NULL , + description text , + project_code bigint DEFAULT NULL , + user_id int DEFAULT NULL , + task_type varchar(50) DEFAULT NULL , + task_params text , + flag int DEFAULT NULL , + task_priority int DEFAULT NULL , + worker_group varchar(255) DEFAULT NULL , + environment_code bigint DEFAULT -1, + fail_retry_times int DEFAULT NULL , + fail_retry_interval int DEFAULT NULL , + timeout_flag int DEFAULT NULL , + timeout_notify_strategy int DEFAULT NULL , + timeout int DEFAULT 0 , + delay_time int DEFAULT 0 , + resource_ids text , + operator int DEFAULT NULL , + operate_time timestamp DEFAULT NULL , + create_time timestamp DEFAULT NULL , + update_time timestamp DEFAULT NULL , + PRIMARY KEY (id) + )'; + + EXECUTE 'CREATE TABLE IF NOT EXISTS '|| quote_ident(v_schema) ||'."t_ds_process_task_relation" ( + id int NOT NULL , + name varchar(255) DEFAULT NULL , + project_code bigint DEFAULT NULL , + process_definition_code bigint DEFAULT NULL , + process_definition_version int DEFAULT NULL , + pre_task_code bigint DEFAULT NULL , + pre_task_version int DEFAULT 0 , + post_task_code bigint DEFAULT NULL , + post_task_version int DEFAULT 0 , + condition_type int DEFAULT NULL , + condition_params text , + create_time timestamp DEFAULT NULL , + update_time timestamp DEFAULT NULL , + PRIMARY KEY (id) + )'; + + EXECUTE 'CREATE TABLE IF NOT EXISTS '|| quote_ident(v_schema) ||'."t_ds_process_task_relation_log" ( + id int NOT NULL , + name varchar(255) DEFAULT NULL , + project_code bigint DEFAULT NULL , + process_definition_code bigint DEFAULT NULL , + process_definition_version int DEFAULT NULL , + pre_task_code bigint DEFAULT NULL , + pre_task_version int DEFAULT 0 , + post_task_code bigint DEFAULT NULL , + post_task_version int DEFAULT 0 , + condition_type int DEFAULT NULL , + condition_params text , + operator int DEFAULT NULL , + operate_time timestamp DEFAULT NULL , + create_time timestamp DEFAULT NULL , + update_time timestamp DEFAULT NULL , + PRIMARY KEY (id) + )'; + return 'Success!'; + exception when others then + ---Raise EXCEPTION '(%)',SQLERRM; + return SQLERRM; +END; +$BODY$; + +select dolphin_update_metadata(); + +d// diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.0_schema/postgresql/dolphinscheduler_ddl_post.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.0_schema/postgresql/dolphinscheduler_ddl_post.sql new file mode 100644 index 0000000000000000000000000000000000000000..728b6bd239a86783cbc32ebde0acb4c7dc5f83ca --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.0_schema/postgresql/dolphinscheduler_ddl_post.sql @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ + +ALTER TABLE "t_ds_process_definition" DROP CONSTRAINT "t_ds_process_definition_pkey"; +ALTER TABLE "t_ds_process_definition" ADD CONSTRAINT "t_ds_process_definition_pkey" PRIMARY KEY ("id","code"); +ALTER TABLE "t_ds_process_definition" DROP CONSTRAINT "process_definition_unique"; +DROP INDEX "process_definition_index"; +ALTER TABLE "t_ds_process_definition" DROP "process_definition_json"; +ALTER TABLE "t_ds_process_definition" DROP "connects"; +ALTER TABLE "t_ds_process_definition" DROP "receivers"; +ALTER TABLE "t_ds_process_definition" DROP "receivers_cc"; +ALTER TABLE "t_ds_process_definition" DROP "modify_by"; +ALTER TABLE "t_ds_process_definition" DROP "resource_ids"; \ No newline at end of file diff --git a/sql/upgrade/1.4.0_schema/postgresql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.0_schema/postgresql/dolphinscheduler_dml.sql similarity index 100% rename from sql/upgrade/1.4.0_schema/postgresql/dolphinscheduler_dml.sql rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.0_schema/postgresql/dolphinscheduler_dml.sql diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.1_schema/mysql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.1_schema/mysql/dolphinscheduler_ddl.sql new file mode 100644 index 0000000000000000000000000000000000000000..bbc3d45b39e30f89f22a1b342ec859613d5c92ca --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.1_schema/mysql/dolphinscheduler_ddl.sql @@ -0,0 +1,21 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ + +SET sql_mode=(SELECT REPLACE(@@sql_mode,'ONLY_FULL_GROUP_BY','')); + +alter table t_ds_process_instance drop KEY `start_time_index`; +alter table t_ds_process_instance add KEY `start_time_index` (`start_time`,`end_time`) USING BTREE; diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.1_schema/postgresql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.1_schema/postgresql/dolphinscheduler_ddl.sql new file mode 100644 index 0000000000000000000000000000000000000000..e2aba5acb6913fac9689071deae322cf776f68e2 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.1_schema/postgresql/dolphinscheduler_ddl.sql @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ +delimiter d// +CREATE OR REPLACE FUNCTION public.dolphin_update_metadata( + ) + RETURNS character varying + LANGUAGE 'plpgsql' + COST 100 + VOLATILE PARALLEL UNSAFE +AS $BODY$ +DECLARE + v_schema varchar; +BEGIN + ---get schema name + v_schema =current_schema(); + + EXECUTE 'DROP INDEX IF EXISTS "start_time_index"'; + EXECUTE 'CREATE INDEX IF NOT EXISTS start_time_index ON ' || quote_ident(v_schema) ||'.t_ds_process_instance USING Btree("start_time","end_time")'; + + return 'Success!'; + exception when others then + ---Raise EXCEPTION '(%)',SQLERRM; + return SQLERRM; +END; +$BODY$; + +select dolphin_update_metadata(); + +d// diff --git a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/registry/RegistryPluginTest.java b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.2_schema/mysql/dolphinscheduler_ddl.sql similarity index 33% rename from dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/registry/RegistryPluginTest.java rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.2_schema/mysql/dolphinscheduler_ddl.sql index a35252c230070bfc81062a812ed0b16337bb9191..89d5c53c59d9ae9691d63420197281f7f7f02cc5 100644 --- a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/registry/RegistryPluginTest.java +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.2_schema/mysql/dolphinscheduler_ddl.sql @@ -13,33 +13,48 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - */ - -package org.apache.dolphinscheduler.service.registry; - -import org.apache.dolphinscheduler.spi.plugin.DolphinPluginLoader; -import org.apache.dolphinscheduler.spi.plugin.DolphinPluginManagerConfig; -import org.apache.dolphinscheduler.spi.register.RegistryPluginManager; - -import org.junit.Assert; -import org.junit.Test; - -import com.google.common.collect.ImmutableList; - -public class RegistryPluginTest { - - @Test - public void testLoadPlugin() throws Exception { - DolphinPluginManagerConfig registryPluginManagerConfig = new DolphinPluginManagerConfig(); - String path = DolphinPluginLoader.class.getClassLoader().getResource("").getPath(); - - String registryPluginZkPath = path + "../../../dolphinscheduler-registry-plugin/dolphinscheduler-registry-zookeeper/pom.xml"; - registryPluginManagerConfig.setPlugins(registryPluginZkPath); - RegistryPluginManager registryPluginManager = new RegistryPluginManager("zookeeper"); - - DolphinPluginLoader registryPluginLoader = new DolphinPluginLoader(registryPluginManagerConfig, ImmutableList.of(registryPluginManager)); - registryPluginLoader.loadPlugins(); - Assert.assertNotNull(registryPluginManager.getRegistry()); - - } -} +*/ + +SET sql_mode=(SELECT REPLACE(@@sql_mode,'ONLY_FULL_GROUP_BY','')); + + +-- uc_dolphin_T_t_ds_process_instance_A_restart_time +drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_process_instance_A_restart_time; +delimiter d// +CREATE PROCEDURE uc_dolphin_T_t_ds_process_instance_A_restart_time() + BEGIN + IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS + WHERE TABLE_NAME='t_ds_process_instance' + AND TABLE_SCHEMA=(SELECT DATABASE()) + AND COLUMN_NAME ='restart_time') + THEN + ALTER TABLE t_ds_process_instance ADD COLUMN `restart_time` datetime DEFAULT NULL COMMENT 'process instance restart time'; + END IF; + END; + +d// + +delimiter ; +CALL uc_dolphin_T_t_ds_process_instance_A_restart_time(); +DROP PROCEDURE uc_dolphin_T_t_ds_process_instance_A_restart_time; + + +-- uc_dolphin_T_t_ds_process_task_relation_A_pc_pd_index +drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_process_task_relation_A_pc_pd_index; +delimiter d// +CREATE PROCEDURE uc_dolphin_T_t_ds_process_task_relation_A_pc_pd_index() +BEGIN + IF NOT EXISTS (SELECT 1 FROM information_schema.STATISTICS + WHERE TABLE_NAME='t_ds_process_task_relation' + AND TABLE_SCHEMA=(SELECT DATABASE()) + AND INDEX_NAME ='project_code_process_definition_code_index') + THEN +ALTER TABLE `t_ds_process_task_relation` ADD KEY `project_code_process_definition_code_index`(`project_code`,`process_definition_code`) USING BTREE; +END IF; +END; + +d// + +delimiter ; +CALL uc_dolphin_T_t_ds_process_task_relation_A_pc_pd_index(); +DROP PROCEDURE uc_dolphin_T_t_ds_process_task_relation_A_pc_pd_index; \ No newline at end of file diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/register/RegistryConnectListener.java b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.2_schema/mysql/dolphinscheduler_dml.sql similarity index 84% rename from dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/register/RegistryConnectListener.java rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.2_schema/mysql/dolphinscheduler_dml.sql index 83385f8998ce9f71e10d15b42e7841e6aef4b341..38964cc551acb5332cd354d2404255d0278c49ff 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/register/RegistryConnectListener.java +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.2_schema/mysql/dolphinscheduler_dml.sql @@ -13,11 +13,4 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - */ - -package org.apache.dolphinscheduler.spi.register; - -public interface RegistryConnectListener { - - void notify(RegistryConnectState newState); -} +*/ \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.2_schema/postgresql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.2_schema/postgresql/dolphinscheduler_ddl.sql new file mode 100644 index 0000000000000000000000000000000000000000..75be01f0c16397f1591ae8bfb257009f0612ad85 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.2_schema/postgresql/dolphinscheduler_ddl.sql @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ +delimiter d// +CREATE OR REPLACE FUNCTION public.dolphin_update_metadata( + ) + RETURNS character varying + LANGUAGE 'plpgsql' + COST 100 + VOLATILE PARALLEL UNSAFE +AS $BODY$ +DECLARE +v_schema varchar; +BEGIN + ---get schema name + v_schema =current_schema(); + +EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_process_instance ADD COLUMN IF NOT EXISTS "restart_time" timestamp DEFAULT NULL'; + +EXECUTE 'CREATE INDEX IF NOT EXISTS project_code_process_definition_code_index ON ' || quote_ident(v_schema) ||'.t_ds_process_task_relation USING Btree("project_code","process_definition_code")'; + +return 'Success!'; +exception when others then + ---Raise EXCEPTION '(%)',SQLERRM; + return SQLERRM; +END; +$BODY$; + +select dolphin_update_metadata(); + +d// \ No newline at end of file diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/alert/AlertConstants.java b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.2_schema/postgresql/dolphinscheduler_dml.sql similarity index 80% rename from dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/alert/AlertConstants.java rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.2_schema/postgresql/dolphinscheduler_dml.sql index b1eee2a0ba0295ceb24d70d1e5366d34527cd44c..38964cc551acb5332cd354d2404255d0278c49ff 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/alert/AlertConstants.java +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.2_schema/postgresql/dolphinscheduler_dml.sql @@ -13,12 +13,4 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - */ - -package org.apache.dolphinscheduler.spi.alert; - -public class AlertConstants { - - /** the field name of alert show type **/ - public static final String SHOW_TYPE = "show_type"; -} +*/ \ No newline at end of file diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuAlertPlugin.java b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.3_schema/mysql/dolphinscheduler_ddl.sql similarity index 62% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuAlertPlugin.java rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.3_schema/mysql/dolphinscheduler_ddl.sql index e71be3e2bde105640cf1eba02c2829ac3841af29..a95858d936c60cd960d18bccf8cd450b6bf80398 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuAlertPlugin.java +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.3_schema/mysql/dolphinscheduler_ddl.sql @@ -13,18 +13,9 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - */ +*/ -package org.apache.dolphinscheduler.plugin.alert.feishu; - -import org.apache.dolphinscheduler.spi.DolphinSchedulerPlugin; -import org.apache.dolphinscheduler.spi.alert.AlertChannelFactory; - -import com.google.common.collect.ImmutableList; - -public class FeiShuAlertPlugin implements DolphinSchedulerPlugin { - @Override - public Iterable getAlertChannelFactorys() { - return ImmutableList.of(new FeiShuAlertChannelFactory()); - } -} +ALTER TABLE `t_ds_task_instance` MODIFY COLUMN `task_params` longtext COMMENT 'job custom parameters' AFTER `app_link`; +ALTER TABLE `t_ds_process_task_relation` ADD KEY `idx_code` (`project_code`, `process_definition_code`) USING BTREE; +ALTER TABLE `t_ds_process_task_relation_log` ADD KEY `idx_process_code_version` (`process_definition_code`,`process_definition_version`) USING BTREE; +ALTER TABLE `t_ds_task_definition_log` ADD INDEX `idx_code_version` (`code`,`version`) USING BTREE; \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.3_schema/mysql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.3_schema/mysql/dolphinscheduler_dml.sql new file mode 100644 index 0000000000000000000000000000000000000000..38964cc551acb5332cd354d2404255d0278c49ff --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.3_schema/mysql/dolphinscheduler_dml.sql @@ -0,0 +1,16 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.3_schema/postgresql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.3_schema/postgresql/dolphinscheduler_ddl.sql new file mode 100644 index 0000000000000000000000000000000000000000..a828a1aa4d530997422e5f9f9d8eed411ec51099 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.3_schema/postgresql/dolphinscheduler_ddl.sql @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ + +delimiter d// +CREATE OR REPLACE FUNCTION public.dolphin_update_metadata( + ) + RETURNS character varying + LANGUAGE 'plpgsql' + COST 100 + VOLATILE PARALLEL UNSAFE +AS $BODY$ +DECLARE +v_schema varchar; +BEGIN + ---get schema name + v_schema =current_schema(); + +EXECUTE 'DROP INDEX IF EXISTS "idx_code_relation"'; +EXECUTE 'DROP INDEX IF EXISTS "idx_process_code_version_relation_log"'; +EXECUTE 'DROP INDEX IF EXISTS "idx_code_version_task_log"'; +EXECUTE 'CREATE INDEX IF NOT EXISTS idx_code_relation ON ' || quote_ident(v_schema) ||'.t_ds_process_task_relation USING Btree("project_code","process_definition_code")'; +EXECUTE 'CREATE INDEX IF NOT EXISTS idx_process_code_version_relation_log ON ' || quote_ident(v_schema) ||'.t_ds_process_task_relation_log USING Btree("process_definition_code","process_definition_version")'; +EXECUTE 'CREATE INDEX IF NOT EXISTS idx_code_version_task_log ON ' || quote_ident(v_schema) ||'.t_ds_task_definition_log USING Btree("code","version")'; + +EXECUTE 'ALTER TABLE t_ds_resources alter COLUMN is_directory TYPE bool using (is_directory::bool)'; +EXECUTE 'ALTER TABLE t_ds_resources alter COLUMN is_directory SET DEFAULT FALSE'; + +return 'Success!'; +exception when others then + ---Raise EXCEPTION '(%)',SQLERRM; + return SQLERRM; +END; +$BODY$; + +select dolphin_update_metadata(); + +d// \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.3_schema/postgresql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.3_schema/postgresql/dolphinscheduler_dml.sql new file mode 100644 index 0000000000000000000000000000000000000000..38964cc551acb5332cd354d2404255d0278c49ff --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.3_schema/postgresql/dolphinscheduler_dml.sql @@ -0,0 +1,16 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.4_schema/mysql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.4_schema/mysql/dolphinscheduler_ddl.sql new file mode 100644 index 0000000000000000000000000000000000000000..996e009dc2aaddafbe1d3a9ca1ede21a9631804d --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.4_schema/mysql/dolphinscheduler_ddl.sql @@ -0,0 +1,19 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ + +ALTER TABLE `t_ds_task_instance` ADD INDEX `idx_code_version` (`task_code`, `task_definition_version`) USING BTREE; +ALTER TABLE `t_ds_task_definition_log` ADD INDEX `idx_project_code` (`project_code`) USING BTREE; \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.4_schema/mysql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.4_schema/mysql/dolphinscheduler_dml.sql new file mode 100644 index 0000000000000000000000000000000000000000..38964cc551acb5332cd354d2404255d0278c49ff --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.4_schema/mysql/dolphinscheduler_dml.sql @@ -0,0 +1,16 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ \ No newline at end of file diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DbType.java b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.4_schema/postgresql/dolphinscheduler_ddl.sql similarity index 47% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DbType.java rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.4_schema/postgresql/dolphinscheduler_ddl.sql index b994afb5f56b88cf6ff825ddab17ee54c8735c91..6d5c3c0f7e5d0868e8acc71b3fd973d7f55dd443 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DbType.java +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.4_schema/postgresql/dolphinscheduler_ddl.sql @@ -13,48 +13,35 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - */ - -package org.apache.dolphinscheduler.common.enums; - -import static java.util.stream.Collectors.toMap; - -import java.util.Arrays; -import java.util.Map; - -import com.baomidou.mybatisplus.annotation.EnumValue; -import com.google.common.base.Functions; - -public enum DbType { - MYSQL(0), - POSTGRESQL(1), - HIVE(2), - SPARK(3), - CLICKHOUSE(4), - ORACLE(5), - SQLSERVER(6), - DB2(7), - PRESTO(8), - H2(9); - - DbType(int code) { - this.code = code; - } - - @EnumValue - private final int code; - - public int getCode() { - return code; - } - - private static final Map DB_TYPE_MAP = - Arrays.stream(DbType.values()).collect(toMap(DbType::getCode, Functions.identity())); - - public static DbType of(int type) { - if (DB_TYPE_MAP.containsKey(type)) { - return DB_TYPE_MAP.get(type); - } - return null; - } -} +*/ + +delimiter d// +CREATE OR REPLACE FUNCTION public.dolphin_update_metadata( + ) + RETURNS character varying + LANGUAGE 'plpgsql' + COST 100 + VOLATILE PARALLEL UNSAFE +AS $BODY$ +DECLARE +v_schema varchar; +BEGIN + ---get schema name + v_schema =current_schema(); + +EXECUTE 'DROP INDEX IF EXISTS "idx_task_definition_log_project_code"'; +EXECUTE 'CREATE INDEX IF NOT EXISTS idx_task_definition_log_project_code ON ' || quote_ident(v_schema) ||'.t_ds_task_definition_log USING Btree("project_code")'; + +EXECUTE 'DROP INDEX IF EXISTS "idx_task_instance_code_version"'; +EXECUTE 'CREATE INDEX IF NOT EXISTS idx_task_instance_code_version ON' || quote_ident(v_schema) ||'.t_ds_task_instance USING Btree("task_code","task_definition_version")'; + +return 'Success!'; +exception when others then + ---Raise EXCEPTION '(%)',SQLERRM; + return SQLERRM; +END; +$BODY$; + +select dolphin_update_metadata(); + +d// \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.4_schema/postgresql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.4_schema/postgresql/dolphinscheduler_dml.sql new file mode 100644 index 0000000000000000000000000000000000000000..38964cc551acb5332cd354d2404255d0278c49ff --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.4_schema/postgresql/dolphinscheduler_dml.sql @@ -0,0 +1,16 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.5_schema/mysql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.5_schema/mysql/dolphinscheduler_ddl.sql new file mode 100644 index 0000000000000000000000000000000000000000..38964cc551acb5332cd354d2404255d0278c49ff --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.5_schema/mysql/dolphinscheduler_ddl.sql @@ -0,0 +1,16 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.5_schema/mysql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.5_schema/mysql/dolphinscheduler_dml.sql new file mode 100644 index 0000000000000000000000000000000000000000..38964cc551acb5332cd354d2404255d0278c49ff --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.5_schema/mysql/dolphinscheduler_dml.sql @@ -0,0 +1,16 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.5_schema/postgresql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.5_schema/postgresql/dolphinscheduler_ddl.sql new file mode 100644 index 0000000000000000000000000000000000000000..4a14f326b985fdbdba5cb08b9d7822d5bc8d4225 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.5_schema/postgresql/dolphinscheduler_ddl.sql @@ -0,0 +1,16 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.5_schema/postgresql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.5_schema/postgresql/dolphinscheduler_dml.sql new file mode 100644 index 0000000000000000000000000000000000000000..38964cc551acb5332cd354d2404255d0278c49ff --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.5_schema/postgresql/dolphinscheduler_dml.sql @@ -0,0 +1,16 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.6_schema/mysql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.6_schema/mysql/dolphinscheduler_ddl.sql new file mode 100644 index 0000000000000000000000000000000000000000..45f8acd4da5f8a0d642a12a6c0cd18cdc282711a --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.6_schema/mysql/dolphinscheduler_ddl.sql @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ + +SET sql_mode=(SELECT REPLACE(@@sql_mode,'ONLY_FULL_GROUP_BY','')); + +-- uc_dolphin_T_t_ds_resources_R_full_name +drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_resources_R_full_name; +delimiter d// +CREATE PROCEDURE uc_dolphin_T_t_ds_resources_R_full_name() +BEGIN + IF EXISTS (SELECT 1 FROM information_schema.COLUMNS + WHERE TABLE_NAME='t_ds_resources' + AND TABLE_SCHEMA=(SELECT DATABASE()) + AND COLUMN_NAME ='full_name') + THEN +ALTER TABLE t_ds_resources MODIFY COLUMN `full_name` varchar(128); +END IF; +END; + +d// + +delimiter ; +CALL uc_dolphin_T_t_ds_resources_R_full_name; +DROP PROCEDURE uc_dolphin_T_t_ds_resources_R_full_name; diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.6_schema/mysql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.6_schema/mysql/dolphinscheduler_dml.sql new file mode 100644 index 0000000000000000000000000000000000000000..4a14f326b985fdbdba5cb08b9d7822d5bc8d4225 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.6_schema/mysql/dolphinscheduler_dml.sql @@ -0,0 +1,16 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.6_schema/postgresql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.6_schema/postgresql/dolphinscheduler_ddl.sql new file mode 100644 index 0000000000000000000000000000000000000000..14a20fcd8e7b5e63a7b25f311a9e5454aa41b3f5 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.6_schema/postgresql/dolphinscheduler_ddl.sql @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ + +delimiter d// +CREATE OR REPLACE FUNCTION public.dolphin_update_metadata( + ) + RETURNS character varying + LANGUAGE 'plpgsql' + COST 100 + VOLATILE PARALLEL UNSAFE +AS $BODY$ +DECLARE + v_schema varchar; +BEGIN + ---get schema name + v_schema =current_schema(); + + --- alter column + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_resources ALTER COLUMN full_name Type varchar(128)'; + + return 'Success!'; + exception when others then + ---Raise EXCEPTION '(%)',SQLERRM; + return SQLERRM; +END; +$BODY$; + +select dolphin_update_metadata(); + +d// \ No newline at end of file diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/register/ConnectStateListener.java b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.6_schema/postgresql/dolphinscheduler_dml.sql similarity index 84% rename from dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/register/ConnectStateListener.java rename to dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.6_schema/postgresql/dolphinscheduler_dml.sql index 6675ef60f128c52ac63fb17e0016c747c170b73b..5f26e3515d6775a3fb948aa5db70954eb83b4a1e 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/register/ConnectStateListener.java +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.6_schema/postgresql/dolphinscheduler_dml.sql @@ -13,11 +13,5 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - */ +*/ -package org.apache.dolphinscheduler.spi.register; - -public interface ConnectStateListener { - - void notify(RegistryConnectState state); -} diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.7_schema/mysql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.7_schema/mysql/dolphinscheduler_ddl.sql new file mode 100644 index 0000000000000000000000000000000000000000..4a14f326b985fdbdba5cb08b9d7822d5bc8d4225 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.7_schema/mysql/dolphinscheduler_ddl.sql @@ -0,0 +1,16 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.7_schema/mysql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.7_schema/mysql/dolphinscheduler_dml.sql new file mode 100644 index 0000000000000000000000000000000000000000..4a14f326b985fdbdba5cb08b9d7822d5bc8d4225 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.7_schema/mysql/dolphinscheduler_dml.sql @@ -0,0 +1,16 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.7_schema/postgresql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.7_schema/postgresql/dolphinscheduler_ddl.sql new file mode 100644 index 0000000000000000000000000000000000000000..4a14f326b985fdbdba5cb08b9d7822d5bc8d4225 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.7_schema/postgresql/dolphinscheduler_ddl.sql @@ -0,0 +1,16 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.7_schema/postgresql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.7_schema/postgresql/dolphinscheduler_dml.sql new file mode 100644 index 0000000000000000000000000000000000000000..4a14f326b985fdbdba5cb08b9d7822d5bc8d4225 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.7_schema/postgresql/dolphinscheduler_dml.sql @@ -0,0 +1,16 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.8_schema/mysql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.8_schema/mysql/dolphinscheduler_ddl.sql new file mode 100644 index 0000000000000000000000000000000000000000..4a14f326b985fdbdba5cb08b9d7822d5bc8d4225 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.8_schema/mysql/dolphinscheduler_ddl.sql @@ -0,0 +1,16 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.8_schema/mysql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.8_schema/mysql/dolphinscheduler_dml.sql new file mode 100644 index 0000000000000000000000000000000000000000..4a14f326b985fdbdba5cb08b9d7822d5bc8d4225 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.8_schema/mysql/dolphinscheduler_dml.sql @@ -0,0 +1,16 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.8_schema/postgresql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.8_schema/postgresql/dolphinscheduler_ddl.sql new file mode 100644 index 0000000000000000000000000000000000000000..4a14f326b985fdbdba5cb08b9d7822d5bc8d4225 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.8_schema/postgresql/dolphinscheduler_ddl.sql @@ -0,0 +1,16 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.8_schema/postgresql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.8_schema/postgresql/dolphinscheduler_dml.sql new file mode 100644 index 0000000000000000000000000000000000000000..4a14f326b985fdbdba5cb08b9d7822d5bc8d4225 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.8_schema/postgresql/dolphinscheduler_dml.sql @@ -0,0 +1,16 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.9_schema/mysql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.9_schema/mysql/dolphinscheduler_ddl.sql new file mode 100644 index 0000000000000000000000000000000000000000..cf451ddd75e667c80d6d3c294848e3834137366b --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.9_schema/mysql/dolphinscheduler_ddl.sql @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ + +drop PROCEDURE if EXISTS t_ds_process_definition_add_column; +delimiter d// +CREATE PROCEDURE t_ds_process_definition_add_column() +BEGIN + IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS + WHERE TABLE_NAME='t_ds_process_definition' + AND TABLE_SCHEMA=(SELECT DATABASE()) + AND COLUMN_NAME='execution_type') + THEN +ALTER TABLE t_ds_process_definition ADD COLUMN `execution_type` tinyint(4) DEFAULT '0' COMMENT 'execution_type 0:parallel,1:serial wait,2:serial discard,3:serial priority'; +END IF; +END; + d// + delimiter ; +CALL t_ds_process_definition_add_column; +DROP PROCEDURE t_ds_process_definition_add_column; + + +-- t_ds_process_definition_log_add_column +drop PROCEDURE if EXISTS t_ds_process_definition_log_add_column; +delimiter d// +CREATE PROCEDURE t_ds_process_definition_log_add_column() +BEGIN + IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS + WHERE TABLE_NAME='t_ds_process_definition_log' + AND TABLE_SCHEMA=(SELECT DATABASE()) + AND COLUMN_NAME='execution_type') + THEN +ALTER TABLE t_ds_process_definition_log ADD COLUMN `execution_type` tinyint(4) DEFAULT '0' COMMENT 'execution_type 0:parallel,1:serial wait,2:serial discard,3:serial priority'; +END IF; +END; + d// + delimiter ; +CALL t_ds_process_definition_log_add_column; +DROP PROCEDURE t_ds_process_definition_log_add_column; + + +-- t_ds_process_instance_add_column +drop PROCEDURE if EXISTS t_ds_process_instance_add_column; +delimiter d// +CREATE PROCEDURE t_ds_process_instance_add_column() +BEGIN + IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS + WHERE TABLE_NAME='t_ds_process_instance' + AND TABLE_SCHEMA=(SELECT DATABASE()) + AND COLUMN_NAME='next_process_instance_id') + THEN +ALTER TABLE t_ds_process_instance ADD COLUMN `next_process_instance_id` int(11) DEFAULT '0' COMMENT 'serial queue next processInstanceId'; +END IF; +END; + d// + delimiter ; +CALL t_ds_process_instance_add_column; +DROP PROCEDURE t_ds_process_instance_add_column; diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.9_schema/mysql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.9_schema/mysql/dolphinscheduler_dml.sql new file mode 100644 index 0000000000000000000000000000000000000000..4a14f326b985fdbdba5cb08b9d7822d5bc8d4225 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.9_schema/mysql/dolphinscheduler_dml.sql @@ -0,0 +1,16 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.9_schema/postgresql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.9_schema/postgresql/dolphinscheduler_ddl.sql new file mode 100644 index 0000000000000000000000000000000000000000..82bd54e9a02535e015b5e6945371740357f7680a --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.9_schema/postgresql/dolphinscheduler_ddl.sql @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ +delimiter d// +CREATE OR REPLACE FUNCTION public.dolphin_update_metadata( + ) + RETURNS character varying + LANGUAGE 'plpgsql' + COST 100 + VOLATILE PARALLEL UNSAFE +AS $BODY$ +DECLARE +v_schema varchar; +BEGIN + ---get schema name + v_schema =current_schema(); + + --- add missing columns, https://github.com/apache/dolphinscheduler/pull/13901 + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_process_definition ADD COLUMN IF NOT EXISTS "execution_type" int DEFAULT 0'; + EXECUTE 'ALTER TABLE ' || quote_ident(v_schema) ||'.t_ds_process_instance ADD COLUMN IF NOT EXISTS "next_process_instance_id" int DEFAULT 0'; + +return 'Success!'; +exception when others then + ---Raise EXCEPTION '(%)',SQLERRM; + return SQLERRM; +END; +$BODY$; + +select dolphin_update_metadata(); + +d// \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.9_schema/postgresql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.9_schema/postgresql/dolphinscheduler_dml.sql new file mode 100644 index 0000000000000000000000000000000000000000..4a14f326b985fdbdba5cb08b9d7822d5bc8d4225 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.0.9_schema/postgresql/dolphinscheduler_dml.sql @@ -0,0 +1,16 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/AlertDaoTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/AlertDaoTest.java index 7b9e8c6f35cb6aa8f44b3c9dfa9ae0b17243b4d9..0a2ca669cb5494a69abcb98ed0db46dc93a69bd9 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/AlertDaoTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/AlertDaoTest.java @@ -18,20 +18,31 @@ package org.apache.dolphinscheduler.dao; import org.apache.dolphinscheduler.common.enums.AlertStatus; +import org.apache.dolphinscheduler.common.enums.ProfileType; import org.apache.dolphinscheduler.dao.entity.Alert; - -import java.util.List; - import org.junit.Assert; import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.ActiveProfiles; +import org.springframework.test.context.junit4.SpringRunner; import org.springframework.transaction.annotation.Transactional; +import java.util.List; + +@ActiveProfiles(ProfileType.H2) +@RunWith(SpringRunner.class) +@SpringBootTest +@SpringBootApplication @Transactional public class AlertDaoTest { + @Autowired + private AlertDao alertDao; @Test public void testAlertDao() { - AlertDao alertDao = DaoFactory.getDaoInstance(AlertDao.class); Alert alert = new Alert(); alert.setTitle("Mysql Exception"); alert.setContent("[\"alarm time:2018-02-05\", \"service name:MYSQL_ALTER\", \"alarm name:MYSQL_ALTER_DUMP\", " @@ -40,23 +51,22 @@ public class AlertDaoTest { alert.setAlertStatus(AlertStatus.WAIT_EXECUTION); alertDao.addAlert(alert); - List alerts = alertDao.listWaitExecutionAlert(); + List alerts = alertDao.listPendingAlerts(); Assert.assertNotNull(alerts); Assert.assertNotEquals(0, alerts.size()); } @Test public void testSendServerStopedAlert() { - AlertDao alertDao = DaoFactory.getDaoInstance(AlertDao.class); int alertGroupId = 1; String host = "127.0.0.998165432"; String serverType = "Master"; alertDao.sendServerStopedAlert(alertGroupId, host, serverType); alertDao.sendServerStopedAlert(alertGroupId, host, serverType); - long count = alertDao.listWaitExecutionAlert() - .stream() - .filter(alert -> alert.getContent().contains(host)) - .count(); + long count = alertDao.listPendingAlerts() + .stream() + .filter(alert -> alert.getContent().contains(host)) + .count(); Assert.assertEquals(1L, count); } } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/SecurityConfigLDAPTest.java~HEAD b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/BaseDaoTest.java similarity index 59% rename from dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/SecurityConfigLDAPTest.java~HEAD rename to dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/BaseDaoTest.java index a96cec9158d302f6eeab96d34d167c69343235bf..5838ad7874ff5d5a1c17f00788ebb9de78d14781 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/SecurityConfigLDAPTest.java~HEAD +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/BaseDaoTest.java @@ -15,31 +15,21 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.api.security; +package org.apache.dolphinscheduler.dao; -import org.apache.dolphinscheduler.api.ApiApplicationServer; +import org.apache.dolphinscheduler.common.enums.ProfileType; -import org.junit.Assert; -import org.junit.Test; import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.TestPropertySource; +import org.springframework.test.annotation.Rollback; +import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.transaction.annotation.Transactional; @RunWith(SpringRunner.class) -@SpringBootTest(classes = ApiApplicationServer.class) -@TestPropertySource(properties = { - "security.authentication.type=LDAP", -}) -public class SecurityConfigLDAPTest { - - @Autowired - private SecurityConfig securityConfig; - - @Test - public void testAuthenticator() { - Authenticator authenticator = securityConfig.authenticator(); - Assert.assertNotNull(authenticator); - } +@SpringBootTest +@ActiveProfiles(value = ProfileType.H2) +@Transactional +@Rollback +public abstract class BaseDaoTest { } diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/entity/TaskInstanceTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/entity/TaskInstanceTest.java index fb73eaf9a1b9539a77c75b5394df5cdfb457bb09..9e938bbf672e653e75272cb3a299027d57f4bfb4 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/entity/TaskInstanceTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/entity/TaskInstanceTest.java @@ -75,7 +75,7 @@ public class TaskInstanceTest { List dependTaskList = new ArrayList<>(); List dependentItems = new ArrayList<>(); DependentItem dependentItem = new DependentItem(); - dependentItem.setDepTasks("A"); + dependentItem.setDepTaskCode(111L); dependentItem.setDefinitionCode(222L); dependentItem.setCycle("today"); dependentItems.add(dependentItem); diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/CommandMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/CommandMapperTest.java index dc9dbeebe7c0a034b30861f38a4d3cb1315994c9..0266ac0eb2a1ae79ac36d4ca9ed0102d925328b0 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/CommandMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/CommandMapperTest.java @@ -147,7 +147,7 @@ public class CommandMapperTest { createCommand(CommandType.START_PROCESS, processDefinition.getCode()); - Command actualCommand = commandMapper.getOneToRun(); + List actualCommand = commandMapper.queryCommandPage(1,0); assertNotNull(actualCommand); } @@ -259,6 +259,8 @@ public class CommandMapperTest { command.setStartTime(DateUtils.stringToDate("2019-12-29 10:10:00")); command.setUpdateTime(DateUtils.stringToDate("2019-12-29 10:10:00")); command.setWorkerGroup(Constants.DEFAULT_WORKER_GROUP); + command.setProcessInstanceId(0); + command.setProcessDefinitionVersion(0); commandMapper.insert(command); return command; diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/DataSourceMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/DataSourceMapperTest.java index a45c520724e43a7356ef87ae0fa15ecac74ceb8d..f8651af14eb0bbbb041dedb308a77fbc92295581 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/DataSourceMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/DataSourceMapperTest.java @@ -25,12 +25,12 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; -import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.dao.entity.DataSource; import org.apache.dolphinscheduler.dao.entity.DatasourceUser; import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.spi.enums.DbType; import java.util.Arrays; import java.util.Date; diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/PluginDefineTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/PluginDefineTest.java index d8636a6fbcb2c365ac57336d89e9a93dddf08682..c3b93448a179e56c3ade5667e437f94f93f600c2 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/PluginDefineTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/PluginDefineTest.java @@ -17,28 +17,18 @@ package org.apache.dolphinscheduler.dao.mapper; +import org.apache.dolphinscheduler.dao.BaseDaoTest; import org.apache.dolphinscheduler.dao.entity.PluginDefine; - -import java.util.List; - import org.junit.Assert; import org.junit.Test; -import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.annotation.Rollback; -import org.springframework.test.context.junit4.SpringRunner; -import org.springframework.transaction.annotation.Transactional; -@RunWith(SpringRunner.class) -@SpringBootTest -@Transactional -@Rollback(true) +import java.util.List; -public class PluginDefineTest { +public class PluginDefineTest extends BaseDaoTest { @Autowired - PluginDefineMapper pluginDefineMapper; + private PluginDefineMapper pluginDefineMapper; @Test public void testQueryAllPluginDefineList() { @@ -58,10 +48,10 @@ public class PluginDefineTest { @Test public void testQueryByNameAndType() { PluginDefine pluginDefine = createPluginDefine(); - List pluginDefines = pluginDefineMapper.queryByNameAndType(pluginDefine.getPluginName(), pluginDefine.getPluginType()); - Assert.assertTrue(pluginDefines.size() > 0); - Assert.assertEquals(pluginDefines.get(0).getPluginType(), pluginDefine.getPluginType()); - Assert.assertEquals(pluginDefines.get(0).getPluginName(), pluginDefine.getPluginName()); + PluginDefine pluginDefineSaved = pluginDefineMapper.queryByNameAndType(pluginDefine.getPluginName(), pluginDefine.getPluginType()); + Assert.assertNotNull(pluginDefineSaved); + Assert.assertEquals(pluginDefineSaved.getPluginType(), pluginDefine.getPluginType()); + Assert.assertEquals(pluginDefineSaved.getPluginName(), pluginDefine.getPluginName()); } /** diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionLogMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionLogMapperTest.java index f0981b0c8dde2c30e871cb9b3fcf155220eb76d0..5ce86b13d1234c32f8f2bd0a0a68ab556d61fdeb 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionLogMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionLogMapperTest.java @@ -159,7 +159,7 @@ public class ProcessDefinitionLogMapperTest { public void testQueryProcessDefinitionVersionsPaging() { insertOne(); Page page = new Page(1, 3); - IPage processDefinitionLogs = processDefinitionLogMapper.queryProcessDefinitionVersionsPaging(page, 1L); + IPage processDefinitionLogs = processDefinitionLogMapper.queryProcessDefinitionVersionsPaging(page, 1L,1L); Assert.assertNotEquals(processDefinitionLogs.getTotal(), 0); } diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapperTest.java index 1eb9cf7b96ce51387e291bed2690968eb017ea87..b8ee5d5f6c0f0af9cbde1d82038f49f41c481612 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapperTest.java @@ -353,7 +353,6 @@ public class ProcessDefinitionMapperTest { @Test public void listResourcesTest() { ProcessDefinition processDefinition = insertOne(); - processDefinition.setResourceIds("3,5"); processDefinition.setReleaseState(ReleaseState.ONLINE); List> maps = processDefinitionMapper.listResources(); Assert.assertNotNull(maps); @@ -362,7 +361,6 @@ public class ProcessDefinitionMapperTest { @Test public void listResourcesByUserTest() { ProcessDefinition processDefinition = insertOne(); - processDefinition.setResourceIds("3,5"); processDefinition.setReleaseState(ReleaseState.ONLINE); List> maps = processDefinitionMapper.listResourcesByUser(processDefinition.getUserId()); Assert.assertNotNull(maps); diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapperTest.java index a492beba927cc155589829f834f3b92a91b7fbba..613bb8bcbc79cdf4fd21d158d27dc6ad813de47b 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapperTest.java @@ -393,7 +393,7 @@ public class ProcessInstanceMapperTest { ProcessInstance processInstance3 = insertOne(startTime3, endTime3); Date start = new Date(2020, 1, 1, 1, 1, 1); Date end = new Date(2021, 1, 1, 1, 1, 1); - List processInstances = processInstanceMapper.queryTopNProcessInstance(2, start, end, ExecutionStatus.SUCCESS); + List processInstances = processInstanceMapper.queryTopNProcessInstance(2, start, end, ExecutionStatus.SUCCESS,0L); Assert.assertEquals(2, processInstances.size()); Assert.assertTrue(isSortedByDuration(processInstances)); for (ProcessInstance processInstance : processInstances) { diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ResourceMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ResourceMapperTest.java index d32095d2a9531b340adb091315b25fb3e2708f8f..dad9638b8bc73d9862cf2b8c4d6c65da6e9a6463 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ResourceMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ResourceMapperTest.java @@ -23,10 +23,10 @@ import static org.hamcrest.Matchers.greaterThan; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; +import org.apache.commons.collections.CollectionUtils; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.ResourceType; +import org.apache.dolphinscheduler.spi.enums.ResourceType; import org.apache.dolphinscheduler.common.enums.UserType; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.dao.entity.Resource; import org.apache.dolphinscheduler.dao.entity.ResourcesUser; import org.apache.dolphinscheduler.dao.entity.Tenant; diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapperTest.java index 46f0b950cac5ac3a13ef76b7682b01e34e2be870..855a2a3d0dee57a538d02fdfbebb968fa76392d0 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapperTest.java @@ -209,12 +209,12 @@ public class ScheduleMapperTest { * test query by process definition id */ @Test - public void queryByProcessDefinitionId() { + public void queryByProcessDefinitionCode() { Schedule schedule = insertOne(); schedule.setProcessDefinitionCode(12345); scheduleMapper.updateById(schedule); - List schedules = scheduleMapper.queryByProcessDefinitionCode(schedule.getProcessDefinitionCode()); - Assert.assertNotEquals(schedules.size(), 0); + Schedule schedules = scheduleMapper.queryByProcessDefinitionCode(schedule.getProcessDefinitionCode()); + Assert.assertNotNull(schedules); } } diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapperTest.java index c53460f00273d231374854167488c778cffa493f..2e5fac822796334f96ccac2fc00ca0a67f6774c9 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapperTest.java @@ -275,6 +275,27 @@ public class TaskInstanceMapperTest { Assert.assertNotEquals(taskInstance, null); } + /** + * test query by task instance id and code + */ + @Test + public void testQueryByInstanceIdAndCode() { + // insert ProcessInstance + ProcessInstance processInstance = insertProcessInstance(); + + // insert taskInstance + TaskInstance task = insertTaskInstance(processInstance.getId()); + task.setHost("111.111.11.11"); + taskInstanceMapper.updateById(task); + + TaskInstance taskInstance = taskInstanceMapper.queryByInstanceIdAndCode( + task.getProcessInstanceId(), + task.getTaskCode() + ); + taskInstanceMapper.deleteById(task.getId()); + Assert.assertNotEquals(taskInstance, null); + } + /** * test count task instance */ @@ -291,7 +312,6 @@ public class TaskInstanceMapperTest { definition.setCreateTime(new Date()); definition.setUpdateTime(new Date()); processDefinitionMapper.insert(definition); - //task.setProcessDefinitionId(definition.getId()); taskInstanceMapper.updateById(task); int countTask = taskInstanceMapper.countTask( @@ -327,7 +347,6 @@ public class TaskInstanceMapperTest { definition.setCreateTime(new Date()); definition.setUpdateTime(new Date()); processDefinitionMapper.insert(definition); - //task.setProcessDefinitionId(definition.getId()); taskInstanceMapper.updateById(task); diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/upgrade/ProcessDefinitionDaoTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/upgrade/ProcessDefinitionDaoTest.java index 43aba5a495360a324667cde6d45bfc07258800a9..9d757b208feaca4450f9bd26bf716cecac8c6d0c 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/upgrade/ProcessDefinitionDaoTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/upgrade/ProcessDefinitionDaoTest.java @@ -17,17 +17,18 @@ package org.apache.dolphinscheduler.dao.upgrade; -import static org.apache.dolphinscheduler.dao.upgrade.UpgradeDao.getDataSource; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.context.ActiveProfiles; +import javax.sql.DataSource; import java.util.HashMap; import java.util.Map; -import javax.sql.DataSource; - -import org.junit.Test; - +@ActiveProfiles("h2") public class ProcessDefinitionDaoTest { - final DataSource dataSource = getDataSource(); + @Autowired + private DataSource dataSource; final ProcessDefinitionDao processDefinitionDao = new ProcessDefinitionDao(); @Test diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/SchemaUtilsTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/upgrade/SchemaUtilsTest.java similarity index 44% rename from dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/SchemaUtilsTest.java rename to dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/upgrade/SchemaUtilsTest.java index 7885806b966d4f23921fe67b49c3ce97119afabb..b505709c2596349a93decabd935328abbe9c30f8 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/SchemaUtilsTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/upgrade/SchemaUtilsTest.java @@ -14,50 +14,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.utils; + +package org.apache.dolphinscheduler.dao.upgrade; import org.junit.Assert; import org.junit.Test; -import org.junit.runner.RunWith; -import org.powermock.api.mockito.PowerMockito; -import org.powermock.core.classloader.annotations.PrepareForTest; -import org.powermock.modules.junit4.PowerMockRunner; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.util.Arrays; -import java.util.List; -@RunWith(PowerMockRunner.class) -@PrepareForTest({ LoggerFactory.class, FileUtils.class }) public class SchemaUtilsTest { - @Test - public void testReplaceBlank() { - Assert.assertEquals("abc", SchemaUtils.replaceBlank(" abc")); - Assert.assertEquals("abc", SchemaUtils.replaceBlank("abc ")); - Assert.assertEquals("abc", SchemaUtils.replaceBlank("a b c")); - Assert.assertEquals("abc", SchemaUtils.replaceBlank("a b c")); - Assert.assertEquals("", SchemaUtils.replaceBlank(" ")); - Assert.assertEquals("", SchemaUtils.replaceBlank(null)); - Assert.assertEquals("我怕的你", SchemaUtils.replaceBlank("我怕的 你")); - } - - @Test - public void testGetSoftVersion() { - // file not found - try { - SchemaUtils.getSoftVersion(); - } catch (RuntimeException e) { - Assert.assertEquals("Failed to get the product version description file. The file could not be found", - e.getMessage()); - } - - // file exists, fmt is invalid - FileUtils.writeContent2File("32432423", "sql/soft_version"); - Assert.assertEquals("32432423", SchemaUtils.getSoftVersion()); - } - @Test public void testIsAGreatVersion() { // param is null @@ -82,35 +46,15 @@ public class SchemaUtilsTest { Assert.assertTrue(SchemaUtils.isAGreatVersion("10.1.1", "1.01.100")); try { SchemaUtils.isAGreatVersion("10.1.1", ".1"); - } catch (Exception e) { - Assert.assertNotNull(e); + Assert.fail("Should fail"); + } catch (Exception ignored) { + // This is expected } try { SchemaUtils.isAGreatVersion("a.1.1", "b.1"); - } catch (Exception e) { - Assert.assertNotNull(e); + Assert.fail("Should fail"); + } catch (Exception ignored) { + // This is expected } } - - @Test - public void testGetAllSchemaList() { - //normal - PowerMockito.mockStatic(FileUtils.class); - File[] files = new File[4]; - files[0] = new File("sql/upgrade/1.2.0_schema"); - files[1] = new File("sql/upgrade/1.0.1_schema"); - files[2] = new File("sql/upgrade/1.0.2_schema"); - files[3] = new File("sql/upgrade/1.1.0_schema"); - PowerMockito.when(FileUtils.getAllDir("sql/upgrade")).thenReturn(files); - List real = SchemaUtils.getAllSchemaList(); - List expect = Arrays.asList("1.0.1_schema", "1.0.2_schema", - "1.1.0_schema", "1.2.0_schema"); - Assert.assertTrue(CollectionUtils.isEqualCollection(real, expect)); - - //normal - files = new File[0]; - PowerMockito.when(FileUtils.getAllDir("sql/upgrade")).thenReturn(files); - real = SchemaUtils.getAllSchemaList(); - Assert.assertNull(real); - } } diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/upgrade/WorkerGroupDaoTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/upgrade/WorkerGroupDaoTest.java index 7eed8672485e66c5077a9278cb4c683cf8ec92d5..ca6ed2f547eb3e0c1bdcdad5a3c31a19c6f546b4 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/upgrade/WorkerGroupDaoTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/upgrade/WorkerGroupDaoTest.java @@ -16,19 +16,24 @@ */ package org.apache.dolphinscheduler.dao.upgrade; -import static org.apache.dolphinscheduler.dao.upgrade.UpgradeDao.getDataSource; - -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.junit.Assert.assertThat; - -import java.util.Map; +import org.junit.BeforeClass; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; import javax.sql.DataSource; +import java.util.Map; -import org.junit.Test; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.junit.Assert.assertThat; public class WorkerGroupDaoTest { - protected final DataSource dataSource = getDataSource(); + @Autowired + protected DataSource dataSource; + + @BeforeClass + public static void setupClass() { + System.setProperty("spring.profiles.active", "h2"); + } @Test public void testQueryQueryAllOldWorkerGroup() throws Exception { diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/utils/DagHelperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/utils/DagHelperTest.java index 18c17fe00be4a3bc8699acd079a0afb783f52a97..fd34c2589cd75f80c5bda9806b254f3ef992520c 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/utils/DagHelperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/utils/DagHelperTest.java @@ -291,12 +291,14 @@ public class DagHelperTest { TaskNode node1 = new TaskNode(); node1.setId("1"); node1.setName("1"); + node1.setCode(1); node1.setType(TaskType.SHELL.getDesc()); taskNodeList.add(node1); TaskNode node2 = new TaskNode(); node2.setId("2"); node2.setName("2"); + node2.setCode(2); node2.setType(TaskType.SHELL.getDesc()); List dep2 = new ArrayList<>(); dep2.add("1"); @@ -306,12 +308,14 @@ public class DagHelperTest { TaskNode node4 = new TaskNode(); node4.setId("4"); node4.setName("4"); + node4.setCode(4); node4.setType(TaskType.SHELL.getDesc()); taskNodeList.add(node4); TaskNode node3 = new TaskNode(); node3.setId("3"); node3.setName("3"); + node3.setCode(3); node3.setType(TaskType.SHELL.getDesc()); List dep3 = new ArrayList<>(); dep3.add("2"); @@ -322,6 +326,7 @@ public class DagHelperTest { TaskNode node5 = new TaskNode(); node5.setId("5"); node5.setName("5"); + node5.setCode(5); node5.setType(TaskType.SHELL.getDesc()); List dep5 = new ArrayList<>(); dep5.add("3"); @@ -332,6 +337,7 @@ public class DagHelperTest { TaskNode node6 = new TaskNode(); node6.setId("6"); node6.setName("6"); + node6.setCode(6); node6.setType(TaskType.SHELL.getDesc()); List dep6 = new ArrayList<>(); dep6.add("3"); @@ -341,6 +347,7 @@ public class DagHelperTest { TaskNode node7 = new TaskNode(); node7.setId("7"); node7.setName("7"); + node7.setCode(7); node7.setType(TaskType.SHELL.getDesc()); List dep7 = new ArrayList<>(); dep7.add("5"); @@ -350,6 +357,7 @@ public class DagHelperTest { TaskNode node8 = new TaskNode(); node8.setId("8"); node8.setName("8"); + node8.setCode(8); node8.setType(TaskType.SHELL.getDesc()); List dep8 = new ArrayList<>(); dep8.add("2"); @@ -381,12 +389,14 @@ public class DagHelperTest { TaskNode node = new TaskNode(); node.setId("0"); node.setName("0"); + node.setCode(0); node.setType("SHELL"); taskNodeList.add(node); TaskNode node1 = new TaskNode(); node1.setId("1"); node1.setName("1"); + node1.setCode(1); node1.setType("switch"); node1.setDependence(JSONUtils.toJsonString(getSwitchNode())); taskNodeList.add(node1); @@ -394,6 +404,7 @@ public class DagHelperTest { TaskNode node2 = new TaskNode(); node2.setId("2"); node2.setName("2"); + node2.setCode(2); node2.setType("SHELL"); List dep2 = new ArrayList<>(); dep2.add("1"); @@ -403,6 +414,7 @@ public class DagHelperTest { TaskNode node4 = new TaskNode(); node4.setId("4"); node4.setName("4"); + node4.setCode(4); node4.setType("SHELL"); List dep4 = new ArrayList<>(); dep4.add("1"); @@ -412,6 +424,7 @@ public class DagHelperTest { TaskNode node5 = new TaskNode(); node5.setId("4"); node5.setName("4"); + node5.setCode(4); node5.setType("SHELL"); List dep5 = new ArrayList<>(); dep5.add("1"); @@ -433,15 +446,15 @@ public class DagHelperTest { SwitchParameters conditionsParameters = new SwitchParameters(); SwitchResultVo switchResultVo1 = new SwitchResultVo(); switchResultVo1.setCondition(" 2 == 1"); - switchResultVo1.setNextNode("2"); + switchResultVo1.setNextNode(2L); SwitchResultVo switchResultVo2 = new SwitchResultVo(); switchResultVo2.setCondition(" 2 == 2"); - switchResultVo2.setNextNode("4"); + switchResultVo2.setNextNode(4L); List list = new ArrayList<>(); list.add(switchResultVo1); list.add(switchResultVo2); conditionsParameters.setDependTaskList(list); - conditionsParameters.setNextNode("5"); + conditionsParameters.setNextNode(5L); conditionsParameters.setRelation("AND"); // in: AND(AND(1 is SUCCESS)) diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-all/pom.xml b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-all/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..7118e42b93cc5a6b3efa7ca16e90f8a94fc76621 --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-all/pom.xml @@ -0,0 +1,58 @@ + + + + + dolphinscheduler-datasource-plugin + org.apache.dolphinscheduler + 2.0.10-SNAPSHOT + + 4.0.0 + + dolphinscheduler-datasource-all + + + + org.apache.dolphinscheduler + dolphinscheduler-datasource-clickhouse + + + org.apache.dolphinscheduler + dolphinscheduler-datasource-db2 + + + org.apache.dolphinscheduler + dolphinscheduler-datasource-hive + + + org.apache.dolphinscheduler + dolphinscheduler-datasource-mysql + + + org.apache.dolphinscheduler + dolphinscheduler-datasource-oracle + + + org.apache.dolphinscheduler + dolphinscheduler-datasource-postgresql + + + org.apache.dolphinscheduler + dolphinscheduler-datasource-sqlserver + + + diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/pom.xml b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..6e758c6dbd536148c15aa50849a40e3398546bb6 --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/pom.xml @@ -0,0 +1,151 @@ + + + + + + dolphinscheduler-datasource-plugin + org.apache.dolphinscheduler + 2.0.10-SNAPSHOT + + 4.0.0 + + dolphinscheduler-datasource-api + + + + org.apache.dolphinscheduler + dolphinscheduler-spi + provided + + + + commons-io + commons-io + provided + + + + org.slf4j + slf4j-api + provided + + + + com.google.guava + guava + + + + commons-codec + commons-codec + + + + org.apache.commons + commons-collections4 + provided + + + + com.github.oshi + oshi-core + provided + + + + org.springframework + spring-jdbc + + + + org.apache.hadoop + hadoop-client + + + org.slf4j + slf4j-log4j12 + + + servlet-api + javax.servlet + + + org.codehaus.jackson + jackson-jaxrs + + + org.codehaus.jackson + jackson-xc + + + + org.fusesource.leveldbjni + leveldbjni-all + + + org.apache.zookeeper + zookeeper + + + org.apache.hadoop + hadoop-mapreduce-client-shuffle + + + jersey-client + com.sun.jersey + + + jersey-core + com.sun.jersey + + + jaxb-api + javax.xml.bind + + + log4j + log4j + + + provided + + + org.mockito + mockito-core + jar + test + + + + org.powermock + powermock-module-junit4 + test + + + + org.powermock + powermock-api-mockito2 + test + + + + com.zaxxer + HikariCP + + + diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/client/CommonDataSourceClient.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/client/CommonDataSourceClient.java new file mode 100644 index 0000000000000000000000000000000000000000..bec5c45ba43d5af5e239f0c69254117c396ff5e6 --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/client/CommonDataSourceClient.java @@ -0,0 +1,129 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.api.client; + +import org.apache.dolphinscheduler.plugin.datasource.api.provider.JdbcDataSourceProvider; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.datasource.DataSourceClient; +import org.apache.dolphinscheduler.spi.enums.DbType; +import org.apache.dolphinscheduler.spi.utils.StringUtils; + +import java.sql.Connection; +import java.sql.SQLException; +import java.util.concurrent.TimeUnit; + +import javax.sql.DataSource; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.jdbc.core.JdbcTemplate; + +import com.google.common.base.Stopwatch; + +public class CommonDataSourceClient implements DataSourceClient { + + private static final Logger logger = LoggerFactory.getLogger(CommonDataSourceClient.class); + + public static final String COMMON_USER = "root"; + public static final String COMMON_PASSWORD = "123456"; + public static final String COMMON_VALIDATION_QUERY = "select 1"; + + protected final BaseConnectionParam baseConnectionParam; + protected DataSource dataSource; + protected JdbcTemplate jdbcTemplate; + + public CommonDataSourceClient(BaseConnectionParam baseConnectionParam, DbType dbType) { + this.baseConnectionParam = baseConnectionParam; + preInit(); + checkEnv(baseConnectionParam); + initClient(baseConnectionParam, dbType); + checkClient(); + } + + protected void preInit() { + logger.info("preInit in CommonDataSourceClient"); + } + + protected void checkEnv(BaseConnectionParam baseConnectionParam) { + checkValidationQuery(baseConnectionParam); + checkUser(baseConnectionParam); + } + + protected void initClient(BaseConnectionParam baseConnectionParam, DbType dbType) { + this.dataSource = JdbcDataSourceProvider.createJdbcDataSource(baseConnectionParam, dbType); + this.jdbcTemplate = new JdbcTemplate(dataSource); + } + + protected void checkUser(BaseConnectionParam baseConnectionParam) { + if (StringUtils.isBlank(baseConnectionParam.getUser())) { + setDefaultUsername(baseConnectionParam); + } + if (StringUtils.isBlank(baseConnectionParam.getPassword())) { + setDefaultPassword(baseConnectionParam); + } + } + + protected void setDefaultUsername(BaseConnectionParam baseConnectionParam) { + baseConnectionParam.setUser(COMMON_USER); + } + + protected void setDefaultPassword(BaseConnectionParam baseConnectionParam) { + baseConnectionParam.setPassword(COMMON_PASSWORD); + } + + protected void checkValidationQuery(BaseConnectionParam baseConnectionParam) { + if (StringUtils.isBlank(baseConnectionParam.getValidationQuery())) { + setDefaultValidationQuery(baseConnectionParam); + } + } + + protected void setDefaultValidationQuery(BaseConnectionParam baseConnectionParam) { + baseConnectionParam.setValidationQuery(COMMON_VALIDATION_QUERY); + } + + @Override + public void checkClient() { + //Checking data source client + Stopwatch stopwatch = Stopwatch.createStarted(); + try { + this.jdbcTemplate.execute(this.baseConnectionParam.getValidationQuery()); + } catch (Exception e) { + throw new RuntimeException("JDBC connect failed", e); + } finally { + logger.info("Time to execute check jdbc client with sql {} for {} ms ", this.baseConnectionParam.getValidationQuery(), stopwatch.elapsed(TimeUnit.MILLISECONDS)); + } + } + + @Override + public Connection getConnection() { + try { + return this.dataSource.getConnection(); + } catch (SQLException e) { + logger.error("get druidDataSource Connection fail SQLException: {}", e.getMessage(), e); + return null; + } + } + + @Override + public void close() { + logger.info("do close dataSource."); + this.dataSource = null; + this.jdbcTemplate = null; + } + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/AbstractDatasourceProcessor.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/AbstractDatasourceProcessor.java similarity index 79% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/AbstractDatasourceProcessor.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/AbstractDatasourceProcessor.java index a9d3bcef361b564a4e0f49d9d5e6285e6bc4f168..0826b404b6984c4cbd9d88c3996fe76ec0b272df 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/AbstractDatasourceProcessor.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/AbstractDatasourceProcessor.java @@ -15,18 +15,23 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.datasource; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource; + +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.datasource.ConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; import org.apache.commons.collections4.MapUtils; +import java.text.MessageFormat; import java.util.Map; import java.util.regex.Pattern; public abstract class AbstractDatasourceProcessor implements DatasourceProcessor { - private static final Pattern IPV4_PATTERN = Pattern.compile("^[a-zA-Z0-9\\_\\-\\.]+$"); + private static final Pattern IPV4_PATTERN = Pattern.compile("^[a-zA-Z0-9\\_\\-\\.\\,]+$"); - private static final Pattern IPV6_PATTERN = Pattern.compile("^[a-zA-Z0-9\\_\\-\\.\\:\\[\\]]+$"); + private static final Pattern IPV6_PATTERN = Pattern.compile("^[a-zA-Z0-9\\_\\-\\.\\:\\[\\]\\,]+$"); private static final Pattern DATABASE_PATTER = Pattern.compile("^[a-zA-Z0-9\\_\\-\\.]+$"); @@ -76,4 +81,9 @@ public abstract class AbstractDatasourceProcessor implements DatasourceProcessor } } + @Override + public String getDatasourceUniqueId(ConnectionParam connectionParam, DbType dbType) { + BaseConnectionParam baseConnectionParam = (BaseConnectionParam) connectionParam; + return MessageFormat.format("{0}@{1}@{2}", dbType.getDescp(), baseConnectionParam.getUser(), baseConnectionParam.getJdbcUrl()); + } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/BaseDataSourceParamDTO.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/BaseDataSourceParamDTO.java similarity index 79% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/BaseDataSourceParamDTO.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/BaseDataSourceParamDTO.java index 8bc3f9437962a205e7e1f65dbcb3653e8c7f80e7..d6d6cab32f459521aed976796cbd234627123024 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/BaseDataSourceParamDTO.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/BaseDataSourceParamDTO.java @@ -15,18 +15,18 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.datasource; - -import org.apache.dolphinscheduler.common.datasource.clickhouse.ClickHouseDatasourceParamDTO; -import org.apache.dolphinscheduler.common.datasource.db2.Db2DatasourceParamDTO; -import org.apache.dolphinscheduler.common.datasource.hive.HiveDataSourceParamDTO; -import org.apache.dolphinscheduler.common.datasource.mysql.MysqlDatasourceParamDTO; -import org.apache.dolphinscheduler.common.datasource.oracle.OracleDatasourceParamDTO; -import org.apache.dolphinscheduler.common.datasource.postgresql.PostgreSqlDatasourceParamDTO; -import org.apache.dolphinscheduler.common.datasource.presto.PrestoDatasourceParamDTO; -import org.apache.dolphinscheduler.common.datasource.spark.SparkDatasourceParamDTO; -import org.apache.dolphinscheduler.common.datasource.sqlserver.SqlServerDatasourceParamDTO; -import org.apache.dolphinscheduler.common.enums.DbType; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource; + +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.clickhouse.ClickHouseDatasourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.db2.Db2DatasourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.hive.HiveDataSourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.mysql.MysqlDatasourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.oracle.OracleDatasourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.postgresql.PostgreSqlDatasourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.presto.PrestoDatasourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.spark.SparkDatasourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.sqlserver.SqlServerDatasourceParamDTO; +import org.apache.dolphinscheduler.spi.enums.DbType; import java.io.Serializable; import java.util.Map; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/BaseHdfsConnectionParam.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/BaseHdfsConnectionParam.java similarity index 92% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/BaseHdfsConnectionParam.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/BaseHdfsConnectionParam.java index 908b7c8cefd38e5472a28980532d66c8736d3664..b5fc59b511b37af83b7ca9e99fcbf5613380e77f 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/BaseHdfsConnectionParam.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/BaseHdfsConnectionParam.java @@ -15,7 +15,9 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.datasource; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource; + +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; public class BaseHdfsConnectionParam extends BaseConnectionParam { protected String principal; diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/BaseHdfsDatasourceParamDTO.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/BaseHdfsDatasourceParamDTO.java similarity index 96% rename from dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/BaseHdfsDatasourceParamDTO.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/BaseHdfsDatasourceParamDTO.java index 0cfa3cb4346443f0cf7c68535b5bfd81fac09991..aaa1892b887d1b2a8a69eb238802bc7eca11d540 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/BaseHdfsDatasourceParamDTO.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/BaseHdfsDatasourceParamDTO.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.plugin.task.datasource; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource; public abstract class BaseHdfsDatasourceParamDTO extends BaseDataSourceParamDTO { diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/DatasourceProcessor.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/DatasourceProcessor.java similarity index 87% rename from dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/DatasourceProcessor.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/DatasourceProcessor.java index b0243a4f7232822c7f9f4126bd0d4e3907b4761a..63c434c91630072bd9248cc805529a54f6a94bc1 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/DatasourceProcessor.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/DatasourceProcessor.java @@ -15,8 +15,9 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.plugin.task.datasource; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource; +import org.apache.dolphinscheduler.spi.datasource.ConnectionParam; import org.apache.dolphinscheduler.spi.enums.DbType; import java.io.IOException; @@ -30,6 +31,12 @@ public interface DatasourceProcessor { */ void checkDatasourceParam(BaseDataSourceParamDTO datasourceParam); + /** + * get Datasource Client UniqueId + * @return UniqueId + */ + String getDatasourceUniqueId(ConnectionParam connectionParam, DbType dbType); + /** * create BaseDataSourceParamDTO by connectionJson * @@ -58,6 +65,11 @@ public interface DatasourceProcessor { */ String getDatasourceDriver(); + /** + * get validation Query + */ + String getValidationQuery(); + /** * get jdbcUrl by connection param, the jdbcUrl is different with ConnectionParam.jdbcUrl, this method will inject * other to jdbcUrl diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/clickhouse/ClickHouseDatasourceParamDTO.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/clickhouse/ClickHouseDatasourceParamDTO.java similarity index 89% rename from dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/clickhouse/ClickHouseDatasourceParamDTO.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/clickhouse/ClickHouseDatasourceParamDTO.java index 0cdb20fdfed13cef9730e3bdf56711772b1bad9b..7938204aac53b0c54306391b7064d548ee0c1e54 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/clickhouse/ClickHouseDatasourceParamDTO.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/clickhouse/ClickHouseDatasourceParamDTO.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.plugin.task.datasource.clickhouse; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.clickhouse; -import org.apache.dolphinscheduler.plugin.task.datasource.BaseDataSourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.BaseDataSourceParamDTO; import org.apache.dolphinscheduler.spi.enums.DbType; public class ClickHouseDatasourceParamDTO extends BaseDataSourceParamDTO { diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/clickhouse/ClickHouseDatasourceProcessor.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/clickhouse/ClickHouseDatasourceProcessor.java similarity index 80% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/clickhouse/ClickHouseDatasourceProcessor.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/clickhouse/ClickHouseDatasourceProcessor.java index 81611a3e1242c151cf3f8bf3bbd7c2e45143727a..330967b2b8e0461f78d5e5a89aa1f79731bb38ab 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/clickhouse/ClickHouseDatasourceProcessor.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/clickhouse/ClickHouseDatasourceProcessor.java @@ -15,18 +15,18 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.datasource.clickhouse; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.clickhouse; -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.datasource.AbstractDatasourceProcessor; -import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO; -import org.apache.dolphinscheduler.common.datasource.ConnectionParam; -import org.apache.dolphinscheduler.common.enums.DbType; -import org.apache.dolphinscheduler.common.utils.CommonUtils; -import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.AbstractDatasourceProcessor; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.BaseDataSourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils; +import org.apache.dolphinscheduler.spi.datasource.ConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; +import org.apache.dolphinscheduler.spi.utils.Constants; +import org.apache.dolphinscheduler.spi.utils.JSONUtils; +import org.apache.dolphinscheduler.spi.utils.StringUtils; import org.apache.commons.collections4.MapUtils; -import org.apache.commons.lang.StringUtils; import java.sql.Connection; import java.sql.DriverManager; @@ -64,8 +64,11 @@ public class ClickHouseDatasourceProcessor extends AbstractDatasourceProcessor { clickhouseConnectionParam.setAddress(address); clickhouseConnectionParam.setJdbcUrl(jdbcUrl); clickhouseConnectionParam.setUser(clickHouseParam.getUserName()); - clickhouseConnectionParam.setPassword(CommonUtils.encodePassword(clickHouseParam.getPassword())); + clickhouseConnectionParam.setPassword(PasswordUtils.encodePassword(clickHouseParam.getPassword())); + clickhouseConnectionParam.setDriverClassName(getDatasourceDriver()); + clickhouseConnectionParam.setValidationQuery(getValidationQuery()); clickhouseConnectionParam.setOther(transformOther(clickHouseParam.getOther())); + clickhouseConnectionParam.setProps(clickHouseParam.getOther()); return clickhouseConnectionParam; } @@ -79,6 +82,11 @@ public class ClickHouseDatasourceProcessor extends AbstractDatasourceProcessor { return Constants.COM_CLICKHOUSE_JDBC_DRIVER; } + @Override + public String getValidationQuery() { + return Constants.CLICKHOUSE_VALIDATION_QUERY; + } + @Override public String getJdbcUrl(ConnectionParam connectionParam) { ClickhouseConnectionParam clickhouseConnectionParam = (ClickhouseConnectionParam) connectionParam; @@ -94,7 +102,7 @@ public class ClickHouseDatasourceProcessor extends AbstractDatasourceProcessor { ClickhouseConnectionParam clickhouseConnectionParam = (ClickhouseConnectionParam) connectionParam; Class.forName(getDatasourceDriver()); return DriverManager.getConnection(getJdbcUrl(clickhouseConnectionParam), - clickhouseConnectionParam.getUser(), CommonUtils.decodePassword(clickhouseConnectionParam.getPassword())); + clickhouseConnectionParam.getUser(), PasswordUtils.decodePassword(clickhouseConnectionParam.getPassword())); } @Override diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/clickhouse/ClickhouseConnectionParam.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/clickhouse/ClickhouseConnectionParam.java similarity index 78% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/clickhouse/ClickhouseConnectionParam.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/clickhouse/ClickhouseConnectionParam.java index d2fdd0a33390adb1b4c7fb2e13fbdd4a94cdab8a..5049cf6253af6cd2898a786129a688d92fb965a3 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/clickhouse/ClickhouseConnectionParam.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/clickhouse/ClickhouseConnectionParam.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.datasource.clickhouse; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.clickhouse; -import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; public class ClickhouseConnectionParam extends BaseConnectionParam { @Override @@ -28,6 +28,9 @@ public class ClickhouseConnectionParam extends BaseConnectionParam { + ", address='" + address + '\'' + ", database='" + database + '\'' + ", jdbcUrl='" + jdbcUrl + '\'' + + ", driverLocation='" + driverLocation + '\'' + + ", driverClassName='" + driverClassName + '\'' + + ", validationQuery='" + validationQuery + '\'' + ", other='" + other + '\'' + '}'; } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/db2/Db2ConnectionParam.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/db2/Db2ConnectionParam.java similarity index 78% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/db2/Db2ConnectionParam.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/db2/Db2ConnectionParam.java index bb47ec2aea4d3d837f6befb5ed464d163697add3..c77b3a78ca168c687d8ce676446712b48e20bcf7 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/db2/Db2ConnectionParam.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/db2/Db2ConnectionParam.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.datasource.db2; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.db2; -import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; public class Db2ConnectionParam extends BaseConnectionParam { @Override @@ -28,6 +28,9 @@ public class Db2ConnectionParam extends BaseConnectionParam { + ", address='" + address + '\'' + ", database='" + database + '\'' + ", jdbcUrl='" + jdbcUrl + '\'' + + ", driverLocation='" + driverLocation + '\'' + + ", driverClassName='" + driverClassName + '\'' + + ", validationQuery='" + validationQuery + '\'' + ", other='" + other + '\'' + '}'; } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/db2/Db2DatasourceParamDTO.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/db2/Db2DatasourceParamDTO.java similarity index 89% rename from dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/db2/Db2DatasourceParamDTO.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/db2/Db2DatasourceParamDTO.java index b5476a525e9c405d065e1e52a3eb4c418364f4ca..a0adfd4fd387502429724e22bab91e6495867a23 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/db2/Db2DatasourceParamDTO.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/db2/Db2DatasourceParamDTO.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.plugin.task.datasource.db2; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.db2; -import org.apache.dolphinscheduler.plugin.task.datasource.BaseDataSourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.BaseDataSourceParamDTO; import org.apache.dolphinscheduler.spi.enums.DbType; public class Db2DatasourceParamDTO extends BaseDataSourceParamDTO { diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/db2/Db2DatasourceProcessor.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/db2/Db2DatasourceProcessor.java similarity index 79% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/db2/Db2DatasourceProcessor.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/db2/Db2DatasourceProcessor.java index 4bad7f60617033557fa20b40b0c26cdf5a6d5478..d9b6f8d27732b41c80206d9a7f7d18cca7e5ec33 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/db2/Db2DatasourceProcessor.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/db2/Db2DatasourceProcessor.java @@ -15,19 +15,19 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.datasource.db2; - -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.datasource.AbstractDatasourceProcessor; -import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam; -import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO; -import org.apache.dolphinscheduler.common.datasource.ConnectionParam; -import org.apache.dolphinscheduler.common.enums.DbType; -import org.apache.dolphinscheduler.common.utils.CommonUtils; -import org.apache.dolphinscheduler.common.utils.JSONUtils; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.db2; + +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.AbstractDatasourceProcessor; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.BaseDataSourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.datasource.ConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; +import org.apache.dolphinscheduler.spi.utils.Constants; +import org.apache.dolphinscheduler.spi.utils.JSONUtils; +import org.apache.dolphinscheduler.spi.utils.StringUtils; import org.apache.commons.collections4.MapUtils; -import org.apache.commons.lang.StringUtils; import java.sql.Connection; import java.sql.DriverManager; @@ -65,8 +65,11 @@ public class Db2DatasourceProcessor extends AbstractDatasourceProcessor { db2ConnectionParam.setDatabase(db2Param.getDatabase()); db2ConnectionParam.setJdbcUrl(jdbcUrl); db2ConnectionParam.setUser(db2Param.getUserName()); - db2ConnectionParam.setPassword(CommonUtils.encodePassword(db2Param.getPassword())); + db2ConnectionParam.setPassword(PasswordUtils.encodePassword(db2Param.getPassword())); + db2ConnectionParam.setDriverClassName(getDatasourceDriver()); + db2ConnectionParam.setValidationQuery(getValidationQuery()); db2ConnectionParam.setOther(transformOther(db2Param.getOther())); + db2ConnectionParam.setProps(db2Param.getOther()); return db2ConnectionParam; } @@ -95,7 +98,7 @@ public class Db2DatasourceProcessor extends AbstractDatasourceProcessor { Db2ConnectionParam db2ConnectionParam = (Db2ConnectionParam) connectionParam; Class.forName(getDatasourceDriver()); return DriverManager.getConnection(getJdbcUrl(db2ConnectionParam), - db2ConnectionParam.getUser(), CommonUtils.decodePassword(db2ConnectionParam.getPassword())); + db2ConnectionParam.getUser(), PasswordUtils.decodePassword(db2ConnectionParam.getPassword())); } @Override @@ -103,6 +106,11 @@ public class Db2DatasourceProcessor extends AbstractDatasourceProcessor { return DbType.DB2; } + @Override + public String getValidationQuery() { + return Constants.DB2_VALIDATION_QUERY; + } + private String transformOther(Map otherMap) { if (MapUtils.isEmpty(otherMap)) { return null; diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/hive/HiveConnectionParam.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/hive/HiveConnectionParam.java similarity index 81% rename from dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/hive/HiveConnectionParam.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/hive/HiveConnectionParam.java index 006f12917b5f3b48c010a14a20558af713547f45..0b1ce523493fa9b7da8f5b6d501bc782a502d8d8 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/hive/HiveConnectionParam.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/hive/HiveConnectionParam.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.plugin.task.datasource.hive; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.hive; -import org.apache.dolphinscheduler.plugin.task.datasource.BaseHdfsConnectionParam; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.BaseHdfsConnectionParam; public class HiveConnectionParam extends BaseHdfsConnectionParam { @Override @@ -28,6 +28,9 @@ public class HiveConnectionParam extends BaseHdfsConnectionParam { + ", address='" + address + '\'' + ", database='" + database + '\'' + ", jdbcUrl='" + jdbcUrl + '\'' + + ", driverLocation='" + driverLocation + '\'' + + ", driverClassName='" + driverClassName + '\'' + + ", validationQuery='" + validationQuery + '\'' + ", other='" + other + '\'' + ", principal='" + principal + '\'' + ", javaSecurityKrb5Conf='" + javaSecurityKrb5Conf + '\'' diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/hive/HiveDataSourceParamDTO.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/hive/HiveDataSourceParamDTO.java similarity index 90% rename from dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/hive/HiveDataSourceParamDTO.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/hive/HiveDataSourceParamDTO.java index 678f866b19dd1acadd14090eecc2c751dca3d080..4aaceaae237b9fb69aaed3353a09344470169af5 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/hive/HiveDataSourceParamDTO.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/hive/HiveDataSourceParamDTO.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.plugin.task.datasource.hive; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.hive; -import org.apache.dolphinscheduler.plugin.task.datasource.BaseHdfsDatasourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.BaseHdfsDatasourceParamDTO; import org.apache.dolphinscheduler.spi.enums.DbType; public class HiveDataSourceParamDTO extends BaseHdfsDatasourceParamDTO { diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/hive/HiveDatasourceProcessor.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/hive/HiveDatasourceProcessor.java similarity index 83% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/hive/HiveDatasourceProcessor.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/hive/HiveDatasourceProcessor.java index 8113e59e39a34e829b6905b9b91f6b990a211b83..15e2ad22283ca693c32ce2ce57bf769637cc1e44 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/hive/HiveDatasourceProcessor.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/hive/HiveDatasourceProcessor.java @@ -15,20 +15,20 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.datasource.hive; - -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.datasource.AbstractDatasourceProcessor; -import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam; -import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO; -import org.apache.dolphinscheduler.common.datasource.ConnectionParam; -import org.apache.dolphinscheduler.common.enums.DbType; -import org.apache.dolphinscheduler.common.utils.CommonUtils; -import org.apache.dolphinscheduler.common.utils.HiveConfUtils; -import org.apache.dolphinscheduler.common.utils.JSONUtils; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.hive; + +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.AbstractDatasourceProcessor; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.BaseDataSourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.CommonUtils; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.datasource.ConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; +import org.apache.dolphinscheduler.spi.utils.Constants; +import org.apache.dolphinscheduler.spi.utils.JSONUtils; +import org.apache.dolphinscheduler.spi.utils.StringUtils; import org.apache.commons.collections4.MapUtils; -import org.apache.commons.lang.StringUtils; import java.io.IOException; import java.sql.Connection; @@ -74,16 +74,15 @@ public class HiveDatasourceProcessor extends AbstractDatasourceProcessor { } address.deleteCharAt(address.length() - 1); String jdbcUrl = address.toString() + "/" + hiveParam.getDatabase(); - if (CommonUtils.getKerberosStartupState()) { - jdbcUrl += ";principal=" + hiveParam.getPrincipal(); - } HiveConnectionParam hiveConnectionParam = new HiveConnectionParam(); hiveConnectionParam.setDatabase(hiveParam.getDatabase()); hiveConnectionParam.setAddress(address.toString()); hiveConnectionParam.setJdbcUrl(jdbcUrl); hiveConnectionParam.setUser(hiveParam.getUserName()); - hiveConnectionParam.setPassword(CommonUtils.encodePassword(hiveParam.getPassword())); + hiveConnectionParam.setPassword(PasswordUtils.encodePassword(hiveParam.getPassword())); + hiveConnectionParam.setDriverClassName(getDatasourceDriver()); + hiveConnectionParam.setValidationQuery(getValidationQuery()); if (CommonUtils.getKerberosStartupState()) { hiveConnectionParam.setPrincipal(hiveParam.getPrincipal()); @@ -92,6 +91,7 @@ public class HiveDatasourceProcessor extends AbstractDatasourceProcessor { hiveConnectionParam.setLoginUserKeytabUsername(hiveParam.getLoginUserKeytabUsername()); } hiveConnectionParam.setOther(transformOther(hiveParam.getOther())); + hiveConnectionParam.setProps(hiveParam.getOther()); return hiveConnectionParam; } @@ -105,6 +105,11 @@ public class HiveDatasourceProcessor extends AbstractDatasourceProcessor { return Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER; } + @Override + public String getValidationQuery() { + return Constants.HIVE_VALIDATION_QUERY; + } + @Override public String getJdbcUrl(ConnectionParam connectionParam) { HiveConnectionParam hiveConnectionParam = (HiveConnectionParam) connectionParam; @@ -123,7 +128,7 @@ public class HiveDatasourceProcessor extends AbstractDatasourceProcessor { hiveConnectionParam.getLoginUserKeytabUsername(), hiveConnectionParam.getLoginUserKeytabPath()); Class.forName(getDatasourceDriver()); return DriverManager.getConnection(getJdbcUrl(connectionParam), - hiveConnectionParam.getUser(), CommonUtils.decodePassword(hiveConnectionParam.getPassword())); + hiveConnectionParam.getUser(), PasswordUtils.decodePassword(hiveConnectionParam.getPassword())); } @Override @@ -152,11 +157,7 @@ public class HiveDatasourceProcessor extends AbstractDatasourceProcessor { String[] otherArray = otherParams.split(";", -1); for (String conf : otherArray) { - if (HiveConfUtils.isHiveConfVar(conf)) { - hiveConfListSb.append(conf).append(";"); - } else { - sessionVarListSb.append(conf).append(";"); - } + sessionVarListSb.append(conf).append(";"); } // remove the last ";" diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/mysql/MysqlConnectionParam.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/mysql/MysqlConnectionParam.java similarity index 78% rename from dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/mysql/MysqlConnectionParam.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/mysql/MysqlConnectionParam.java index ea7a574db57b316477ef9d57763dc887dbad6965..c86af3b915d499e5a2c1cecf356c8f88e2034bc1 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/mysql/MysqlConnectionParam.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/mysql/MysqlConnectionParam.java @@ -15,12 +15,11 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.plugin.task.datasource.mysql; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.mysql; -import org.apache.dolphinscheduler.plugin.task.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; public class MysqlConnectionParam extends BaseConnectionParam { - @Override public String toString() { return "MysqlConnectionParam{" @@ -29,6 +28,9 @@ public class MysqlConnectionParam extends BaseConnectionParam { + ", address='" + address + '\'' + ", database='" + database + '\'' + ", jdbcUrl='" + jdbcUrl + '\'' + + ", driverLocation='" + driverLocation + '\'' + + ", driverClassName='" + driverClassName + '\'' + + ", validationQuery='" + validationQuery + '\'' + ", other='" + other + '\'' + '}'; } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/mysql/MysqlDatasourceParamDTO.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/mysql/MysqlDatasourceParamDTO.java similarity index 89% rename from dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/mysql/MysqlDatasourceParamDTO.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/mysql/MysqlDatasourceParamDTO.java index 3bcb84bd607bb1393cff08c3528b35e04cdead88..25d5a95acdfc71c5a6b33ba679f21d56008d4205 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/mysql/MysqlDatasourceParamDTO.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/mysql/MysqlDatasourceParamDTO.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.plugin.task.datasource.mysql; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.mysql; -import org.apache.dolphinscheduler.plugin.task.datasource.BaseDataSourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.BaseDataSourceParamDTO; import org.apache.dolphinscheduler.spi.enums.DbType; public class MysqlDatasourceParamDTO extends BaseDataSourceParamDTO { diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/mysql/MysqlDatasourceProcessor.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/mysql/MysqlDatasourceProcessor.java similarity index 83% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/mysql/MysqlDatasourceProcessor.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/mysql/MysqlDatasourceProcessor.java index 564ed3dfc267b01efa4f4f4b59c310d246fd5248..9624cbc44ec037a4a9b0a4d8fea7528ba4f7c2f9 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/mysql/MysqlDatasourceProcessor.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/mysql/MysqlDatasourceProcessor.java @@ -15,19 +15,19 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.datasource.mysql; - -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.datasource.AbstractDatasourceProcessor; -import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam; -import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO; -import org.apache.dolphinscheduler.common.datasource.ConnectionParam; -import org.apache.dolphinscheduler.common.enums.DbType; -import org.apache.dolphinscheduler.common.utils.CommonUtils; -import org.apache.dolphinscheduler.common.utils.JSONUtils; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.mysql; + +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.AbstractDatasourceProcessor; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.BaseDataSourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.datasource.ConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; +import org.apache.dolphinscheduler.spi.utils.Constants; +import org.apache.dolphinscheduler.spi.utils.JSONUtils; +import org.apache.dolphinscheduler.spi.utils.StringUtils; import org.apache.commons.collections4.MapUtils; -import org.apache.commons.lang.StringUtils; import java.sql.Connection; import java.sql.DriverManager; @@ -82,8 +82,11 @@ public class MysqlDatasourceProcessor extends AbstractDatasourceProcessor { mysqlConnectionParam.setDatabase(mysqlDatasourceParam.getDatabase()); mysqlConnectionParam.setAddress(address); mysqlConnectionParam.setUser(mysqlDatasourceParam.getUserName()); - mysqlConnectionParam.setPassword(CommonUtils.encodePassword(mysqlDatasourceParam.getPassword())); + mysqlConnectionParam.setPassword(PasswordUtils.encodePassword(mysqlDatasourceParam.getPassword())); + mysqlConnectionParam.setDriverClassName(getDatasourceDriver()); + mysqlConnectionParam.setValidationQuery(getValidationQuery()); mysqlConnectionParam.setOther(transformOther(mysqlDatasourceParam.getOther())); + mysqlConnectionParam.setProps(mysqlDatasourceParam.getOther()); return mysqlConnectionParam; } @@ -95,7 +98,12 @@ public class MysqlDatasourceProcessor extends AbstractDatasourceProcessor { @Override public String getDatasourceDriver() { - return Constants.COM_MYSQL_JDBC_DRIVER; + return Constants.COM_MYSQL_CJ_JDBC_DRIVER; + } + + @Override + public String getValidationQuery() { + return Constants.MYSQL_VALIDATION_QUERY; } @Override @@ -117,7 +125,7 @@ public class MysqlDatasourceProcessor extends AbstractDatasourceProcessor { logger.warn("sensitive param : {} in username field is filtered", AUTO_DESERIALIZE); user = user.replace(AUTO_DESERIALIZE, ""); } - String password = CommonUtils.decodePassword(mysqlConnectionParam.getPassword()); + String password = PasswordUtils.decodePassword(mysqlConnectionParam.getPassword()); if (password.contains(AUTO_DESERIALIZE)) { logger.warn("sensitive param : {} in password field is filtered", AUTO_DESERIALIZE); password = password.replace(AUTO_DESERIALIZE, ""); diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/oracle/OracleConnectionParam.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/oracle/OracleConnectionParam.java similarity index 82% rename from dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/oracle/OracleConnectionParam.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/oracle/OracleConnectionParam.java index 45c7ae14deb430d08b9c69e96a56d2e4f4605968..9984174042662b6c5d14432b01e3a96ced4c43b6 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/oracle/OracleConnectionParam.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/oracle/OracleConnectionParam.java @@ -15,13 +15,12 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.plugin.task.datasource.oracle; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.oracle; -import org.apache.dolphinscheduler.plugin.task.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; import org.apache.dolphinscheduler.spi.enums.DbConnectType; public class OracleConnectionParam extends BaseConnectionParam { - protected DbConnectType connectType; public DbConnectType getConnectType() { @@ -40,6 +39,9 @@ public class OracleConnectionParam extends BaseConnectionParam { + ", address='" + address + '\'' + ", database='" + database + '\'' + ", jdbcUrl='" + jdbcUrl + '\'' + + ", driverLocation='" + driverLocation + '\'' + + ", driverClassName='" + driverClassName + '\'' + + ", validationQuery='" + validationQuery + '\'' + ", other='" + other + '\'' + ", connectType=" + connectType + '}'; diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/oracle/OracleDatasourceParamDTO.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/oracle/OracleDatasourceParamDTO.java similarity index 91% rename from dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/oracle/OracleDatasourceParamDTO.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/oracle/OracleDatasourceParamDTO.java index 502a893579d1abcbb1a2244eda543cd0b7833913..b027266cf3897fd416d27e53aac306798881d8d5 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/oracle/OracleDatasourceParamDTO.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/oracle/OracleDatasourceParamDTO.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.plugin.task.datasource.oracle; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.oracle; -import org.apache.dolphinscheduler.plugin.task.datasource.BaseDataSourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.BaseDataSourceParamDTO; import org.apache.dolphinscheduler.spi.enums.DbConnectType; import org.apache.dolphinscheduler.spi.enums.DbType; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/oracle/OracleDatasourceProcessor.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/oracle/OracleDatasourceProcessor.java similarity index 79% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/oracle/OracleDatasourceProcessor.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/oracle/OracleDatasourceProcessor.java index 4ec7e19bb646fcdd440da8251e8479bfa604296e..a3b72b32d29bb92c6856e23f1e88384d52758061 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/oracle/OracleDatasourceProcessor.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/oracle/OracleDatasourceProcessor.java @@ -15,17 +15,17 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.datasource.oracle; - -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.datasource.AbstractDatasourceProcessor; -import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam; -import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO; -import org.apache.dolphinscheduler.common.datasource.ConnectionParam; -import org.apache.dolphinscheduler.common.enums.DbConnectType; -import org.apache.dolphinscheduler.common.enums.DbType; -import org.apache.dolphinscheduler.common.utils.CommonUtils; -import org.apache.dolphinscheduler.common.utils.JSONUtils; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.oracle; + +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.AbstractDatasourceProcessor; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.BaseDataSourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.datasource.ConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbConnectType; +import org.apache.dolphinscheduler.spi.enums.DbType; +import org.apache.dolphinscheduler.spi.utils.Constants; +import org.apache.dolphinscheduler.spi.utils.JSONUtils; import org.apache.commons.collections4.MapUtils; import org.apache.commons.lang.StringUtils; @@ -65,23 +65,28 @@ public class OracleDatasourceProcessor extends AbstractDatasourceProcessor { public BaseConnectionParam createConnectionParams(BaseDataSourceParamDTO datasourceParam) { OracleDatasourceParamDTO oracleParam = (OracleDatasourceParamDTO) datasourceParam; String address; + String jdbcUrl; if (DbConnectType.ORACLE_SID.equals(oracleParam.getConnectType())) { address = String.format("%s%s:%s", Constants.JDBC_ORACLE_SID, oracleParam.getHost(), oracleParam.getPort()); + jdbcUrl = address + ":" + oracleParam.getDatabase(); } else { address = String.format("%s%s:%s", Constants.JDBC_ORACLE_SERVICE_NAME, oracleParam.getHost(), oracleParam.getPort()); + jdbcUrl = address + "/" + oracleParam.getDatabase(); } - String jdbcUrl = address + "/" + oracleParam.getDatabase(); OracleConnectionParam oracleConnectionParam = new OracleConnectionParam(); oracleConnectionParam.setUser(oracleParam.getUserName()); - oracleConnectionParam.setPassword(CommonUtils.encodePassword(oracleParam.getPassword())); + oracleConnectionParam.setPassword(PasswordUtils.encodePassword(oracleParam.getPassword())); oracleConnectionParam.setAddress(address); oracleConnectionParam.setJdbcUrl(jdbcUrl); oracleConnectionParam.setDatabase(oracleParam.getDatabase()); oracleConnectionParam.setConnectType(oracleParam.getConnectType()); + oracleConnectionParam.setDriverClassName(getDatasourceDriver()); + oracleConnectionParam.setValidationQuery(getValidationQuery()); oracleConnectionParam.setOther(transformOther(oracleParam.getOther())); + oracleConnectionParam.setProps(oracleParam.getOther()); return oracleConnectionParam; } @@ -96,6 +101,11 @@ public class OracleDatasourceProcessor extends AbstractDatasourceProcessor { return Constants.COM_ORACLE_JDBC_DRIVER; } + @Override + public String getValidationQuery() { + return Constants.ORACLE_VALIDATION_QUERY; + } + @Override public String getJdbcUrl(ConnectionParam connectionParam) { OracleConnectionParam oracleConnectionParam = (OracleConnectionParam) connectionParam; @@ -110,7 +120,7 @@ public class OracleDatasourceProcessor extends AbstractDatasourceProcessor { OracleConnectionParam oracleConnectionParam = (OracleConnectionParam) connectionParam; Class.forName(getDatasourceDriver()); return DriverManager.getConnection(getJdbcUrl(connectionParam), - oracleConnectionParam.getUser(), CommonUtils.decodePassword(oracleConnectionParam.getPassword())); + oracleConnectionParam.getUser(), PasswordUtils.decodePassword(oracleConnectionParam.getPassword())); } @Override diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/postgresql/PostgreSqlConnectionParam.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/postgresql/PostgreSqlConnectionParam.java similarity index 78% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/postgresql/PostgreSqlConnectionParam.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/postgresql/PostgreSqlConnectionParam.java index 7090581f6501652ee2320e4ce63d583ef1cab10a..e2e3dc0a293fec98c1c588c69fe48c6c1896462f 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/postgresql/PostgreSqlConnectionParam.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/postgresql/PostgreSqlConnectionParam.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.datasource.postgresql; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.postgresql; -import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; public class PostgreSqlConnectionParam extends BaseConnectionParam { @Override @@ -28,6 +28,9 @@ public class PostgreSqlConnectionParam extends BaseConnectionParam { + ", address='" + address + '\'' + ", database='" + database + '\'' + ", jdbcUrl='" + jdbcUrl + '\'' + + ", driverLocation='" + driverLocation + '\'' + + ", driverClassName='" + driverClassName + '\'' + + ", validationQuery='" + validationQuery + '\'' + ", other='" + other + '\'' + '}'; } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/postgresql/PostgreSqlDatasourceParamDTO.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/postgresql/PostgreSqlDatasourceParamDTO.java similarity index 89% rename from dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/postgresql/PostgreSqlDatasourceParamDTO.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/postgresql/PostgreSqlDatasourceParamDTO.java index ef1ec9eda42f060e2ccdd35f73031bab7e929222..b17f8380be6730092bb9dcbecb81f5570a929649 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/postgresql/PostgreSqlDatasourceParamDTO.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/postgresql/PostgreSqlDatasourceParamDTO.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.plugin.task.datasource.postgresql; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.postgresql; -import org.apache.dolphinscheduler.plugin.task.datasource.BaseDataSourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.BaseDataSourceParamDTO; import org.apache.dolphinscheduler.spi.enums.DbType; public class PostgreSqlDatasourceParamDTO extends BaseDataSourceParamDTO { diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/postgresql/PostgreSqlDatasourceProcessor.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/postgresql/PostgreSqlDatasourceProcessor.java similarity index 80% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/postgresql/PostgreSqlDatasourceProcessor.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/postgresql/PostgreSqlDatasourceProcessor.java index e7939630a4536c6c084744b5b3e4fd9c501f749d..749c6b5944d4be3888204e8b64d57072713ca7e2 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/postgresql/PostgreSqlDatasourceProcessor.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/postgresql/PostgreSqlDatasourceProcessor.java @@ -15,16 +15,16 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.datasource.postgresql; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.postgresql; -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.datasource.AbstractDatasourceProcessor; -import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam; -import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO; -import org.apache.dolphinscheduler.common.datasource.ConnectionParam; -import org.apache.dolphinscheduler.common.enums.DbType; -import org.apache.dolphinscheduler.common.utils.CommonUtils; -import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.AbstractDatasourceProcessor; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.BaseDataSourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.datasource.ConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; +import org.apache.dolphinscheduler.spi.utils.Constants; +import org.apache.dolphinscheduler.spi.utils.JSONUtils; import org.apache.commons.collections4.MapUtils; import org.apache.commons.lang.StringUtils; @@ -65,8 +65,11 @@ public class PostgreSqlDatasourceProcessor extends AbstractDatasourceProcessor { postgreSqlConnectionParam.setAddress(address); postgreSqlConnectionParam.setDatabase(postgreSqlParam.getDatabase()); postgreSqlConnectionParam.setUser(postgreSqlParam.getUserName()); - postgreSqlConnectionParam.setPassword(CommonUtils.encodePassword(postgreSqlParam.getPassword())); + postgreSqlConnectionParam.setPassword(PasswordUtils.encodePassword(postgreSqlParam.getPassword())); + postgreSqlConnectionParam.setDriverClassName(getDatasourceDriver()); + postgreSqlConnectionParam.setValidationQuery(getValidationQuery()); postgreSqlConnectionParam.setOther(transformOther(postgreSqlParam.getOther())); + postgreSqlConnectionParam.setProps(postgreSqlParam.getOther()); return postgreSqlConnectionParam; } @@ -81,6 +84,11 @@ public class PostgreSqlDatasourceProcessor extends AbstractDatasourceProcessor { return Constants.ORG_POSTGRESQL_DRIVER; } + @Override + public String getValidationQuery() { + return Constants.POSTGRESQL_VALIDATION_QUERY; + } + @Override public String getJdbcUrl(ConnectionParam connectionParam) { PostgreSqlConnectionParam postgreSqlConnectionParam = (PostgreSqlConnectionParam) connectionParam; @@ -95,7 +103,7 @@ public class PostgreSqlDatasourceProcessor extends AbstractDatasourceProcessor { PostgreSqlConnectionParam postgreSqlConnectionParam = (PostgreSqlConnectionParam) connectionParam; Class.forName(getDatasourceDriver()); return DriverManager.getConnection(getJdbcUrl(postgreSqlConnectionParam), - postgreSqlConnectionParam.getUser(), CommonUtils.decodePassword(postgreSqlConnectionParam.getPassword())); + postgreSqlConnectionParam.getUser(), PasswordUtils.decodePassword(postgreSqlConnectionParam.getPassword())); } @Override diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/presto/PrestoConnectionParam.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/presto/PrestoConnectionParam.java similarity index 78% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/presto/PrestoConnectionParam.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/presto/PrestoConnectionParam.java index 436bc6dd1e888a62a1cd4b308d3d8dc6804293ed..fcdd17eaaad8571d4096fbf21372e0973c386cf4 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/presto/PrestoConnectionParam.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/presto/PrestoConnectionParam.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.datasource.presto; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.presto; -import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; public class PrestoConnectionParam extends BaseConnectionParam { @Override @@ -28,6 +28,9 @@ public class PrestoConnectionParam extends BaseConnectionParam { + ", address='" + address + '\'' + ", database='" + database + '\'' + ", jdbcUrl='" + jdbcUrl + '\'' + + ", driverLocation='" + driverLocation + '\'' + + ", driverClassName='" + driverClassName + '\'' + + ", validationQuery='" + validationQuery + '\'' + ", other='" + other + '\'' + '}'; } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/presto/PrestoDatasourceParamDTO.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/presto/PrestoDatasourceParamDTO.java similarity index 89% rename from dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/presto/PrestoDatasourceParamDTO.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/presto/PrestoDatasourceParamDTO.java index b592e217c2d4fffc90d29026e37c401e10f5ded3..50e65b5b5525f2a4cb4c4a26a035258769ea0d8d 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/presto/PrestoDatasourceParamDTO.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/presto/PrestoDatasourceParamDTO.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.plugin.task.datasource.presto; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.presto; -import org.apache.dolphinscheduler.plugin.task.datasource.BaseDataSourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.BaseDataSourceParamDTO; import org.apache.dolphinscheduler.spi.enums.DbType; public class PrestoDatasourceParamDTO extends BaseDataSourceParamDTO { diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/presto/PrestoDatasourceProcessor.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/presto/PrestoDatasourceProcessor.java similarity index 80% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/presto/PrestoDatasourceProcessor.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/presto/PrestoDatasourceProcessor.java index 25dab29a689ba7c0b942445b9e1aaf5c215abf34..b9fbc33b027e993c1b56384ec5de23c550b5c7ab 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/presto/PrestoDatasourceProcessor.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/presto/PrestoDatasourceProcessor.java @@ -15,16 +15,16 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.datasource.presto; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.presto; -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.datasource.AbstractDatasourceProcessor; -import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam; -import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO; -import org.apache.dolphinscheduler.common.datasource.ConnectionParam; -import org.apache.dolphinscheduler.common.enums.DbType; -import org.apache.dolphinscheduler.common.utils.CommonUtils; -import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.AbstractDatasourceProcessor; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.BaseDataSourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.datasource.ConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; +import org.apache.dolphinscheduler.spi.utils.Constants; +import org.apache.dolphinscheduler.spi.utils.JSONUtils; import org.apache.commons.collections4.MapUtils; import org.apache.commons.lang.StringUtils; @@ -64,11 +64,14 @@ public class PrestoDatasourceProcessor extends AbstractDatasourceProcessor { PrestoConnectionParam prestoConnectionParam = new PrestoConnectionParam(); prestoConnectionParam.setUser(prestoParam.getUserName()); - prestoConnectionParam.setPassword(CommonUtils.encodePassword(prestoParam.getPassword())); + prestoConnectionParam.setPassword(PasswordUtils.encodePassword(prestoParam.getPassword())); prestoConnectionParam.setOther(transformOther(prestoParam.getOther())); prestoConnectionParam.setAddress(address); prestoConnectionParam.setJdbcUrl(jdbcUrl); prestoConnectionParam.setDatabase(prestoParam.getDatabase()); + prestoConnectionParam.setDriverClassName(getDatasourceDriver()); + prestoConnectionParam.setValidationQuery(getValidationQuery()); + prestoConnectionParam.setProps(prestoParam.getOther()); return prestoConnectionParam; } @@ -83,6 +86,11 @@ public class PrestoDatasourceProcessor extends AbstractDatasourceProcessor { return Constants.COM_PRESTO_JDBC_DRIVER; } + @Override + public String getValidationQuery() { + return Constants.PRESTO_VALIDATION_QUERY; + } + @Override public String getJdbcUrl(ConnectionParam connectionParam) { PrestoConnectionParam prestoConnectionParam = (PrestoConnectionParam) connectionParam; @@ -97,7 +105,7 @@ public class PrestoDatasourceProcessor extends AbstractDatasourceProcessor { PrestoConnectionParam prestoConnectionParam = (PrestoConnectionParam) connectionParam; Class.forName(getDatasourceDriver()); return DriverManager.getConnection(getJdbcUrl(connectionParam), - prestoConnectionParam.getUser(), CommonUtils.decodePassword(prestoConnectionParam.getPassword())); + prestoConnectionParam.getUser(), PasswordUtils.decodePassword(prestoConnectionParam.getPassword())); } @Override diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/spark/SparkConnectionParam.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/spark/SparkConnectionParam.java similarity index 81% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/spark/SparkConnectionParam.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/spark/SparkConnectionParam.java index 725d7b8e2b0aa6c0ffb255424cd5c0af31a1f80c..bd1bb9e025004794eee7d499e6f977ccc3163f6f 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/spark/SparkConnectionParam.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/spark/SparkConnectionParam.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.datasource.spark; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.spark; -import org.apache.dolphinscheduler.common.datasource.BaseHdfsConnectionParam; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.BaseHdfsConnectionParam; public class SparkConnectionParam extends BaseHdfsConnectionParam { @Override @@ -28,6 +28,9 @@ public class SparkConnectionParam extends BaseHdfsConnectionParam { + ", address='" + address + '\'' + ", database='" + database + '\'' + ", jdbcUrl='" + jdbcUrl + '\'' + + ", driverLocation='" + driverLocation + '\'' + + ", driverClassName='" + driverClassName + '\'' + + ", validationQuery='" + validationQuery + '\'' + ", other='" + other + '\'' + ", principal='" + principal + '\'' + ", javaSecurityKrb5Conf='" + javaSecurityKrb5Conf + '\'' diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/spark/SparkDatasourceParamDTO.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/spark/SparkDatasourceParamDTO.java similarity index 90% rename from dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/spark/SparkDatasourceParamDTO.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/spark/SparkDatasourceParamDTO.java index 144fae5a1fe98791487768edb6392d30e7e73f1c..82e1f1124f9a219533085ffa018ca9963f5f878f 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/spark/SparkDatasourceParamDTO.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/spark/SparkDatasourceParamDTO.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.plugin.task.datasource.spark; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.spark; -import org.apache.dolphinscheduler.plugin.task.datasource.BaseHdfsDatasourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.BaseHdfsDatasourceParamDTO; import org.apache.dolphinscheduler.spi.enums.DbType; public class SparkDatasourceParamDTO extends BaseHdfsDatasourceParamDTO { diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/spark/SparkDatasourceProcessor.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/spark/SparkDatasourceProcessor.java similarity index 82% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/spark/SparkDatasourceProcessor.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/spark/SparkDatasourceProcessor.java index 7c7d021056b9555cec7d349990b09cca824274c2..bb930d7612b4ffcfdc5bd1197bc7c44134c5fd13 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/spark/SparkDatasourceProcessor.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/spark/SparkDatasourceProcessor.java @@ -15,19 +15,20 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.datasource.spark; - -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.datasource.AbstractDatasourceProcessor; -import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam; -import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO; -import org.apache.dolphinscheduler.common.datasource.ConnectionParam; -import org.apache.dolphinscheduler.common.enums.DbType; -import org.apache.dolphinscheduler.common.utils.CommonUtils; -import org.apache.dolphinscheduler.common.utils.JSONUtils; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.spark; + +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.AbstractDatasourceProcessor; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.BaseDataSourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.CommonUtils; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.datasource.ConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; +import org.apache.dolphinscheduler.spi.utils.Constants; +import org.apache.dolphinscheduler.spi.utils.JSONUtils; +import org.apache.dolphinscheduler.spi.utils.StringUtils; import org.apache.commons.collections4.MapUtils; -import org.apache.commons.lang.StringUtils; import java.io.IOException; import java.sql.Connection; @@ -76,17 +77,18 @@ public class SparkDatasourceProcessor extends AbstractDatasourceProcessor { address.deleteCharAt(address.length() - 1); String jdbcUrl = address + "/" + sparkDatasourceParam.getDatabase(); - if (CommonUtils.getKerberosStartupState()) { - jdbcUrl += ";principal=" + sparkDatasourceParam.getPrincipal(); - } SparkConnectionParam sparkConnectionParam = new SparkConnectionParam(); - sparkConnectionParam.setPassword(CommonUtils.encodePassword(sparkDatasourceParam.getPassword())); + sparkConnectionParam.setPassword(PasswordUtils.encodePassword(sparkDatasourceParam.getPassword())); sparkConnectionParam.setUser(sparkDatasourceParam.getUserName()); sparkConnectionParam.setOther(transformOther(sparkDatasourceParam.getOther())); sparkConnectionParam.setDatabase(sparkDatasourceParam.getDatabase()); sparkConnectionParam.setAddress(address.toString()); sparkConnectionParam.setJdbcUrl(jdbcUrl); + sparkConnectionParam.setDriverClassName(getDatasourceDriver()); + sparkConnectionParam.setValidationQuery(getValidationQuery()); + sparkConnectionParam.setProps(sparkDatasourceParam.getOther()); + if (CommonUtils.getKerberosStartupState()) { sparkConnectionParam.setPrincipal(sparkDatasourceParam.getPrincipal()); sparkConnectionParam.setJavaSecurityKrb5Conf(sparkDatasourceParam.getJavaSecurityKrb5Conf()); @@ -107,6 +109,11 @@ public class SparkDatasourceProcessor extends AbstractDatasourceProcessor { return Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER; } + @Override + public String getValidationQuery() { + return Constants.HIVE_VALIDATION_QUERY; + } + @Override public String getJdbcUrl(ConnectionParam connectionParam) { SparkConnectionParam sparkConnectionParam = (SparkConnectionParam) connectionParam; @@ -123,7 +130,7 @@ public class SparkDatasourceProcessor extends AbstractDatasourceProcessor { sparkConnectionParam.getLoginUserKeytabUsername(), sparkConnectionParam.getLoginUserKeytabPath()); Class.forName(getDatasourceDriver()); return DriverManager.getConnection(getJdbcUrl(sparkConnectionParam), - sparkConnectionParam.getUser(), CommonUtils.decodePassword(sparkConnectionParam.getPassword())); + sparkConnectionParam.getUser(), PasswordUtils.decodePassword(sparkConnectionParam.getPassword())); } @Override diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/sqlserver/SqlServerConnectionParam.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/sqlserver/SqlServerConnectionParam.java similarity index 78% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/sqlserver/SqlServerConnectionParam.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/sqlserver/SqlServerConnectionParam.java index ff5225771a0522decaa057380761296213a58e1b..d362c241aaa9dffaf75b88a25c9c6d6479e32e37 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/sqlserver/SqlServerConnectionParam.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/sqlserver/SqlServerConnectionParam.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.datasource.sqlserver; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.sqlserver; -import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; public class SqlServerConnectionParam extends BaseConnectionParam { @Override @@ -28,6 +28,9 @@ public class SqlServerConnectionParam extends BaseConnectionParam { + ", address='" + address + '\'' + ", database='" + database + '\'' + ", jdbcUrl='" + jdbcUrl + '\'' + + ", driverLocation='" + driverLocation + '\'' + + ", driverClassName='" + driverClassName + '\'' + + ", validationQuery='" + validationQuery + '\'' + ", other='" + other + '\'' + '}'; } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/sqlserver/SqlServerDatasourceParamDTO.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/sqlserver/SqlServerDatasourceParamDTO.java similarity index 89% rename from dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/sqlserver/SqlServerDatasourceParamDTO.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/sqlserver/SqlServerDatasourceParamDTO.java index 101ab2aa9f82480e40add9f5a546d7885ab0b1ea..b90bb8820b4834ca2535f65558e6c86782df6a40 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/sqlserver/SqlServerDatasourceParamDTO.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/sqlserver/SqlServerDatasourceParamDTO.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.plugin.task.datasource.sqlserver; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.sqlserver; -import org.apache.dolphinscheduler.plugin.task.datasource.BaseDataSourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.BaseDataSourceParamDTO; import org.apache.dolphinscheduler.spi.enums.DbType; public class SqlServerDatasourceParamDTO extends BaseDataSourceParamDTO { diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/sqlserver/SqlServerDatasourceProcessor.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/sqlserver/SqlServerDatasourceProcessor.java similarity index 80% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/sqlserver/SqlServerDatasourceProcessor.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/sqlserver/SqlServerDatasourceProcessor.java index 32fca091f19bcdeebb26a693f277ad1b4af29454..c4c2d9fec1db134cc3f4f69dab0d5cec43296470 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/sqlserver/SqlServerDatasourceProcessor.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/sqlserver/SqlServerDatasourceProcessor.java @@ -15,16 +15,16 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.datasource.sqlserver; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.sqlserver; -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.datasource.AbstractDatasourceProcessor; -import org.apache.dolphinscheduler.common.datasource.BaseConnectionParam; -import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO; -import org.apache.dolphinscheduler.common.datasource.ConnectionParam; -import org.apache.dolphinscheduler.common.enums.DbType; -import org.apache.dolphinscheduler.common.utils.CommonUtils; -import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.AbstractDatasourceProcessor; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.BaseDataSourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.datasource.ConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; +import org.apache.dolphinscheduler.spi.utils.Constants; +import org.apache.dolphinscheduler.spi.utils.JSONUtils; import org.apache.commons.collections4.MapUtils; import org.apache.commons.lang.StringUtils; @@ -64,7 +64,10 @@ public class SqlServerDatasourceProcessor extends AbstractDatasourceProcessor { sqlServerConnectionParam.setJdbcUrl(jdbcUrl); sqlServerConnectionParam.setOther(transformOther(sqlServerParam.getOther())); sqlServerConnectionParam.setUser(sqlServerParam.getUserName()); - sqlServerConnectionParam.setPassword(CommonUtils.encodePassword(sqlServerParam.getPassword())); + sqlServerConnectionParam.setPassword(PasswordUtils.encodePassword(sqlServerParam.getPassword())); + sqlServerConnectionParam.setDriverClassName(getDatasourceDriver()); + sqlServerConnectionParam.setValidationQuery(getValidationQuery()); + sqlServerConnectionParam.setProps(sqlServerParam.getOther()); return sqlServerConnectionParam; } @@ -78,6 +81,11 @@ public class SqlServerDatasourceProcessor extends AbstractDatasourceProcessor { return Constants.COM_SQLSERVER_JDBC_DRIVER; } + @Override + public String getValidationQuery() { + return Constants.SQLSERVER_VALIDATION_QUERY; + } + @Override public String getJdbcUrl(ConnectionParam connectionParam) { SqlServerConnectionParam sqlServerConnectionParam = (SqlServerConnectionParam) connectionParam; @@ -93,7 +101,7 @@ public class SqlServerDatasourceProcessor extends AbstractDatasourceProcessor { SqlServerConnectionParam sqlServerConnectionParam = (SqlServerConnectionParam) connectionParam; Class.forName(getDatasourceDriver()); return DriverManager.getConnection(getJdbcUrl(connectionParam), sqlServerConnectionParam.getUser(), - CommonUtils.decodePassword(sqlServerConnectionParam.getPassword())); + PasswordUtils.decodePassword(sqlServerConnectionParam.getPassword())); } @Override diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/plugin/DataSourceClientProvider.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/plugin/DataSourceClientProvider.java new file mode 100644 index 0000000000000000000000000000000000000000..c34283322e0e4c1b736d99aae540ff66b57b87b8 --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/plugin/DataSourceClientProvider.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.api.plugin; + +import org.apache.dolphinscheduler.plugin.datasource.api.utils.DatasourceUtil; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.datasource.ConnectionParam; +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; +import org.apache.dolphinscheduler.spi.datasource.DataSourceClient; +import org.apache.dolphinscheduler.spi.enums.DbType; + +import java.sql.Connection; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class DataSourceClientProvider { + private static final Logger logger = LoggerFactory.getLogger(DataSourceClientProvider.class); + + private static final Map uniqueId2dataSourceClientMap = new ConcurrentHashMap<>(); + + private DataSourcePluginManager dataSourcePluginManager; + + private DataSourceClientProvider() { + initDataSourcePlugin(); + } + + private static class DataSourceClientProviderHolder { + private static final DataSourceClientProvider INSTANCE = new DataSourceClientProvider(); + } + + public static DataSourceClientProvider getInstance() { + return DataSourceClientProviderHolder.INSTANCE; + } + + public Connection getConnection(DbType dbType, ConnectionParam connectionParam) { + BaseConnectionParam baseConnectionParam = (BaseConnectionParam) connectionParam; + String datasourceUniqueId = DatasourceUtil.getDatasourceUniqueId(baseConnectionParam, dbType); + logger.info("getConnection datasourceUniqueId {}", datasourceUniqueId); + + DataSourceClient dataSourceClient = uniqueId2dataSourceClientMap.computeIfAbsent(datasourceUniqueId, $ -> { + Map dataSourceChannelMap = dataSourcePluginManager.getDataSourceChannelMap(); + DataSourceChannel dataSourceChannel = dataSourceChannelMap.get(dbType.getDescp()); + if (null == dataSourceChannel) { + throw new RuntimeException(String.format("datasource plugin '%s' is not found", dbType.getDescp())); + } + return dataSourceChannel.createDataSourceClient(baseConnectionParam, dbType); + }); + return dataSourceClient.getConnection(); + } + + private void initDataSourcePlugin() { + dataSourcePluginManager = new DataSourcePluginManager(); + dataSourcePluginManager.installPlugin(); + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/vo/AlertGroupVo.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/plugin/DataSourcePluginManager.java similarity index 33% rename from dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/vo/AlertGroupVo.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/plugin/DataSourcePluginManager.java index db9c9f50bc066cd05879490b71069722693ec5cc..27a2ea8e40c0f9f301d5efc4aa4599e53d274791 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/vo/AlertGroupVo.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/plugin/DataSourcePluginManager.java @@ -15,73 +15,52 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.dao.vo; +package org.apache.dolphinscheduler.plugin.datasource.api.plugin; -import java.util.Date; +import static java.lang.String.format; -/** - * AlertGroupVo - */ -public class AlertGroupVo { +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannelFactory; - /** - * primary key - */ - private int id; - /** - * group_name - */ - private String groupName; - /** - * description - */ - private String description; - /** - * create_time - */ - private Date createTime; - /** - * update_time - */ - private Date updateTime; +import java.util.Collections; +import java.util.HashSet; +import java.util.Map; +import java.util.ServiceLoader; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; - public int getId() { - return id; - } +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; - public void setId(int id) { - this.id = id; - } +public class DataSourcePluginManager { + private static final Logger logger = LoggerFactory.getLogger(DataSourcePluginManager.class); - public String getGroupName() { - return groupName; - } + private final Map datasourceClientMap = new ConcurrentHashMap<>(); - public void setGroupName(String groupName) { - this.groupName = groupName; + public Map getDataSourceChannelMap() { + return Collections.unmodifiableMap(datasourceClientMap); } - public String getDescription() { - return description; - } + public void installPlugin() { + final Set names = new HashSet<>(); - public void setDescription(String description) { - this.description = description; - } + ServiceLoader.load(DataSourceChannelFactory.class).forEach(factory -> { + final String name = factory.getName(); - public Date getCreateTime() { - return createTime; - } + logger.info("Registering datasource plugin: {}", name); - public void setCreateTime(Date createTime) { - this.createTime = createTime; - } + if (!names.add(name)) { + throw new IllegalStateException(format("Duplicate datasource plugins named '%s'", name)); + } + + loadDatasourceClient(factory); - public Date getUpdateTime() { - return updateTime; + logger.info("Registered datasource plugin: {}", name); + }); } - public void setUpdateTime(Date updateTime) { - this.updateTime = updateTime; + private void loadDatasourceClient(DataSourceChannelFactory datasourceChannelFactory) { + DataSourceChannel datasourceChannel = datasourceChannelFactory.create(); + datasourceClientMap.put(datasourceChannelFactory.getName(), datasourceChannel); } } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/provider/JdbcDataSourceProvider.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/provider/JdbcDataSourceProvider.java new file mode 100644 index 0000000000000000000000000000000000000000..ac93ee1fad37bb5725efa58136d65f8e98c37e3b --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/provider/JdbcDataSourceProvider.java @@ -0,0 +1,113 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.api.provider; + +import org.apache.dolphinscheduler.plugin.datasource.api.utils.DatasourceUtil; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; +import org.apache.dolphinscheduler.spi.utils.Constants; +import org.apache.dolphinscheduler.spi.utils.PropertyUtils; +import org.apache.dolphinscheduler.spi.utils.StringUtils; + +import java.sql.Driver; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.zaxxer.hikari.HikariDataSource; + +/** + * Jdbc Data Source Provider + */ +public class JdbcDataSourceProvider { + + private static final Logger logger = LoggerFactory.getLogger(JdbcDataSourceProvider.class); + + public static HikariDataSource createJdbcDataSource(BaseConnectionParam properties, DbType dbType) { + logger.info("Creating HikariDataSource pool for maxActive:{}", PropertyUtils.getInt(Constants.SPRING_DATASOURCE_MAX_ACTIVE, 50)); + HikariDataSource dataSource = new HikariDataSource(); + + //TODO Support multiple versions of data sources + ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); + loaderJdbcDriver(classLoader, properties, dbType); + + dataSource.setDriverClassName(properties.getDriverClassName()); + dataSource.setJdbcUrl(DatasourceUtil.getJdbcUrl(dbType, properties)); + dataSource.setUsername(properties.getUser()); + dataSource.setPassword(PasswordUtils.decodePassword(properties.getPassword())); + + dataSource.setMinimumIdle(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_MIN_IDLE, 5)); + dataSource.setMaximumPoolSize(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_MAX_ACTIVE, 50)); + dataSource.setConnectionTestQuery(properties.getValidationQuery()); + + if (properties.getProps() != null) { + properties.getProps().forEach(dataSource::addDataSourceProperty); + } + + logger.info("Creating HikariDataSource pool success."); + return dataSource; + } + + /** + * @return One Session Jdbc DataSource + */ + public static HikariDataSource createOneSessionJdbcDataSource(BaseConnectionParam properties, DbType dbType) { + logger.info("Creating OneSession HikariDataSource pool for maxActive:{}", PropertyUtils.getInt(Constants.SPRING_DATASOURCE_MAX_ACTIVE, 50)); + + HikariDataSource dataSource = new HikariDataSource(); + + dataSource.setDriverClassName(properties.getDriverClassName()); + dataSource.setJdbcUrl(DatasourceUtil.getJdbcUrl(dbType, properties)); + dataSource.setUsername(properties.getUser()); + dataSource.setPassword(PasswordUtils.decodePassword(properties.getPassword())); + + Boolean isOneSession = PropertyUtils.getBoolean(Constants.SUPPORT_HIVE_ONE_SESSION, false); + dataSource.setMinimumIdle(isOneSession ? 1 : PropertyUtils.getInt(Constants.SPRING_DATASOURCE_MIN_IDLE, 5)); + dataSource.setMaximumPoolSize(isOneSession ? 1 : PropertyUtils.getInt(Constants.SPRING_DATASOURCE_MAX_ACTIVE, 50)); + dataSource.setConnectionTestQuery(properties.getValidationQuery()); + + if (properties.getProps() != null) { + properties.getProps().forEach(dataSource::addDataSourceProperty); + } + + logger.info("Creating OneSession HikariDataSource pool success."); + return dataSource; + } + + protected static void loaderJdbcDriver(ClassLoader classLoader, BaseConnectionParam properties, DbType dbType) { + String drv = StringUtils.isBlank(properties.getDriverClassName()) ? DatasourceUtil.getDatasourceProcessor(dbType).getDatasourceDriver() : properties.getDriverClassName(); + try { + final Class clazz = Class.forName(drv, true, classLoader); + final Driver driver = (Driver) clazz.newInstance(); + if (!driver.acceptsURL(properties.getJdbcUrl())) { + logger.warn("Jdbc driver loading error. Driver {} cannot accept url.", drv); + throw new RuntimeException("Jdbc driver loading error."); + } + //Compatible historical version data source connection information + if (dbType.equals(DbType.MYSQL) && driver.getMajorVersion() <= 8) { + properties.setDriverClassName(Constants.COM_MYSQL_JDBC_DRIVER); + } else { + properties.setDriverClassName(drv); + } + } catch (final Exception e) { + logger.warn("The specified driver not suitable."); + } + } + +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/util/CommonUtils.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/CommonUtils.java similarity index 78% rename from dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/util/CommonUtils.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/CommonUtils.java index 0ce90adc2715a233e023d19d56ac0cfa471a9842..349638c2aa10086027f37f5404cec77f36d1110b 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/util/CommonUtils.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/CommonUtils.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.plugin.task.util; +package org.apache.dolphinscheduler.plugin.datasource.api.utils; import static org.apache.dolphinscheduler.spi.task.TaskConstants.HADOOP_SECURITY_AUTHENTICATION; import static org.apache.dolphinscheduler.spi.task.TaskConstants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE; @@ -27,6 +27,7 @@ import static org.apache.dolphinscheduler.spi.task.TaskConstants.LOGIN_USER_KEY_ import static org.apache.dolphinscheduler.spi.task.TaskConstants.RESOURCE_STORAGE_TYPE; import org.apache.dolphinscheduler.spi.enums.ResUploadType; +import org.apache.dolphinscheduler.spi.task.TaskConstants; import org.apache.dolphinscheduler.spi.utils.PropertyUtils; import org.apache.dolphinscheduler.spi.utils.StringUtils; @@ -78,7 +79,9 @@ public class CommonUtils { * @throws IOException errors */ public static void loadKerberosConf(String javaSecurityKrb5Conf, String loginUserKeytabUsername, String loginUserKeytabPath) throws IOException { - loadKerberosConf(javaSecurityKrb5Conf, loginUserKeytabUsername, loginUserKeytabPath, new Configuration()); + Configuration configuration = new Configuration(); + configuration.setClassLoader(configuration.getClass().getClassLoader()); + loadKerberosConf(javaSecurityKrb5Conf, loginUserKeytabUsername, loginUserKeytabPath, configuration); } /** @@ -103,4 +106,37 @@ public class CommonUtils { return false; } + /** + * hdfs udf dir + * + * @param tenantCode tenant code + * @return get udf dir on hdfs + */ + public static String getHdfsUdfDir(String tenantCode) { + return String.format("%s/udfs", getHdfsTenantDir(tenantCode)); + } + + /** + * @param tenantCode tenant code + * @return file directory of tenants on hdfs + */ + public static String getHdfsTenantDir(String tenantCode) { + return String.format("%s/%s", getHdfsDataBasePath(), tenantCode); + } + + /** + * get data hdfs path + * + * @return data hdfs path + */ + public static String getHdfsDataBasePath() { + String resourceUploadPath = PropertyUtils.getString(TaskConstants.RESOURCE_UPLOAD_PATH, "/dolphinscheduler"); + if ("/".equals(resourceUploadPath)) { + // if basepath is configured to /, the generated url may be //default/resources (with extra leading /) + return ""; + } else { + return resourceUploadPath; + } + } + } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/DatasourceUtil.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/DatasourceUtil.java similarity index 73% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/DatasourceUtil.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/DatasourceUtil.java index 45dcece4acd2aab653ca05a6719c320abdd035d5..19dd7a755521df8a3b91f5fb2f1af1aeff17dfc6 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/DatasourceUtil.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/DatasourceUtil.java @@ -15,20 +15,21 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.datasource; - -import org.apache.dolphinscheduler.common.datasource.clickhouse.ClickHouseDatasourceProcessor; -import org.apache.dolphinscheduler.common.datasource.db2.Db2DatasourceProcessor; -import org.apache.dolphinscheduler.common.datasource.hive.HiveDatasourceProcessor; -import org.apache.dolphinscheduler.common.datasource.mysql.MysqlDatasourceProcessor; -import org.apache.dolphinscheduler.common.datasource.oracle.OracleDatasourceProcessor; -import org.apache.dolphinscheduler.common.datasource.postgresql.PostgreSqlDatasourceProcessor; -import org.apache.dolphinscheduler.common.datasource.presto.PrestoDatasourceProcessor; -import org.apache.dolphinscheduler.common.datasource.spark.SparkDatasourceProcessor; -import org.apache.dolphinscheduler.common.datasource.sqlserver.SqlServerDatasourceProcessor; -import org.apache.dolphinscheduler.common.enums.DbType; - -import java.sql.Connection; +package org.apache.dolphinscheduler.plugin.datasource.api.utils; + +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.BaseDataSourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.DatasourceProcessor; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.clickhouse.ClickHouseDatasourceProcessor; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.db2.Db2DatasourceProcessor; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.hive.HiveDatasourceProcessor; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.mysql.MysqlDatasourceProcessor; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.oracle.OracleDatasourceProcessor; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.postgresql.PostgreSqlDatasourceProcessor; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.presto.PrestoDatasourceProcessor; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.spark.SparkDatasourceProcessor; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.sqlserver.SqlServerDatasourceProcessor; +import org.apache.dolphinscheduler.spi.datasource.ConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -77,14 +78,6 @@ public class DatasourceUtil { return getDatasourceProcessor(dbType).createConnectionParams(connectionJson); } - public static Connection getConnection(DbType dbType, ConnectionParam connectionParam) { - try { - return getDatasourceProcessor(dbType).getConnection(connectionParam); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - public static String getJdbcUrl(DbType dbType, ConnectionParam baseConnectionParam) { return getDatasourceProcessor(dbType).getJdbcUrl(baseConnectionParam); } @@ -118,4 +111,10 @@ public class DatasourceUtil { } } + /** + * get datasource UniqueId + */ + public static String getDatasourceUniqueId(ConnectionParam connectionParam, DbType dbType) { + return getDatasourceProcessor(dbType).getDatasourceUniqueId(connectionParam, dbType); + } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/PasswordUtils.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/PasswordUtils.java similarity index 98% rename from dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/PasswordUtils.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/PasswordUtils.java index d71b7d04ee0f4e8fdbdc109bda203d302cb0b505..9bf2f96735cf171b2e291b420f86a0e403615a4c 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/PasswordUtils.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/PasswordUtils.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.plugin.task.datasource; +package org.apache.dolphinscheduler.plugin.datasource.api.utils; import static org.apache.dolphinscheduler.spi.task.TaskConstants.DATASOURCE_ENCRYPTION_ENABLE; import static org.apache.dolphinscheduler.spi.task.TaskConstants.DATASOURCE_ENCRYPTION_SALT; diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/client/CommonDataSourceClientTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/client/CommonDataSourceClientTest.java new file mode 100644 index 0000000000000000000000000000000000000000..60a7e5959c4819e2a17c63a3098542548bef897d --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/client/CommonDataSourceClientTest.java @@ -0,0 +1,101 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.api.client; + +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.mysql.MysqlConnectionParam; +import org.apache.dolphinscheduler.plugin.datasource.api.provider.JdbcDataSourceProvider; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; + +import java.sql.Connection; + +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.powermock.api.mockito.PowerMockito; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor; +import org.powermock.modules.junit4.PowerMockRunner; +import org.springframework.jdbc.core.JdbcTemplate; + +import com.zaxxer.hikari.HikariDataSource; + +@RunWith(PowerMockRunner.class) +@SuppressStaticInitializationFor("org.apache.dolphinscheduler.plugin.datasource.api.client.CommonDataSourceClient") +@PrepareForTest(value = {HikariDataSource.class, CommonDataSourceClient.class, JdbcDataSourceProvider.class, JdbcTemplate.class, Connection.class}) +public class CommonDataSourceClientTest { + + @Mock + private CommonDataSourceClient commonDataSourceClient; + + @Test + public void testPreInit() { + PowerMockito.doNothing().when(commonDataSourceClient).preInit(); + commonDataSourceClient.preInit(); + Mockito.verify(commonDataSourceClient).preInit(); + } + + @Test + public void testCheckEnv() { + BaseConnectionParam baseConnectionParam = new MysqlConnectionParam(); + PowerMockito.doNothing().when(commonDataSourceClient).checkEnv(Mockito.any(BaseConnectionParam.class)); + commonDataSourceClient.checkEnv(baseConnectionParam); + Mockito.verify(commonDataSourceClient).checkEnv(Mockito.any(BaseConnectionParam.class)); + + PowerMockito.doNothing().when(commonDataSourceClient).checkValidationQuery(Mockito.any(BaseConnectionParam.class)); + commonDataSourceClient.checkValidationQuery(baseConnectionParam); + Mockito.verify(commonDataSourceClient).checkValidationQuery(Mockito.any(BaseConnectionParam.class)); + + PowerMockito.doNothing().when(commonDataSourceClient).checkUser(Mockito.any(BaseConnectionParam.class)); + commonDataSourceClient.checkUser(baseConnectionParam); + Mockito.verify(commonDataSourceClient).checkUser(Mockito.any(BaseConnectionParam.class)); + + PowerMockito.doNothing().when(commonDataSourceClient).setDefaultUsername(Mockito.any(BaseConnectionParam.class)); + commonDataSourceClient.setDefaultUsername(baseConnectionParam); + Mockito.verify(commonDataSourceClient).setDefaultUsername(Mockito.any(BaseConnectionParam.class)); + + PowerMockito.doNothing().when(commonDataSourceClient).setDefaultPassword(Mockito.any(BaseConnectionParam.class)); + commonDataSourceClient.setDefaultPassword(baseConnectionParam); + Mockito.verify(commonDataSourceClient).setDefaultPassword(Mockito.any(BaseConnectionParam.class)); + + } + + @Test + public void testInitClient() { + BaseConnectionParam baseConnectionParam = new MysqlConnectionParam(); + PowerMockito.doNothing().when(commonDataSourceClient).initClient(Mockito.any(BaseConnectionParam.class), Mockito.any()); + commonDataSourceClient.initClient(baseConnectionParam, DbType.MYSQL); + Mockito.verify(commonDataSourceClient).initClient(Mockito.any(BaseConnectionParam.class), Mockito.any()); + } + + @Test + public void testCheckClient() { + PowerMockito.doNothing().when(this.commonDataSourceClient).checkClient(); + this.commonDataSourceClient.checkClient(); + Mockito.verify(this.commonDataSourceClient).checkClient(); + } + + @Test + public void testGetConnection() { + Connection connection = PowerMockito.mock(Connection.class); + PowerMockito.when(commonDataSourceClient.getConnection()).thenReturn(connection); + Assert.assertNotNull(commonDataSourceClient.getConnection()); + } +} diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/clickhouse/ClickHouseDatasourceProcessorTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/clickhouse/ClickHouseDatasourceProcessorTest.java similarity index 74% rename from dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/clickhouse/ClickHouseDatasourceProcessorTest.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/clickhouse/ClickHouseDatasourceProcessorTest.java index 75c1f3c17dd5832228afa1429a715f313a2b64c8..106aa300a151f41c003edb79c624b25a41457a43 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/clickhouse/ClickHouseDatasourceProcessorTest.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/clickhouse/ClickHouseDatasourceProcessorTest.java @@ -15,33 +15,46 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.datasource.clickhouse; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.clickhouse; -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.DbType; +import org.apache.dolphinscheduler.plugin.datasource.api.plugin.DataSourceClientProvider; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.CommonUtils; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.DatasourceUtil; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils; +import org.apache.dolphinscheduler.spi.enums.DbType; +import org.apache.dolphinscheduler.spi.utils.Constants; import java.sql.DriverManager; +import java.util.HashMap; +import java.util.Map; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; @RunWith(PowerMockRunner.class) -@PrepareForTest({Class.class, DriverManager.class}) +@PrepareForTest({Class.class, DriverManager.class, DatasourceUtil.class, CommonUtils.class, DataSourceClientProvider.class, PasswordUtils.class}) public class ClickHouseDatasourceProcessorTest { private ClickHouseDatasourceProcessor clickHouseDatasourceProcessor = new ClickHouseDatasourceProcessor(); @Test public void testCreateConnectionParams() { + Map props = new HashMap<>(); + props.put("serverTimezone", "utc"); ClickHouseDatasourceParamDTO clickhouseConnectionParam = new ClickHouseDatasourceParamDTO(); clickhouseConnectionParam.setUserName("user"); clickhouseConnectionParam.setPassword("password"); clickhouseConnectionParam.setHost("localhost"); clickhouseConnectionParam.setPort(8123); clickhouseConnectionParam.setDatabase("default"); + clickhouseConnectionParam.setOther(props); + PowerMockito.mockStatic(PasswordUtils.class); + PowerMockito.when(PasswordUtils.encodePassword(Mockito.anyString())).thenReturn("test"); ClickhouseConnectionParam connectionParams = (ClickhouseConnectionParam) clickHouseDatasourceProcessor .createConnectionParams(clickhouseConnectionParam); Assert.assertNotNull(connectionParams); @@ -80,4 +93,9 @@ public class ClickHouseDatasourceProcessorTest { public void testGetDbType() { Assert.assertEquals(DbType.CLICKHOUSE, clickHouseDatasourceProcessor.getDbType()); } + + @Test + public void testGetValidationQuery() { + Assert.assertEquals(Constants.CLICKHOUSE_VALIDATION_QUERY, clickHouseDatasourceProcessor.getValidationQuery()); + } } \ No newline at end of file diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/db2/Db2DatasourceProcessorTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/db2/Db2DatasourceProcessorTest.java similarity index 72% rename from dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/db2/Db2DatasourceProcessorTest.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/db2/Db2DatasourceProcessorTest.java index 21b1bc4663d526485bb5970f199c3ea5f357eb10..7b405aae9347e3c874f1bcd1a1ced18caf756211 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/db2/Db2DatasourceProcessorTest.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/db2/Db2DatasourceProcessorTest.java @@ -15,33 +15,46 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.datasource.db2; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.db2; -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.DbType; +import org.apache.dolphinscheduler.plugin.datasource.api.plugin.DataSourceClientProvider; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.CommonUtils; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.DatasourceUtil; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils; +import org.apache.dolphinscheduler.spi.enums.DbType; +import org.apache.dolphinscheduler.spi.utils.Constants; import java.sql.DriverManager; +import java.util.HashMap; +import java.util.Map; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; @RunWith(PowerMockRunner.class) -@PrepareForTest({Class.class, DriverManager.class}) +@PrepareForTest({Class.class, DriverManager.class, DatasourceUtil.class, CommonUtils.class, DataSourceClientProvider.class, PasswordUtils.class}) public class Db2DatasourceProcessorTest { private Db2DatasourceProcessor db2DatasourceProcessor = new Db2DatasourceProcessor(); @Test public void testCreateConnectionParams() { + Map props = new HashMap<>(); + props.put("serverTimezone", "utc"); Db2DatasourceParamDTO db2DatasourceParamDTO = new Db2DatasourceParamDTO(); db2DatasourceParamDTO.setUserName("root"); db2DatasourceParamDTO.setPassword("123456"); db2DatasourceParamDTO.setHost("localhost"); db2DatasourceParamDTO.setPort(5142); db2DatasourceParamDTO.setDatabase("default"); + db2DatasourceParamDTO.setOther(props); + PowerMockito.mockStatic(PasswordUtils.class); + PowerMockito.when(PasswordUtils.encodePassword(Mockito.anyString())).thenReturn("test"); Db2ConnectionParam connectionParams = (Db2ConnectionParam) db2DatasourceProcessor .createConnectionParams(db2DatasourceParamDTO); @@ -80,4 +93,8 @@ public class Db2DatasourceProcessorTest { Assert.assertEquals(DbType.DB2, db2DatasourceProcessor.getDbType()); } + @Test + public void testGetValidationQuery() { + Assert.assertEquals(Constants.DB2_VALIDATION_QUERY, db2DatasourceProcessor.getValidationQuery()); + } } \ No newline at end of file diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/hive/HiveDatasourceProcessorTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/hive/HiveDatasourceProcessorTest.java similarity index 68% rename from dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/hive/HiveDatasourceProcessorTest.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/hive/HiveDatasourceProcessorTest.java index 230b0855e5b40c358163fce5bf93077dcca2639f..84e8fd8898345a2747ac998a5130cbf04b4eb1d5 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/hive/HiveDatasourceProcessorTest.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/hive/HiveDatasourceProcessorTest.java @@ -15,36 +15,50 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.datasource.hive; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.hive; -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.DbType; -import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.plugin.datasource.api.plugin.DataSourceClientProvider; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.CommonUtils; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.DatasourceUtil; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils; +import org.apache.dolphinscheduler.spi.enums.DbType; +import org.apache.dolphinscheduler.spi.utils.Constants; +import org.apache.dolphinscheduler.spi.utils.JSONUtils; import java.sql.DriverManager; +import java.util.HashMap; +import java.util.Map; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; @RunWith(PowerMockRunner.class) -@PrepareForTest({Class.class, DriverManager.class}) +@PrepareForTest({Class.class, DriverManager.class, DatasourceUtil.class, CommonUtils.class, DataSourceClientProvider.class, PasswordUtils.class}) public class HiveDatasourceProcessorTest { private HiveDatasourceProcessor hiveDatasourceProcessor = new HiveDatasourceProcessor(); @Test public void testCreateConnectionParams() { + Map props = new HashMap<>(); + props.put("serverTimezone", "utc"); HiveDataSourceParamDTO hiveDataSourceParamDTO = new HiveDataSourceParamDTO(); hiveDataSourceParamDTO.setHost("localhost1,localhost2"); hiveDataSourceParamDTO.setPort(5142); hiveDataSourceParamDTO.setUserName("default"); hiveDataSourceParamDTO.setDatabase("default"); + hiveDataSourceParamDTO.setOther(props); + PowerMockito.mockStatic(PasswordUtils.class); + PowerMockito.when(PasswordUtils.encodePassword(Mockito.anyString())).thenReturn("test"); + PowerMockito.mockStatic(CommonUtils.class); + PowerMockito.when(CommonUtils.getKerberosStartupState()).thenReturn(false); HiveConnectionParam connectionParams = (HiveConnectionParam) hiveDatasourceProcessor .createConnectionParams(hiveDataSourceParamDTO); - System.out.println(JSONUtils.toJsonString(connectionParams)); Assert.assertNotNull(connectionParams); Assert.assertEquals("jdbc:hive2://localhost1:5142,localhost2:5142", connectionParams.getAddress()); } @@ -76,4 +90,9 @@ public class HiveDatasourceProcessorTest { public void testGetDbType() { Assert.assertEquals(DbType.HIVE, hiveDatasourceProcessor.getDbType()); } + + @Test + public void testGetValidationQuery() { + Assert.assertEquals(Constants.HIVE_VALIDATION_QUERY, hiveDatasourceProcessor.getValidationQuery()); + } } \ No newline at end of file diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/mysql/MysqlDatasourceProcessorTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/mysql/MysqlDatasourceProcessorTest.java similarity index 63% rename from dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/mysql/MysqlDatasourceProcessorTest.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/mysql/MysqlDatasourceProcessorTest.java index 89bf4b15f19e0d0b0123c48460498fd978da7b54..f54b28d44dd588b32e1770f04d17574527105ee1 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/mysql/MysqlDatasourceProcessorTest.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/mysql/MysqlDatasourceProcessorTest.java @@ -15,38 +15,49 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.datasource.mysql; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.mysql; -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.DbType; -import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.plugin.datasource.api.plugin.DataSourceClientProvider; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.CommonUtils; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.DatasourceUtil; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils; +import org.apache.dolphinscheduler.spi.enums.DbType; +import org.apache.dolphinscheduler.spi.utils.Constants; +import org.apache.dolphinscheduler.spi.utils.JSONUtils; import java.sql.DriverManager; +import java.util.HashMap; +import java.util.Map; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; @RunWith(PowerMockRunner.class) -@PrepareForTest({Class.class, DriverManager.class}) +@PrepareForTest({Class.class, DriverManager.class, DatasourceUtil.class, CommonUtils.class, DataSourceClientProvider.class, PasswordUtils.class}) public class MysqlDatasourceProcessorTest { private MysqlDatasourceProcessor mysqlDatasourceProcessor = new MysqlDatasourceProcessor(); @Test public void testCreateConnectionParams() { + Map props = new HashMap<>(); + props.put("serverTimezone", "utc"); MysqlDatasourceParamDTO mysqlDatasourceParamDTO = new MysqlDatasourceParamDTO(); mysqlDatasourceParamDTO.setUserName("root"); mysqlDatasourceParamDTO.setPassword("123456"); mysqlDatasourceParamDTO.setHost("localhost"); mysqlDatasourceParamDTO.setPort(3306); mysqlDatasourceParamDTO.setDatabase("default"); - + mysqlDatasourceParamDTO.setOther(props); + PowerMockito.mockStatic(PasswordUtils.class); + PowerMockito.when(PasswordUtils.encodePassword(Mockito.anyString())).thenReturn("test"); MysqlConnectionParam connectionParams = (MysqlConnectionParam) mysqlDatasourceProcessor .createConnectionParams(mysqlDatasourceParamDTO); - System.out.println(JSONUtils.toJsonString(connectionParams)); Assert.assertEquals("jdbc:mysql://localhost:3306", connectionParams.getAddress()); Assert.assertEquals("jdbc:mysql://localhost:3306/default", connectionParams.getJdbcUrl()); } @@ -63,7 +74,7 @@ public class MysqlDatasourceProcessorTest { @Test public void testGetDatasourceDriver() { - Assert.assertEquals(Constants.COM_MYSQL_JDBC_DRIVER, mysqlDatasourceProcessor.getDatasourceDriver()); + Assert.assertEquals(Constants.COM_MYSQL_CJ_JDBC_DRIVER, mysqlDatasourceProcessor.getDatasourceDriver()); } @Test @@ -78,4 +89,17 @@ public class MysqlDatasourceProcessorTest { public void testGetDbType() { Assert.assertEquals(DbType.MYSQL, mysqlDatasourceProcessor.getDbType()); } + + @Test + public void testGetValidationQuery() { + Assert.assertEquals(Constants.MYSQL_VALIDATION_QUERY, mysqlDatasourceProcessor.getValidationQuery()); + } + + @Test + public void testGetDatasourceUniqueId() { + MysqlConnectionParam mysqlConnectionParam = new MysqlConnectionParam(); + mysqlConnectionParam.setJdbcUrl("jdbc:mysql://localhost:3306/default"); + mysqlConnectionParam.setUser("root"); + Assert.assertEquals("mysql@root@jdbc:mysql://localhost:3306/default", mysqlDatasourceProcessor.getDatasourceUniqueId(mysqlConnectionParam, DbType.MYSQL)); + } } \ No newline at end of file diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/oracle/OracleDatasourceProcessorTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/oracle/OracleDatasourceProcessorTest.java similarity index 68% rename from dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/oracle/OracleDatasourceProcessorTest.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/oracle/OracleDatasourceProcessorTest.java index 4f0fce2d9b00b24f090cd95b5a86d8d1bf628e32..36b5e35dfb91307a080692ef0b2b384bb8151cb2 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/oracle/OracleDatasourceProcessorTest.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/oracle/OracleDatasourceProcessorTest.java @@ -15,28 +15,38 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.datasource.oracle; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.oracle; -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.DbConnectType; -import org.apache.dolphinscheduler.common.enums.DbType; +import org.apache.dolphinscheduler.plugin.datasource.api.plugin.DataSourceClientProvider; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.CommonUtils; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.DatasourceUtil; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils; +import org.apache.dolphinscheduler.spi.enums.DbConnectType; +import org.apache.dolphinscheduler.spi.enums.DbType; +import org.apache.dolphinscheduler.spi.utils.Constants; import java.sql.DriverManager; +import java.util.HashMap; +import java.util.Map; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; @RunWith(PowerMockRunner.class) -@PrepareForTest({Class.class, DriverManager.class}) +@PrepareForTest({Class.class, DriverManager.class, DatasourceUtil.class, CommonUtils.class, DataSourceClientProvider.class, PasswordUtils.class}) public class OracleDatasourceProcessorTest { private OracleDatasourceProcessor oracleDatasourceProcessor = new OracleDatasourceProcessor(); @Test public void testCreateConnectionParams() { + Map props = new HashMap<>(); + props.put("serverTimezone", "utc"); OracleDatasourceParamDTO oracleDatasourceParamDTO = new OracleDatasourceParamDTO(); oracleDatasourceParamDTO.setConnectType(DbConnectType.ORACLE_SID); oracleDatasourceParamDTO.setHost("localhost"); @@ -44,18 +54,20 @@ public class OracleDatasourceProcessorTest { oracleDatasourceParamDTO.setUserName("root"); oracleDatasourceParamDTO.setPassword("123456"); oracleDatasourceParamDTO.setDatabase("default"); - + oracleDatasourceParamDTO.setOther(props); + PowerMockito.mockStatic(PasswordUtils.class); + PowerMockito.when(PasswordUtils.encodePassword(Mockito.anyString())).thenReturn("test"); OracleConnectionParam connectionParams = (OracleConnectionParam) oracleDatasourceProcessor .createConnectionParams(oracleDatasourceParamDTO); Assert.assertNotNull(connectionParams); Assert.assertEquals("jdbc:oracle:thin:@localhost:3308", connectionParams.getAddress()); - Assert.assertEquals("jdbc:oracle:thin:@localhost:3308/default", connectionParams.getJdbcUrl()); + Assert.assertEquals("jdbc:oracle:thin:@localhost:3308:default", connectionParams.getJdbcUrl()); } @Test public void testCreateConnectionParams2() { String connectionJson = "{\"user\":\"root\",\"password\":\"123456\",\"address\":\"jdbc:oracle:thin:@localhost:3308\"" - + ",\"database\":\"default\",\"jdbcUrl\":\"jdbc:oracle:thin:@localhost:3308/default\",\"connectType\":\"ORACLE_SID\"}"; + + ",\"database\":\"default\",\"jdbcUrl\":\"jdbc:oracle:thin:@localhost:3308:default\",\"connectType\":\"ORACLE_SID\"}"; OracleConnectionParam connectionParams = (OracleConnectionParam) oracleDatasourceProcessor .createConnectionParams(connectionJson); Assert.assertNotNull(connectionParams); @@ -70,9 +82,9 @@ public class OracleDatasourceProcessorTest { @Test public void testGetJdbcUrl() { OracleConnectionParam oracleConnectionParam = new OracleConnectionParam(); - oracleConnectionParam.setJdbcUrl("jdbc:oracle:thin:@localhost:3308/default"); + oracleConnectionParam.setJdbcUrl("jdbc:oracle:thin:@localhost:3308:default"); oracleConnectionParam.setOther("other=other"); - Assert.assertEquals("jdbc:oracle:thin:@localhost:3308/default?other=other", + Assert.assertEquals("jdbc:oracle:thin:@localhost:3308:default?other=other", oracleDatasourceProcessor.getJdbcUrl(oracleConnectionParam)); } @@ -80,4 +92,9 @@ public class OracleDatasourceProcessorTest { public void getDbType() { Assert.assertEquals(DbType.ORACLE, oracleDatasourceProcessor.getDbType()); } -} \ No newline at end of file + + @Test + public void testGetValidationQuery() { + Assert.assertEquals(Constants.ORACLE_VALIDATION_QUERY, oracleDatasourceProcessor.getValidationQuery()); + } +} diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/postgresql/PostgreSqlDatasourceProcessorTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/postgresql/PostgreSqlDatasourceProcessorTest.java similarity index 73% rename from dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/postgresql/PostgreSqlDatasourceProcessorTest.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/postgresql/PostgreSqlDatasourceProcessorTest.java index 178c30ce7b86bcaa662a31af7be9e7843862652f..0555cbfa137ea85d01304bc84a8a9982173b0cd4 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/postgresql/PostgreSqlDatasourceProcessorTest.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/postgresql/PostgreSqlDatasourceProcessorTest.java @@ -15,34 +15,46 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.datasource.postgresql; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.postgresql; -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.DbType; +import org.apache.dolphinscheduler.plugin.datasource.api.plugin.DataSourceClientProvider; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.CommonUtils; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.DatasourceUtil; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils; +import org.apache.dolphinscheduler.spi.enums.DbType; +import org.apache.dolphinscheduler.spi.utils.Constants; import java.sql.DriverManager; +import java.util.HashMap; +import java.util.Map; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; @RunWith(PowerMockRunner.class) -@PrepareForTest({Class.class, DriverManager.class}) +@PrepareForTest({Class.class, DriverManager.class, DatasourceUtil.class, CommonUtils.class, DataSourceClientProvider.class, PasswordUtils.class}) public class PostgreSqlDatasourceProcessorTest { private PostgreSqlDatasourceProcessor postgreSqlDatasourceProcessor = new PostgreSqlDatasourceProcessor(); @Test public void testCreateConnectionParams() { + Map props = new HashMap<>(); + props.put("serverTimezone", "utc"); PostgreSqlDatasourceParamDTO postgreSqlDatasourceParamDTO = new PostgreSqlDatasourceParamDTO(); postgreSqlDatasourceParamDTO.setUserName("root"); postgreSqlDatasourceParamDTO.setPassword("123456"); postgreSqlDatasourceParamDTO.setHost("localhost"); postgreSqlDatasourceParamDTO.setPort(3308); postgreSqlDatasourceParamDTO.setDatabase("default"); - + postgreSqlDatasourceParamDTO.setOther(props); + PowerMockito.mockStatic(PasswordUtils.class); + PowerMockito.when(PasswordUtils.encodePassword(Mockito.anyString())).thenReturn("test"); PostgreSqlConnectionParam connectionParams = (PostgreSqlConnectionParam) postgreSqlDatasourceProcessor .createConnectionParams(postgreSqlDatasourceParamDTO); Assert.assertEquals("jdbc:postgresql://localhost:3308", connectionParams.getAddress()); @@ -80,4 +92,9 @@ public class PostgreSqlDatasourceProcessorTest { public void testGetDbType() { Assert.assertEquals(DbType.POSTGRESQL, postgreSqlDatasourceProcessor.getDbType()); } + + @Test + public void testGetValidationQuery() { + Assert.assertEquals(Constants.POSTGRESQL_VALIDATION_QUERY, postgreSqlDatasourceProcessor.getValidationQuery()); + } } \ No newline at end of file diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/presto/PrestoDatasourceProcessorTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/presto/PrestoDatasourceProcessorTest.java similarity index 72% rename from dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/presto/PrestoDatasourceProcessorTest.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/presto/PrestoDatasourceProcessorTest.java index a2bfe997bd2132a329cee04ab9178fb54a8bd1ed..5145cd2e213b46e3b8b2bccfdfcfe2ffba757937 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/presto/PrestoDatasourceProcessorTest.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/presto/PrestoDatasourceProcessorTest.java @@ -15,34 +15,46 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.datasource.presto; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.presto; -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.DbType; +import org.apache.dolphinscheduler.plugin.datasource.api.plugin.DataSourceClientProvider; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.CommonUtils; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.DatasourceUtil; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils; +import org.apache.dolphinscheduler.spi.enums.DbType; +import org.apache.dolphinscheduler.spi.utils.Constants; import java.sql.DriverManager; +import java.util.HashMap; +import java.util.Map; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; @RunWith(PowerMockRunner.class) -@PrepareForTest({Class.class, DriverManager.class}) +@PrepareForTest({Class.class, DriverManager.class, DatasourceUtil.class, CommonUtils.class, DataSourceClientProvider.class, PasswordUtils.class}) public class PrestoDatasourceProcessorTest { private PrestoDatasourceProcessor prestoDatasourceProcessor = new PrestoDatasourceProcessor(); @Test public void testCreateConnectionParams() { + Map props = new HashMap<>(); + props.put("serverTimezone", "utc"); PrestoDatasourceParamDTO prestoDatasourceParamDTO = new PrestoDatasourceParamDTO(); prestoDatasourceParamDTO.setHost("localhost"); prestoDatasourceParamDTO.setPort(1234); prestoDatasourceParamDTO.setDatabase("default"); prestoDatasourceParamDTO.setUserName("root"); prestoDatasourceParamDTO.setPassword("123456"); - + prestoDatasourceParamDTO.setOther(props); + PowerMockito.mockStatic(PasswordUtils.class); + PowerMockito.when(PasswordUtils.encodePassword(Mockito.anyString())).thenReturn("test"); PrestoConnectionParam connectionParams = (PrestoConnectionParam) prestoDatasourceProcessor .createConnectionParams(prestoDatasourceParamDTO); Assert.assertEquals("jdbc:presto://localhost:1234", connectionParams.getAddress()); @@ -78,4 +90,9 @@ public class PrestoDatasourceProcessorTest { public void testGetDbType() { Assert.assertEquals(DbType.PRESTO, prestoDatasourceProcessor.getDbType()); } + + @Test + public void testGetValidationQuery() { + Assert.assertEquals(Constants.PRESTO_VALIDATION_QUERY, prestoDatasourceProcessor.getValidationQuery()); + } } \ No newline at end of file diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/spark/SparkDatasourceProcessorTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/spark/SparkDatasourceProcessorTest.java similarity index 70% rename from dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/spark/SparkDatasourceProcessorTest.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/spark/SparkDatasourceProcessorTest.java index 706c0aef0fd15d430c1bbc163169bbdb54bb0109..7e55e26848bcf2503a2606ab38b9df130374f503 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/spark/SparkDatasourceProcessorTest.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/spark/SparkDatasourceProcessorTest.java @@ -15,34 +15,48 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.datasource.spark; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.spark; -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.DbType; +import org.apache.dolphinscheduler.plugin.datasource.api.plugin.DataSourceClientProvider; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.CommonUtils; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.DatasourceUtil; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils; +import org.apache.dolphinscheduler.spi.enums.DbType; +import org.apache.dolphinscheduler.spi.utils.Constants; import java.sql.DriverManager; +import java.util.HashMap; +import java.util.Map; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; @RunWith(PowerMockRunner.class) -@PrepareForTest({Class.class, DriverManager.class}) +@PrepareForTest({Class.class, DriverManager.class, DatasourceUtil.class, CommonUtils.class, DataSourceClientProvider.class, PasswordUtils.class}) public class SparkDatasourceProcessorTest { private SparkDatasourceProcessor sparkDatasourceProcessor = new SparkDatasourceProcessor(); @Test public void testCreateConnectionParams() { + Map props = new HashMap<>(); + props.put("serverTimezone", "utc"); SparkDatasourceParamDTO sparkDatasourceParamDTO = new SparkDatasourceParamDTO(); sparkDatasourceParamDTO.setUserName("root"); sparkDatasourceParamDTO.setPassword("12345"); sparkDatasourceParamDTO.setHost("localhost1,localhost2"); sparkDatasourceParamDTO.setPort(1234); sparkDatasourceParamDTO.setDatabase("default"); - + sparkDatasourceParamDTO.setOther(props); + PowerMockito.mockStatic(PasswordUtils.class); + PowerMockito.when(PasswordUtils.encodePassword(Mockito.anyString())).thenReturn("test"); + PowerMockito.mockStatic(CommonUtils.class); + PowerMockito.when(CommonUtils.getKerberosStartupState()).thenReturn(false); SparkConnectionParam connectionParams = (SparkConnectionParam) sparkDatasourceProcessor .createConnectionParams(sparkDatasourceParamDTO); Assert.assertEquals("jdbc:hive2://localhost1:1234,localhost2:1234", connectionParams.getAddress()); @@ -77,4 +91,9 @@ public class SparkDatasourceProcessorTest { public void testGetDbType() { Assert.assertEquals(DbType.SPARK, sparkDatasourceProcessor.getDbType()); } + + @Test + public void testGetValidationQuery() { + Assert.assertEquals(Constants.HIVE_VALIDATION_QUERY, sparkDatasourceProcessor.getValidationQuery()); + } } \ No newline at end of file diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/sqlserver/SqlServerDatasourceProcessorTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/sqlserver/SqlServerDatasourceProcessorTest.java similarity index 72% rename from dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/sqlserver/SqlServerDatasourceProcessorTest.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/sqlserver/SqlServerDatasourceProcessorTest.java index 39a23f6e25aa9f5cb30dab5f7891d6199d8de4fe..de5956035cbb46882a7a959c8269ef0b99902d93 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/sqlserver/SqlServerDatasourceProcessorTest.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/datasource/sqlserver/SqlServerDatasourceProcessorTest.java @@ -15,35 +15,47 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.datasource.sqlserver; +package org.apache.dolphinscheduler.plugin.datasource.api.datasource.sqlserver; -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.DbType; -import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.plugin.datasource.api.plugin.DataSourceClientProvider; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.CommonUtils; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.DatasourceUtil; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils; +import org.apache.dolphinscheduler.spi.enums.DbType; +import org.apache.dolphinscheduler.spi.utils.Constants; +import org.apache.dolphinscheduler.spi.utils.JSONUtils; import java.sql.DriverManager; +import java.util.HashMap; +import java.util.Map; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; @RunWith(PowerMockRunner.class) -@PrepareForTest({Class.class, DriverManager.class}) +@PrepareForTest({Class.class, DriverManager.class, DatasourceUtil.class, CommonUtils.class, DataSourceClientProvider.class, PasswordUtils.class}) public class SqlServerDatasourceProcessorTest { private SqlServerDatasourceProcessor sqlServerDatasourceProcessor = new SqlServerDatasourceProcessor(); @Test public void testCreateConnectionParams() { + Map props = new HashMap<>(); + props.put("serverTimezone", "utc"); SqlServerDatasourceParamDTO sqlServerDatasourceParamDTO = new SqlServerDatasourceParamDTO(); sqlServerDatasourceParamDTO.setUserName("root"); sqlServerDatasourceParamDTO.setPassword("123456"); sqlServerDatasourceParamDTO.setDatabase("default"); sqlServerDatasourceParamDTO.setHost("localhost"); sqlServerDatasourceParamDTO.setPort(1234); - + sqlServerDatasourceParamDTO.setOther(props); + PowerMockito.mockStatic(PasswordUtils.class); + PowerMockito.when(PasswordUtils.encodePassword(Mockito.anyString())).thenReturn("test"); SqlServerConnectionParam connectionParams = (SqlServerConnectionParam) sqlServerDatasourceProcessor .createConnectionParams(sqlServerDatasourceParamDTO); Assert.assertEquals("jdbc:sqlserver://localhost:1234", connectionParams.getAddress()); @@ -78,4 +90,9 @@ public class SqlServerDatasourceProcessorTest { public void testGetDbType() { Assert.assertEquals(DbType.SQLSERVER, sqlServerDatasourceProcessor.getDbType()); } + + @Test + public void testGetValidationQuery() { + Assert.assertEquals(Constants.SQLSERVER_VALIDATION_QUERY, sqlServerDatasourceProcessor.getValidationQuery()); + } } \ No newline at end of file diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/provider/JdbcDataSourceProviderTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/provider/JdbcDataSourceProviderTest.java new file mode 100644 index 0000000000000000000000000000000000000000..9167dd2e7bcf80bb666250b70b432d6959ddbd4a --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/provider/JdbcDataSourceProviderTest.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.api.provider; + +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.mysql.MysqlConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; + +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.powermock.api.mockito.PowerMockito; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; + +import com.zaxxer.hikari.HikariDataSource; + +@RunWith(PowerMockRunner.class) +@PrepareForTest(value = {HikariDataSource.class, JdbcDataSourceProvider.class}) +public class JdbcDataSourceProviderTest { + + @Test + public void testCreateJdbcDataSource() { + PowerMockito.mockStatic(JdbcDataSourceProvider.class); + HikariDataSource dataSource = PowerMockito.mock(HikariDataSource.class); + PowerMockito.when(JdbcDataSourceProvider.createJdbcDataSource(Mockito.any(), Mockito.any())).thenReturn(dataSource); + Assert.assertNotNull(JdbcDataSourceProvider.createJdbcDataSource(new MysqlConnectionParam(), DbType.MYSQL)); + } + + @Test + public void testCreateOneSessionJdbcDataSource() { + PowerMockito.mockStatic(JdbcDataSourceProvider.class); + HikariDataSource dataSource = PowerMockito.mock(HikariDataSource.class); + PowerMockito.when(JdbcDataSourceProvider.createOneSessionJdbcDataSource(Mockito.any(), Mockito.any())).thenReturn(dataSource); + Assert.assertNotNull(JdbcDataSourceProvider.createOneSessionJdbcDataSource(new MysqlConnectionParam(), DbType.MYSQL)); + } + +} diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/CommonUtilsTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/CommonUtilsTest.java new file mode 100644 index 0000000000000000000000000000000000000000..61f78ba634210314bc220d2ed41c491afd522c7d --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/CommonUtilsTest.java @@ -0,0 +1,124 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.api.utils; + +import static org.apache.dolphinscheduler.spi.task.TaskConstants.DATASOURCE_ENCRYPTION_ENABLE; + +import org.apache.dolphinscheduler.spi.utils.Constants; +import org.apache.dolphinscheduler.spi.utils.PropertyUtils; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.security.UserGroupInformation; + +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.powermock.api.mockito.PowerMockito; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor; +import org.powermock.modules.junit4.PowerMockRunner; + +@RunWith(PowerMockRunner.class) +@SuppressStaticInitializationFor("org.apache.dolphinscheduler.spi.utils.PropertyUtils") +@PrepareForTest(value = {PropertyUtils.class, UserGroupInformation.class, CommonUtils.class, PasswordUtils.class}) +public class CommonUtilsTest { + + @Test + public void testGetKerberosStartupState() { + PowerMockito.mockStatic(CommonUtils.class); + PowerMockito.when(CommonUtils.getKerberosStartupState()).thenReturn(false); + boolean kerberosStartupState = CommonUtils.getKerberosStartupState(); + Assert.assertFalse(kerberosStartupState); + + PowerMockito.mockStatic(PropertyUtils.class); + PowerMockito.when(PropertyUtils.getUpperCaseString(Constants.RESOURCE_STORAGE_TYPE)).thenReturn("HDFS"); + PowerMockito.when(PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE, true)).thenReturn(Boolean.TRUE); + kerberosStartupState = CommonUtils.getKerberosStartupState(); + Assert.assertFalse(kerberosStartupState); + } + + @Test + public void testLoadKerberosConf() { + try { + PowerMockito.mockStatic(PropertyUtils.class); + PowerMockito.when(PropertyUtils.getUpperCaseString(Constants.RESOURCE_STORAGE_TYPE)).thenReturn("HDFS"); + PowerMockito.when(PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE, false)).thenReturn(Boolean.TRUE); + PowerMockito.when(PropertyUtils.getString(Constants.JAVA_SECURITY_KRB5_CONF_PATH)).thenReturn("/opt/krb5.conf"); + PowerMockito.when(PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_USERNAME)).thenReturn("hdfs-mycluster@ESZ.COM"); + PowerMockito.when(PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_PATH)).thenReturn("/opt/hdfs.headless.keytab"); + + PowerMockito.mockStatic(UserGroupInformation.class); + boolean result = CommonUtils.loadKerberosConf(new Configuration()); + Assert.assertTrue(result); + + CommonUtils.loadKerberosConf(null, null, null); + + } catch (Exception e) { + Assert.fail("load Kerberos Conf failed"); + } + } + + @Test + public void encodePassword() { + PowerMockito.mockStatic(PropertyUtils.class); + PowerMockito.when(PropertyUtils.getBoolean(DATASOURCE_ENCRYPTION_ENABLE, false)).thenReturn(Boolean.TRUE); + + Assert.assertEquals("", PasswordUtils.encodePassword("")); + Assert.assertEquals("bnVsbE1USXpORFUy", PasswordUtils.encodePassword("123456")); + Assert.assertEquals("bnVsbElWRkJXbGhUVjBBPQ==", PasswordUtils.encodePassword("!QAZXSW@")); + Assert.assertEquals("bnVsbE5XUm1aMlZ5S0VBPQ==", PasswordUtils.encodePassword("5dfger(@")); + + PowerMockito.when(PropertyUtils.getBoolean(DATASOURCE_ENCRYPTION_ENABLE, false)).thenReturn(Boolean.FALSE); + + Assert.assertEquals("", PasswordUtils.encodePassword("")); + Assert.assertEquals("123456", PasswordUtils.encodePassword("123456")); + Assert.assertEquals("!QAZXSW@", PasswordUtils.encodePassword("!QAZXSW@")); + Assert.assertEquals("5dfger(@", PasswordUtils.encodePassword("5dfger(@")); + + } + + @Test + public void decodePassword() { + PowerMockito.mockStatic(PropertyUtils.class); + PowerMockito.when(PropertyUtils.getBoolean(DATASOURCE_ENCRYPTION_ENABLE, false)).thenReturn(Boolean.TRUE); + + PropertyUtils.setValue(Constants.DATASOURCE_ENCRYPTION_ENABLE, "true"); + + PowerMockito.mockStatic(PasswordUtils.class); + PowerMockito.when(PasswordUtils.decodePassword("bnVsbE1USXpORFUy")).thenReturn("123456"); + PowerMockito.when(PasswordUtils.decodePassword("bnVsbElWRkJXbGhUVjBBPQ==")).thenReturn("!QAZXSW@"); + PowerMockito.when(PasswordUtils.decodePassword("bnVsbE5XUm1aMlZ5S0VBPQ==")).thenReturn("5dfger(@"); + + Assert.assertEquals(null, PasswordUtils.decodePassword("")); + Assert.assertEquals("123456", PasswordUtils.decodePassword("bnVsbE1USXpORFUy")); + Assert.assertEquals("!QAZXSW@", PasswordUtils.decodePassword("bnVsbElWRkJXbGhUVjBBPQ==")); + Assert.assertEquals("5dfger(@", PasswordUtils.decodePassword("bnVsbE5XUm1aMlZ5S0VBPQ==")); + + PowerMockito.when(PropertyUtils.getBoolean(DATASOURCE_ENCRYPTION_ENABLE, false)).thenReturn(Boolean.FALSE); + + PowerMockito.when(PasswordUtils.decodePassword("123456")).thenReturn("123456"); + PowerMockito.when(PasswordUtils.decodePassword("!QAZXSW@")).thenReturn("!QAZXSW@"); + PowerMockito.when(PasswordUtils.decodePassword("5dfger(@")).thenReturn("5dfger(@"); + + Assert.assertEquals(null, PasswordUtils.decodePassword("")); + Assert.assertEquals("123456", PasswordUtils.decodePassword("123456")); + Assert.assertEquals("!QAZXSW@", PasswordUtils.decodePassword("!QAZXSW@")); + Assert.assertEquals("5dfger(@", PasswordUtils.decodePassword("5dfger(@")); + } + +} diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/DatasourceUtilTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/DatasourceUtilTest.java similarity index 76% rename from dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/DatasourceUtilTest.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/DatasourceUtilTest.java index 1b8b59cd99a41e8e7fab6c1597bf4bedc2fcde29..0078fb03d54d426565579007a527a8f30b0435c9 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/datasource/DatasourceUtilTest.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/test/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/DatasourceUtilTest.java @@ -15,19 +15,20 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.datasource; +package org.apache.dolphinscheduler.plugin.datasource.api.utils; -import java.util.HashMap; -import java.util.Map; -import org.apache.dolphinscheduler.common.datasource.mysql.MysqlConnectionParam; -import org.apache.dolphinscheduler.common.datasource.mysql.MysqlDatasourceParamDTO; -import org.apache.dolphinscheduler.common.datasource.mysql.MysqlDatasourceProcessor; -import org.apache.dolphinscheduler.common.enums.DbType; -import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.mysql.MysqlConnectionParam; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.mysql.MysqlDatasourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.mysql.MysqlDatasourceProcessor; +import org.apache.dolphinscheduler.plugin.datasource.api.plugin.DataSourceClientProvider; +import org.apache.dolphinscheduler.spi.datasource.ConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; +import org.apache.dolphinscheduler.spi.utils.JSONUtils; import java.sql.Connection; import java.sql.DriverManager; -import java.sql.SQLException; +import java.util.HashMap; +import java.util.Map; import org.junit.Assert; import org.junit.Test; @@ -38,7 +39,7 @@ import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; @RunWith(PowerMockRunner.class) -@PrepareForTest({Class.class, DriverManager.class, MysqlDatasourceProcessor.class}) +@PrepareForTest({Class.class, DriverManager.class, MysqlDatasourceProcessor.class, DataSourceClientProvider.class, PasswordUtils.class, CommonUtils.class}) public class DatasourceUtilTest { @Test @@ -63,6 +64,10 @@ public class DatasourceUtilTest { mysqlDatasourceParamDTO.setUserName("root"); mysqlDatasourceParamDTO.setPort(3306); mysqlDatasourceParamDTO.setPassword("123456"); + PowerMockito.mockStatic(PasswordUtils.class); + PowerMockito.when(PasswordUtils.encodePassword(Mockito.anyString())).thenReturn("123456"); + PowerMockito.mockStatic(CommonUtils.class); + PowerMockito.when(CommonUtils.getKerberosStartupState()).thenReturn(false); ConnectionParam connectionParam = DatasourceUtil.buildConnectionParams(mysqlDatasourceParamDTO); Assert.assertNotNull(connectionParam); } @@ -80,18 +85,21 @@ public class DatasourceUtilTest { } @Test - public void testGetConnection() throws ClassNotFoundException, SQLException { - PowerMockito.mockStatic(Class.class); - PowerMockito.when(Class.forName(Mockito.any())).thenReturn(null); - PowerMockito.mockStatic(DriverManager.class); - PowerMockito.when(DriverManager.getConnection(Mockito.any(), Mockito.any(), Mockito.any())).thenReturn(null); + public void testGetConnection() { + + PowerMockito.mockStatic(DataSourceClientProvider.class); + DataSourceClientProvider clientProvider = PowerMockito.mock(DataSourceClientProvider.class); + PowerMockito.when(DataSourceClientProvider.getInstance()).thenReturn(clientProvider); + + Connection connection = PowerMockito.mock(Connection.class); + PowerMockito.when(clientProvider.getConnection(Mockito.any(), Mockito.any())).thenReturn(connection); MysqlConnectionParam connectionParam = new MysqlConnectionParam(); connectionParam.setUser("root"); connectionParam.setPassword("123456"); - Connection connection = DatasourceUtil.getConnection(DbType.MYSQL, connectionParam); + connection = DataSourceClientProvider.getInstance().getConnection(DbType.MYSQL, connectionParam); - Assert.assertNull(connection); + Assert.assertNotNull(connection); } diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/pom.xml b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-clickhouse/pom.xml similarity index 46% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/pom.xml rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-clickhouse/pom.xml index 9c2347ec25760fd8b0f0149be2eee3c8fc27ad00..7df716442fa3320ee6f685ee6be43d2bb4dcf6b6 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/pom.xml +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-clickhouse/pom.xml @@ -15,73 +15,54 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - + - dolphinscheduler-alert-plugin + dolphinscheduler-datasource-plugin org.apache.dolphinscheduler - 2.0.0-SNAPSHOT + 2.0.10-SNAPSHOT 4.0.0 - org.apache.dolphinscheduler - dolphinscheduler-alert-email - - dolphinscheduler-plugin + dolphinscheduler-datasource-clickhouse + ${project.artifactId} + jar - - org.apache.commons - commons-collections4 - - - - org.apache.poi - poi - - - org.apache.poi - poi-ooxml - - - - com.google.guava - guava - - - - ch.qos.logback - logback-classic - - - - org.slf4j - slf4j-api - - org.apache.commons - commons-email - - - - com.fasterxml.jackson.core - jackson-annotations + org.apache.dolphinscheduler + dolphinscheduler-spi provided - junit - junit - test + org.apache.dolphinscheduler + dolphinscheduler-datasource-api + ${project.version} - org.mockito - mockito-core - jar - test + ru.yandex.clickhouse + clickhouse-jdbc + + + jaxb-api + javax.xml.bind + + + com.fasterxml.jackson.core + jackson-annotations + + + com.fasterxml.jackson.core + jackson-databind + + + com.fasterxml.jackson.core + jackson-core + + + ${clickhouse.jdbc.version} @@ -94,24 +75,13 @@ org.powermock powermock-api-mockito2 test - - - org.mockito - mockito-core - - - org.jacoco - org.jacoco.agent - runtime + org.mockito + mockito-core + jar test - - - dolphinscheduler-alert-email-${project.version} - - diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-clickhouse/src/main/java/org/apache/dolphinscheduler/plugin/datasource/clickhouse/ClickhouseDataSourceChannel.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-clickhouse/src/main/java/org/apache/dolphinscheduler/plugin/datasource/clickhouse/ClickhouseDataSourceChannel.java new file mode 100644 index 0000000000000000000000000000000000000000..ee2d231dbbcccffd62ba46df382674843309d6d1 --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-clickhouse/src/main/java/org/apache/dolphinscheduler/plugin/datasource/clickhouse/ClickhouseDataSourceChannel.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.clickhouse; + +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; +import org.apache.dolphinscheduler.spi.datasource.DataSourceClient; +import org.apache.dolphinscheduler.spi.enums.DbType; + +public class ClickhouseDataSourceChannel implements DataSourceChannel { + + @Override + public DataSourceClient createDataSourceClient(BaseConnectionParam baseConnectionParam, DbType dbType) { + return new ClickhouseDataSourceClient(baseConnectionParam, dbType); + } +} diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-clickhouse/src/main/java/org/apache/dolphinscheduler/plugin/datasource/clickhouse/ClickhouseDataSourceChannelFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-clickhouse/src/main/java/org/apache/dolphinscheduler/plugin/datasource/clickhouse/ClickhouseDataSourceChannelFactory.java new file mode 100644 index 0000000000000000000000000000000000000000..c81d810efbb139b9142661a0bbe23aa49484ee13 --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-clickhouse/src/main/java/org/apache/dolphinscheduler/plugin/datasource/clickhouse/ClickhouseDataSourceChannelFactory.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.clickhouse; + +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannelFactory; + +import com.google.auto.service.AutoService; + +@AutoService(DataSourceChannelFactory.class) +public class ClickhouseDataSourceChannelFactory implements DataSourceChannelFactory { + @Override + public String getName() { + return "clickhouse"; + } + + @Override + public DataSourceChannel create() { + return new ClickhouseDataSourceChannel(); + } +} diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertPlugin.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-clickhouse/src/main/java/org/apache/dolphinscheduler/plugin/datasource/clickhouse/ClickhouseDataSourceClient.java similarity index 62% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertPlugin.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-clickhouse/src/main/java/org/apache/dolphinscheduler/plugin/datasource/clickhouse/ClickhouseDataSourceClient.java index 973f1617a6dd7088619367ed19f7fca2d4d4d28f..dc6b1e05040a0cdb11a3e6902987e138adcaed1d 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertPlugin.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-clickhouse/src/main/java/org/apache/dolphinscheduler/plugin/datasource/clickhouse/ClickhouseDataSourceClient.java @@ -15,20 +15,16 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.plugin.alert.http; +package org.apache.dolphinscheduler.plugin.datasource.clickhouse; -import org.apache.dolphinscheduler.spi.DolphinSchedulerPlugin; -import org.apache.dolphinscheduler.spi.alert.AlertChannelFactory; +import org.apache.dolphinscheduler.plugin.datasource.api.client.CommonDataSourceClient; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; -import com.google.common.collect.ImmutableList; +public class ClickhouseDataSourceClient extends CommonDataSourceClient { -/** - * http alertPlugins - */ -public class HttpAlertPlugin implements DolphinSchedulerPlugin { - - @Override - public Iterable getAlertChannelFactorys() { - return ImmutableList.of(new HttpAlertChannelFactory()); + public ClickhouseDataSourceClient(BaseConnectionParam baseConnectionParam, DbType dbType) { + super(baseConnectionParam, dbType); } + } diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/src/test/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertPluginTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-clickhouse/src/test/java/org/apache/dolphinscheduler/plugin/datasource/clickhouse/ClickhouseDataSourceChannelFactoryTest.java similarity index 65% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/src/test/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertPluginTest.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-clickhouse/src/test/java/org/apache/dolphinscheduler/plugin/datasource/clickhouse/ClickhouseDataSourceChannelFactoryTest.java index 7dac686e882942606714e331b463345aeb777d42..5bfb884d973f7704a297836f207d99f55015c662 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/src/test/java/org/apache/dolphinscheduler/plugin/alert/http/HttpAlertPluginTest.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-clickhouse/src/test/java/org/apache/dolphinscheduler/plugin/datasource/clickhouse/ClickhouseDataSourceChannelFactoryTest.java @@ -15,24 +15,19 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.plugin.alert.http; +package org.apache.dolphinscheduler.plugin.datasource.clickhouse; -import org.apache.dolphinscheduler.spi.alert.AlertChannelFactory; +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; import org.junit.Assert; import org.junit.Test; -/** - * HttpAlertPlugin UT - */ -public class HttpAlertPluginTest { +public class ClickhouseDataSourceChannelFactoryTest { @Test - public void getAlertChannelFactorysTest() { - - HttpAlertPlugin httpAlertPlugin = new HttpAlertPlugin(); - Iterable alertChannelFactorys = httpAlertPlugin.getAlertChannelFactorys(); - Assert.assertNotNull(alertChannelFactorys); - + public void testCreate() { + ClickhouseDataSourceChannelFactory sourceChannelFactory = new ClickhouseDataSourceChannelFactory(); + DataSourceChannel dataSourceChannel = sourceChannelFactory.create(); + Assert.assertNotNull(dataSourceChannel); } } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-clickhouse/src/test/java/org/apache/dolphinscheduler/plugin/datasource/clickhouse/ClickhouseDataSourceChannelTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-clickhouse/src/test/java/org/apache/dolphinscheduler/plugin/datasource/clickhouse/ClickhouseDataSourceChannelTest.java new file mode 100644 index 0000000000000000000000000000000000000000..699c86f31a99569ff66136554c107c7fd94c8e1d --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-clickhouse/src/test/java/org/apache/dolphinscheduler/plugin/datasource/clickhouse/ClickhouseDataSourceChannelTest.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.clickhouse; + +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.clickhouse.ClickhouseConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; + +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.powermock.api.mockito.PowerMockito; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor; +import org.powermock.modules.junit4.PowerMockRunner; + +@RunWith(PowerMockRunner.class) +@SuppressStaticInitializationFor("org.apache.dolphinscheduler.plugin.datasource.api.client.CommonDataSourceClient") +@PrepareForTest({ClickhouseDataSourceClient.class, ClickhouseDataSourceChannel.class}) +public class ClickhouseDataSourceChannelTest { + + @Test + public void testCreateDataSourceClient() { + ClickhouseDataSourceChannel sourceChannel = PowerMockito.mock(ClickhouseDataSourceChannel.class); + ClickhouseDataSourceClient dataSourceClient = PowerMockito.mock(ClickhouseDataSourceClient.class); + PowerMockito.when(sourceChannel.createDataSourceClient(Mockito.any(), Mockito.any())).thenReturn(dataSourceClient); + Assert.assertNotNull(sourceChannel.createDataSourceClient(new ClickhouseConnectionParam(), DbType.CLICKHOUSE)); + } +} diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-db2/pom.xml b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-db2/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..a6b41e828ba1ac8e86cce2de179190084c928021 --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-db2/pom.xml @@ -0,0 +1,63 @@ + + + + + dolphinscheduler-datasource-plugin + org.apache.dolphinscheduler + 2.0.10-SNAPSHOT + + 4.0.0 + + dolphinscheduler-datasource-db2 + ${project.artifactId} + jar + + + + + org.apache.dolphinscheduler + dolphinscheduler-spi + provided + + + + org.apache.dolphinscheduler + dolphinscheduler-datasource-api + ${project.version} + + + + org.powermock + powermock-module-junit4 + test + + + + org.powermock + powermock-api-mockito2 + test + + + + org.mockito + mockito-core + jar + test + + + diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-db2/src/main/java/org/apache/dolphinscheduler/plugin/datasource/db2/DB2DataSourceChannel.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-db2/src/main/java/org/apache/dolphinscheduler/plugin/datasource/db2/DB2DataSourceChannel.java new file mode 100644 index 0000000000000000000000000000000000000000..fac46b0802720a3ff9afd3da1dceed3aaf91754e --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-db2/src/main/java/org/apache/dolphinscheduler/plugin/datasource/db2/DB2DataSourceChannel.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.db2; + +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; +import org.apache.dolphinscheduler.spi.datasource.DataSourceClient; +import org.apache.dolphinscheduler.spi.enums.DbType; + +public class DB2DataSourceChannel implements DataSourceChannel { + + @Override + public DataSourceClient createDataSourceClient(BaseConnectionParam baseConnectionParam, DbType dbType) { + return new DB2DataSourceClient(baseConnectionParam, dbType); + } +} diff --git a/dolphinscheduler-microbench/src/main/java/org/apache/dolphinscheduler/microbench/common/UserService.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-db2/src/main/java/org/apache/dolphinscheduler/plugin/datasource/db2/DB2DataSourceChannelFactory.java similarity index 62% rename from dolphinscheduler-microbench/src/main/java/org/apache/dolphinscheduler/microbench/common/UserService.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-db2/src/main/java/org/apache/dolphinscheduler/plugin/datasource/db2/DB2DataSourceChannelFactory.java index ad09a346458113ed14bbab27d0f95b32626bd824..cda8a2e59259cf5cf170b56c77be126ebccb37a2 100644 --- a/dolphinscheduler-microbench/src/main/java/org/apache/dolphinscheduler/microbench/common/UserService.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-db2/src/main/java/org/apache/dolphinscheduler/plugin/datasource/db2/DB2DataSourceChannelFactory.java @@ -15,23 +15,23 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.microbench.common; +package org.apache.dolphinscheduler.plugin.datasource.db2; -import org.apache.dolphinscheduler.rpc.base.RpcService; +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannelFactory; -/** - * UserService - */ -@RpcService("IUserService") -public class UserService implements IUserService { +import com.google.auto.service.AutoService; + +@AutoService(DataSourceChannelFactory.class) +public class DB2DataSourceChannelFactory implements DataSourceChannelFactory { @Override - public Boolean say(String s) { - return true; + public String getName() { + return "db2"; } @Override - public Integer hi(int num) { - return ++num; + public DataSourceChannel create() { + return new DB2DataSourceChannel(); } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/src/main/java/org/apache/dolphinscheduler/plugin/task/pigeon/PigeonTaskPlugin.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-db2/src/main/java/org/apache/dolphinscheduler/plugin/datasource/db2/DB2DataSourceClient.java similarity index 63% rename from dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/src/main/java/org/apache/dolphinscheduler/plugin/task/pigeon/PigeonTaskPlugin.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-db2/src/main/java/org/apache/dolphinscheduler/plugin/datasource/db2/DB2DataSourceClient.java index b731d2ab245bf55c46d503e5d2a0e344f31d0df3..5db789f2231b33ed351751cd621fb9104b7c78a2 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/src/main/java/org/apache/dolphinscheduler/plugin/task/pigeon/PigeonTaskPlugin.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-db2/src/main/java/org/apache/dolphinscheduler/plugin/datasource/db2/DB2DataSourceClient.java @@ -15,17 +15,16 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.plugin.task.pigeon; +package org.apache.dolphinscheduler.plugin.datasource.db2; -import org.apache.dolphinscheduler.spi.DolphinSchedulerPlugin; -import org.apache.dolphinscheduler.spi.task.TaskChannelFactory; +import org.apache.dolphinscheduler.plugin.datasource.api.client.CommonDataSourceClient; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; -import com.google.common.collect.ImmutableList; +public class DB2DataSourceClient extends CommonDataSourceClient { -public class PigeonTaskPlugin implements DolphinSchedulerPlugin { - - @Override - public Iterable getTaskChannelFactorys() { - return ImmutableList.of(new PigeonTaskChannelFactory()); + public DB2DataSourceClient(BaseConnectionParam baseConnectionParam, DbType dbType) { + super(baseConnectionParam, dbType); } + } diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/upgrade/UpgradeDaoTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-db2/src/test/java/org/apache/dolphinscheduler/plugin/datasource/db2/DB2DataSourceChannelFactoryTest.java similarity index 65% rename from dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/upgrade/UpgradeDaoTest.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-db2/src/test/java/org/apache/dolphinscheduler/plugin/datasource/db2/DB2DataSourceChannelFactoryTest.java index 4561cabaee03ebdc15ab50655190745eda9f0098..89be3f70a643fcf6c9b384379a9fc6698244abc9 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/upgrade/UpgradeDaoTest.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-db2/src/test/java/org/apache/dolphinscheduler/plugin/datasource/db2/DB2DataSourceChannelFactoryTest.java @@ -14,16 +14,20 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.dao.upgrade; +package org.apache.dolphinscheduler.plugin.datasource.db2; + +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; + +import org.junit.Assert; import org.junit.Test; -public class UpgradeDaoTest { - PostgresqlUpgradeDao postgresqlUpgradeDao = PostgresqlUpgradeDao.getInstance(); +public class DB2DataSourceChannelFactoryTest { @Test - public void testQueryQueryAllOldWorkerGroup() throws Exception{ - //postgresqlUpgradeDao.updateProcessDefinitionJsonWorkerGroup(); + public void testCreate() { + DB2DataSourceChannelFactory sourceChannelFactory = new DB2DataSourceChannelFactory(); + DataSourceChannel dataSourceChannel = sourceChannelFactory.create(); + Assert.assertNotNull(dataSourceChannel); } - } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/SecurityConfigLDAPTest.java~dev b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-db2/src/test/java/org/apache/dolphinscheduler/plugin/datasource/db2/DB2DataSourceChannelTest.java similarity index 42% rename from dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/SecurityConfigLDAPTest.java~dev rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-db2/src/test/java/org/apache/dolphinscheduler/plugin/datasource/db2/DB2DataSourceChannelTest.java index a96cec9158d302f6eeab96d34d167c69343235bf..b398b899dbc4bf73a5737aa5787078e333132edd 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/SecurityConfigLDAPTest.java~dev +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-db2/src/test/java/org/apache/dolphinscheduler/plugin/datasource/db2/DB2DataSourceChannelTest.java @@ -15,31 +15,30 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.api.security; +package org.apache.dolphinscheduler.plugin.datasource.db2; -import org.apache.dolphinscheduler.api.ApiApplicationServer; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.db2.Db2ConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.TestPropertySource; -import org.springframework.test.context.junit4.SpringRunner; +import org.mockito.Mockito; +import org.powermock.api.mockito.PowerMockito; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor; +import org.powermock.modules.junit4.PowerMockRunner; -@RunWith(SpringRunner.class) -@SpringBootTest(classes = ApiApplicationServer.class) -@TestPropertySource(properties = { - "security.authentication.type=LDAP", -}) -public class SecurityConfigLDAPTest { - - @Autowired - private SecurityConfig securityConfig; +@RunWith(PowerMockRunner.class) +@SuppressStaticInitializationFor("org.apache.dolphinscheduler.plugin.datasource.api.client.CommonDataSourceClient") +@PrepareForTest({DB2DataSourceClient.class, DB2DataSourceChannel.class}) +public class DB2DataSourceChannelTest { @Test - public void testAuthenticator() { - Authenticator authenticator = securityConfig.authenticator(); - Assert.assertNotNull(authenticator); + public void testCreateDataSourceClient() { + DB2DataSourceChannel sourceChannel = PowerMockito.mock(DB2DataSourceChannel.class); + DB2DataSourceClient dataSourceClient = PowerMockito.mock(DB2DataSourceClient.class); + PowerMockito.when(sourceChannel.createDataSourceClient(Mockito.any(), Mockito.any())).thenReturn(dataSourceClient); + Assert.assertNotNull(sourceChannel.createDataSourceClient(new Db2ConnectionParam(), DbType.DB2)); } } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/pom.xml b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..775e507bc7f845c43c1a668e76a107604ef27b1b --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/pom.xml @@ -0,0 +1,343 @@ + + + + + dolphinscheduler-datasource-plugin + org.apache.dolphinscheduler + 2.0.10-SNAPSHOT + + 4.0.0 + + dolphinscheduler-datasource-hive + ${project.artifactId} + jar + + + + + org.apache.dolphinscheduler + dolphinscheduler-spi + provided + + + + org.apache.hadoop + hadoop-client + + + org.slf4j + slf4j-log4j12 + + + servlet-api + javax.servlet + + + org.codehaus.jackson + jackson-jaxrs + + + org.codehaus.jackson + jackson-xc + + + + org.fusesource.leveldbjni + leveldbjni-all + + + org.apache.zookeeper + zookeeper + + + org.apache.hadoop + hadoop-mapreduce-client-shuffle + + + jersey-client + com.sun.jersey + + + jersey-core + com.sun.jersey + + + jaxb-api + javax.xml.bind + + + log4j + log4j + + + + + + org.apache.dolphinscheduler + dolphinscheduler-datasource-api + ${project.version} + + + + + org.apache.hive + hive-jdbc + ${hive.jdbc.version} + + + slf4j-log4j12 + org.slf4j + + + org.eclipse.jetty.aggregate + jetty-all + + + + org.apache.ant + ant + + + io.dropwizard.metrics + metrics-json + + + io.dropwizard.metrics + metrics-jvm + + + com.github.joshelser + dropwizard-metrics-hadoop-metrics2-reporter + + + + io.netty + netty-all + + + com.google.code.gson + gson + + + com.google.code.findbugs + jsr305 + + + io.dropwizard.metrics + metrics-core + + + javax.servlet + servlet-api + + + org.apache.avro + avro + + + org.apache.commons + commons-compress + + + org.apache.curator + curator-client + + + org.apache.hadoop + hadoop-auth + + + org.apache.hadoop + hadoop-mapreduce-client-core + + + org.apache.hadoop + hadoop-yarn-api + + + + org.apache.zookeeper + zookeeper + + + org.codehaus.jackson + jackson-jaxrs + + + org.codehaus.jackson + jackson-xc + + + com.google.protobuf + protobuf-java + + + org.json + json + + + log4j-slf4j-impl + org.apache.logging.log4j + + + javax.servlet + org.eclipse.jetty.orbit + + + servlet-api-2.5 + org.mortbay.jetty + + + jasper-runtime + tomcat + + + slider-core + org.apache.slider + + + hbase-server + org.apache.hbase + + + jersey-client + com.sun.jersey + + + jersey-core + com.sun.jersey + + + jersey-json + com.sun.jersey + + + jersey-server + com.sun.jersey + + + jersey-guice + com.sun.jersey.contribs + + + hbase-common + org.apache.hbase + + + hbase-hadoop2-compat + org.apache.hbase + + + hbase-client + org.apache.hbase + + + hbase-hadoop-compat + org.apache.hbase + + + tephra-hbase-compat-1.0 + co.cask.tephra + + + jaxb-api + javax.xml.bind + + + hive-llap-client + org.apache.hive + + + hive-llap-common + org.apache.hive + + + hive-llap-server + org.apache.hive + + + tephra-core + co.cask.tephra + + + ant + ant + + + stringtemplate + org.antlr + + + antlr-runtime + org.antlr + + + hive-shims + org.apache.hive + + + jsp-api + javax.servlet + + + log4j-api + org.apache.logging.log4j + + + log4j-core + org.apache.logging.log4j + + + log4j-web + org.apache.logging.log4j + + + jasper-compiler + tomcat + + + com.fasterxml.jackson.core + jackson-annotations + + + com.fasterxml.jackson.core + jackson-databind + + + + + + org.powermock + powermock-module-junit4 + test + + + + org.powermock + powermock-api-mockito2 + test + + + + org.mockito + mockito-core + jar + test + + + diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-python/src/main/java/org/apache/dolphinscheduler/plugin/task/python/PythonTaskPlugin.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hive/HiveDataSourceChannel.java similarity index 59% rename from dolphinscheduler-task-plugin/dolphinscheduler-task-python/src/main/java/org/apache/dolphinscheduler/plugin/task/python/PythonTaskPlugin.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hive/HiveDataSourceChannel.java index 63fe7b792e1b0bcbc022d7f3e01f3f2e8c55910b..fbacbfbf01bab37e0f8139ec9c8c9baf140d193f 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-python/src/main/java/org/apache/dolphinscheduler/plugin/task/python/PythonTaskPlugin.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hive/HiveDataSourceChannel.java @@ -15,17 +15,17 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.plugin.task.python; +package org.apache.dolphinscheduler.plugin.datasource.hive; -import org.apache.dolphinscheduler.spi.DolphinSchedulerPlugin; -import org.apache.dolphinscheduler.spi.task.TaskChannelFactory; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; +import org.apache.dolphinscheduler.spi.datasource.DataSourceClient; +import org.apache.dolphinscheduler.spi.enums.DbType; -import com.google.common.collect.ImmutableList; - -public class PythonTaskPlugin implements DolphinSchedulerPlugin { +public class HiveDataSourceChannel implements DataSourceChannel { @Override - public Iterable getTaskChannelFactorys() { - return ImmutableList.of(new PythonTaskChannelFactory()); + public DataSourceClient createDataSourceClient(BaseConnectionParam baseConnectionParam, DbType dbType) { + return new HiveDataSourceClient(baseConnectionParam, dbType); } } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hive/HiveDataSourceChannelFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hive/HiveDataSourceChannelFactory.java new file mode 100644 index 0000000000000000000000000000000000000000..12556b66be9cf625b230510f8a192d514ba46136 --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hive/HiveDataSourceChannelFactory.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.hive; + +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannelFactory; + +import com.google.auto.service.AutoService; + +@AutoService(DataSourceChannelFactory.class) +public class HiveDataSourceChannelFactory implements DataSourceChannelFactory { + @Override + public String getName() { + return "hive"; + } + + @Override + public DataSourceChannel create() { + return new HiveDataSourceChannel(); + } +} diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hive/HiveDataSourceClient.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hive/HiveDataSourceClient.java new file mode 100644 index 0000000000000000000000000000000000000000..e13a2d3e5af7192d88847b501cdd69e6088fd533 --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hive/HiveDataSourceClient.java @@ -0,0 +1,168 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.hive; + +import static org.apache.dolphinscheduler.spi.task.TaskConstants.JAVA_SECURITY_KRB5_CONF; +import static org.apache.dolphinscheduler.spi.task.TaskConstants.JAVA_SECURITY_KRB5_CONF_PATH; +import static org.apache.dolphinscheduler.spi.task.TaskConstants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE; + +import org.apache.dolphinscheduler.plugin.datasource.api.client.CommonDataSourceClient; +import org.apache.dolphinscheduler.plugin.datasource.api.provider.JdbcDataSourceProvider; +import org.apache.dolphinscheduler.plugin.datasource.utils.CommonUtil; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; +import org.apache.dolphinscheduler.spi.utils.Constants; +import org.apache.dolphinscheduler.spi.utils.PropertyUtils; +import org.apache.dolphinscheduler.spi.utils.StringUtils; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.security.UserGroupInformation; + +import java.io.IOException; +import java.lang.reflect.Field; +import java.sql.Connection; +import java.sql.SQLException; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.zaxxer.hikari.HikariDataSource; + +import sun.security.krb5.Config; + +public class HiveDataSourceClient extends CommonDataSourceClient { + + private static final Logger logger = LoggerFactory.getLogger(HiveDataSourceClient.class); + + private ScheduledExecutorService kerberosRenewalService; + + private Configuration hadoopConf; + protected HikariDataSource oneSessionDataSource; + private UserGroupInformation ugi; + + public HiveDataSourceClient(BaseConnectionParam baseConnectionParam, DbType dbType) { + super(baseConnectionParam, dbType); + } + + @Override + protected void preInit() { + logger.info("PreInit in {}", getClass().getName()); + this.kerberosRenewalService = Executors.newSingleThreadScheduledExecutor(); + } + + @Override + protected void initClient(BaseConnectionParam baseConnectionParam, DbType dbType) { + logger.info("Create Configuration for hive configuration."); + this.hadoopConf = createHadoopConf(); + logger.info("Create Configuration success."); + + logger.info("Create UserGroupInformation."); + this.ugi = createUserGroupInformation(baseConnectionParam.getUser()); + logger.info("Create ugi success."); + + super.initClient(baseConnectionParam, dbType); + this.oneSessionDataSource = JdbcDataSourceProvider.createOneSessionJdbcDataSource(baseConnectionParam, dbType); + logger.info("Init {} success.", getClass().getName()); + } + + @Override + protected void checkEnv(BaseConnectionParam baseConnectionParam) { + super.checkEnv(baseConnectionParam); + checkKerberosEnv(); + } + + private void checkKerberosEnv() { + String krb5File = PropertyUtils.getString(JAVA_SECURITY_KRB5_CONF_PATH); + Boolean kerberosStartupState = PropertyUtils.getBoolean(HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE, false); + if (kerberosStartupState && StringUtils.isNotBlank(krb5File)) { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5File); + try { + Config.refresh(); + Class kerberosName = Class.forName("org.apache.hadoop.security.authentication.util.KerberosName"); + Field field = kerberosName.getDeclaredField("defaultRealm"); + field.setAccessible(true); + field.set(null, Config.getInstance().getDefaultRealm()); + } catch (Exception e) { + throw new RuntimeException("Update Kerberos environment failed.", e); + } + } + } + + private UserGroupInformation createUserGroupInformation(String username) { + String krb5File = PropertyUtils.getString(Constants.JAVA_SECURITY_KRB5_CONF_PATH); + String keytab = PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_PATH); + String principal = PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_USERNAME); + + try { + UserGroupInformation ugi = CommonUtil.createUGI(getHadoopConf(), principal, keytab, krb5File, username); + try { + Field isKeytabField = ugi.getClass().getDeclaredField("isKeytab"); + isKeytabField.setAccessible(true); + isKeytabField.set(ugi, true); + } catch (NoSuchFieldException | IllegalAccessException e) { + logger.warn(e.getMessage()); + } + + kerberosRenewalService.scheduleWithFixedDelay(() -> { + try { + ugi.checkTGTAndReloginFromKeytab(); + } catch (IOException e) { + logger.error("Check TGT and Renewal from Keytab error", e); + } + }, 5, 5, TimeUnit.MINUTES); + return ugi; + } catch (IOException e) { + throw new RuntimeException("createUserGroupInformation fail. ", e); + } + } + + protected Configuration createHadoopConf() { + Configuration hadoopConf = new Configuration(); + hadoopConf.setBoolean("ipc.client.fallback-to-simple-auth-allowed", true); + return hadoopConf; + } + + protected Configuration getHadoopConf() { + return this.hadoopConf; + } + + @Override + public Connection getConnection() { + try { + return oneSessionDataSource.getConnection(); + } catch (SQLException e) { + logger.error("get oneSessionDataSource Connection fail SQLException: {}", e.getMessage(), e); + return null; + } + } + + @Override + public void close() { + super.close(); + + logger.info("close HiveDataSourceClient."); + kerberosRenewalService.shutdown(); + this.ugi = null; + + this.oneSessionDataSource.close(); + this.oneSessionDataSource = null; + } +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/AbstractDatasourceProcessor.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/utils/CommonUtil.java similarity index 30% rename from dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/AbstractDatasourceProcessor.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/utils/CommonUtil.java index 19d88e1588008da1b8a242af41f0a96f974b71c6..faf8f4aafa76ada4f06641b05649bbfb906e7a83 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/AbstractDatasourceProcessor.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/utils/CommonUtil.java @@ -15,65 +15,51 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.plugin.task.datasource; +package org.apache.dolphinscheduler.plugin.datasource.utils; -import org.apache.commons.collections.MapUtils; +import static org.apache.dolphinscheduler.spi.utils.Constants.JAVA_SECURITY_KRB5_CONF; -import java.util.Map; -import java.util.regex.Pattern; +import org.apache.dolphinscheduler.spi.enums.ResUploadType; +import org.apache.dolphinscheduler.spi.utils.Constants; +import org.apache.dolphinscheduler.spi.utils.PropertyUtils; +import org.apache.dolphinscheduler.spi.utils.StringUtils; -public abstract class AbstractDatasourceProcessor implements DatasourceProcessor { +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.security.UserGroupInformation; - private static final Pattern IPV4_PATTERN = Pattern.compile("^[a-zA-Z0-9\\_\\-\\.]+$"); +import java.io.IOException; +import java.util.Objects; - private static final Pattern IPV6_PATTERN = Pattern.compile("^[a-zA-Z0-9\\_\\-\\.\\:\\[\\]]+$"); +public class CommonUtil { - private static final Pattern DATABASE_PATTER = Pattern.compile("^[a-zA-Z0-9\\_\\-\\.]+$"); - - private static final Pattern PARAMS_PATTER = Pattern.compile("^[a-zA-Z0-9\\-\\_\\/]+$"); - - @Override - public void checkDatasourceParam(BaseDataSourceParamDTO baseDataSourceParamDTO) { - checkHost(baseDataSourceParamDTO.getHost()); - checkDatasourcePatter(baseDataSourceParamDTO.getDatabase()); - checkOther(baseDataSourceParamDTO.getOther()); + private CommonUtil() { } - /** - * Check the host is valid - * - * @param host datasource host - */ - protected void checkHost(String host) { - if (!IPV4_PATTERN.matcher(host).matches() || !IPV6_PATTERN.matcher(host).matches()) { - throw new IllegalArgumentException("datasource host illegal"); - } + public static boolean getKerberosStartupState() { + String resUploadStartupType = PropertyUtils.getUpperCaseString(Constants.RESOURCE_STORAGE_TYPE); + ResUploadType resUploadType = ResUploadType.valueOf(resUploadStartupType); + Boolean kerberosStartupState = PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE, false); + return resUploadType == ResUploadType.HDFS && kerberosStartupState; } - /** - * check database name is valid - * - * @param database database name - */ - protected void checkDatasourcePatter(String database) { - if (!DATABASE_PATTER.matcher(database).matches()) { - throw new IllegalArgumentException("datasource name illegal"); + public static synchronized UserGroupInformation createUGI(Configuration configuration, String principal, String keyTab, String krb5File, String username) + throws IOException { + if (getKerberosStartupState()) { + Objects.requireNonNull(keyTab); + if (StringUtils.isNotBlank(krb5File)) { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5File); + } + return loginKerberos(configuration, principal, keyTab); } + return UserGroupInformation.createRemoteUser(username); } - /** - * check other is valid - * - * @param other other - */ - protected void checkOther(Map other) { - if (MapUtils.isEmpty(other)) { - return; - } - boolean paramsCheck = other.entrySet().stream().allMatch(p -> PARAMS_PATTER.matcher(p.getValue()).matches()); - if (!paramsCheck) { - throw new IllegalArgumentException("datasource other params illegal"); - } + public static synchronized UserGroupInformation loginKerberos(final Configuration config, final String principal, final String keyTab) + throws IOException { + config.set(Constants.HADOOP_SECURITY_AUTHENTICATION, Constants.KERBEROS); + UserGroupInformation.setConfiguration(config); + UserGroupInformation.loginUserFromKeytab(principal.trim(), keyTab.trim()); + return UserGroupInformation.getCurrentUser(); } } diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ConnectionFactoryTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/test/java/org/apache/dolphinscheduler/plugin/datasource/hive/HiveDataSourceChannelFactoryTest.java similarity index 65% rename from dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ConnectionFactoryTest.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/test/java/org/apache/dolphinscheduler/plugin/datasource/hive/HiveDataSourceChannelFactoryTest.java index 1d419a83d8a5351a7c256fb899cae6f82b39a25c..6f74a3dd03d9f65bc225b37dc1a1126e52ed5241 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ConnectionFactoryTest.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/test/java/org/apache/dolphinscheduler/plugin/datasource/hive/HiveDataSourceChannelFactoryTest.java @@ -14,24 +14,20 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.dao.mapper; -import org.apache.dolphinscheduler.dao.datasource.ConnectionFactory; -import org.junit.Assert; -import org.junit.Test; +package org.apache.dolphinscheduler.plugin.datasource.hive; -import java.sql.Connection; +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; +import org.junit.Assert; +import org.junit.Test; -public class ConnectionFactoryTest { +public class HiveDataSourceChannelFactoryTest { - /** - * test connection - * @throws Exception if error throws Exception - */ @Test - public void testConnection()throws Exception{ - Connection connection = ConnectionFactory.getInstance().getDataSource().getConnection(); - Assert.assertTrue(connection != null); + public void testCreate() { + HiveDataSourceChannelFactory sourceChannelFactory = new HiveDataSourceChannelFactory(); + DataSourceChannel dataSourceChannel = sourceChannelFactory.create(); + Assert.assertNotNull(dataSourceChannel); } -} \ No newline at end of file +} diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/test/java/org/apache/dolphinscheduler/plugin/datasource/hive/HiveDataSourceChannelTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/test/java/org/apache/dolphinscheduler/plugin/datasource/hive/HiveDataSourceChannelTest.java new file mode 100644 index 0000000000000000000000000000000000000000..847042a4a0ce1a227bafec4ef4f2797e3fea4356 --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/test/java/org/apache/dolphinscheduler/plugin/datasource/hive/HiveDataSourceChannelTest.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.hive; + +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.hive.HiveConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; + +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.powermock.api.mockito.PowerMockito; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor; +import org.powermock.modules.junit4.PowerMockRunner; + +@RunWith(PowerMockRunner.class) +@SuppressStaticInitializationFor("org.apache.dolphinscheduler.plugin.datasource.api.client.CommonDataSourceClient") +@PrepareForTest({HiveDataSourceChannel.class, HiveDataSourceClient.class}) +public class HiveDataSourceChannelTest { + + @Test + public void testCreateDataSourceClient() { + HiveDataSourceChannel sourceChannel = PowerMockito.mock(HiveDataSourceChannel.class); + HiveDataSourceClient dataSourceClient = PowerMockito.mock(HiveDataSourceClient.class); + PowerMockito.when(sourceChannel.createDataSourceClient(Mockito.any(), Mockito.any())).thenReturn(dataSourceClient); + Assert.assertNotNull(sourceChannel.createDataSourceClient(new HiveConnectionParam(), DbType.HIVE)); + } +} diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/pom.xml b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-mysql/pom.xml similarity index 54% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/pom.xml rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-mysql/pom.xml index 0ab74785ef5562f05371336121759a427939059e..d4ebe2e0eefdd5a85bcf94f0e82a7991e4c86997 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/pom.xml +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-mysql/pom.xml @@ -15,46 +15,46 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - + - dolphinscheduler-alert-plugin + dolphinscheduler-datasource-plugin org.apache.dolphinscheduler - 2.0.0-SNAPSHOT + 2.0.10-SNAPSHOT 4.0.0 - org.apache.dolphinscheduler - dolphinscheduler-alert-script - dolphinscheduler-plugin + dolphinscheduler-datasource-mysql + ${project.artifactId} + jar - com.google.guava - guava + org.apache.dolphinscheduler + dolphinscheduler-spi + provided - ch.qos.logback - logback-classic + org.apache.dolphinscheduler + dolphinscheduler-datasource-api + ${project.version} - org.slf4j - slf4j-api + mysql + mysql-connector-java - com.fasterxml.jackson.core - jackson-annotations - provided + org.powermock + powermock-module-junit4 + test - junit - junit + org.powermock + powermock-api-mockito2 test @@ -64,17 +64,5 @@ jar test - - - org.jacoco - org.jacoco.agent - runtime - test - - - - dolphinscheduler-alert-script-${project.version} - - - \ No newline at end of file + diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertPlugin.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-mysql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/mysql/MysqlDataSourceChannel.java similarity index 59% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertPlugin.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-mysql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/mysql/MysqlDataSourceChannel.java index 56c3c01f20ef055d5a9caebb0eccfd908bcbdb98..f14c9703f898fae5c39177ca1cdd6677f522bb7e 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatAlertPlugin.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-mysql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/mysql/MysqlDataSourceChannel.java @@ -15,20 +15,17 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.plugin.alert.wechat; +package org.apache.dolphinscheduler.plugin.datasource.mysql; -import org.apache.dolphinscheduler.spi.DolphinSchedulerPlugin; -import org.apache.dolphinscheduler.spi.alert.AlertChannelFactory; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; +import org.apache.dolphinscheduler.spi.datasource.DataSourceClient; +import org.apache.dolphinscheduler.spi.enums.DbType; -import com.google.common.collect.ImmutableList; - -/** - * WeChatAlertPlugin - */ -public class WeChatAlertPlugin implements DolphinSchedulerPlugin { +public class MysqlDataSourceChannel implements DataSourceChannel { @Override - public Iterable getAlertChannelFactorys() { - return ImmutableList.of(new WeChatAlertChannelFactory()); + public DataSourceClient createDataSourceClient(BaseConnectionParam baseConnectionParam, DbType dbType) { + return new MysqlDataSourceClient(baseConnectionParam, dbType); } } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-mysql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/mysql/MysqlDataSourceChannelFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-mysql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/mysql/MysqlDataSourceChannelFactory.java new file mode 100644 index 0000000000000000000000000000000000000000..963aa59e05c14a69a1a76f89f34bc62c95bd6bf7 --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-mysql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/mysql/MysqlDataSourceChannelFactory.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.mysql; + +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannelFactory; + +import com.google.auto.service.AutoService; + +@AutoService(DataSourceChannelFactory.class) +public class MysqlDataSourceChannelFactory implements DataSourceChannelFactory { + @Override + public String getName() { + return "mysql"; + } + + @Override + public DataSourceChannel create() { + return new MysqlDataSourceChannel(); + } +} diff --git a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/Constants.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-mysql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/mysql/MysqlDataSourceClient.java similarity index 63% rename from dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/Constants.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-mysql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/mysql/MysqlDataSourceClient.java index 077f7ac26c3add8ca7b2ac793647b6eafcc94d27..7fa77751f2dae07d9368bd6af9ef50debf0477b2 100644 --- a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/Constants.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-mysql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/mysql/MysqlDataSourceClient.java @@ -15,24 +15,16 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.alert.utils; +package org.apache.dolphinscheduler.plugin.datasource.mysql; -/** - * constants - */ -public class Constants { - private Constants() { - throw new IllegalStateException("Constants class"); - } +import org.apache.dolphinscheduler.plugin.datasource.api.client.CommonDataSourceClient; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; - /** - * alert properties path - */ - public static final String ALERT_PROPERTIES_PATH = "/alert.properties"; +public class MysqlDataSourceClient extends CommonDataSourceClient { - /** default alert plugin dir **/ - public static final String ALERT_PLUGIN_PATH = "lib/plugin/alert"; - - public static final int ALERT_SCAN_INTERVAL = 5000; + public MysqlDataSourceClient(BaseConnectionParam baseConnectionParam, DbType dbType) { + super(baseConnectionParam, dbType); + } } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-mysql/src/test/java/org/apache/dolphinscheduler/plugin/datasource/mysql/MysqlDataSourceChannelFactoryTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-mysql/src/test/java/org/apache/dolphinscheduler/plugin/datasource/mysql/MysqlDataSourceChannelFactoryTest.java new file mode 100644 index 0000000000000000000000000000000000000000..dd0603f8f7948c23e588df594f6c5f4dad951b60 --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-mysql/src/test/java/org/apache/dolphinscheduler/plugin/datasource/mysql/MysqlDataSourceChannelFactoryTest.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.mysql; + +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; + +import org.junit.Assert; +import org.junit.Test; + +public class MysqlDataSourceChannelFactoryTest { + + @Test + public void testCreate() { + MysqlDataSourceChannelFactory sourceChannelFactory = new MysqlDataSourceChannelFactory(); + DataSourceChannel dataSourceChannel = sourceChannelFactory.create(); + Assert.assertNotNull(dataSourceChannel); + } +} diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-mysql/src/test/java/org/apache/dolphinscheduler/plugin/datasource/mysql/MysqlDataSourceChannelTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-mysql/src/test/java/org/apache/dolphinscheduler/plugin/datasource/mysql/MysqlDataSourceChannelTest.java new file mode 100644 index 0000000000000000000000000000000000000000..1564594e7186179d04f9f3079c3e64aaba901274 --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-mysql/src/test/java/org/apache/dolphinscheduler/plugin/datasource/mysql/MysqlDataSourceChannelTest.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.mysql; + +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.mysql.MysqlConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; + +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.powermock.api.mockito.PowerMockito; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor; +import org.powermock.modules.junit4.PowerMockRunner; + +@RunWith(PowerMockRunner.class) +@SuppressStaticInitializationFor("org.apache.dolphinscheduler.plugin.datasource.api.client.CommonDataSourceClient") +@PrepareForTest({MysqlDataSourceClient.class, MysqlDataSourceChannel.class}) +public class MysqlDataSourceChannelTest { + + @Test + public void testCreateDataSourceClient() { + MysqlDataSourceChannel sourceChannel = PowerMockito.mock(MysqlDataSourceChannel.class); + MysqlDataSourceClient dataSourceClient = PowerMockito.mock(MysqlDataSourceClient.class); + PowerMockito.when(sourceChannel.createDataSourceClient(Mockito.any(), Mockito.any())).thenReturn(dataSourceClient); + Assert.assertNotNull(sourceChannel.createDataSourceClient(new MysqlConnectionParam(), DbType.MYSQL)); + } +} diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oracle/pom.xml b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oracle/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..8ed25ef1e33233b2ec65a5f73010ef0d8e90937b --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oracle/pom.xml @@ -0,0 +1,63 @@ + + + + + dolphinscheduler-datasource-plugin + org.apache.dolphinscheduler + 2.0.10-SNAPSHOT + + 4.0.0 + + dolphinscheduler-datasource-oracle + ${project.artifactId} + jar + + + + + org.apache.dolphinscheduler + dolphinscheduler-spi + provided + + + + org.apache.dolphinscheduler + dolphinscheduler-datasource-api + ${project.version} + + + + org.powermock + powermock-module-junit4 + test + + + + org.powermock + powermock-api-mockito2 + test + + + + org.mockito + mockito-core + jar + test + + + diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oracle/src/main/java/org/apache/dolphinscheduler/plugin/datasource/oracle/OracleDataSourceChannel.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oracle/src/main/java/org/apache/dolphinscheduler/plugin/datasource/oracle/OracleDataSourceChannel.java new file mode 100644 index 0000000000000000000000000000000000000000..65584b0f9ca82ff3e9f8c9765382fdf5e7ca4c25 --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oracle/src/main/java/org/apache/dolphinscheduler/plugin/datasource/oracle/OracleDataSourceChannel.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.oracle; + +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; +import org.apache.dolphinscheduler.spi.datasource.DataSourceClient; +import org.apache.dolphinscheduler.spi.enums.DbType; + +public class OracleDataSourceChannel implements DataSourceChannel { + + @Override + public DataSourceClient createDataSourceClient(BaseConnectionParam baseConnectionParam, DbType dbType) { + return new OracleDataSourceClient(baseConnectionParam, dbType); + } +} diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oracle/src/main/java/org/apache/dolphinscheduler/plugin/datasource/oracle/OracleDataSourceChannelFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oracle/src/main/java/org/apache/dolphinscheduler/plugin/datasource/oracle/OracleDataSourceChannelFactory.java new file mode 100644 index 0000000000000000000000000000000000000000..8fcc47766b812850e092bbd288f0c6c1951bca0e --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oracle/src/main/java/org/apache/dolphinscheduler/plugin/datasource/oracle/OracleDataSourceChannelFactory.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.oracle; + +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannelFactory; + +import com.google.auto.service.AutoService; + +@AutoService(DataSourceChannelFactory.class) +public class OracleDataSourceChannelFactory implements DataSourceChannelFactory { + @Override + public String getName() { + return "oracle"; + } + + @Override + public DataSourceChannel create() { + return new OracleDataSourceChannel(); + } +} diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackAlertPlugin.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oracle/src/main/java/org/apache/dolphinscheduler/plugin/datasource/oracle/OracleDataSourceClient.java similarity index 62% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackAlertPlugin.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oracle/src/main/java/org/apache/dolphinscheduler/plugin/datasource/oracle/OracleDataSourceClient.java index 59c45ae4ac96eaa803e4ce5cbd3303448722a0f3..6131dad924a4555ad5c4b81fad719e68ae3d52a8 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackAlertPlugin.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oracle/src/main/java/org/apache/dolphinscheduler/plugin/datasource/oracle/OracleDataSourceClient.java @@ -15,20 +15,16 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.plugin.alert.slack; +package org.apache.dolphinscheduler.plugin.datasource.oracle; -import org.apache.dolphinscheduler.spi.DolphinSchedulerPlugin; -import org.apache.dolphinscheduler.spi.alert.AlertChannelFactory; +import org.apache.dolphinscheduler.plugin.datasource.api.client.CommonDataSourceClient; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; -import com.google.common.collect.ImmutableList; +public class OracleDataSourceClient extends CommonDataSourceClient { -/** - * Slack alert plugin - */ -public class SlackAlertPlugin implements DolphinSchedulerPlugin { - - @Override - public Iterable getAlertChannelFactorys() { - return ImmutableList.of(new SlackAlertChannelFactory()); + public OracleDataSourceClient(BaseConnectionParam baseConnectionParam, DbType dbType) { + super(baseConnectionParam, dbType); } + } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oracle/src/test/java/org/apache/dolphinscheduler/plugin/datasource/oracle/OracleDataSourceChannelFactoryTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oracle/src/test/java/org/apache/dolphinscheduler/plugin/datasource/oracle/OracleDataSourceChannelFactoryTest.java new file mode 100644 index 0000000000000000000000000000000000000000..0dc4af463581f745042fa6b8edee1c385bd440dd --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oracle/src/test/java/org/apache/dolphinscheduler/plugin/datasource/oracle/OracleDataSourceChannelFactoryTest.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.oracle; + +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; + +import org.junit.Assert; +import org.junit.Test; + +public class OracleDataSourceChannelFactoryTest { + + @Test + public void testCreate() { + OracleDataSourceChannelFactory sourceChannelFactory = new OracleDataSourceChannelFactory(); + DataSourceChannel dataSourceChannel = sourceChannelFactory.create(); + Assert.assertNotNull(dataSourceChannel); + } +} diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oracle/src/test/java/org/apache/dolphinscheduler/plugin/datasource/oracle/OracleDataSourceChannelTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oracle/src/test/java/org/apache/dolphinscheduler/plugin/datasource/oracle/OracleDataSourceChannelTest.java new file mode 100644 index 0000000000000000000000000000000000000000..751a6cfddfa82a5c5e0e8c9d34b1cbb4cb39fe9b --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-oracle/src/test/java/org/apache/dolphinscheduler/plugin/datasource/oracle/OracleDataSourceChannelTest.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.oracle; + +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.oracle.OracleConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; + +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.powermock.api.mockito.PowerMockito; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor; +import org.powermock.modules.junit4.PowerMockRunner; + +@RunWith(PowerMockRunner.class) +@SuppressStaticInitializationFor("org.apache.dolphinscheduler.plugin.datasource.api.client.CommonDataSourceClient") +@PrepareForTest({OracleDataSourceClient.class, OracleDataSourceChannel.class}) +public class OracleDataSourceChannelTest { + + @Test + public void testCreateDataSourceClient() { + OracleDataSourceChannel sourceChannel = PowerMockito.mock(OracleDataSourceChannel.class); + OracleDataSourceClient dataSourceClient = PowerMockito.mock(OracleDataSourceClient.class); + PowerMockito.when(sourceChannel.createDataSourceClient(Mockito.any(), Mockito.any())).thenReturn(dataSourceClient); + Assert.assertNotNull(sourceChannel.createDataSourceClient(new OracleConnectionParam(), DbType.ORACLE)); + } +} diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/pom.xml b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-postgresql/pom.xml similarity index 59% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/pom.xml rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-postgresql/pom.xml index 0c0eaac7b95a714c97630d94f625e0105628b35e..820cbbf7f2c172a5fa67c0e744bdb998803fec46 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/pom.xml +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-postgresql/pom.xml @@ -15,64 +15,58 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - + - dolphinscheduler-alert-plugin + dolphinscheduler-datasource-plugin org.apache.dolphinscheduler - 2.0.0-SNAPSHOT + 2.0.10-SNAPSHOT 4.0.0 - dolphinscheduler-alert-http - dolphinscheduler-plugin + dolphinscheduler-datasource-postgresql + ${project.artifactId} + jar + - com.google.guava - guava + org.apache.dolphinscheduler + dolphinscheduler-spi + provided - ch.qos.logback - logback-classic + org.apache.dolphinscheduler + dolphinscheduler-datasource-api + ${project.version} - org.apache.httpcomponents - httpclient + com.google.guava + guava - com.fasterxml.jackson.core - jackson-databind - provided + org.postgresql + postgresql - junit - junit + org.powermock + powermock-module-junit4 test - org.mockito - mockito-core - jar + org.powermock + powermock-api-mockito2 test - org.jacoco - org.jacoco.agent - runtime + org.mockito + mockito-core test - - - dolphinscheduler-alert-http-${project.version} - - - \ No newline at end of file + diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-postgresql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/postgresql/PostgresqlDataSourceChannel.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-postgresql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/postgresql/PostgresqlDataSourceChannel.java new file mode 100644 index 0000000000000000000000000000000000000000..014911fe7158cfcbe40c6f7559055af117b0d866 --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-postgresql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/postgresql/PostgresqlDataSourceChannel.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.postgresql; + +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; +import org.apache.dolphinscheduler.spi.datasource.DataSourceClient; +import org.apache.dolphinscheduler.spi.enums.DbType; + +public class PostgresqlDataSourceChannel implements DataSourceChannel { + + @Override + public DataSourceClient createDataSourceClient(BaseConnectionParam baseConnectionParam, DbType dbType) { + return new PostgresqlDataSourceClient(baseConnectionParam, dbType); + } +} diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-postgresql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/postgresql/PostgresqlDataSourceChannelFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-postgresql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/postgresql/PostgresqlDataSourceChannelFactory.java new file mode 100644 index 0000000000000000000000000000000000000000..eda1a336cc8d6a74cff1035ec241cbce41381a45 --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-postgresql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/postgresql/PostgresqlDataSourceChannelFactory.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.postgresql; + +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannelFactory; + +import com.google.auto.service.AutoService; + +@AutoService(DataSourceChannelFactory.class) +public class PostgresqlDataSourceChannelFactory implements DataSourceChannelFactory { + @Override + public String getName() { + return "postgresql"; + } + + @Override + public DataSourceChannel create() { + return new PostgresqlDataSourceChannel(); + } +} diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertPlugin.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-postgresql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/postgresql/PostgresqlDataSourceClient.java similarity index 62% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertPlugin.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-postgresql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/postgresql/PostgresqlDataSourceClient.java index 175b5181898ce5e50d8d97ec302353e2af3f3362..944ab23c8118cf85434b46ca31df8e2ecc6e9eb8 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertPlugin.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-postgresql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/postgresql/PostgresqlDataSourceClient.java @@ -15,19 +15,16 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.plugin.alert.email; +package org.apache.dolphinscheduler.plugin.datasource.postgresql; -import org.apache.dolphinscheduler.spi.DolphinSchedulerPlugin; -import org.apache.dolphinscheduler.spi.alert.AlertChannelFactory; +import org.apache.dolphinscheduler.plugin.datasource.api.client.CommonDataSourceClient; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; -import com.google.common.collect.ImmutableList; +public class PostgresqlDataSourceClient extends CommonDataSourceClient { -/** - * email alert plugin - */ -public class EmailAlertPlugin implements DolphinSchedulerPlugin { - @Override - public Iterable getAlertChannelFactorys() { - return ImmutableList.of(new EmailAlertChannelFactory()); + public PostgresqlDataSourceClient(BaseConnectionParam baseConnectionParam, DbType dbType) { + super(baseConnectionParam, dbType); } + } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-postgresql/src/test/java/org/apache/dolphinscheduler/plugin/datasource/postgresql/PostgresqlDataSourceChannelFactoryTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-postgresql/src/test/java/org/apache/dolphinscheduler/plugin/datasource/postgresql/PostgresqlDataSourceChannelFactoryTest.java new file mode 100644 index 0000000000000000000000000000000000000000..7b459267781c0c76bd20f83876095ee6c49dcf9d --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-postgresql/src/test/java/org/apache/dolphinscheduler/plugin/datasource/postgresql/PostgresqlDataSourceChannelFactoryTest.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.postgresql; + +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; + +import org.junit.Assert; +import org.junit.Test; + +public class PostgresqlDataSourceChannelFactoryTest { + + @Test + public void testCreate() { + PostgresqlDataSourceChannelFactory sourceChannelFactory = new PostgresqlDataSourceChannelFactory(); + DataSourceChannel dataSourceChannel = sourceChannelFactory.create(); + Assert.assertNotNull(dataSourceChannel); + } +} diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-postgresql/src/test/java/org/apache/dolphinscheduler/plugin/datasource/postgresql/PostgresqlDataSourceChannelTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-postgresql/src/test/java/org/apache/dolphinscheduler/plugin/datasource/postgresql/PostgresqlDataSourceChannelTest.java new file mode 100644 index 0000000000000000000000000000000000000000..7824fb67e2d5450ded2e411a528a7ff617b08e8f --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-postgresql/src/test/java/org/apache/dolphinscheduler/plugin/datasource/postgresql/PostgresqlDataSourceChannelTest.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.postgresql; + +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.postgresql.PostgreSqlConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; + +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.powermock.api.mockito.PowerMockito; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor; +import org.powermock.modules.junit4.PowerMockRunner; + +@RunWith(PowerMockRunner.class) +@SuppressStaticInitializationFor("org.apache.dolphinscheduler.plugin.datasource.api.client.CommonDataSourceClient") +@PrepareForTest({PostgresqlDataSourceClient.class, PostgresqlDataSourceChannel.class}) +public class PostgresqlDataSourceChannelTest { + + @Test + public void testCreateDataSourceClient() { + PostgresqlDataSourceChannel sourceChannel = PowerMockito.mock(PostgresqlDataSourceChannel.class); + PostgresqlDataSourceClient dataSourceClient = PowerMockito.mock(PostgresqlDataSourceClient.class); + PowerMockito.when(sourceChannel.createDataSourceClient(Mockito.any(), Mockito.any())).thenReturn(dataSourceClient); + Assert.assertNotNull(sourceChannel.createDataSourceClient(new PostgreSqlConnectionParam(), DbType.POSTGRESQL)); + } +} diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/pom.xml b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sqlserver/pom.xml similarity index 51% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/pom.xml rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sqlserver/pom.xml index 3f13c8e3422de9e405f631b4f6a43fbd226ee4e9..2b2d3c10aa3b2b3654d4ef48fede1008d95015c5 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-feishu/pom.xml +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sqlserver/pom.xml @@ -15,70 +15,59 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - + + - dolphinscheduler-alert-plugin + dolphinscheduler-datasource-plugin org.apache.dolphinscheduler - 2.0.0-SNAPSHOT + 2.0.10-SNAPSHOT 4.0.0 - org.apache.dolphinscheduler - dolphinscheduler-alert-feishu - dolphinscheduler-plugin + dolphinscheduler-datasource-sqlserver + ${project.artifactId} + jar - - - org.apache.httpcomponents - httpclient - - com.google.guava - guava + org.apache.dolphinscheduler + dolphinscheduler-spi + provided - - ch.qos.logback - logback-classic + org.apache.dolphinscheduler + dolphinscheduler-datasource-api + ${project.version} - org.slf4j - slf4j-api + com.microsoft.sqlserver + mssql-jdbc + + + azure-keyvault + com.microsoft.azure + + + ${mssql.jdbc.version} - com.fasterxml.jackson.core - jackson-annotations - provided + org.powermock + powermock-module-junit4 + test - junit - junit + org.powermock + powermock-api-mockito2 test org.mockito mockito-core - jar - test - - - - org.jacoco - org.jacoco.agent - runtime test - - - dolphinscheduler-alert-feishu-${project.version} - - - \ No newline at end of file + diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sqlserver/src/main/java/org/apache/dolphinscheduler/plugin/datasource/sqlserver/SqlServerDataSourceChannel.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sqlserver/src/main/java/org/apache/dolphinscheduler/plugin/datasource/sqlserver/SqlServerDataSourceChannel.java new file mode 100644 index 0000000000000000000000000000000000000000..03d5bc37a930cffab56ca8ac80b4559245dbca81 --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sqlserver/src/main/java/org/apache/dolphinscheduler/plugin/datasource/sqlserver/SqlServerDataSourceChannel.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.sqlserver; + +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; +import org.apache.dolphinscheduler.spi.datasource.DataSourceClient; +import org.apache.dolphinscheduler.spi.enums.DbType; + +public class SqlServerDataSourceChannel implements DataSourceChannel { + + @Override + public DataSourceClient createDataSourceClient(BaseConnectionParam baseConnectionParam, DbType dbType) { + return new SqlserverDataSourceClient(baseConnectionParam, dbType); + } +} diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sqlserver/src/main/java/org/apache/dolphinscheduler/plugin/datasource/sqlserver/SqlServerDataSourceChannelFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sqlserver/src/main/java/org/apache/dolphinscheduler/plugin/datasource/sqlserver/SqlServerDataSourceChannelFactory.java new file mode 100644 index 0000000000000000000000000000000000000000..cab272bf7f5607e648aeae5b125f5c9058a63c05 --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sqlserver/src/main/java/org/apache/dolphinscheduler/plugin/datasource/sqlserver/SqlServerDataSourceChannelFactory.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.sqlserver; + +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannelFactory; + +import com.google.auto.service.AutoService; + +@AutoService(DataSourceChannelFactory.class) +public class SqlServerDataSourceChannelFactory implements DataSourceChannelFactory { + @Override + public String getName() { + return "sqlserver"; + } + + @Override + public DataSourceChannel create() { + return new SqlServerDataSourceChannel(); + } +} diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptAlertPlugin.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sqlserver/src/main/java/org/apache/dolphinscheduler/plugin/datasource/sqlserver/SqlserverDataSourceClient.java similarity index 62% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptAlertPlugin.java rename to dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sqlserver/src/main/java/org/apache/dolphinscheduler/plugin/datasource/sqlserver/SqlserverDataSourceClient.java index f2ba0e87d0cdf221c167d8a8f7b81ab2436bf563..bd97ca483bb649dba2d53cfef8f0d38b9d6b3d3d 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptAlertPlugin.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sqlserver/src/main/java/org/apache/dolphinscheduler/plugin/datasource/sqlserver/SqlserverDataSourceClient.java @@ -15,21 +15,16 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.plugin.alert.script; +package org.apache.dolphinscheduler.plugin.datasource.sqlserver; -import org.apache.dolphinscheduler.spi.DolphinSchedulerPlugin; -import org.apache.dolphinscheduler.spi.alert.AlertChannelFactory; +import org.apache.dolphinscheduler.plugin.datasource.api.client.CommonDataSourceClient; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; -import com.google.common.collect.ImmutableList; +public class SqlserverDataSourceClient extends CommonDataSourceClient { -/** - * ScriptAlertPlugin - */ -public class ScriptAlertPlugin implements DolphinSchedulerPlugin { - - @Override - public Iterable getAlertChannelFactorys() { - return ImmutableList.of(new ScriptAlertChannelFactory()); + public SqlserverDataSourceClient(BaseConnectionParam baseConnectionParam, DbType dbType) { + super(baseConnectionParam, dbType); } } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sqlserver/src/test/java/org/apache/dolphinscheduler/plugin/datasource/sqlserver/SqlserverDataSourceChannelFactoryTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sqlserver/src/test/java/org/apache/dolphinscheduler/plugin/datasource/sqlserver/SqlserverDataSourceChannelFactoryTest.java new file mode 100644 index 0000000000000000000000000000000000000000..bfa4df68e9f2eae6a1d58ce72355c2551a211ff0 --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sqlserver/src/test/java/org/apache/dolphinscheduler/plugin/datasource/sqlserver/SqlserverDataSourceChannelFactoryTest.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.sqlserver; + +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; + +import org.junit.Assert; +import org.junit.Test; + +public class SqlserverDataSourceChannelFactoryTest { + + @Test + public void testCreate() { + SqlServerDataSourceChannelFactory sourceChannelFactory = new SqlServerDataSourceChannelFactory(); + DataSourceChannel dataSourceChannel = sourceChannelFactory.create(); + Assert.assertNotNull(dataSourceChannel); + } +} diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sqlserver/src/test/java/org/apache/dolphinscheduler/plugin/datasource/sqlserver/SqlserverDataSourceChannelTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sqlserver/src/test/java/org/apache/dolphinscheduler/plugin/datasource/sqlserver/SqlserverDataSourceChannelTest.java new file mode 100644 index 0000000000000000000000000000000000000000..4fa2d262fe39ad4a11d9cb8252d066531dfb18c0 --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-sqlserver/src/test/java/org/apache/dolphinscheduler/plugin/datasource/sqlserver/SqlserverDataSourceChannelTest.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.sqlserver; + +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.sqlserver.SqlServerConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; + +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.powermock.api.mockito.PowerMockito; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor; +import org.powermock.modules.junit4.PowerMockRunner; + +@RunWith(PowerMockRunner.class) +@SuppressStaticInitializationFor("org.apache.dolphinscheduler.plugin.datasource.api.client.CommonDataSourceClient") +@PrepareForTest({SqlserverDataSourceClient.class, SqlServerDataSourceChannel.class}) +public class SqlserverDataSourceChannelTest { + + @Test + public void testCreateDataSourceClient() { + SqlServerDataSourceChannel sourceChannel = PowerMockito.mock(SqlServerDataSourceChannel.class); + SqlserverDataSourceClient dataSourceClient = PowerMockito.mock(SqlserverDataSourceClient.class); + PowerMockito.when(sourceChannel.createDataSourceClient(Mockito.any(), Mockito.any())).thenReturn(dataSourceClient); + Assert.assertNotNull(sourceChannel.createDataSourceClient(new SqlServerConnectionParam(), DbType.SQLSERVER)); + } +} diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-zookeeper.xml b/dolphinscheduler-datasource-plugin/pom.xml similarity index 38% rename from ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-zookeeper.xml rename to dolphinscheduler-datasource-plugin/pom.xml index e89962d9003d2edb9fc91c6eb98fad34361ec34b..69d8c4c42108cd86fb60c9b31338ef4c7545084c 100644 --- a/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-zookeeper.xml +++ b/dolphinscheduler-datasource-plugin/pom.xml @@ -1,3 +1,4 @@ + - - - zookeeper.dolphinscheduler.root - /dolphinscheduler - - dolphinscheduler root directory - - - - - zookeeper.session.timeout - 300 - - int - - - - - - - zookeeper.connection.timeout - 300 - - int - - - - - - - zookeeper.retry.base.sleep - 100 - - int - - - - - - - zookeeper.retry.max.sleep - 30000 - - int - - - - - - - zookeeper.retry.maxtime - 5 - - int - - - - - - \ No newline at end of file + + + dolphinscheduler + org.apache.dolphinscheduler + 2.0.10-SNAPSHOT + + 4.0.0 + dolphinscheduler-datasource-plugin + ${project.artifactId} + pom + + + dolphinscheduler-datasource-sqlserver + dolphinscheduler-datasource-clickhouse + dolphinscheduler-datasource-db2 + dolphinscheduler-datasource-hive + dolphinscheduler-datasource-mysql + dolphinscheduler-datasource-oracle + dolphinscheduler-datasource-postgresql + dolphinscheduler-datasource-api + dolphinscheduler-datasource-all + + diff --git a/dolphinscheduler-dist/pom.xml b/dolphinscheduler-dist/pom.xml index 1c8213275bf8d8815cc6dc04cea44796ba312dca..45b2680d1e8aa16a4874aee3a2bf26e4d29fb918 100644 --- a/dolphinscheduler-dist/pom.xml +++ b/dolphinscheduler-dist/pom.xml @@ -20,7 +20,7 @@ dolphinscheduler org.apache.dolphinscheduler - 2.0.0-SNAPSHOT + 2.0.10-SNAPSHOT 4.0.0 @@ -49,7 +49,12 @@ org.apache.dolphinscheduler - dolphinscheduler-alert + dolphinscheduler-alert-server + + + + org.apache.dolphinscheduler + dolphinscheduler-python @@ -90,296 +95,44 @@ - - - - - org.apache.maven.plugins - maven-source-plugin - - attach-sources - verify + python + package - jar-no-fork + single + + + python + false + + src/main/assembly/dolphinscheduler-python-api.xml + + + - - - - - rpmbuild - - org.apache.maven.plugins - maven-dependency-plugin - - ${project.build.directory}/lib - false - false - true - provided - - - - copy-dependencies - package - - copy-dependencies - - - - - - - org.codehaus.mojo - rpm-maven-plugin - true + maven-source-plugin - package + attach-sources + verify - attached-rpm + jar-no-fork - - - apache-dolphinscheduler - 1 - apache dolphinscheduler rpm - apache - dolphinscheduler - - /opt/soft - false - - - __os_install_post %(echo '%{__os_install_post}' | sed -e 's!/usr/lib[^[:space:]]*/brp-python-bytecompile[[:space:]].*$!!g') - - - - /opt/soft/${project.build.finalName}/conf - 755 - root - root - - - - ${basedir}/../dolphinscheduler-alert/src/main/resources - - - **/*.* - - - - - - ${basedir}/../dolphinscheduler-common/src/main/resources - - - **/*.* - - - - - - ${basedir}/../dolphinscheduler-dao/src/main/resources - - - **/*.* - - - - - - ${basedir}/../dolphinscheduler-api/src/main/resources - - - **/*.* - - - - - - ${basedir}/../dolphinscheduler-server/src/main/resources - - - config/*.* - **/*.xml - - - - - - ${basedir}/../dolphinscheduler-service/src/main/resources - - - *.* - - - - - - ${basedir}/../script - - - env/*.* - - - - - - - - /opt/soft/${project.build.finalName}/lib - 755 - root - root - - - - - ${basedir}/../dolphinscheduler-dist/target/lib - - - *.* - - - servlet-api-*.jar - slf4j-log4j12-${slf4j.log4j12.version}.jar - - - - - - /opt/soft/${project.build.finalName}/bin - 755 - root - root - - - - - ${basedir}/../script - - - start-all.sh - stop-all.sh - dolphinscheduler-daemon.sh - status-all.sh - - - - - - /opt/soft/${project.build.finalName} - 755 - root - root - - - - ${basedir}/../ - - - *.sh - *.py - DISCLAIMER - - - - - - ${basedir}/release-docs - - - **/* - - - - - - - /opt/soft/${project.build.finalName}/ui - 755 - root - root - - - - ${basedir}/../dolphinscheduler-ui/dist - - - **/*.* - - - - - - /opt/soft/${project.build.finalName}/sql - 755 - root - root - - - - ${basedir}/../sql - - - **/*.* - - - - - ${basedir}/../sql - - - soft_version - - - - - - - /opt/soft/${project.build.finalName}/script - 755 - root - root - - - - ${basedir}/../script - - - *.sh - - - - - - - - - - - - - - - - - - - + + apache-dolphinscheduler-${project.version} + diff --git a/dolphinscheduler-dist/release-docs/LICENSE b/dolphinscheduler-dist/release-docs/LICENSE index 63ab3f63cc8fabac6f646647c7f59862c22cf67d..56a80dd749ec4f7c1038133202a2c8150ee19ec7 100644 --- a/dolphinscheduler-dist/release-docs/LICENSE +++ b/dolphinscheduler-dist/release-docs/LICENSE @@ -217,23 +217,23 @@ The text of each license is also included at licenses/LICENSE-[project].txt. apacheds-i18n 2.0.0-M15: https://mvnrepository.com/artifact/org.apache.directory.server/apacheds-i18n/2.0.0-M15, Apache 2.0 apacheds-kerberos-codec 2.0.0-M15: https://mvnrepository.com/artifact/org.apache.directory.server/apacheds-kerberos-codec/2.0.0-M15, Apache 2.0 - apache-el 8.5.54: https://mvnrepository.com/artifact/org.mortbay.jasper/apache-el/8.5.54, Apache 2.0 + tomcat-embed-el 9.0.54: https://mvnrepository.com/artifact/org.apache.tomcat.embed/tomcat-embed-el/9.0.54, Apache 2.0 api-asn1-api 1.0.0-M20: https://mvnrepository.com/artifact/org.apache.directory.api/api-asn1-api/1.0.0-M20, Apache 2.0 api-util 1.0.0-M20: https://mvnrepository.com/artifact/org.apache.directory.api/api-util/1.0.0-M20, Apache 2.0 - async-http-client 1.6.5: https://mvnrepository.com/artifact/com.ning/async-http-client, Apache 2.0 audience-annotations 0.5.0: https://mvnrepository.com/artifact/org.apache.yetus/audience-annotations/0.5.0, Apache 2.0 avro 1.7.4: https://github.com/apache/avro, Apache 2.0 aws-sdk-java 1.7.4: https://mvnrepository.com/artifact/com.amazonaws/aws-java-sdk/1.7.4, Apache 2.0 bonecp 0.8.0.RELEASE: https://github.com/wwadge/bonecp, Apache 2.0 byte-buddy 1.9.16: https://mvnrepository.com/artifact/net.bytebuddy/byte-buddy/1.9.16, Apache 2.0 - classmate 1.4.0: https://mvnrepository.com/artifact/com.fasterxml/classmate/1.4.0, Apache 2.0 + caffeine 2.9.2: https://mvnrepository.com/artifact/com.github.ben-manes.caffeine/caffeine/2.9.2, Apache 2.0 + classmate 1.5.1: https://mvnrepository.com/artifact/com.fasterxml/classmate/1.5.1, Apache 2.0 clickhouse-jdbc 0.1.52: https://mvnrepository.com/artifact/ru.yandex.clickhouse/clickhouse-jdbc/0.1.52, Apache 2.0 commons-beanutils 1.9.4 https://mvnrepository.com/artifact/commons-beanutils/commons-beanutils/1.9.4, Apache 2.0 commons-cli 1.2: https://mvnrepository.com/artifact/commons-cli/commons-cli/1.2, Apache 2.0 commons-codec 1.11: https://mvnrepository.com/artifact/commons-codec/commons-codec/1.11, Apache 2.0 commons-collections 3.2.2: https://mvnrepository.com/artifact/commons-collections/commons-collections/3.2.2, Apache 2.0 commons-collections4 4.1: https://mvnrepository.com/artifact/org.apache.commons/commons-collections4/4.1, Apache 2.0 - commons-compress 1.19: https://mvnrepository.com/artifact/org.apache.commons/commons-compress/1.19, Apache 2.0 + commons-compress 1.4.1: https://mvnrepository.com/artifact/org.apache.commons/commons-compress/1.4.1, Apache 2.0 commons-configuration 1.10: https://mvnrepository.com/artifact/commons-configuration/commons-configuration/1.10, Apache 2.0 commons-daemon 1.0.13 https://mvnrepository.com/artifact/commons-daemon/commons-daemon/1.0.13, Apache 2.0 commons-dbcp 1.4: https://github.com/apache/commons-dbcp, Apache 2.0 @@ -242,10 +242,11 @@ The text of each license is also included at licenses/LICENSE-[project].txt. commons-io 2.4: https://github.com/apache/commons-io, Apache 2.0 commons-lang 2.6: https://github.com/apache/commons-lang, Apache 2.0 commons-logging 1.1.1: https://github.com/apache/commons-logging, Apache 2.0 - commons-math3 3.6.1: https://mvnrepository.com/artifact/org.apache.commons/commons-math3/3.6.1, Apache 2.0 + commons-math3 3.1.1: https://mvnrepository.com/artifact/org.apache.commons/commons-math3/3.1.1, Apache 2.0 commons-net 3.1: https://github.com/apache/commons-net, Apache 2.0 commons-pool 1.6: https://github.com/apache/commons-pool, Apache 2.0 - cron-utils 5.0.5: https://mvnrepository.com/artifact/com.cronutils/cron-utils/5.0.5, Apache 2.0 + cron-utils 9.1.3: https://mvnrepository.com/artifact/com.cronutils/cron-utils/9.1.3, Apache 2.0 + commons-lang3 3.12.0: https://mvnrepository.com/artifact/org.apache.commons/commons-lang3/3.12.0, Apache 2.0 curator-client 4.3.0: https://mvnrepository.com/artifact/org.apache.curator/curator-client/4.3.0, Apache 2.0 curator-framework 4.3.0: https://mvnrepository.com/artifact/org.apache.curator/curator-framework/4.3.0, Apache 2.0 curator-recipes 4.3.0: https://mvnrepository.com/artifact/org.apache.curator/curator-recipes/4.3.0, Apache 2.0 @@ -256,8 +257,9 @@ The text of each license is also included at licenses/LICENSE-[project].txt. derby 10.14.2.0: https://github.com/apache/derby, Apache 2.0 druid 1.1.14: https://mvnrepository.com/artifact/com.alibaba/druid/1.1.14, Apache 2.0 error_prone_annotations 2.1.3 https://mvnrepository.com/artifact/com.google.errorprone/error_prone_annotations/2.1.3, Apache 2.0 - gson 2.8.6: https://github.com/google/gson, Apache 2.0 + gson 2.8.8: https://github.com/google/gson, Apache 2.0 guava 24.1-jre: https://mvnrepository.com/artifact/com.google.guava/guava/24.1-jre, Apache 2.0 + guava-retrying 2.0.0: https://mvnrepository.com/artifact/com.github.rholder/guava-retrying/2.0.0, Apache 2.0 guice 3.0: https://mvnrepository.com/artifact/com.google.inject/guice/3.0, Apache 2.0 guice-servlet 3.0: https://mvnrepository.com/artifact/com.google.inject.extensions/guice-servlet/3.0, Apache 2.0 hadoop-annotations 2.7.3:https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-annotations/2.7.3, Apache 2.0 @@ -275,8 +277,7 @@ The text of each license is also included at licenses/LICENSE-[project].txt. hadoop-yarn-client 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-yarn-client/2.7.3, Apache 2.0 hadoop-yarn-common 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-yarn-common/2.7.3, Apache 2.0 hadoop-yarn-server-common 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-yarn-server-common/2.7.3, Apache 2.0 - hibernate-validator 6.0.21.Final: https://github.com/hibernate/hibernate-validator, Apache 2.0 - HikariCP 3.2.0: https://mvnrepository.com/artifact/com.zaxxer/HikariCP/3.2.0, Apache 2.0 + HikariCP 4.0.3: https://mvnrepository.com/artifact/com.zaxxer/HikariCP/4.0.3, Apache 2.0 hive-common 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-common/2.1.0, Apache 2.0 hive-jdbc 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-jdbc/2.1.0, Apache 2.0 hive-metastore 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-metastore/2.1.0, Apache 2.0 @@ -293,58 +294,46 @@ The text of each license is also included at licenses/LICENSE-[project].txt. jackson-core 2.10.5: https://github.com/FasterXML/jackson-core, Apache 2.0 jackson-core-asl 1.9.13: https://mvnrepository.com/artifact/org.codehaus.jackson/jackson-core-asl/1.9.13, Apache 2.0 jackson-databind 2.10.5: https://github.com/FasterXML/jackson-databind, Apache 2.0 - jackson-datatype-jdk8 2.9.10: https://mvnrepository.com/artifact/com.fasterxml.jackson.datatype/jackson-datatype-jdk8/2.9.10, Apache 2.0 - jackson-datatype-jsr310 2.9.10: https://mvnrepository.com/artifact/com.fasterxml.jackson.datatype/jackson-datatype-jsr310/2.9.10, Apache 2.0 + jackson-datatype-jdk8 2.12.5: https://mvnrepository.com/artifact/com.fasterxml.jackson.datatype/jackson-datatype-jdk8/2.12.5, Apache 2.0 + jackson-datatype-jsr310 2.12.5: https://mvnrepository.com/artifact/com.fasterxml.jackson.datatype/jackson-datatype-jsr310/2.12.5, Apache 2.0 jackson-jaxrs 1.9.13: https://mvnrepository.com/artifact/org.codehaus.jackson/jackson-jaxrs/1.9.13, Apache 2.0 and LGPL 2.1 jackson-mapper-asl 1.9.13: https://mvnrepository.com/artifact/org.codehaus.jackson/jackson-mapper-asl/1.9.13, Apache 2.0 - jackson-module-parameter-names 2.9.10: https://mvnrepository.com/artifact/com.fasterxml.jackson.module/jackson-module-parameter-names/2.9.10, Apache 2.0 + jackson-module-parameter-names 2.12.5: https://mvnrepository.com/artifact/com.fasterxml.jackson.module/jackson-module-parameter-names/2.12.5, Apache 2.0 jackson-xc 1.9.13: https://mvnrepository.com/artifact/org.codehaus.jackson/jackson-xc/1.9.13, Apache 2.0 and LGPL 2.1 javax.inject 1: https://mvnrepository.com/artifact/javax.inject/javax.inject/1, Apache 2.0 javax.jdo-3.2.0-m3: https://mvnrepository.com/artifact/org.datanucleus/javax.jdo/3.2.0-m3, Apache 2.0 java-xmlbuilder 0.4 : https://mvnrepository.com/artifact/com.jamesmurty.utils/java-xmlbuilder/0.4, Apache 2.0 - jboss-logging 3.3.3.Final: https://mvnrepository.com/artifact/org.jboss.logging/jboss-logging/3.3.3.Final, Apache 2.0 jdo-api 3.0.1: https://mvnrepository.com/artifact/javax.jdo/jdo-api/3.0.1, Apache 2.0 jets3t 0.9.0: https://mvnrepository.com/artifact/net.java.dev.jets3t/jets3t/0.9.0, Apache 2.0 jettison 1.1: https://github.com/jettison-json/jettison, Apache 2.0 jetty 6.1.26: https://mvnrepository.com/artifact/org.mortbay.jetty/jetty/6.1.26, Apache 2.0 and EPL 1.0 - jetty-continuation 9.4.33.v20201020: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-continuation/9.4.33.v20201020, Apache 2.0 and EPL 1.0 - jetty-http 9.4.33.v20201020: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-http/9.4.33.v20201020, Apache 2.0 and EPL 1.0 - jetty-io 9.4.33.v20201020: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-io/9.4.33.v20201020, Apache 2.0 and EPL 1.0 - jetty-security 9.4.33.v20201020: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-security/9.4.33.v20201020, Apache 2.0 and EPL 1.0 - jetty-server 9.4.33.v20201020: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-server/9.4.33.v20201020, Apache 2.0 and EPL 1.0 - jetty-servlet 9.4.33.v20201020: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-servlet/9.4.33.v20201020, Apache 2.0 and EPL 1.0 - jetty-servlets 9.4.33.v20201020: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-servlets/9.4.33.v20201020, Apache 2.0 and EPL 1.0 + jetty-continuation 9.4.44.v20210927: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-continuation/9.4.44.v20210927, Apache 2.0 and EPL 1.0 + jetty-http 9.4.44.v20210927: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-http/9.4.44.v20210927, Apache 2.0 and EPL 1.0 + jetty-io 9.4.44.v20210927: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-io/9.4.44.v20210927, Apache 2.0 and EPL 1.0 + jetty-security 9.4.44.v20210927: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-security/9.4.44.v20210927, Apache 2.0 and EPL 1.0 + jetty-server 9.4.44.v20210927: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-server/9.4.44.v20210927, Apache 2.0 and EPL 1.0 + jetty-servlet 9.4.44.v20210927: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-servlet/9.4.44.v20210927, Apache 2.0 and EPL 1.0 + jetty-servlets 9.4.44.v20210927: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-servlets/9.4.44.v20210927, Apache 2.0 and EPL 1.0 jetty-util 6.1.26: https://mvnrepository.com/artifact/org.mortbay.jetty/jetty-util/6.1.26, Apache 2.0 and EPL 1.0 - jetty-util 9.4.33.v20201020: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-util/9.4.33.v20201020, Apache 2.0 and EPL 1.0 - jetty-webapp 9.4.33.v20201020: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-webapp/9.4.33.v20201020, Apache 2.0 and EPL 1.0 - jetty-xml 9.4.33.v20201020: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-xml/9.4.33.v20201020, Apache 2.0 and EPL 1.0 - jna 4.5.2: https://mvnrepository.com/artifact/net.java.dev.jna/jna/4.5.2, Apache 2.0 and LGPL 2.1 - jna-platform 4.5.2: https://mvnrepository.com/artifact/net.java.dev.jna/jna-platform/4.5.2, Apache 2.0 and LGPL 2.1 - joda-time 2.10.8: https://github.com/JodaOrg/joda-time, Apache 2.0 + jetty-util 9.4.44.v20210927: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-util/9.4.44.v20210927, Apache 2.0 and EPL 1.0 + jetty-util-ajax 9.4.44.v20210927: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-util-ajax/9.4.44.v20210927, Apache 2.0 and EPL 1.0 + jetty-webapp 9.4.44.v20210927: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-webapp/9.4.44.v20210927, Apache 2.0 and EPL 1.0 + jetty-xml 9.4.44.v20210927: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-xml/9.4.44.v20210927, Apache 2.0 and EPL 1.0 + jna 5.10.0: https://mvnrepository.com/artifact/net.java.dev.jna/jna/5.10.0, Apache 2.0 and LGPL 2.1 + jna-platform 5.10.0: https://mvnrepository.com/artifact/net.java.dev.jna/jna-platform/5.10.0, Apache 2.0 and LGPL 2.1 + joda-time 2.5: https://github.com/JodaOrg/joda-time, Apache 2.0 jpam 1.1: https://mvnrepository.com/artifact/net.sf.jpam/jpam/1.1, Apache 2.0 jsqlparser 2.1: https://github.com/JSQLParser/JSqlParser, Apache 2.0 or LGPL 2.1 jsr305 3.0.0: https://mvnrepository.com/artifact/com.google.code.findbugs/jsr305, Apache 2.0 - jsr305 1.3.9: https://mvnrepository.com/artifact/com.google.code.findbugs/jsr305, Apache 2.0 j2objc-annotations 1.1 https://mvnrepository.com/artifact/com.google.j2objc/j2objc-annotations/1.1, Apache 2.0 libfb303 0.9.3: https://mvnrepository.com/artifact/org.apache.thrift/libfb303/0.9.3, Apache 2.0 libthrift 0.9.3: https://mvnrepository.com/artifact/org.apache.thrift/libthrift/0.9.3, Apache 2.0 log4j-api 2.11.2: https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-api/2.11.2, Apache 2.0 log4j-core-2.11.2: https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-core/2.11.2, Apache 2.0 log4j 1.2.17: https://mvnrepository.com/artifact/log4j/log4j/1.2.17, Apache 2.0 - log4j-1.2-api 2.11.2: https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-1.2-api/2.11.2, Apache 2.0 + log4j-1.2-api 2.14.1: https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-1.2-api/2.14.1, Apache 2.0 lz4 1.3.0: https://mvnrepository.com/artifact/net.jpountz.lz4/lz4/1.3.0, Apache 2.0 mapstruct 1.2.0.Final: https://github.com/mapstruct/mapstruct, Apache 2.0 - maven-aether-provider 3.0.4: https://mvnrepository.com/artifact/org.apache.maven/maven-aether-provider/3.0.4, Apache 2.0 - maven-artifact 3.0.4: https://mvnrepository.com/artifact/org.apache.maven/maven-artifact/3.0.4, Apache 2.0 - maven-compat 3.0.4: https://mvnrepository.com/artifact/org.apache.maven/maven-compat/3.0.4, Apache 2.0 - maven-core 3.0.4: https://mvnrepository.com/artifact/org.apache.maven/maven-core/3.0.4, Apache 2.0 - maven-embedder 3.0.4: https://mvnrepository.com/artifact/org.apache.maven/maven-embedder/3.0.4, Apache 2.0 - maven-model 3.0.4: https://mvnrepository.com/artifact/org.apache.maven/maven-model/3.0.4, Apache 2.0 - maven-model-builder 3.0.4: https://mvnrepository.com/artifact/org.apache.maven/maven-model-builder/3.0.4, Apache 2.0 - maven-plugin-api 3.0.4: https://mvnrepository.com/artifact/org.apache.maven/maven-plugin-api/3.0.4, Apache 2.0 - maven-repository-metadata 3.0.4: https://mvnrepository.com/artifact/org.apache.maven/maven-repository-metadata/3.0.4, Apache 2.0 - maven-settings 3.0.4: https://mvnrepository.com/artifact/org.apache.maven/maven-settings/3.0.4, Apache 2.0 - maven-settings-builder 3.0.4: https://mvnrepository.com/artifact/org.apache.maven/maven-settings-builder/3.0.4, Apache 2.0 mybatis 3.5.2 https://mvnrepository.com/artifact/org.mybatis/mybatis/3.5.2, Apache 2.0 mybatis-plus 3.2.0: https://github.com/baomidou/mybatis-plus, Apache 2.0 mybatis-plus-annotation 3.2.0: https://mvnrepository.com/artifact/com.baomidou/mybatis-plus-annotation/3.2.0, Apache 2.0 @@ -353,40 +342,39 @@ The text of each license is also included at licenses/LICENSE-[project].txt. mybatis-plus-extension 3.2.0: https://mvnrepository.com/artifact/com.baomidou/mybatis-plus-extension/3.2.0, Apache 2.0 mybatis-spring 2.0.2: https://mvnrepository.com/artifact/org.mybatis/mybatis-spring/2.0.2, Apache 2.0 netty 3.6.2.Final: https://github.com/netty/netty, Apache 2.0 - netty-all 4.1.53.Final: https://github.com/netty/netty/blob/netty-4.1.53.Final/LICENSE.txt, Apache 2.0 + netty 4.1.53.Final: https://github.com/netty/netty/blob/netty-4.1.53.Final/LICENSE.txt, Apache 2.0 opencsv 2.3: https://mvnrepository.com/artifact/net.sf.opencsv/opencsv/2.3, Apache 2.0 parquet-hadoop-bundle 1.8.1: https://mvnrepository.com/artifact/org.apache.parquet/parquet-hadoop-bundle/1.8.1, Apache 2.0 poi 4.1.2: https://mvnrepository.com/artifact/org.apache.poi/poi/4.1.2, Apache 2.0 poi-ooxml 4.1.2: https://mvnrepository.com/artifact/org.apache.poi/poi-ooxml/4.1.2, Apache 2.0 poi-ooxml-schemas-4.1.2: https://mvnrepository.com/artifact/org.apache.poi/poi-ooxml-schemas/4.1.2, Apache 2.0 - plexus-cipher 1.7.0: https://mvnrepository.com/artifact/org.sonatype.plexus/plexus-cipher/1.7.0, Apache 2.0 - plexus-classworlds 2.4: https://mvnrepository.com/artifact/org.codehaus.plexus/plexus-classworlds/2.4, Apache 2.0 - plexus-component-annotations 1.5.5: https://mvnrepository.com/artifact/org.codehaus.plexus/plexus-component-annotations/1.5.5, Apache 2.0 - plexus-container-default 1.5.5: https://mvnrepository.com/artifact/org.codehaus.plexus/plexus-container-default/1.5.5, Apache 2.0 - plexus-interpolation 1.14: https://mvnrepository.com/artifact/org.codehaus.plexus/plexus-interpolation/1.14, Apache 2.0 - plexus-sec-dispatcher 1.3: https://mvnrepository.com/artifact/org.codehaus.plexus/plexus-sec-dispatcher/1.3, Apache 2.0 - plexus-utils 2.0.6: https://mvnrepository.com/artifact/org.codehaus.plexus/plexus-utils/2.0.6, Apache 2.0 quartz 2.3.0: https://mvnrepository.com/artifact/org.quartz-scheduler/quartz/2.3.0, Apache 2.0 quartz-jobs 2.3.0: https://mvnrepository.com/artifact/org.quartz-scheduler/quartz-jobs/2.3.0, Apache 2.0 - resolver 1.5: https://mvnrepository.com/artifact/io.airlift.resolver/resolver/1.5 Apache 2.0 - snakeyaml 1.23: https://mvnrepository.com/artifact/org.yaml/snakeyaml/1.23, Apache 2.0 + snakeyaml 1.28: https://mvnrepository.com/artifact/org.yaml/snakeyaml/1.28, Apache 2.0 snappy 0.2: https://mvnrepository.com/artifact/org.iq80.snappy/snappy/0.2, Apache 2.0 snappy-java 1.0.4.1: https://github.com/xerial/snappy-java, Apache 2.0 SparseBitSet 1.2: https://mvnrepository.com/artifact/com.zaxxer/SparseBitSet, Apache 2.0 - spring-aop 5.1.19.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-aop/5.1.19.RELEASE, Apache 2.0 - spring-beans 5.1.19.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-beans/5.1.19.RELEASE, Apache 2.0 - spring-boot 2.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot/2.1.18.RELEASE, Apache 2.0 - spring-boot-autoconfigure 2.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-autoconfigure/2.1.18.RELEASE, Apache 2.0 - spring-boot-starter 2.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter/2.1.18.RELEASE, Apache 2.0 - spring-boot-starter-aop 2.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-aop/2.1.18.RELEASE, Apache 2.0 - spring-boot-starter-jdbc 2.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-jdbc/2.1.18.RELEASE, Apache 2.0 - spring-boot-starter-jetty 2.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-jetty/2.1.18.RELEASE, Apache 2.0 - spring-boot-starter-json 2.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-json/2.1.18.RELEASE, Apache 2.0 - spring-boot-starter-logging 2.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-logging/2.1.18.RELEASE, Apache 2.0 - spring-boot-starter-web 2.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-web/2.1.18.RELEASE, Apache 2.0 - spring-context 5.1.19.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-context/5.1.19.RELEASE, Apache 2.0 - spring-core 5.1.19.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-core, Apache 2.0 - spring-expression 5.1.19.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-expression, Apache 2.0 + spring-aop 5.3.12: https://mvnrepository.com/artifact/org.springframework/spring-aop/5.3.12, Apache 2.0 + spring-beans 5.3.12: https://mvnrepository.com/artifact/org.springframework/spring-beans/5.3.12, Apache 2.0 + spring-boot 2.5.6: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot/2.5.6, Apache 2.0 + spring-boot-actuator 2.5.6: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-actuator/2.5.6, Apache 2.0 + spring-boot-actuator-autoconfigure 2.5.6: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-actuator-autoconfigure/2.5.6, Apache 2.0 + spring-boot-configuration-processor 2.5.6: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-configuration-processor/2.5.6, Apache 2.0 + spring-boot-autoconfigure 2.5.6: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-autoconfigure/2.5.6, Apache 2.0 + spring-boot-starter 2.5.6: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter/2.5.6, Apache 2.0 + spring-boot-starter-actuator 2.5.6: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-actuator/2.5.6, Apache 2.0 + spring-boot-starter-aop 2.5.6: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-aop/2.5.6, Apache 2.0 + spring-boot-starter-jdbc 2.5.6: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-jdbc/2.5.6, Apache 2.0 + spring-boot-starter-jetty 2.5.6: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-jetty/2.5.6, Apache 2.0 + spring-boot-starter-json 2.5.6: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-json/2.5.6, Apache 2.0 + spring-boot-starter-logging 2.5.6: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-logging/2.5.6, Apache 2.0 + spring-boot-starter-quartz 2.5.6: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-quartz/2.5.6, Apache 2.0 + spring-boot-starter-web 2.5.6: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-web/2.5.6, Apache 2.0 + spring-boot-starter-cache 2.5.6: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-cache/2.5.6, Apache 2.0 + spring-context 5.3.12: https://mvnrepository.com/artifact/org.springframework/spring-context/5.3.12, Apache 2.0 + spring-context-support 5.3.12: https://mvnrepository.com/artifact/org.springframework/spring-context-support/5.3.12, Apache 2.0 + spring-core 5.3.12: https://mvnrepository.com/artifact/org.springframework/spring-core, Apache 2.0 + spring-expression 5.3.12: https://mvnrepository.com/artifact/org.springframework/spring-expression, Apache 2.0 springfox-core 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-core, Apache 2.0 springfox-schema 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-schema, Apache 2.0 springfox-spi 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-spi, Apache 2.0 @@ -394,29 +382,33 @@ The text of each license is also included at licenses/LICENSE-[project].txt. springfox-swagger2 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-swagger2/2.9.2, Apache 2.0 springfox-swagger-common 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-swagger-common/2.9.2, Apache 2.0 springfox-swagger-ui 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-swagger-ui/2.9.2, Apache 2.0 - spring-jcl 5.1.19.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-jcl/5.1.19.RELEASE, Apache 2.0 - spring-jdbc 5.1.19.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-jdbc/5.1.19.RELEASE, Apache 2.0 + spring-jcl 5.3.12: https://mvnrepository.com/artifact/org.springframework/spring-jcl/5.3.12, Apache 2.0 + spring-jdbc 5.3.12: https://mvnrepository.com/artifact/org.springframework/spring-jdbc/5.3.12, Apache 2.0 spring-plugin-core 1.2.0.RELEASE: https://mvnrepository.com/artifact/org.springframework.plugin/spring-plugin-core/1.2.0.RELEASE, Apache 2.0 spring-plugin-metadata 1.2.0.RELEASE: https://mvnrepository.com/artifact/org.springframework.plugin/spring-plugin-metadata/1.2.0.RELEASE, Apache 2.0 - spring-tx 5.1.19.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-tx/5.1.19.RELEASE, Apache 2.0 - spring-web 5.1.19.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-web/5.1.19.RELEASE, Apache 2.0 - spring-webmvc 5.1.19.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-webmvc/5.1.19.RELEASE, Apache 2.0 + spring-tx 5.3.12: https://mvnrepository.com/artifact/org.springframework/spring-tx/5.3.12, Apache 2.0 + spring-web 5.3.12: https://mvnrepository.com/artifact/org.springframework/spring-web/5.3.12, Apache 2.0 + spring-webmvc 5.3.12: https://mvnrepository.com/artifact/org.springframework/spring-webmvc/5.3.12, Apache 2.0 swagger-annotations 1.5.20: https://mvnrepository.com/artifact/io.swagger/swagger-annotations/1.5.20, Apache 2.0 swagger-bootstrap-ui 1.9.3: https://mvnrepository.com/artifact/com.github.xiaoymin/swagger-bootstrap-ui/1.9.3, Apache 2.0 swagger-models 1.5.24: https://mvnrepository.com/artifact/io.swagger/swagger-models/1.5.24, Apache 2.0 tephra-api 0.6.0: https://mvnrepository.com/artifact/co.cask.tephra/tephra-api/0.6.0, Apache 2.0 - validation-api 2.0.1.Final: https://mvnrepository.com/artifact/javax.validation/validation-api/2.0.1.Final, Apache 2.0 - wagon-provider-api 2.2: https://mvnrepository.com/artifact/org.apache.maven.wagon/wagon-provider-api/2.2, Apache 2.0 - xbean-reflect 3.4: https://mvnrepository.com/artifact/org.apache.xbean/xbean-reflect/3.4, Apache 2.0 + tomcat-embed-el 9.0.54: https://mvnrepository.com/artifact/org.apache.tomcat.embed/tomcat-embed-el/9.0.54, Apache 2.0 xercesImpl 2.9.1: https://mvnrepository.com/artifact/xerces/xercesImpl/2.9.1, Apache 2.0 xmlbeans 3.1.0: https://mvnrepository.com/artifact/org.apache.xmlbeans/xmlbeans/3.1.0, Apache 2.0 - xml-apis 1.4.01: https://mvnrepository.com/artifact/xml-apis/xml-apis/1.4.01, Apache 2.0 and W3C + xml-apis 1.3.04: https://mvnrepository.com/artifact/xml-apis/xml-apis/1.3.04, Apache 2.0 and W3C zookeeper 3.4.14: https://mvnrepository.com/artifact/org.apache.zookeeper/zookeeper/3.4.14, Apache 2.0 presto-jdbc 0.238.1 https://mvnrepository.com/artifact/com.facebook.presto/presto-jdbc/0.238.1 protostuff-core 1.7.2: https://github.com/protostuff/protostuff/protostuff-core Apache-2.0 protostuff-runtime 1.7.2: https://github.com/protostuff/protostuff/protostuff-core Apache-2.0 protostuff-api 1.7.2: https://github.com/protostuff/protostuff/protostuff-api Apache-2.0 protostuff-collectionschema 1.7.2: https://github.com/protostuff/protostuff/protostuff-collectionschema Apache-2.0 + prometheus client_java(simpleclient) 0.12.0: https://github.com/prometheus/client_java, Apache 2.0 + snowflake snowflake-2010: https://github.com/twitter-archive/snowflake/tree/snowflake-2010, Apache 2.0 + hibernate-validator 6.2.2.Final https://mvnrepository.com/artifact/org.hibernate.validator/hibernate-validator/6.2.2.Final, Apache 2.0 + jakarta.validation-api 2.0.2 https://mvnrepository.com/artifact/jakarta.validation/jakarta.validation-api/2.0.2, Apache 2.0 + jboss-logging:jar 3.4.2.Final https://mvnrepository.com/artifact/org.jboss.logging/jboss-logging/3.4.2.Final, Apache 2.0 + ======================================================================== BSD licenses ======================================================================== @@ -435,8 +427,8 @@ The text of each license is also included at licenses/LICENSE-[project].txt. paranamer 2.3: https://mvnrepository.com/artifact/com.thoughtworks.paranamer/paranamer/2.3, BSD threetenbp 1.3.6: https://mvnrepository.com/artifact/org.threeten/threetenbp/1.3.6, BSD 3-clause xmlenc 0.52: https://mvnrepository.com/artifact/xmlenc/xmlenc/0.52, BSD - hamcrest-core 1.3: https://mvnrepository.com/artifact/org.hamcrest/hamcrest-core/1.3, BSD 2-Clause - + py4j 0.10.9: https://mvnrepository.com/artifact/net.sf.py4j/py4j/0.10.9, BSD 2-clause + LatencyUtils 2.0.3: https://github.com/LatencyUtils/LatencyUtils, BSD-2-Clause ======================================================================== CDDL licenses @@ -457,9 +449,9 @@ The text of each license is also included at licenses/LICENSE-[project].txt. jersey-guice 1.9: https://mvnrepository.com/artifact/com.sun.jersey.contribs/jersey-guice/1.9, CDDL 1.1 and GPL 1.1 jersey-json 1.9: https://mvnrepository.com/artifact/com.sun.jersey/jersey-json/1.9, CDDL 1.1 and GPL 1.1 jersey-server 1.9: https://mvnrepository.com/artifact/com.sun.jersey/jersey-server/1.9, CDDL 1.1 and GPL 1.1 - jta 1.1: https://mvnrepository.com/artifact/javax.transaction/jta/1.1, CDDL 1.0 transaction-api 1.1: https://mvnrepository.com/artifact/javax.transaction/transaction-api/1.1, CDDL 1.0 + javax.el 3.0.0: https://mvnrepository.com/artifact/org.glassfish/javax.el/3.0.0, CDDL and GPL and GPL 2.0 ======================================================================== EPL licenses @@ -467,17 +459,10 @@ EPL licenses The following components are provided under the EPL License. See project link for details. The text of each license is also included at licenses/LICENSE-[project].txt. - aether-api 1.13.1: https://mvnrepository.com/artifact/org.sonatype.aether/aether-api/1.13.1, EPL 1.0 - aether-connector-asynchttpclient 1.13.1: https://mvnrepository.com/artifact/org.sonatype.aether/aether-connector-asynchttpclient/1.13.1, EPL 1.0 - aether-connector-file 1.13.1: https://mvnrepository.com/artifact/org.sonatype.aether/aether-connector-file/1.13.1, EPL 1.0 - aether-impl 1.13.1: https://mvnrepository.com/artifact/org.sonatype.aether/aether-impl/1.13.1, EPL 1.0 - aether-spi 1.13.1: https://mvnrepository.com/artifact/org.sonatype.aether/aether-spi/1.13.1, EPL 1.0 - aether-util 1.13.1: https://mvnrepository.com/artifact/org.sonatype.aether/aether-util/1.13.1, EPL 1.0 - aspectjweaver 1.9.6:https://mvnrepository.com/artifact/org.aspectj/aspectjweaver/1.9.6, EPL 1.0 + aspectjweaver 1.9.7:https://mvnrepository.com/artifact/org.aspectj/aspectjweaver/1.9.7, EPL 1.0 logback-classic 1.2.3: https://mvnrepository.com/artifact/ch.qos.logback/logback-classic/1.2.3, EPL 1.0 and LGPL 2.1 logback-core 1.2.3: https://mvnrepository.com/artifact/ch.qos.logback/logback-core/1.2.3, EPL 1.0 and LGPL 2.1 - oshi-core 3.9.1: https://mvnrepository.com/artifact/com.github.oshi/oshi-core/3.9.1, EPL 1.0 - junit 4.12: https://mvnrepository.com/artifact/junit/junit/4.12, EPL 1.0 + oshi-core 6.1.1: https://mvnrepository.com/artifact/com.github.oshi/oshi-core/6.1.1, EPL 1.0 h2-1.4.200 https://github.com/h2database/h2database/blob/master/LICENSE.txt, MPL 2.0 or EPL 1.0 ======================================================================== @@ -487,11 +472,12 @@ MIT licenses The following components are provided under a MIT 2.0 license. See project link for details. The text of each license is also included at licenses/LICENSE-[project].txt. - jul-to-slf4j 1.7.30: https://mvnrepository.com/artifact/org.slf4j/jul-to-slf4j/1.7.30, MIT + jul-to-slf4j 1.7.32: https://mvnrepository.com/artifact/org.slf4j/jul-to-slf4j/1.7.32, MIT mssql-jdbc 6.1.0.jre8: https://mvnrepository.com/artifact/com.microsoft.sqlserver/mssql-jdbc/6.1.0.jre8, MIT slf4j-api 1.7.5: https://mvnrepository.com/artifact/org.slf4j/slf4j-api/1.7.5, MIT animal-sniffer-annotations 1.14 https://mvnrepository.com/artifact/org.codehaus.mojo/animal-sniffer-annotations/1.14, MIT checker-compat-qual 2.0.0 https://mvnrepository.com/artifact/org.checkerframework/checker-compat-qual/2.0.0, MIT + GPLv2 + checker-qual 3.10.0 https://mvnrepository.com/artifact/org.checkerframework/checker-qual/3.10.0, MIT + GPLv2 Java-WebSocket 1.5.1: https://github.com/TooTallNate/Java-WebSocket MIT ======================================================================== @@ -515,6 +501,11 @@ WTFPL License ======================================== reflections 0.9.12: https://github.com/ronmamo/reflections WTFPL +======================================== +CC0-1.0 licenses +======================================== + +HdrHistogram 2.1.12: https://github.com/HdrHistogram/HdrHistogram , CC0-1.0 and BSD 2-Clause ======================================================================== UI related licenses diff --git a/dolphinscheduler-dist/release-docs/NOTICE b/dolphinscheduler-dist/release-docs/NOTICE index 11f5a94872043e3b8661f5844f600dcadbe8fa3c..3406a3e37d67e4f3b97296e9a7310365baba2526 100644 --- a/dolphinscheduler-dist/release-docs/NOTICE +++ b/dolphinscheduler-dist/release-docs/NOTICE @@ -1,5 +1,5 @@ Apache DolphinScheduler -Copyright 2019-2021 The Apache Software Foundation +Copyright 2019-2022 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). @@ -31,23 +31,6 @@ Jetty may be distributed under either license. ------ -plexus-cipher - -The code in this component contains a class - Base64 taken from http://juliusdavies.ca/svn/not-yet-commons-ssl/tags/commons-ssl-0.3.10/src/java/org/apache/commons/ssl/Base64.java -which is Apache license: http://www.apache.org/licenses/LICENSE-2.0 - -The PBE key processing routine PBECipher.createCipher() is adopted from http://juliusdavies.ca/svn/not-yet-commons-ssl/tags/commons-ssl-0.3.10/src/java/org/apache/commons/ssl/OpenSSL.java - which is also Apache APL-2.0 license: http://www.apache.org/licenses/LICENSE-2.0 - ------- -plexus-utils - -This product includes software developed by -The Apache Software Foundation (http://www.apache.org/). - ------- - - Eclipse The following artifacts are EPL. @@ -129,11 +112,6 @@ org.mortbay.jasper:apache-jsp org.apache.tomcat:tomcat-util-scan org.apache.tomcat:tomcat-util -org.mortbay.jasper:apache-el - org.apache.tomcat:tomcat-jasper-el - org.apache.tomcat:tomcat-el-api - - ------ Mortbay @@ -364,14 +342,6 @@ can be obtained at: * HOMEPAGE: * http://logging.apache.org/log4j/ -This product optionally depends on 'Aalto XML', an ultra-high performance -non-blocking XML processor, which can be obtained at: - - * LICENSE: - * license/LICENSE.aalto-xml.txt (Apache License 2.0) - * HOMEPAGE: - * http://wiki.fasterxml.com/AaltoHome - This product contains a modified version of 'HPACK', a Java implementation of the HTTP/2 HPACK algorithm written by Twitter. It can be obtained at: @@ -461,12 +431,6 @@ Copyright 2009-2014 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). -This product includes software components originally -developed for Airlift (https://github.com/airlift/airlift), -licensed under the Apache 2.0 license. The licensing terms -for Airlift code can be found at: -https://github.com/airlift/airlift/blob/master/LICENSE - ======================================================================== @@ -674,6 +638,14 @@ http://gcc.gnu.org/onlinedocs/libstdc++/manual/license.html ======================================================================== +Alibaba Druid NOTICE + +======================================================================== + +Alibaba Druid +Copyright 1999-2021 Alibaba Group Holding Ltd. + +======================================================================== AWS SDK for Java NOTICE @@ -1687,14 +1659,6 @@ can be obtained at: * HOMEPAGE: * http://logging.apache.org/log4j/ -This product optionally depends on 'Aalto XML', an ultra-high performance -non-blocking XML processor, which can be obtained at: - - * LICENSE: - * license/LICENSE.aalto-xml.txt (Apache License 2.0) - * HOMEPAGE: - * http://wiki.fasterxml.com/AaltoHome - This product contains a modified version of 'HPACK', a Java implementation of the HTTP/2 HPACK algorithm written by Twitter. It can be obtained at: @@ -2046,6 +2010,3 @@ Xmlbeans NOTICE - W3C XML Schema documents Copyright 2001-2003 (c) World Wide Web Consortium (Massachusetts Institute of Technology, European Research Consortium for Informatics and Mathematics, Keio University) - - - resolver.jar from Apache Xml Commons project, - Copyright (c) 2001-2003 Apache Software Foundation diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-LatencyUtils.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-LatencyUtils.txt new file mode 100644 index 0000000000000000000000000000000000000000..3405c711aefc596bd94b5319cba7420a6f0354c0 --- /dev/null +++ b/dolphinscheduler-dist/release-docs/licenses/LICENSE-LatencyUtils.txt @@ -0,0 +1,38 @@ + * This code was Written by Gil Tene of Azul Systems, and released to the + * public domain, as explained at http://creativecommons.org/publicdomain/zero/1.0/ + + For users of this code who wish to consume it under the "BSD" license + rather than under the public domain or CC0 contribution text mentioned + above, the code found under this directory is *also* provided under the + following license (commonly referred to as the BSD 2-Clause License). This + license does not detract from the above stated release of the code into + the public domain, and simply represents an additional license granted by + the Author. + + ----------------------------------------------------------------------------- + ** Beginning of "BSD 2-Clause License" text. ** + + Copyright (c) 2012, 2013, 2014 Gil Tene + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF + THE POSSIBILITY OF SUCH DAMAGE. diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-aether-api.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-aether-api.txt deleted file mode 100644 index 3fa00836fa4104691d85335a7a6a222206d3e22c..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-aether-api.txt +++ /dev/null @@ -1,86 +0,0 @@ -Eclipse Public License - v 1.0 -THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT. - -1. DEFINITIONS - -"Contribution" means: - -a) in the case of the initial Contributor, the initial code and documentation distributed under this Agreement, and - -b) in the case of each subsequent Contributor: - -i) changes to the Program, and - -ii) additions to the Program; - -where such changes and/or additions to the Program originate from and are distributed by that particular Contributor. A Contribution 'originates' from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include additions to the Program which: (i) are separate modules of software distributed in conjunction with the Program under their own license agreement, and (ii) are not derivative works of the Program. - -"Contributor" means any person or entity that distributes the Program. - -"Licensed Patents" mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program. - -"Program" means the Contributions distributed in accordance with this Agreement. - -"Recipient" means anyone who receives the Program under this Agreement, including all Contributors. - -2. GRANT OF RIGHTS - -a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, distribute and sublicense the Contribution of such Contributor, if any, and such derivative works, in source code and object code form. - -b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in source code and object code form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder. - -c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program. - -d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement. - -3. REQUIREMENTS - -A Contributor may choose to distribute the Program in object code form under its own license agreement, provided that: - -a) it complies with the terms and conditions of this Agreement; and - -b) its license agreement: - -i) effectively disclaims on behalf of all Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose; - -ii) effectively excludes on behalf of all Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits; - -iii) states that any provisions which differ from this Agreement are offered by that Contributor alone and not by any other party; and - -iv) states that source code for the Program is available from such Contributor, and informs licensees how to obtain it in a reasonable manner on or through a medium customarily used for software exchange. - -When the Program is made available in source code form: - -a) it must be made available under this Agreement; and - -b) a copy of this Agreement must be included with each copy of the Program. - -Contributors may not remove or alter any copyright notices contained within the Program. - -Each Contributor must identify itself as the originator of its Contribution, if any, in a manner that reasonably allows subsequent Recipients to identify the originator of the Contribution. - -4. COMMERCIAL DISTRIBUTION - -Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor ("Commercial Contributor") hereby agrees to defend and indemnify every other Contributor ("Indemnified Contributor") against any losses, damages and costs (collectively "Losses") arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense. - -For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages. - -5. NO WARRANTY - -EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, THE PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement , including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations. - -6. DISCLAIMER OF LIABILITY - -EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. - -7. GENERAL - -If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable. - -If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed. - -All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive. - -Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to distribute the Program (including its Contributions) under the new version. Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved. - -This Agreement is governed by the laws of the State of New York and the intellectual property laws of the United States of America. No party to this Agreement will bring a legal action under this Agreement more than one year after the cause of action arose. Each party waives its rights to a jury trial in any resulting litigation. \ No newline at end of file diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-aether-connector-asynchttpclient.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-aether-connector-asynchttpclient.txt deleted file mode 100644 index 3fa00836fa4104691d85335a7a6a222206d3e22c..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-aether-connector-asynchttpclient.txt +++ /dev/null @@ -1,86 +0,0 @@ -Eclipse Public License - v 1.0 -THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT. - -1. DEFINITIONS - -"Contribution" means: - -a) in the case of the initial Contributor, the initial code and documentation distributed under this Agreement, and - -b) in the case of each subsequent Contributor: - -i) changes to the Program, and - -ii) additions to the Program; - -where such changes and/or additions to the Program originate from and are distributed by that particular Contributor. A Contribution 'originates' from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include additions to the Program which: (i) are separate modules of software distributed in conjunction with the Program under their own license agreement, and (ii) are not derivative works of the Program. - -"Contributor" means any person or entity that distributes the Program. - -"Licensed Patents" mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program. - -"Program" means the Contributions distributed in accordance with this Agreement. - -"Recipient" means anyone who receives the Program under this Agreement, including all Contributors. - -2. GRANT OF RIGHTS - -a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, distribute and sublicense the Contribution of such Contributor, if any, and such derivative works, in source code and object code form. - -b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in source code and object code form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder. - -c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program. - -d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement. - -3. REQUIREMENTS - -A Contributor may choose to distribute the Program in object code form under its own license agreement, provided that: - -a) it complies with the terms and conditions of this Agreement; and - -b) its license agreement: - -i) effectively disclaims on behalf of all Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose; - -ii) effectively excludes on behalf of all Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits; - -iii) states that any provisions which differ from this Agreement are offered by that Contributor alone and not by any other party; and - -iv) states that source code for the Program is available from such Contributor, and informs licensees how to obtain it in a reasonable manner on or through a medium customarily used for software exchange. - -When the Program is made available in source code form: - -a) it must be made available under this Agreement; and - -b) a copy of this Agreement must be included with each copy of the Program. - -Contributors may not remove or alter any copyright notices contained within the Program. - -Each Contributor must identify itself as the originator of its Contribution, if any, in a manner that reasonably allows subsequent Recipients to identify the originator of the Contribution. - -4. COMMERCIAL DISTRIBUTION - -Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor ("Commercial Contributor") hereby agrees to defend and indemnify every other Contributor ("Indemnified Contributor") against any losses, damages and costs (collectively "Losses") arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense. - -For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages. - -5. NO WARRANTY - -EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, THE PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement , including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations. - -6. DISCLAIMER OF LIABILITY - -EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. - -7. GENERAL - -If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable. - -If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed. - -All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive. - -Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to distribute the Program (including its Contributions) under the new version. Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved. - -This Agreement is governed by the laws of the State of New York and the intellectual property laws of the United States of America. No party to this Agreement will bring a legal action under this Agreement more than one year after the cause of action arose. Each party waives its rights to a jury trial in any resulting litigation. \ No newline at end of file diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-aether-connector-file.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-aether-connector-file.txt deleted file mode 100644 index 3fa00836fa4104691d85335a7a6a222206d3e22c..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-aether-connector-file.txt +++ /dev/null @@ -1,86 +0,0 @@ -Eclipse Public License - v 1.0 -THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT. - -1. DEFINITIONS - -"Contribution" means: - -a) in the case of the initial Contributor, the initial code and documentation distributed under this Agreement, and - -b) in the case of each subsequent Contributor: - -i) changes to the Program, and - -ii) additions to the Program; - -where such changes and/or additions to the Program originate from and are distributed by that particular Contributor. A Contribution 'originates' from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include additions to the Program which: (i) are separate modules of software distributed in conjunction with the Program under their own license agreement, and (ii) are not derivative works of the Program. - -"Contributor" means any person or entity that distributes the Program. - -"Licensed Patents" mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program. - -"Program" means the Contributions distributed in accordance with this Agreement. - -"Recipient" means anyone who receives the Program under this Agreement, including all Contributors. - -2. GRANT OF RIGHTS - -a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, distribute and sublicense the Contribution of such Contributor, if any, and such derivative works, in source code and object code form. - -b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in source code and object code form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder. - -c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program. - -d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement. - -3. REQUIREMENTS - -A Contributor may choose to distribute the Program in object code form under its own license agreement, provided that: - -a) it complies with the terms and conditions of this Agreement; and - -b) its license agreement: - -i) effectively disclaims on behalf of all Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose; - -ii) effectively excludes on behalf of all Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits; - -iii) states that any provisions which differ from this Agreement are offered by that Contributor alone and not by any other party; and - -iv) states that source code for the Program is available from such Contributor, and informs licensees how to obtain it in a reasonable manner on or through a medium customarily used for software exchange. - -When the Program is made available in source code form: - -a) it must be made available under this Agreement; and - -b) a copy of this Agreement must be included with each copy of the Program. - -Contributors may not remove or alter any copyright notices contained within the Program. - -Each Contributor must identify itself as the originator of its Contribution, if any, in a manner that reasonably allows subsequent Recipients to identify the originator of the Contribution. - -4. COMMERCIAL DISTRIBUTION - -Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor ("Commercial Contributor") hereby agrees to defend and indemnify every other Contributor ("Indemnified Contributor") against any losses, damages and costs (collectively "Losses") arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense. - -For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages. - -5. NO WARRANTY - -EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, THE PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement , including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations. - -6. DISCLAIMER OF LIABILITY - -EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. - -7. GENERAL - -If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable. - -If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed. - -All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive. - -Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to distribute the Program (including its Contributions) under the new version. Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved. - -This Agreement is governed by the laws of the State of New York and the intellectual property laws of the United States of America. No party to this Agreement will bring a legal action under this Agreement more than one year after the cause of action arose. Each party waives its rights to a jury trial in any resulting litigation. \ No newline at end of file diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-aether-impl.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-aether-impl.txt deleted file mode 100644 index 3fa00836fa4104691d85335a7a6a222206d3e22c..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-aether-impl.txt +++ /dev/null @@ -1,86 +0,0 @@ -Eclipse Public License - v 1.0 -THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT. - -1. DEFINITIONS - -"Contribution" means: - -a) in the case of the initial Contributor, the initial code and documentation distributed under this Agreement, and - -b) in the case of each subsequent Contributor: - -i) changes to the Program, and - -ii) additions to the Program; - -where such changes and/or additions to the Program originate from and are distributed by that particular Contributor. A Contribution 'originates' from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include additions to the Program which: (i) are separate modules of software distributed in conjunction with the Program under their own license agreement, and (ii) are not derivative works of the Program. - -"Contributor" means any person or entity that distributes the Program. - -"Licensed Patents" mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program. - -"Program" means the Contributions distributed in accordance with this Agreement. - -"Recipient" means anyone who receives the Program under this Agreement, including all Contributors. - -2. GRANT OF RIGHTS - -a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, distribute and sublicense the Contribution of such Contributor, if any, and such derivative works, in source code and object code form. - -b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in source code and object code form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder. - -c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program. - -d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement. - -3. REQUIREMENTS - -A Contributor may choose to distribute the Program in object code form under its own license agreement, provided that: - -a) it complies with the terms and conditions of this Agreement; and - -b) its license agreement: - -i) effectively disclaims on behalf of all Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose; - -ii) effectively excludes on behalf of all Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits; - -iii) states that any provisions which differ from this Agreement are offered by that Contributor alone and not by any other party; and - -iv) states that source code for the Program is available from such Contributor, and informs licensees how to obtain it in a reasonable manner on or through a medium customarily used for software exchange. - -When the Program is made available in source code form: - -a) it must be made available under this Agreement; and - -b) a copy of this Agreement must be included with each copy of the Program. - -Contributors may not remove or alter any copyright notices contained within the Program. - -Each Contributor must identify itself as the originator of its Contribution, if any, in a manner that reasonably allows subsequent Recipients to identify the originator of the Contribution. - -4. COMMERCIAL DISTRIBUTION - -Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor ("Commercial Contributor") hereby agrees to defend and indemnify every other Contributor ("Indemnified Contributor") against any losses, damages and costs (collectively "Losses") arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense. - -For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages. - -5. NO WARRANTY - -EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, THE PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement , including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations. - -6. DISCLAIMER OF LIABILITY - -EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. - -7. GENERAL - -If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable. - -If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed. - -All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive. - -Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to distribute the Program (including its Contributions) under the new version. Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved. - -This Agreement is governed by the laws of the State of New York and the intellectual property laws of the United States of America. No party to this Agreement will bring a legal action under this Agreement more than one year after the cause of action arose. Each party waives its rights to a jury trial in any resulting litigation. \ No newline at end of file diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-aether-spi.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-aether-spi.txt deleted file mode 100644 index 3fa00836fa4104691d85335a7a6a222206d3e22c..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-aether-spi.txt +++ /dev/null @@ -1,86 +0,0 @@ -Eclipse Public License - v 1.0 -THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT. - -1. DEFINITIONS - -"Contribution" means: - -a) in the case of the initial Contributor, the initial code and documentation distributed under this Agreement, and - -b) in the case of each subsequent Contributor: - -i) changes to the Program, and - -ii) additions to the Program; - -where such changes and/or additions to the Program originate from and are distributed by that particular Contributor. A Contribution 'originates' from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include additions to the Program which: (i) are separate modules of software distributed in conjunction with the Program under their own license agreement, and (ii) are not derivative works of the Program. - -"Contributor" means any person or entity that distributes the Program. - -"Licensed Patents" mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program. - -"Program" means the Contributions distributed in accordance with this Agreement. - -"Recipient" means anyone who receives the Program under this Agreement, including all Contributors. - -2. GRANT OF RIGHTS - -a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, distribute and sublicense the Contribution of such Contributor, if any, and such derivative works, in source code and object code form. - -b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in source code and object code form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder. - -c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program. - -d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement. - -3. REQUIREMENTS - -A Contributor may choose to distribute the Program in object code form under its own license agreement, provided that: - -a) it complies with the terms and conditions of this Agreement; and - -b) its license agreement: - -i) effectively disclaims on behalf of all Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose; - -ii) effectively excludes on behalf of all Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits; - -iii) states that any provisions which differ from this Agreement are offered by that Contributor alone and not by any other party; and - -iv) states that source code for the Program is available from such Contributor, and informs licensees how to obtain it in a reasonable manner on or through a medium customarily used for software exchange. - -When the Program is made available in source code form: - -a) it must be made available under this Agreement; and - -b) a copy of this Agreement must be included with each copy of the Program. - -Contributors may not remove or alter any copyright notices contained within the Program. - -Each Contributor must identify itself as the originator of its Contribution, if any, in a manner that reasonably allows subsequent Recipients to identify the originator of the Contribution. - -4. COMMERCIAL DISTRIBUTION - -Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor ("Commercial Contributor") hereby agrees to defend and indemnify every other Contributor ("Indemnified Contributor") against any losses, damages and costs (collectively "Losses") arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense. - -For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages. - -5. NO WARRANTY - -EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, THE PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement , including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations. - -6. DISCLAIMER OF LIABILITY - -EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. - -7. GENERAL - -If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable. - -If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed. - -All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive. - -Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to distribute the Program (including its Contributions) under the new version. Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved. - -This Agreement is governed by the laws of the State of New York and the intellectual property laws of the United States of America. No party to this Agreement will bring a legal action under this Agreement more than one year after the cause of action arose. Each party waives its rights to a jury trial in any resulting litigation. \ No newline at end of file diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-aether-util.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-aether-util.txt deleted file mode 100644 index 3fa00836fa4104691d85335a7a6a222206d3e22c..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-aether-util.txt +++ /dev/null @@ -1,86 +0,0 @@ -Eclipse Public License - v 1.0 -THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT. - -1. DEFINITIONS - -"Contribution" means: - -a) in the case of the initial Contributor, the initial code and documentation distributed under this Agreement, and - -b) in the case of each subsequent Contributor: - -i) changes to the Program, and - -ii) additions to the Program; - -where such changes and/or additions to the Program originate from and are distributed by that particular Contributor. A Contribution 'originates' from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include additions to the Program which: (i) are separate modules of software distributed in conjunction with the Program under their own license agreement, and (ii) are not derivative works of the Program. - -"Contributor" means any person or entity that distributes the Program. - -"Licensed Patents" mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program. - -"Program" means the Contributions distributed in accordance with this Agreement. - -"Recipient" means anyone who receives the Program under this Agreement, including all Contributors. - -2. GRANT OF RIGHTS - -a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, distribute and sublicense the Contribution of such Contributor, if any, and such derivative works, in source code and object code form. - -b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in source code and object code form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder. - -c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program. - -d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement. - -3. REQUIREMENTS - -A Contributor may choose to distribute the Program in object code form under its own license agreement, provided that: - -a) it complies with the terms and conditions of this Agreement; and - -b) its license agreement: - -i) effectively disclaims on behalf of all Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose; - -ii) effectively excludes on behalf of all Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits; - -iii) states that any provisions which differ from this Agreement are offered by that Contributor alone and not by any other party; and - -iv) states that source code for the Program is available from such Contributor, and informs licensees how to obtain it in a reasonable manner on or through a medium customarily used for software exchange. - -When the Program is made available in source code form: - -a) it must be made available under this Agreement; and - -b) a copy of this Agreement must be included with each copy of the Program. - -Contributors may not remove or alter any copyright notices contained within the Program. - -Each Contributor must identify itself as the originator of its Contribution, if any, in a manner that reasonably allows subsequent Recipients to identify the originator of the Contribution. - -4. COMMERCIAL DISTRIBUTION - -Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor ("Commercial Contributor") hereby agrees to defend and indemnify every other Contributor ("Indemnified Contributor") against any losses, damages and costs (collectively "Losses") arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense. - -For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages. - -5. NO WARRANTY - -EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, THE PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement , including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations. - -6. DISCLAIMER OF LIABILITY - -EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. - -7. GENERAL - -If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable. - -If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed. - -All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive. - -Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to distribute the Program (including its Contributions) under the new version. Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved. - -This Agreement is governed by the laws of the State of New York and the intellectual property laws of the United States of America. No party to this Agreement will bring a legal action under this Agreement more than one year after the cause of action arose. Each party waives its rights to a jury trial in any resulting litigation. \ No newline at end of file diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-async-http-client.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-async-http-client.txt deleted file mode 100644 index 84fcbc259f9318d92deef697887c44ec954c1714..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-async-http-client.txt +++ /dev/null @@ -1,13 +0,0 @@ -Copyright 2014-2016 AsyncHttpClient Project - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. \ No newline at end of file diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-apache-el.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-caffeine.txt similarity index 99% rename from dolphinscheduler-dist/release-docs/licenses/LICENSE-apache-el.txt rename to dolphinscheduler-dist/release-docs/licenses/LICENSE-caffeine.txt index d645695673349e3947e8e5ae42332d0ac3164cd7..0b42f8f262b23178c7392857a4d8120c6e0dbb4e 100644 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-apache-el.txt +++ b/dolphinscheduler-dist/release-docs/licenses/LICENSE-caffeine.txt @@ -1,4 +1,4 @@ - + Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ @@ -199,4 +199,4 @@ distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and - limitations under the License. + limitations under the License. \ No newline at end of file diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-checker-qual.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-checker-qual.txt new file mode 100644 index 0000000000000000000000000000000000000000..7b59b5c9820a92823f97d331bbc591091dc8aa8b --- /dev/null +++ b/dolphinscheduler-dist/release-docs/licenses/LICENSE-checker-qual.txt @@ -0,0 +1,22 @@ +Checker Framework qualifiers +Copyright 2004-present by the Checker Framework developers + +MIT License: + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-hamcrest-core.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-hamcrest-core.txt deleted file mode 100644 index 60125b680e573a686436a19229f4b1a5616b5964..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-hamcrest-core.txt +++ /dev/null @@ -1,27 +0,0 @@ -BSD License - -Copyright (c) 2000-2006, www.hamcrest.org -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -Redistributions of source code must retain the above copyright notice, this list of -conditions and the following disclaimer. Redistributions in binary form must reproduce -the above copyright notice, this list of conditions and the following disclaimer in -the documentation and/or other materials provided with the distribution. - -Neither the name of Hamcrest nor the names of its contributors may be used to endorse -or promote products derived from this software without specific prior written -permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY -EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES -OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT -SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR -BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY -WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH -DAMAGE. diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-hibernate-validator.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-hibernate-validator.txt deleted file mode 100644 index 6b0b1270ff0ca8f03867efcd09ba6ddb6392b1e1..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-hibernate-validator.txt +++ /dev/null @@ -1,203 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-javax.activation-api.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-javax.activation-api.txt deleted file mode 100644 index 2c3350791e6d2785748f7d3aebee92034abbb991..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-javax.activation-api.txt +++ /dev/null @@ -1,758 +0,0 @@ -COMMON DEVELOPMENT AND DISTRIBUTION LICENSE (CDDL) Version 1.1 - -1. Definitions. - - 1.1. "Contributor" means each individual or entity that creates or - contributes to the creation of Modifications. - - 1.2. "Contributor Version" means the combination of the Original - Software, prior Modifications used by a Contributor (if any), and - the Modifications made by that particular Contributor. - - 1.3. "Covered Software" means (a) the Original Software, or (b) - Modifications, or (c) the combination of files containing Original - Software with files containing Modifications, in each case including - portions thereof. - - 1.4. "Executable" means the Covered Software in any form other than - Source Code. - - 1.5. "Initial Developer" means the individual or entity that first - makes Original Software available under this License. - - 1.6. "Larger Work" means a work which combines Covered Software or - portions thereof with code not governed by the terms of this License. - - 1.7. "License" means this document. - - 1.8. "Licensable" means having the right to grant, to the maximum - extent possible, whether at the time of the initial grant or - subsequently acquired, any and all of the rights conveyed herein. - - 1.9. "Modifications" means the Source Code and Executable form of - any of the following: - - A. Any file that results from an addition to, deletion from or - modification of the contents of a file containing Original Software - or previous Modifications; - - B. Any new file that contains any part of the Original Software or - previous Modification; or - - C. Any new file that is contributed or otherwise made available - under the terms of this License. - - 1.10. "Original Software" means the Source Code and Executable form - of computer software code that is originally released under this - License. - - 1.11. "Patent Claims" means any patent claim(s), now owned or - hereafter acquired, including without limitation, method, process, - and apparatus claims, in any patent Licensable by grantor. - - 1.12. "Source Code" means (a) the common form of computer software - code in which modifications are made and (b) associated - documentation included in or with such code. - - 1.13. "You" (or "Your") means an individual or a legal entity - exercising rights under, and complying with all of the terms of, - this License. For legal entities, "You" includes any entity which - controls, is controlled by, or is under common control with You. For - purposes of this definition, "control" means (a) the power, direct - or indirect, to cause the direction or management of such entity, - whether by contract or otherwise, or (b) ownership of more than - fifty percent (50%) of the outstanding shares or beneficial - ownership of such entity. - -2. License Grants. - - 2.1. The Initial Developer Grant. - - Conditioned upon Your compliance with Section 3.1 below and subject - to third party intellectual property claims, the Initial Developer - hereby grants You a world-wide, royalty-free, non-exclusive license: - - (a) under intellectual property rights (other than patent or - trademark) Licensable by Initial Developer, to use, reproduce, - modify, display, perform, sublicense and distribute the Original - Software (or portions thereof), with or without Modifications, - and/or as part of a Larger Work; and - - (b) under Patent Claims infringed by the making, using or selling of - Original Software, to make, have made, use, practice, sell, and - offer for sale, and/or otherwise dispose of the Original Software - (or portions thereof). - - (c) The licenses granted in Sections 2.1(a) and (b) are effective on - the date Initial Developer first distributes or otherwise makes the - Original Software available to a third party under the terms of this - License. - - (d) Notwithstanding Section 2.1(b) above, no patent license is - granted: (1) for code that You delete from the Original Software, or - (2) for infringements caused by: (i) the modification of the - Original Software, or (ii) the combination of the Original Software - with other software or devices. - - 2.2. Contributor Grant. - - Conditioned upon Your compliance with Section 3.1 below and subject - to third party intellectual property claims, each Contributor hereby - grants You a world-wide, royalty-free, non-exclusive license: - - (a) under intellectual property rights (other than patent or - trademark) Licensable by Contributor to use, reproduce, modify, - display, perform, sublicense and distribute the Modifications - created by such Contributor (or portions thereof), either on an - unmodified basis, with other Modifications, as Covered Software - and/or as part of a Larger Work; and - - (b) under Patent Claims infringed by the making, using, or selling - of Modifications made by that Contributor either alone and/or in - combination with its Contributor Version (or portions of such - combination), to make, use, sell, offer for sale, have made, and/or - otherwise dispose of: (1) Modifications made by that Contributor (or - portions thereof); and (2) the combination of Modifications made by - that Contributor with its Contributor Version (or portions of such - combination). - - (c) The licenses granted in Sections 2.2(a) and 2.2(b) are effective - on the date Contributor first distributes or otherwise makes the - Modifications available to a third party. - - (d) Notwithstanding Section 2.2(b) above, no patent license is - granted: (1) for any code that Contributor has deleted from the - Contributor Version; (2) for infringements caused by: (i) third - party modifications of Contributor Version, or (ii) the combination - of Modifications made by that Contributor with other software - (except as part of the Contributor Version) or other devices; or (3) - under Patent Claims infringed by Covered Software in the absence of - Modifications made by that Contributor. - -3. Distribution Obligations. - - 3.1. Availability of Source Code. - - Any Covered Software that You distribute or otherwise make available - in Executable form must also be made available in Source Code form - and that Source Code form must be distributed only under the terms - of this License. You must include a copy of this License with every - copy of the Source Code form of the Covered Software You distribute - or otherwise make available. You must inform recipients of any such - Covered Software in Executable form as to how they can obtain such - Covered Software in Source Code form in a reasonable manner on or - through a medium customarily used for software exchange. - - 3.2. Modifications. - - The Modifications that You create or to which You contribute are - governed by the terms of this License. You represent that You - believe Your Modifications are Your original creation(s) and/or You - have sufficient rights to grant the rights conveyed by this License. - - 3.3. Required Notices. - - You must include a notice in each of Your Modifications that - identifies You as the Contributor of the Modification. You may not - remove or alter any copyright, patent or trademark notices contained - within the Covered Software, or any notices of licensing or any - descriptive text giving attribution to any Contributor or the - Initial Developer. - - 3.4. Application of Additional Terms. - - You may not offer or impose any terms on any Covered Software in - Source Code form that alters or restricts the applicable version of - this License or the recipients' rights hereunder. You may choose to - offer, and to charge a fee for, warranty, support, indemnity or - liability obligations to one or more recipients of Covered Software. - However, you may do so only on Your own behalf, and not on behalf of - the Initial Developer or any Contributor. You must make it - absolutely clear that any such warranty, support, indemnity or - liability obligation is offered by You alone, and You hereby agree - to indemnify the Initial Developer and every Contributor for any - liability incurred by the Initial Developer or such Contributor as a - result of warranty, support, indemnity or liability terms You offer. - - 3.5. Distribution of Executable Versions. - - You may distribute the Executable form of the Covered Software under - the terms of this License or under the terms of a license of Your - choice, which may contain terms different from this License, - provided that You are in compliance with the terms of this License - and that the license for the Executable form does not attempt to - limit or alter the recipient's rights in the Source Code form from - the rights set forth in this License. If You distribute the Covered - Software in Executable form under a different license, You must make - it absolutely clear that any terms which differ from this License - are offered by You alone, not by the Initial Developer or - Contributor. You hereby agree to indemnify the Initial Developer and - every Contributor for any liability incurred by the Initial - Developer or such Contributor as a result of any such terms You offer. - - 3.6. Larger Works. - - You may create a Larger Work by combining Covered Software with - other code not governed by the terms of this License and distribute - the Larger Work as a single product. In such a case, You must make - sure the requirements of this License are fulfilled for the Covered - Software. - -4. Versions of the License. - - 4.1. New Versions. - - Oracle is the initial license steward and may publish revised and/or - new versions of this License from time to time. Each version will be - given a distinguishing version number. Except as provided in Section - 4.3, no one other than the license steward has the right to modify - this License. - - 4.2. Effect of New Versions. - - You may always continue to use, distribute or otherwise make the - Covered Software available under the terms of the version of the - License under which You originally received the Covered Software. If - the Initial Developer includes a notice in the Original Software - prohibiting it from being distributed or otherwise made available - under any subsequent version of the License, You must distribute and - make the Covered Software available under the terms of the version - of the License under which You originally received the Covered - Software. Otherwise, You may also choose to use, distribute or - otherwise make the Covered Software available under the terms of any - subsequent version of the License published by the license steward. - - 4.3. Modified Versions. - - When You are an Initial Developer and You want to create a new - license for Your Original Software, You may create and use a - modified version of this License if You: (a) rename the license and - remove any references to the name of the license steward (except to - note that the license differs from this License); and (b) otherwise - make it clear that the license contains terms which differ from this - License. - -5. DISCLAIMER OF WARRANTY. - - COVERED SOFTWARE IS PROVIDED UNDER THIS LICENSE ON AN "AS IS" BASIS, - WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, - INCLUDING, WITHOUT LIMITATION, WARRANTIES THAT THE COVERED SOFTWARE - IS FREE OF DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR - NON-INFRINGING. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF - THE COVERED SOFTWARE IS WITH YOU. SHOULD ANY COVERED SOFTWARE PROVE - DEFECTIVE IN ANY RESPECT, YOU (NOT THE INITIAL DEVELOPER OR ANY - OTHER CONTRIBUTOR) ASSUME THE COST OF ANY NECESSARY SERVICING, - REPAIR OR CORRECTION. THIS DISCLAIMER OF WARRANTY CONSTITUTES AN - ESSENTIAL PART OF THIS LICENSE. NO USE OF ANY COVERED SOFTWARE IS - AUTHORIZED HEREUNDER EXCEPT UNDER THIS DISCLAIMER. - -6. TERMINATION. - - 6.1. This License and the rights granted hereunder will terminate - automatically if You fail to comply with terms herein and fail to - cure such breach within 30 days of becoming aware of the breach. - Provisions which, by their nature, must remain in effect beyond the - termination of this License shall survive. - - 6.2. If You assert a patent infringement claim (excluding - declaratory judgment actions) against Initial Developer or a - Contributor (the Initial Developer or Contributor against whom You - assert such claim is referred to as "Participant") alleging that the - Participant Software (meaning the Contributor Version where the - Participant is a Contributor or the Original Software where the - Participant is the Initial Developer) directly or indirectly - infringes any patent, then any and all rights granted directly or - indirectly to You by such Participant, the Initial Developer (if the - Initial Developer is not the Participant) and all Contributors under - Sections 2.1 and/or 2.2 of this License shall, upon 60 days notice - from Participant terminate prospectively and automatically at the - expiration of such 60 day notice period, unless if within such 60 - day period You withdraw Your claim with respect to the Participant - Software against such Participant either unilaterally or pursuant to - a written agreement with Participant. - - 6.3. If You assert a patent infringement claim against Participant - alleging that the Participant Software directly or indirectly - infringes any patent where such claim is resolved (such as by - license or settlement) prior to the initiation of patent - infringement litigation, then the reasonable value of the licenses - granted by such Participant under Sections 2.1 or 2.2 shall be taken - into account in determining the amount or value of any payment or - license. - - 6.4. In the event of termination under Sections 6.1 or 6.2 above, - all end user licenses that have been validly granted by You or any - distributor hereunder prior to termination (excluding licenses - granted to You by any distributor) shall survive termination. - -7. LIMITATION OF LIABILITY. - - UNDER NO CIRCUMSTANCES AND UNDER NO LEGAL THEORY, WHETHER TORT - (INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE, SHALL YOU, THE - INITIAL DEVELOPER, ANY OTHER CONTRIBUTOR, OR ANY DISTRIBUTOR OF - COVERED SOFTWARE, OR ANY SUPPLIER OF ANY OF SUCH PARTIES, BE LIABLE - TO ANY PERSON FOR ANY INDIRECT, SPECIAL, INCIDENTAL, OR - CONSEQUENTIAL DAMAGES OF ANY CHARACTER INCLUDING, WITHOUT - LIMITATION, DAMAGES FOR LOSS OF GOODWILL, WORK STOPPAGE, COMPUTER - FAILURE OR MALFUNCTION, OR ANY AND ALL OTHER COMMERCIAL DAMAGES OR - LOSSES, EVEN IF SUCH PARTY SHALL HAVE BEEN INFORMED OF THE - POSSIBILITY OF SUCH DAMAGES. THIS LIMITATION OF LIABILITY SHALL NOT - APPLY TO LIABILITY FOR DEATH OR PERSONAL INJURY RESULTING FROM SUCH - PARTY'S NEGLIGENCE TO THE EXTENT APPLICABLE LAW PROHIBITS SUCH - LIMITATION. SOME JURISDICTIONS DO NOT ALLOW THE EXCLUSION OR - LIMITATION OF INCIDENTAL OR CONSEQUENTIAL DAMAGES, SO THIS EXCLUSION - AND LIMITATION MAY NOT APPLY TO YOU. - -8. U.S. GOVERNMENT END USERS. - - The Covered Software is a "commercial item," as that term is defined - in 48 C.F.R. 2.101 (Oct. 1995), consisting of "commercial computer - software" (as that term is defined at 48 C.F.R. ? 252.227-7014(a)(1)) and "commercial computer software documentation" - as such terms are used in 48 C.F.R. 12.212 (Sept. 1995). Consistent - with 48 C.F.R. 12.212 and 48 C.F.R. 227.7202-1 through 227.7202-4 - (June 1995), all U.S. Government End Users acquire Covered Software - with only those rights set forth herein. This U.S. Government Rights - clause is in lieu of, and supersedes, any other FAR, DFAR, or other - clause or provision that addresses Government rights in computer - software under this License. - -9. MISCELLANEOUS. - - This License represents the complete agreement concerning subject - matter hereof. If any provision of this License is held to be - unenforceable, such provision shall be reformed only to the extent - necessary to make it enforceable. This License shall be governed by - the law of the jurisdiction specified in a notice contained within - the Original Software (except to the extent applicable law, if any, - provides otherwise), excluding such jurisdiction's conflict-of-law - provisions. Any litigation relating to this License shall be subject - to the jurisdiction of the courts located in the jurisdiction and - venue specified in a notice contained within the Original Software, - with the losing party responsible for costs, including, without - limitation, court costs and reasonable attorneys' fees and expenses. - The application of the United Nations Convention on Contracts for - the International Sale of Goods is expressly excluded. Any law or - regulation which provides that the language of a contract shall be - construed against the drafter shall not apply to this License. You - agree that You alone are responsible for compliance with the United - States export administration regulations (and the export control - laws and regulation of any other countries) when You use, distribute - or otherwise make available any Covered Software. - -10. RESPONSIBILITY FOR CLAIMS. - - As between Initial Developer and the Contributors, each party is - responsible for claims and damages arising, directly or indirectly, - out of its utilization of rights under this License and You agree to - work with Initial Developer and Contributors to distribute such - responsibility on an equitable basis. Nothing herein is intended or - shall be deemed to constitute any admission of liability. - ------------------------------------------------------------------------- - -NOTICE PURSUANT TO SECTION 9 OF THE COMMON DEVELOPMENT AND DISTRIBUTION -LICENSE (CDDL) - -The code released under the CDDL shall be governed by the laws of the -State of California (excluding conflict-of-law provisions). Any -litigation relating to this License shall be subject to the jurisdiction -of the Federal Courts of the Northern District of California and the -state courts of the State of California, with venue lying in Santa Clara -County, California. - - - - The GNU General Public License (GPL) Version 2, June 1991 - -Copyright (C) 1989, 1991 Free Software Foundation, Inc. -51 Franklin Street, Fifth Floor -Boston, MA 02110-1335 -USA - -Everyone is permitted to copy and distribute verbatim copies -of this license document, but changing it is not allowed. - -Preamble - -The licenses for most software are designed to take away your freedom to -share and change it. By contrast, the GNU General Public License is -intended to guarantee your freedom to share and change free software--to -make sure the software is free for all its users. This General Public -License applies to most of the Free Software Foundation's software and -to any other program whose authors commit to using it. (Some other Free -Software Foundation software is covered by the GNU Library General -Public License instead.) You can apply it to your programs, too. - -When we speak of free software, we are referring to freedom, not price. -Our General Public Licenses are designed to make sure that you have the -freedom to distribute copies of free software (and charge for this -service if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs; and that you know you can do these things. - -To protect your rights, we need to make restrictions that forbid anyone -to deny you these rights or to ask you to surrender the rights. These -restrictions translate to certain responsibilities for you if you -distribute copies of the software, or if you modify it. - -For example, if you distribute copies of such a program, whether gratis -or for a fee, you must give the recipients all the rights that you have. -You must make sure that they, too, receive or can get the source code. -And you must show them these terms so they know their rights. - -We protect your rights with two steps: (1) copyright the software, and -(2) offer you this license which gives you legal permission to copy, -distribute and/or modify the software. - -Also, for each author's protection and ours, we want to make certain -that everyone understands that there is no warranty for this free -software. If the software is modified by someone else and passed on, we -want its recipients to know that what they have is not the original, so -that any problems introduced by others will not reflect on the original -authors' reputations. - -Finally, any free program is threatened constantly by software patents. -We wish to avoid the danger that redistributors of a free program will -individually obtain patent licenses, in effect making the program -proprietary. To prevent this, we have made it clear that any patent must -be licensed for everyone's free use or not licensed at all. - -The precise terms and conditions for copying, distribution and -modification follow. - -TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - -0. This License applies to any program or other work which contains a -notice placed by the copyright holder saying it may be distributed under -the terms of this General Public License. The "Program", below, refers -to any such program or work, and a "work based on the Program" means -either the Program or any derivative work under copyright law: that is -to say, a work containing the Program or a portion of it, either -verbatim or with modifications and/or translated into another language. -(Hereinafter, translation is included without limitation in the term -"modification".) Each licensee is addressed as "you". - -Activities other than copying, distribution and modification are not -covered by this License; they are outside its scope. The act of running -the Program is not restricted, and the output from the Program is -covered only if its contents constitute a work based on the Program -(independent of having been made by running the Program). Whether that -is true depends on what the Program does. - -1. You may copy and distribute verbatim copies of the Program's source -code as you receive it, in any medium, provided that you conspicuously -and appropriately publish on each copy an appropriate copyright notice -and disclaimer of warranty; keep intact all the notices that refer to -this License and to the absence of any warranty; and give any other -recipients of the Program a copy of this License along with the Program. - -You may charge a fee for the physical act of transferring a copy, and -you may at your option offer warranty protection in exchange for a fee. - -2. You may modify your copy or copies of the Program or any portion of -it, thus forming a work based on the Program, and copy and distribute -such modifications or work under the terms of Section 1 above, provided -that you also meet all of these conditions: - - a) You must cause the modified files to carry prominent notices - stating that you changed the files and the date of any change. - - b) You must cause any work that you distribute or publish, that in - whole or in part contains or is derived from the Program or any part - thereof, to be licensed as a whole at no charge to all third parties - under the terms of this License. - - c) If the modified program normally reads commands interactively - when run, you must cause it, when started running for such - interactive use in the most ordinary way, to print or display an - announcement including an appropriate copyright notice and a notice - that there is no warranty (or else, saying that you provide a - warranty) and that users may redistribute the program under these - conditions, and telling the user how to view a copy of this License. - (Exception: if the Program itself is interactive but does not - normally print such an announcement, your work based on the Program - is not required to print an announcement.) - -These requirements apply to the modified work as a whole. If -identifiable sections of that work are not derived from the Program, and -can be reasonably considered independent and separate works in -themselves, then this License, and its terms, do not apply to those -sections when you distribute them as separate works. But when you -distribute the same sections as part of a whole which is a work based on -the Program, the distribution of the whole must be on the terms of this -License, whose permissions for other licensees extend to the entire -whole, and thus to each and every part regardless of who wrote it. - -Thus, it is not the intent of this section to claim rights or contest -your rights to work written entirely by you; rather, the intent is to -exercise the right to control the distribution of derivative or -collective works based on the Program. - -In addition, mere aggregation of another work not based on the Program -with the Program (or with a work based on the Program) on a volume of a -storage or distribution medium does not bring the other work under the -scope of this License. - -3. You may copy and distribute the Program (or a work based on it, -under Section 2) in object code or executable form under the terms of -Sections 1 and 2 above provided that you also do one of the following: - - a) Accompany it with the complete corresponding machine-readable - source code, which must be distributed under the terms of Sections 1 - and 2 above on a medium customarily used for software interchange; or, - - b) Accompany it with a written offer, valid for at least three - years, to give any third party, for a charge no more than your cost - of physically performing source distribution, a complete - machine-readable copy of the corresponding source code, to be - distributed under the terms of Sections 1 and 2 above on a medium - customarily used for software interchange; or, - - c) Accompany it with the information you received as to the offer to - distribute corresponding source code. (This alternative is allowed - only for noncommercial distribution and only if you received the - program in object code or executable form with such an offer, in - accord with Subsection b above.) - -The source code for a work means the preferred form of the work for -making modifications to it. For an executable work, complete source code -means all the source code for all modules it contains, plus any -associated interface definition files, plus the scripts used to control -compilation and installation of the executable. However, as a special -exception, the source code distributed need not include anything that is -normally distributed (in either source or binary form) with the major -components (compiler, kernel, and so on) of the operating system on -which the executable runs, unless that component itself accompanies the -executable. - -If distribution of executable or object code is made by offering access -to copy from a designated place, then offering equivalent access to copy -the source code from the same place counts as distribution of the source -code, even though third parties are not compelled to copy the source -along with the object code. - -4. You may not copy, modify, sublicense, or distribute the Program -except as expressly provided under this License. Any attempt otherwise -to copy, modify, sublicense or distribute the Program is void, and will -automatically terminate your rights under this License. However, parties -who have received copies, or rights, from you under this License will -not have their licenses terminated so long as such parties remain in -full compliance. - -5. You are not required to accept this License, since you have not -signed it. However, nothing else grants you permission to modify or -distribute the Program or its derivative works. These actions are -prohibited by law if you do not accept this License. Therefore, by -modifying or distributing the Program (or any work based on the -Program), you indicate your acceptance of this License to do so, and all -its terms and conditions for copying, distributing or modifying the -Program or works based on it. - -6. Each time you redistribute the Program (or any work based on the -Program), the recipient automatically receives a license from the -original licensor to copy, distribute or modify the Program subject to -these terms and conditions. You may not impose any further restrictions -on the recipients' exercise of the rights granted herein. You are not -responsible for enforcing compliance by third parties to this License. - -7. If, as a consequence of a court judgment or allegation of patent -infringement or for any other reason (not limited to patent issues), -conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot distribute -so as to satisfy simultaneously your obligations under this License and -any other pertinent obligations, then as a consequence you may not -distribute the Program at all. For example, if a patent license would -not permit royalty-free redistribution of the Program by all those who -receive copies directly or indirectly through you, then the only way you -could satisfy both it and this License would be to refrain entirely from -distribution of the Program. - -If any portion of this section is held invalid or unenforceable under -any particular circumstance, the balance of the section is intended to -apply and the section as a whole is intended to apply in other -circumstances. - -It is not the purpose of this section to induce you to infringe any -patents or other property right claims or to contest validity of any -such claims; this section has the sole purpose of protecting the -integrity of the free software distribution system, which is implemented -by public license practices. Many people have made generous -contributions to the wide range of software distributed through that -system in reliance on consistent application of that system; it is up to -the author/donor to decide if he or she is willing to distribute -software through any other system and a licensee cannot impose that choice. - -This section is intended to make thoroughly clear what is believed to be -a consequence of the rest of this License. - -8. If the distribution and/or use of the Program is restricted in -certain countries either by patents or by copyrighted interfaces, the -original copyright holder who places the Program under this License may -add an explicit geographical distribution limitation excluding those -countries, so that distribution is permitted only in or among countries -not thus excluded. In such case, this License incorporates the -limitation as if written in the body of this License. - -9. The Free Software Foundation may publish revised and/or new -versions of the General Public License from time to time. Such new -versions will be similar in spirit to the present version, but may -differ in detail to address new problems or concerns. - -Each version is given a distinguishing version number. If the Program -specifies a version number of this License which applies to it and "any -later version", you have the option of following the terms and -conditions either of that version or of any later version published by -the Free Software Foundation. If the Program does not specify a version -number of this License, you may choose any version ever published by the -Free Software Foundation. - -10. If you wish to incorporate parts of the Program into other free -programs whose distribution conditions are different, write to the -author to ask for permission. For software which is copyrighted by the -Free Software Foundation, write to the Free Software Foundation; we -sometimes make exceptions for this. Our decision will be guided by the -two goals of preserving the free status of all derivatives of our free -software and of promoting the sharing and reuse of software generally. - -NO WARRANTY - -11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO -WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. -EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR -OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, -EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE -ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH -YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL -NECESSARY SERVICING, REPAIR OR CORRECTION. - -12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN -WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY -AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR -DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL -DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM -(INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED -INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF -THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR -OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. - -END OF TERMS AND CONDITIONS - -How to Apply These Terms to Your New Programs - -If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - -To do so, attach the following notices to the program. It is safest to -attach them to the start of each source file to most effectively convey -the exclusion of warranty; and each file should have at least the -"copyright" line and a pointer to where the full notice is found. - - One line to give the program's name and a brief idea of what it does. - Copyright (C) - - This program is free software; you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation; either version 2 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, but - WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program; if not, write to the Free Software - Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335 USA - -Also add information on how to contact you by electronic and paper mail. - -If the program is interactive, make it output a short notice like this -when it starts in an interactive mode: - - Gnomovision version 69, Copyright (C) year name of author - Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type - `show w'. This is free software, and you are welcome to redistribute - it under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the -appropriate parts of the General Public License. Of course, the commands -you use may be called something other than `show w' and `show c'; they -could even be mouse-clicks or menu items--whatever suits your program. - -You should also get your employer (if you work as a programmer) or your -school, if any, to sign a "copyright disclaimer" for the program, if -necessary. Here is a sample; alter the names: - - Yoyodyne, Inc., hereby disclaims all copyright interest in the - program `Gnomovision' (which makes passes at compilers) written by - James Hacker. - - signature of Ty Coon, 1 April 1989 - Ty Coon, President of Vice - -This General Public License does not permit incorporating your program -into proprietary programs. If your program is a subroutine library, you -may consider it more useful to permit linking proprietary applications -with the library. If this is what you want to do, use the GNU Library -General Public License instead of this License. - -# - -Certain source files distributed by Oracle America, Inc. and/or its -affiliates are subject to the following clarification and special -exception to the GPLv2, based on the GNU Project exception for its -Classpath libraries, known as the GNU Classpath Exception, but only -where Oracle has expressly included in the particular source file's -header the words "Oracle designates this particular file as subject to -the "Classpath" exception as provided by Oracle in the LICENSE file -that accompanied this code." - -You should also note that Oracle includes multiple, independent -programs in this software package. Some of those programs are provided -under licenses deemed incompatible with the GPLv2 by the Free Software -Foundation and others. For example, the package includes programs -licensed under the Apache License, Version 2.0. Such programs are -licensed to you under their original licenses. - -Oracle facilitates your further distribution of this package by adding -the Classpath Exception to the necessary parts of its GPLv2 code, which -permits you to use that code in combination with other independent -modules not licensed under the GPLv2. However, note that this would -not permit you to commingle code under an incompatible license with -Oracle's GPLv2 licensed code by, for example, cutting and pasting such -code into a file also containing Oracle's GPLv2 licensed code and then -distributing the result. Additionally, if you were to remove the -Classpath Exception from any of the files to which it applies and -distribute the result, you would likely be required to license some or -all of the other code in that distribution under the GPLv2 as well, and -since the GPLv2 is incompatible with the license terms of some items -included in the distribution by Oracle, removing the Classpath -Exception could therefore effectively compromise your ability to -further distribute the package. - -Proceed with caution and we recommend that you obtain the advice of a -lawyer skilled in open source matters before removing the Classpath -Exception or making modifications to this package which may -subsequently be redistributed and/or involve the use of third party -software. - -CLASSPATH EXCEPTION -Linking this library statically or dynamically with other modules is -making a combined work based on this library. Thus, the terms and -conditions of the GNU General Public License version 2 cover the whole -combination. - -As a special exception, the copyright holders of this library give you -permission to link this library with independent modules to produce an -executable, regardless of the license terms of these independent -modules, and to copy and distribute the resulting executable under -terms of your choice, provided that you also meet, for each linked -independent module, the terms and conditions of the license of that -module. An independent module is a module which is not derived from or -based on this library. If you modify this library, you may extend this -exception to your version of the library, but you are not obligated to -do so. If you do not wish to do so, delete this exception statement -from your version. diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-javax.servlet-api.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-javax.servlet-api.txt deleted file mode 100644 index a0ccc93564c0588fb48eefd8ed2832a5372e2c42..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-javax.servlet-api.txt +++ /dev/null @@ -1,263 +0,0 @@ -COMMON DEVELOPMENT AND DISTRIBUTION LICENSE (CDDL) Version 1.0 - -1. Definitions. - - 1.1. Contributor. means each individual or entity that creates or contributes to the creation of Modifications. - - 1.2. Contributor Version. means the combination of the Original Software, prior Modifications used by a Contributor (if any), and the Modifications made by that particular Contributor. - - 1.3. Covered Software. means (a) the Original Software, or (b) Modifications, or (c) the combination of files containing Original Software with files containing Modifications, in each case including portions thereof. - - 1.4. Executable. means the Covered Software in any form other than Source Code. - - 1.5. Initial Developer. means the individual or entity that first makes Original Software available under this License. - - 1.6. Larger Work. means a work which combines Covered Software or portions thereof with code not governed by the terms of this License. - - 1.7. License. means this document. - - 1.8. Licensable. means having the right to grant, to the maximum extent possible, whether at the time of the initial grant or subsequently acquired, any and all of the rights conveyed herein. - - 1.9. Modifications. means the Source Code and Executable form of any of the following: - - A. Any file that results from an addition to, deletion from or modification of the contents of a file containing Original Software or previous Modifications; - - B. Any new file that contains any part of the Original Software or previous Modification; or - - C. Any new file that is contributed or otherwise made available under the terms of this License. - - 1.10. Original Software. means the Source Code and Executable form of computer software code that is originally released under this License. - - 1.11. Patent Claims. means any patent claim(s), now owned or hereafter acquired, including without limitation, method, process, and apparatus claims, in any patent Licensable by grantor. - - 1.12. Source Code. means (a) the common form of computer software code in which modifications are made and (b) associated documentation included in or with such code. - - 1.13. You. (or .Your.) means an individual or a legal entity exercising rights under, and complying with all of the terms of, this License. For legal entities, .You. includes any entity which controls, is controlled by, or is under common control with You. For purposes of this definition, .control. means (a) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (b) ownership of more than fifty percent (50%) of the outstanding shares or beneficial ownership of such entity. - -2. License Grants. - - 2.1. The Initial Developer Grant. - - Conditioned upon Your compliance with Section 3.1 below and subject to third party intellectual property claims, the Initial Developer hereby grants You a world-wide, royalty-free, non-exclusive license: - - (a) under intellectual property rights (other than patent or trademark) Licensable by Initial Developer, to use, reproduce, modify, display, perform, sublicense and distribute the Original Software (or portions thereof), with or without Modifications, and/or as part of a Larger Work; and - - (b) under Patent Claims infringed by the making, using or selling of Original Software, to make, have made, use, practice, sell, and offer for sale, and/or otherwise dispose of the Original Software (or portions thereof). - - (c) The licenses granted in Sections 2.1(a) and (b) are effective on the date Initial Developer first distributes or otherwise makes the Original Software available to a third party under the terms of this License. - - (d) Notwithstanding Section 2.1(b) above, no patent license is granted: (1) for code that You delete from the Original Software, or (2) for infringements caused by: (i) the modification of the Original Software, or (ii) the combination of the Original Software with other software or devices. - - 2.2. Contributor Grant. - - Conditioned upon Your compliance with Section 3.1 below and subject to third party intellectual property claims, each Contributor hereby grants You a world-wide, royalty-free, non-exclusive license: - - (a) under intellectual property rights (other than patent or trademark) Licensable by Contributor to use, reproduce, modify, display, perform, sublicense and distribute the Modifications created by such Contributor (or portions thereof), either on an unmodified basis, with other Modifications, as Covered Software and/or as part of a Larger Work; and - - (b) under Patent Claims infringed by the making, using, or selling of Modifications made by that Contributor either alone and/or in combination with its Contributor Version (or portions of such combination), to make, use, sell, offer for sale, have made, and/or otherwise dispose of: (1) Modifications made by that Contributor (or portions thereof); and (2) the combination of Modifications made by that Contributor with its Contributor Version (or portions of such combination). - - (c) The licenses granted in Sections 2.2(a) and 2.2(b) are effective on the date Contributor first distributes or otherwise makes the Modifications available to a third party. - - (d) Notwithstanding Section 2.2(b) above, no patent license is granted: (1) for any code that Contributor has deleted from the Contributor Version; (2) for infringements caused by: (i) third party modifications of Contributor Version, or (ii) the combination of Modifications made by that Contributor with other software (except as part of the Contributor Version) or other devices; or (3) under Patent Claims infringed by Covered Software in the absence of Modifications made by that Contributor. - -3. Distribution Obligations. - - 3.1. Availability of Source Code. - Any Covered Software that You distribute or otherwise make available in Executable form must also be made available in Source Code form and that Source Code form must be distributed only under the terms of this License. You must include a copy of this License with every copy of the Source Code form of the Covered Software You distribute or otherwise make available. You must inform recipients of any such Covered Software in Executable form as to how they can obtain such Covered Software in Source Code form in a reasonable manner on or through a medium customarily used for software exchange. - - 3.2. Modifications. - The Modifications that You create or to which You contribute are governed by the terms of this License. You represent that You believe Your Modifications are Your original creation(s) and/or You have sufficient rights to grant the rights conveyed by this License. - - 3.3. Required Notices. - You must include a notice in each of Your Modifications that identifies You as the Contributor of the Modification. You may not remove or alter any copyright, patent or trademark notices contained within the Covered Software, or any notices of licensing or any descriptive text giving attribution to any Contributor or the Initial Developer. - - 3.4. Application of Additional Terms. - You may not offer or impose any terms on any Covered Software in Source Code form that alters or restricts the applicable version of this License or the recipients. rights hereunder. You may choose to offer, and to charge a fee for, warranty, support, indemnity or liability obligations to one or more recipients of Covered Software. However, you may do so only on Your own behalf, and not on behalf of the Initial Developer or any Contributor. You must make it absolutely clear that any such warranty, support, indemnity or liability obligation is offered by You alone, and You hereby agree to indemnify the Initial Developer and every Contributor for any liability incurred by the Initial Developer or such Contributor as a result of warranty, support, indemnity or liability terms You offer. - - 3.5. Distribution of Executable Versions. - You may distribute the Executable form of the Covered Software under the terms of this License or under the terms of a license of Your choice, which may contain terms different from this License, provided that You are in compliance with the terms of this License and that the license for the Executable form does not attempt to limit or alter the recipient.s rights in the Source Code form from the rights set forth in this License. If You distribute the Covered Software in Executable form under a different license, You must make it absolutely clear that any terms which differ from this License are offered by You alone, not by the Initial Developer or Contributor. You hereby agree to indemnify the Initial Developer and every Contributor for any liability incurred by the Initial Developer or such Contributor as a result of any such terms You offer. - - 3.6. Larger Works. - You may create a Larger Work by combining Covered Software with other code not governed by the terms of this License and distribute the Larger Work as a single product. In such a case, You must make sure the requirements of this License are fulfilled for the Covered Software. - -4. Versions of the License. - - 4.1. New Versions. - Sun Microsystems, Inc. is the initial license steward and may publish revised and/or new versions of this License from time to time. Each version will be given a distinguishing version number. Except as provided in Section 4.3, no one other than the license steward has the right to modify this License. - - 4.2. Effect of New Versions. - You may always continue to use, distribute or otherwise make the Covered Software available under the terms of the version of the License under which You originally received the Covered Software. If the Initial Developer includes a notice in the Original Software prohibiting it from being distributed or otherwise made available under any subsequent version of the License, You must distribute and make the Covered Software available under the terms of the version of the License under which You originally received the Covered Software. Otherwise, You may also choose to use, distribute or otherwise make the Covered Software available under the terms of any subsequent version of the License published by the license steward. - - 4.3. Modified Versions. - When You are an Initial Developer and You want to create a new license for Your Original Software, You may create and use a modified version of this License if You: (a) rename the license and remove any references to the name of the license steward (except to note that the license differs from this License); and (b) otherwise make it clear that the license contains terms which differ from this License. - -5. DISCLAIMER OF WARRANTY. - - COVERED SOFTWARE IS PROVIDED UNDER THIS LICENSE ON AN .AS IS. BASIS, WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES THAT THE COVERED SOFTWARE IS FREE OF DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR NON-INFRINGING. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE COVERED SOFTWARE IS WITH YOU. SHOULD ANY COVERED SOFTWARE PROVE DEFECTIVE IN ANY RESPECT, YOU (NOT THE INITIAL DEVELOPER OR ANY OTHER CONTRIBUTOR) ASSUME THE COST OF ANY NECESSARY SERVICING, REPAIR OR CORRECTION. THIS DISCLAIMER OF WARRANTY CONSTITUTES AN ESSENTIAL PART OF THIS LICENSE. NO USE OF ANY COVERED SOFTWARE IS AUTHORIZED HEREUNDER EXCEPT UNDER THIS DISCLAIMER. - -6. TERMINATION. - - 6.1. This License and the rights granted hereunder will terminate automatically if You fail to comply with terms herein and fail to cure such breach within 30 days of becoming aware of the breach. Provisions which, by their nature, must remain in effect beyond the termination of this License shall survive. - - 6.2. If You assert a patent infringement claim (excluding declaratory judgment actions) against Initial Developer or a Contributor (the Initial Developer or Contributor against whom You assert such claim is referred to as .Participant.) alleging that the Participant Software (meaning the Contributor Version where the Participant is a Contributor or the Original Software where the Participant is the Initial Developer) directly or indirectly infringes any patent, then any and all rights granted directly or indirectly to You by such Participant, the Initial Developer (if the Initial Developer is not the Participant) and all Contributors under Sections 2.1 and/or 2.2 of this License shall, upon 60 days notice from Participant terminate prospectively and automatically at the expiration of such 60 day notice period, unless if within such 60 day period You withdraw Your claim with respect to the Participant Software against such Participant either unilaterally or pursuant to a written agreement with Participant. - - 6.3. In the event of termination under Sections 6.1 or 6.2 above, all end user licenses that have been validly granted by You or any distributor hereunder prior to termination (excluding licenses granted to You by any distributor) shall survive termination. - -7. LIMITATION OF LIABILITY. - - UNDER NO CIRCUMSTANCES AND UNDER NO LEGAL THEORY, WHETHER TORT (INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE, SHALL YOU, THE INITIAL DEVELOPER, ANY OTHER CONTRIBUTOR, OR ANY DISTRIBUTOR OF COVERED SOFTWARE, OR ANY SUPPLIER OF ANY OF SUCH PARTIES, BE LIABLE TO ANY PERSON FOR ANY INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES OF ANY CHARACTER INCLUDING, WITHOUT LIMITATION, DAMAGES FOR LOST PROFITS, LOSS OF GOODWILL, WORK STOPPAGE, COMPUTER FAILURE OR MALFUNCTION, OR ANY AND ALL OTHER COMMERCIAL DAMAGES OR LOSSES, EVEN IF SUCH PARTY SHALL HAVE BEEN INFORMED OF THE POSSIBILITY OF SUCH DAMAGES. THIS LIMITATION OF LIABILITY SHALL NOT APPLY TO LIABILITY FOR DEATH OR PERSONAL INJURY RESULTING FROM SUCH PARTY.S NEGLIGENCE TO THE EXTENT APPLICABLE LAW PROHIBITS SUCH LIMITATION. SOME JURISDICTIONS DO NOT ALLOW THE EXCLUSION OR LIMITATION OF INCIDENTAL OR CONSEQUENTIAL DAMAGES, SO THIS EXCLUSION AND LIMITATION MAY NOT APPLY TO YOU. - -8. U.S. GOVERNMENT END USERS. - - The Covered Software is a .commercial item,. as that term is defined in 48 C.F.R. 2.101 (Oct. 1995), consisting of .commercial computer software. (as that term is defined at 48 C.F.R. ? 252.227-7014(a)(1)) and .commercial computer software documentation. as such terms are used in 48 C.F.R. 12.212 (Sept. 1995). Consistent with 48 C.F.R. 12.212 and 48 C.F.R. 227.7202-1 through 227.7202-4 (June 1995), all U.S. Government End Users acquire Covered Software with only those rights set forth herein. This U.S. Government Rights clause is in lieu of, and supersedes, any other FAR, DFAR, or other clause or provision that addresses Government rights in computer software under this License. - -9. MISCELLANEOUS. - - This License represents the complete agreement concerning subject matter hereof. If any provision of this License is held to be unenforceable, such provision shall be reformed only to the extent necessary to make it enforceable. This License shall be governed by the law of the jurisdiction specified in a notice contained within the Original Software (except to the extent applicable law, if any, provides otherwise), excluding such jurisdiction.s conflict-of-law provisions. Any litigation relating to this License shall be subject to the jurisdiction of the courts located in the jurisdiction and venue specified in a notice contained within the Original Software, with the losing party responsible for costs, including, without limitation, court costs and reasonable attorneys. fees and expenses. The application of the United Nations Convention on Contracts for the International Sale of Goods is expressly excluded. Any law or regulation which provides that the language of a contract shall be construed against the drafter shall not apply to this License. You agree that You alone are responsible for compliance with the United States export administration regulations (and the export control laws and regulation of any other countries) when You use, distribute or otherwise make available any Covered Software. - -10. RESPONSIBILITY FOR CLAIMS. - - As between Initial Developer and the Contributors, each party is responsible for claims and damages arising, directly or indirectly, out of its utilization of rights under this License and You agree to work with Initial Developer and Contributors to distribute such responsibility on an equitable basis. Nothing herein is intended or shall be deemed to constitute any admission of liability. - - NOTICE PURSUANT TO SECTION 9 OF THE COMMON DEVELOPMENT AND DISTRIBUTION LICENSE (CDDL) - - The code released under the CDDL shall be governed by the laws of the State of California (excluding conflict-of-law provisions). Any litigation relating to this License shall be subject to the jurisdiction of the Federal Courts of the Northern District of California and the state courts of the State of California, with venue lying in Santa Clara County, California. - - -The GNU General Public License (GPL) Version 2, June 1991 - - -Copyright (C) 1989, 1991 Free Software Foundation, Inc. 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - -Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. - -Preamble - -The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Library General Public License instead.) You can apply it to your programs, too. - -When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things. - -To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it. - -For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. - -We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software. - -Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations. - -Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all. - -The precise terms and conditions for copying, distribution and modification follow. - - -TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - -0. This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term "modification".) Each licensee is addressed as "you". - -Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does. - -1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program. - -You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. - -2. You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: - - a) You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change. - - b) You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License. - - c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.) - -These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. - -Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program. - -In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. - -3. You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following: - - a) Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, - - b) Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, - - c) Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.) - -The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. - -If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code. - -4. You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. - -5. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it. - -6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License. - -7. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program. - -If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances. - -It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. - -This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. - -8. If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. - -9. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. - -Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation. - -10. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. - -NO WARRANTY - -11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - -12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. - -END OF TERMS AND CONDITIONS - - -How to Apply These Terms to Your New Programs - -If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. - -To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. - - One line to give the program's name and a brief idea of what it does. - - Copyright (C) - - This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. - - This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. - - You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - -Also add information on how to contact you by electronic and paper mail. - -If the program is interactive, make it output a short notice like this when it starts in an interactive mode: - - Gnomovision version 69, Copyright (C) year name of author - Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, the commands you use may be called something other than `show w' and `show c'; they could even be mouse-clicks or menu items--whatever suits your program. - -You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the program, if necessary. Here is a sample; alter the names: - - Yoyodyne, Inc., hereby disclaims all copyright interest in the program `Gnomovision' (which makes passes at compilers) written by James Hacker. - - signature of Ty Coon, 1 April 1989 - Ty Coon, President of Vice - -This General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Library General Public License instead of this License. - - -"CLASSPATH" EXCEPTION TO THE GPL VERSION 2 - -Certain source files distributed by Sun Microsystems, Inc. are subject to the following clarification and special exception to the GPL Version 2, but only where Sun has expressly included in the particular source file's header the words - -"Sun designates this particular file as subject to the "Classpath" exception as provided by Sun in the License file that accompanied this code." - -Linking this library statically or dynamically with other modules is making a combined work based on this library. Thus, the terms and conditions of the GNU General Public License Version 2 cover the whole combination. - -As a special exception, the copyright holders of this library give you permission to link this library with independent modules to produce an executable, regardless of the license terms of these independent modules, and to copy and distribute the resulting executable under terms of your choice, provided that you also meet, for each linked independent module, the terms and conditions of the license of that module.? An independent module is a module which is not derived from or based on this library.? If you modify this library, you may extend this exception to your version of the library, but you are not obligated to do so.? If you do not wish to do so, delete this exception statement from your version. diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-jboss-logging.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-jboss-logging.txt deleted file mode 100644 index d645695673349e3947e8e5ae42332d0ac3164cd7..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-jboss-logging.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-junit.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-junit.txt deleted file mode 100644 index cd53fb9fa9c68fc6b9e7167da1deccdfe010028b..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-junit.txt +++ /dev/null @@ -1,213 +0,0 @@ -JUnit - -Eclipse Public License - v 1.0 - -THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC -LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM -CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT. - -1. DEFINITIONS - -"Contribution" means: - - a) in the case of the initial Contributor, the initial code and - documentation distributed under this Agreement, and - b) in the case of each subsequent Contributor: - - i) changes to the Program, and - - ii) additions to the Program; - - where such changes and/or additions to the Program originate from and are -distributed by that particular Contributor. A Contribution 'originates' from a -Contributor if it was added to the Program by such Contributor itself or anyone -acting on such Contributor's behalf. Contributions do not include additions to -the Program which: (i) are separate modules of software distributed in -conjunction with the Program under their own license agreement, and (ii) are -not derivative works of the Program. - -"Contributor" means any person or entity that distributes the Program. - -"Licensed Patents " mean patent claims licensable by a Contributor which are -necessarily infringed by the use or sale of its Contribution alone or when -combined with the Program. - -"Program" means the Contributions distributed in accordance with this Agreement. - -"Recipient" means anyone who receives the Program under this Agreement, -including all Contributors. - -2. GRANT OF RIGHTS - - a) Subject to the terms of this Agreement, each Contributor hereby grants -Recipient a non-exclusive, worldwide, royalty-free copyright license to -reproduce, prepare derivative works of, publicly display, publicly perform, -distribute and sublicense the Contribution of such Contributor, if any, and -such derivative works, in source code and object code form. - - b) Subject to the terms of this Agreement, each Contributor hereby grants -Recipient a non-exclusive, worldwide, royalty-free patent license under -Licensed Patents to make, use, sell, offer to sell, import and otherwise -transfer the Contribution of such Contributor, if any, in source code and -object code form. This patent license shall apply to the combination of the -Contribution and the Program if, at the time the Contribution is added by the -Contributor, such addition of the Contribution causes such combination to be -covered by the Licensed Patents. The patent license shall not apply to any -other combinations which include the Contribution. No hardware per se is -licensed hereunder. - - c) Recipient understands that although each Contributor grants the -licenses to its Contributions set forth herein, no assurances are provided by -any Contributor that the Program does not infringe the patent or other -intellectual property rights of any other entity. Each Contributor disclaims -any liability to Recipient for claims brought by any other entity based on -infringement of intellectual property rights or otherwise. As a condition to -exercising the rights and licenses granted hereunder, each Recipient hereby -assumes sole responsibility to secure any other intellectual property rights -needed, if any. For example, if a third party patent license is required to -allow Recipient to distribute the Program, it is Recipient's responsibility to -acquire that license before distributing the Program. - - d) Each Contributor represents that to its knowledge it has sufficient -copyright rights in its Contribution, if any, to grant the copyright license -set forth in this Agreement. - -3. REQUIREMENTS - -A Contributor may choose to distribute the Program in object code form under -its own license agreement, provided that: - - a) it complies with the terms and conditions of this Agreement; and - - b) its license agreement: - - i) effectively disclaims on behalf of all Contributors all warranties and -conditions, express and implied, including warranties or conditions of title -and non-infringement, and implied warranties or conditions of merchantability -and fitness for a particular purpose; - - ii) effectively excludes on behalf of all Contributors all liability for -damages, including direct, indirect, special, incidental and consequential -damages, such as lost profits; - - iii) states that any provisions which differ from this Agreement are -offered by that Contributor alone and not by any other party; and - - iv) states that source code for the Program is available from such -Contributor, and informs licensees how to obtain it in a reasonable manner on -or through a medium customarily used for software exchange. - -When the Program is made available in source code form: - - a) it must be made available under this Agreement; and - - b) a copy of this Agreement must be included with each copy of the -Program. - -Contributors may not remove or alter any copyright notices contained within the -Program. - -Each Contributor must identify itself as the originator of its Contribution, if -any, in a manner that reasonably allows subsequent Recipients to identify the -originator of the Contribution. - -4. COMMERCIAL DISTRIBUTION - -Commercial distributors of software may accept certain responsibilities with -respect to end users, business partners and the like. While this license is -intended to facilitate the commercial use of the Program, the Contributor who -includes the Program in a commercial product offering should do so in a manner -which does not create potential liability for other Contributors. Therefore, if -a Contributor includes the Program in a commercial product offering, such -Contributor ("Commercial Contributor") hereby agrees to defend and indemnify -every other Contributor ("Indemnified Contributor") against any losses, damages -and costs (collectively "Losses") arising from claims, lawsuits and other legal -actions brought by a third party against the Indemnified Contributor to the -extent caused by the acts or omissions of such Commercial Contributor in -connection with its distribution of the Program in a commercial product -offering. The obligations in this section do not apply to any claims or Losses -relating to any actual or alleged intellectual property infringement. In order -to qualify, an Indemnified Contributor must: a) promptly notify the Commercial -Contributor in writing of such claim, and b) allow the Commercial Contributor -to control, and cooperate with the Commercial Contributor in, the defense and -any related settlement negotiations. The Indemnified Contributor may -participate in any such claim at its own expense. - -For example, a Contributor might include the Program in a commercial product -offering, Product X. That Contributor is then a Commercial Contributor. If that -Commercial Contributor then makes performance claims, or offers warranties -related to Product X, those performance claims and warranties are such -Commercial Contributor's responsibility alone. Under this section, the -Commercial Contributor would have to defend claims against the other -Contributors related to those performance claims and warranties, and if a court -requires any other Contributor to pay any damages as a result, the Commercial -Contributor must pay those damages. - -5. NO WARRANTY - -EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, THE PROGRAM IS PROVIDED ON AN -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR -IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, -NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each -Recipient is solely responsible for determining the appropriateness of using -and distributing the Program and assumes all risks associated with its exercise -of rights under this Agreement, including but not limited to the risks and -costs of program errors, compliance with applicable laws, damage to or loss of -data, programs or equipment, and unavailability or interruption of operations. - -6. DISCLAIMER OF LIABILITY - -EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, NEITHER RECIPIENT NOR ANY -CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST -PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, -STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY -WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS -GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. - -7. GENERAL - -If any provision of this Agreement is invalid or unenforceable under applicable -law, it shall not affect the validity or enforceability of the remainder of the -terms of this Agreement, and without further action by the parties hereto, such -provision shall be reformed to the minimum extent necessary to make such -provision valid and enforceable. - -If Recipient institutes patent litigation against any -entity (including a cross-claim or counterclaim in a lawsuit) alleging that the -Program itself (excluding combinations of the Program with other software or -hardware) infringes such Recipient's patent(s), then such Recipient's rights -granted under Section 2(b) shall terminate as of the date such litigation is -filed. - -All Recipient's rights under this Agreement shall terminate if it fails to -comply with any of the material terms or conditions of this Agreement and does -not cure such failure in a reasonable period of time after becoming aware of -such noncompliance. If all Recipient's rights under this Agreement terminate, -Recipient agrees to cease use and distribution of the Program as soon as -reasonably practicable. However, Recipient's obligations under this Agreement -and any licenses granted by Recipient relating to the Program shall continue -and survive. - -Everyone is permitted to copy and distribute copies of this Agreement, but in -order to avoid inconsistency the Agreement is copyrighted and may only be -modified in the following manner. The Agreement Steward reserves the right to -publish new versions (including revisions) of this Agreement from time to time. -No one other than the Agreement Steward has the right to modify this Agreement. -The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to -serve as the Agreement Steward to a suitable separate entity. Each new version -of the Agreement will be given a distinguishing version number. The Program -(including Contributions) may always be distributed subject to the version of -the Agreement under which it was received. In addition, after a new version of -the Agreement is published, Contributor may elect to distribute the Program -(including its Contributions) under the new version. Except as expressly stated -in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to -the intellectual property of any Contributor under this Agreement, whether -expressly, by implication, estoppel or otherwise. All rights in the Program not -expressly granted under this Agreement are reserved. - -This Agreement is governed by the laws of the State of New York and the -intellectual property laws of the United States of America. No party to this -Agreement will bring a legal action under this Agreement more than one year -after the cause of action arose. Each party waives its rights to a jury trial -in any resulting litigation. \ No newline at end of file diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-maven-aether-provider.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-maven-aether-provider.txt deleted file mode 100644 index d645695673349e3947e8e5ae42332d0ac3164cd7..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-maven-aether-provider.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-maven-compat.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-maven-compat.txt deleted file mode 100644 index d645695673349e3947e8e5ae42332d0ac3164cd7..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-maven-compat.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-maven-core.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-maven-core.txt deleted file mode 100644 index d645695673349e3947e8e5ae42332d0ac3164cd7..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-maven-core.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-maven-embedder.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-maven-embedder.txt deleted file mode 100644 index d645695673349e3947e8e5ae42332d0ac3164cd7..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-maven-embedder.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-maven-model-builder.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-maven-model-builder.txt deleted file mode 100644 index d645695673349e3947e8e5ae42332d0ac3164cd7..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-maven-model-builder.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-maven-model.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-maven-model.txt deleted file mode 100644 index d645695673349e3947e8e5ae42332d0ac3164cd7..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-maven-model.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-maven-plugin-api.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-maven-plugin-api.txt deleted file mode 100644 index d645695673349e3947e8e5ae42332d0ac3164cd7..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-maven-plugin-api.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-maven-repository-metadata.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-maven-repository-metadata.txt deleted file mode 100644 index d645695673349e3947e8e5ae42332d0ac3164cd7..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-maven-repository-metadata.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-maven-settings-builder.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-maven-settings-builder.txt deleted file mode 100644 index d645695673349e3947e8e5ae42332d0ac3164cd7..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-maven-settings-builder.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-maven-settings.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-maven-settings.txt deleted file mode 100644 index d645695673349e3947e8e5ae42332d0ac3164cd7..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-maven-settings.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-plexus-cipher.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-plexus-cipher.txt deleted file mode 100644 index d645695673349e3947e8e5ae42332d0ac3164cd7..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-plexus-cipher.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-plexus-classworlds.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-plexus-classworlds.txt deleted file mode 100644 index d645695673349e3947e8e5ae42332d0ac3164cd7..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-plexus-classworlds.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-plexus-component-annotations.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-plexus-component-annotations.txt deleted file mode 100644 index d645695673349e3947e8e5ae42332d0ac3164cd7..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-plexus-component-annotations.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-plexus-container-default.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-plexus-container-default.txt deleted file mode 100644 index d645695673349e3947e8e5ae42332d0ac3164cd7..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-plexus-container-default.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-plexus-interpolation.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-plexus-interpolation.txt deleted file mode 100644 index d645695673349e3947e8e5ae42332d0ac3164cd7..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-plexus-interpolation.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-plexus-sec-dispatcher.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-plexus-sec-dispatcher.txt deleted file mode 100644 index d645695673349e3947e8e5ae42332d0ac3164cd7..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-plexus-sec-dispatcher.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-plexus-utils.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-plexus-utils.txt deleted file mode 100644 index d645695673349e3947e8e5ae42332d0ac3164cd7..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-plexus-utils.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-resolver.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-resolver.txt deleted file mode 100644 index d645695673349e3947e8e5ae42332d0ac3164cd7..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-resolver.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-snowflake.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-snowflake.txt new file mode 100644 index 0000000000000000000000000000000000000000..e257174b059379c5431340f069f7baf8ffdabfa5 --- /dev/null +++ b/dolphinscheduler-dist/release-docs/licenses/LICENSE-snowflake.txt @@ -0,0 +1,11 @@ +Copyright 2010-2012 Twitter, Inc. + +Licensed under the Apache License, Version 2.0 (the "License"); you may not use this +file except in compliance with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software distributed +under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. \ No newline at end of file diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-maven-artifact.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-spring-boot-starter-cache.txt similarity index 99% rename from dolphinscheduler-dist/release-docs/licenses/LICENSE-maven-artifact.txt rename to dolphinscheduler-dist/release-docs/licenses/LICENSE-spring-boot-starter-cache.txt index d645695673349e3947e8e5ae42332d0ac3164cd7..82714d7648eb4f6cda2ed88fc4768e7d05472fe6 100644 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-maven-artifact.txt +++ b/dolphinscheduler-dist/release-docs/licenses/LICENSE-spring-boot-starter-cache.txt @@ -179,7 +179,7 @@ APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" + boilerplate notice, with the fields enclosed by brackets "{}" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a @@ -187,7 +187,7 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright [yyyy] [name of copyright owner] + Copyright {yyyy} {name of copyright owner} Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -199,4 +199,4 @@ distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and - limitations under the License. + limitations under the License. \ No newline at end of file diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-validation-api.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-validation-api.txt deleted file mode 100644 index babf57d5e0617522bc4ba5ca380a1e82fd57569d..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-validation-api.txt +++ /dev/null @@ -1,4 +0,0 @@ -Bean Validation API - -License: Apache License, Version 2.0 -See the license.txt file in the root directory or . diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-wagon-provider-api.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-wagon-provider-api.txt deleted file mode 100644 index d645695673349e3947e8e5ae42332d0ac3164cd7..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-wagon-provider-api.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-xmlbeans.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-xmlbeans.txt deleted file mode 100644 index 261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64..0000000000000000000000000000000000000000 --- a/dolphinscheduler-dist/release-docs/licenses/LICENSE-xmlbeans.txt +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/dolphinscheduler-dist/src/main/assembly/dolphinscheduler-bin.xml b/dolphinscheduler-dist/src/main/assembly/dolphinscheduler-bin.xml index c918aefa2a0bda4ad076a40d17811d1345556a52..4fd8a14ad38919adf6f4cd63711005d19ef3206c 100644 --- a/dolphinscheduler-dist/src/main/assembly/dolphinscheduler-bin.xml +++ b/dolphinscheduler-dist/src/main/assembly/dolphinscheduler-bin.xml @@ -28,8 +28,10 @@ - ${basedir}/../dolphinscheduler-alert/src/main/resources + ${basedir}/../dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/resources + **/*.yaml + **/*.yml **/*.properties **/*.xml **/*.json @@ -41,19 +43,20 @@ ${basedir}/../dolphinscheduler-api/src/main/resources + **/*.yaml + **/*.yml **/*.properties **/*.xml **/*.json - - application.properties - conf ${basedir}/../dolphinscheduler-common/src/main/resources + **/*.yaml + **/*.yml **/*.properties **/*.xml **/*.json @@ -64,6 +67,8 @@ ${basedir}/../dolphinscheduler-dao/src/main/resources + **/*.yaml + **/*.yml **/*.properties **/*.xml **/*.json @@ -77,6 +82,8 @@ ${basedir}/../dolphinscheduler-server/src/main/resources + **/*.yaml + **/*.yml **/*.properties **/*.xml **/*.json @@ -88,6 +95,8 @@ ${basedir}/../dolphinscheduler-service/src/main/resources + **/*.yaml + **/*.yml **/*.properties **/*.xml **/*.json @@ -97,37 +106,21 @@ - src/main/resources + ${basedir}/../dolphinscheduler-standalone-server/src/main/resources - **/*.properties - **/*.xml - **/*.json + **/*.yaml conf - ${basedir}/../dolphinscheduler-server/target/dolphinscheduler-server-${project.version} + ${basedir}/../dolphinscheduler-python/src/main/resources - **/*.* - - . - - - - ${basedir}/../dolphinscheduler-api/target/dolphinscheduler-api-${project.version} - - **/*.* - - . - - - - ${basedir}/../dolphinscheduler-alert/target/dolphinscheduler-alert-${project.version} - - **/*.* + **/*.yaml + **/*.xml + **/*.properties - . + conf @@ -147,11 +140,11 @@ - ${basedir}/../sql + ${basedir}/../dolphinscheduler-dao/src/main/resources/sql **/* - ./sql + ./sql/sql @@ -167,7 +160,7 @@ env/*.* - ./conf + conf @@ -186,7 +179,6 @@ *.sh *.py - DISCLAIMER . diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-zookeeper.xml b/dolphinscheduler-dist/src/main/assembly/dolphinscheduler-python-api.xml similarity index 38% rename from ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-zookeeper.xml rename to dolphinscheduler-dist/src/main/assembly/dolphinscheduler-python-api.xml index e89962d9003d2edb9fc91c6eb98fad34361ec34b..cd37acee629ae04112010fd0cd360dfced77a972 100644 --- a/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-zookeeper.xml +++ b/dolphinscheduler-dist/src/main/assembly/dolphinscheduler-python-api.xml @@ -14,63 +14,21 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - - - zookeeper.dolphinscheduler.root - /dolphinscheduler - - dolphinscheduler root directory - - - - - zookeeper.session.timeout - 300 - - int - - - - - - - zookeeper.connection.timeout - 300 - - int - - - - - - - zookeeper.retry.base.sleep - 100 - - int - - - - - - - zookeeper.retry.max.sleep - 30000 - - int - - - - - - - zookeeper.retry.maxtime - 5 - - int - - - - - - \ No newline at end of file + + + python-api + + dir + + false + + + + ${basedir}/../dolphinscheduler-python/pydolphinscheduler/dist + . + + + diff --git a/dolphinscheduler-dist/src/main/assembly/dolphinscheduler-src.xml b/dolphinscheduler-dist/src/main/assembly/dolphinscheduler-src.xml index e675cfb2ba727f06d36ab932f812064b39e759c5..e275d6e42d15c0b5ddac8ccbbaa6e2d9052fdb74 100644 --- a/dolphinscheduler-dist/src/main/assembly/dolphinscheduler-src.xml +++ b/dolphinscheduler-dist/src/main/assembly/dolphinscheduler-src.xml @@ -58,6 +58,13 @@ **/dolphinscheduler-ui/node/** **/dolphinscheduler-ui/node_modules/** + + **/dolphinscheduler-python/pydolphinscheduler/.pytest_cache/** + **/dolphinscheduler-python/pydolphinscheduler/build/** + **/dolphinscheduler-python/pydolphinscheduler/dist/** + **/dolphinscheduler-python/pydolphinscheduler/dist/** + **/dolphinscheduler-python/pydolphinscheduler/htmlcov/** + **/.settings/** **/.project diff --git a/dolphinscheduler-dist/src/main/provisio/dolphinscheduler.xml b/dolphinscheduler-dist/src/main/provisio/dolphinscheduler.xml index 2ed6a3fc58d2f7c12aa49b74e1be6b0c06026a53..739e3c26dfc759b09ab9c7aff22cda2a3c92e88e 100644 --- a/dolphinscheduler-dist/src/main/provisio/dolphinscheduler.xml +++ b/dolphinscheduler-dist/src/main/provisio/dolphinscheduler.xml @@ -33,87 +33,4 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file + diff --git a/dolphinscheduler-microbench/pom.xml b/dolphinscheduler-microbench/pom.xml deleted file mode 100644 index 93556166242937671a9befc4890e21651fdc9bbb..0000000000000000000000000000000000000000 --- a/dolphinscheduler-microbench/pom.xml +++ /dev/null @@ -1,105 +0,0 @@ - - - - - dolphinscheduler - org.apache.dolphinscheduler - 2.0.0-SNAPSHOT - - 4.0.0 - - dolphinscheduler-microbench - jar - ${project.artifactId} - - - UTF-8 - 1.21 - 1.8 - benchmarks - - - - - - org.openjdk.jmh - jmh-core - ${jmh.version} - - - - org.openjdk.jmh - jmh-generator-annprocess - ${jmh.version} - provided - - - - junit - junit - compile - - - - org.slf4j - slf4j-api - - - org.apache.dolphinscheduler - dolphinscheduler-remote - - - - - - - - org.apache.maven.plugins - maven-compiler-plugin - ${maven-compiler-plugin.version} - - ${javac.target} - ${javac.target} - ${javac.target} - false - - - - - org.apache.maven.plugins - maven-assembly-plugin - ${maven-assembly-plugin.version} - - - - org.openjdk.jmh.Main - - - - jar-with-dependencies - - - - - - - - - \ No newline at end of file diff --git a/dolphinscheduler-microbench/src/main/java/org/apache/dolphinscheduler/microbench/base/AbstractBaseBenchmark.java b/dolphinscheduler-microbench/src/main/java/org/apache/dolphinscheduler/microbench/base/AbstractBaseBenchmark.java deleted file mode 100644 index 25f0ae911407f8e3a752abfdf7becec9f5cbd2db..0000000000000000000000000000000000000000 --- a/dolphinscheduler-microbench/src/main/java/org/apache/dolphinscheduler/microbench/base/AbstractBaseBenchmark.java +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.dolphinscheduler.microbench.base; - -import org.junit.Test; -import org.openjdk.jmh.annotations.*; -import org.openjdk.jmh.results.format.ResultFormatType; -import org.openjdk.jmh.runner.Runner; -import org.openjdk.jmh.runner.options.ChainedOptionsBuilder; -import org.openjdk.jmh.runner.options.OptionsBuilder; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.IOException; - -/** - * BaseBenchMark - * If you need to test jmh, please extend him first - */ -@Warmup(iterations = AbstractBaseBenchmark.DEFAULT_WARMUP_ITERATIONS) -@Measurement(iterations = AbstractBaseBenchmark.DEFAULT_MEASURE_ITERATIONS) -@State(Scope.Thread) -@Fork(AbstractBaseBenchmark.DEFAULT_FORKS) -public abstract class AbstractBaseBenchmark { - - static final int DEFAULT_WARMUP_ITERATIONS = 10; - - static final int DEFAULT_MEASURE_ITERATIONS = 10; - - static final int DEFAULT_FORKS = 2; - - private static Logger logger = LoggerFactory.getLogger(AbstractBaseBenchmark.class); - - - private ChainedOptionsBuilder newOptionsBuilder() { - - String className = getClass().getSimpleName(); - - ChainedOptionsBuilder optBuilder = new OptionsBuilder() - // set benchmark ClassName - .include(className); - - if (getMeasureIterations() > 0) { - optBuilder.warmupIterations(getMeasureIterations()); - } - - if (getMeasureIterations() > 0) { - optBuilder.measurementIterations(getMeasureIterations()); - } - - if (getForks() > 0) { - optBuilder.forks(getForks()); - } - - String output = getReportDir(); - if (output != null) { - boolean writeFileStatus; - String filePath = getReportDir() + className + ".json"; - File file = new File(filePath); - - if (file.exists()) { - writeFileStatus = file.delete(); - - - } else { - writeFileStatus = file.getParentFile().mkdirs(); - try { - writeFileStatus = file.createNewFile(); - } catch (IOException e) { - logger.warn("jmh test create file error" + e); - } - } - if (writeFileStatus) { - optBuilder.resultFormat(ResultFormatType.JSON) - .result(filePath); - } - } - return optBuilder; - } - - @Test - public void run() throws Exception { - new Runner(newOptionsBuilder().build()).run(); - } - - private int getWarmupIterations() { - - String value = System.getProperty("warmupIterations"); - return null != value ? Integer.parseInt(value) : -1; - } - - private int getMeasureIterations() { - String value = System.getProperty("measureIterations"); - return null != value ? Integer.parseInt(value) : -1; - } - - private static String getReportDir() { - return System.getProperty("perfReportDir"); - } - - private static int getForks() { - String value = System.getProperty("forkCount"); - return null != value ? Integer.parseInt(value) : -1; - } - - -} - diff --git a/dolphinscheduler-microbench/src/main/java/org/apache/dolphinscheduler/microbench/common/EnumBenchMark.java b/dolphinscheduler-microbench/src/main/java/org/apache/dolphinscheduler/microbench/common/EnumBenchMark.java deleted file mode 100644 index dcce5368e3e8dbb9dc964ee43871c8d1ba7c549a..0000000000000000000000000000000000000000 --- a/dolphinscheduler-microbench/src/main/java/org/apache/dolphinscheduler/microbench/common/EnumBenchMark.java +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.dolphinscheduler.microbench.common; - - -import org.apache.dolphinscheduler.microbench.base.AbstractBaseBenchmark; -import org.openjdk.jmh.annotations.*; -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.TimeUnit; - -/** - *Enum values JMH test - */ -@Warmup(iterations = 2, time = 1) -@Measurement(iterations = 4, time = 1) -@State(Scope.Benchmark) -public class EnumBenchMark extends AbstractBaseBenchmark { - - @Benchmark - public boolean simpleTest(){ - return Boolean.TRUE; - } - @Param({"101", "108", "103", "104", "105", "103"}) - private int testNum; - - - @Benchmark - @BenchmarkMode(Mode.AverageTime) - @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void enumValuesTest() { - TestTypeEnum.oldGetNameByType(testNum); - } - - @Benchmark - @BenchmarkMode(Mode.AverageTime) - @OutputTimeUnit(TimeUnit.MICROSECONDS) - public void enumStaticMapTest() { - TestTypeEnum.newGetNameByType(testNum); - } - - - public enum TestTypeEnum { - - TYPE_101(101, "TYPE101"), - TYPE_102(102, "TYPE102"), - TYPE_103(103, "TYPE103"), - TYPE_104(104, "TYPE104"), - TYPE_105(105, "TYPE105"), - TYPE_106(106, "TYPE106"), - TYPE_107(107, "TYPE107"), - TYPE_108(108, "TYPE108"); - - private int code; - private String name; - - public int getCode() { - return code; - } - - - public String getName() { - return name; - } - - - TestTypeEnum(int code, String name) { - this.code = code; - this.name = name; - } - - private static final Map TEST_TYPE_MAP = new HashMap<>(); - - static { - for (TestTypeEnum testTypeEnum : TestTypeEnum.values()) { - TEST_TYPE_MAP.put(testTypeEnum.code,testTypeEnum); - } - } - - public static void newGetNameByType(int code) { - if (TEST_TYPE_MAP.containsKey(code)) { - TEST_TYPE_MAP.get(code); - return; - } - throw new IllegalArgumentException("invalid code : " + code); - } - - public static void oldGetNameByType(int code) { - for (TestTypeEnum testTypeEnum : TestTypeEnum.values()) { - if (testTypeEnum.getCode() == code) { - return; - } - } - throw new IllegalArgumentException("invalid code : " + code); - } - } - -} diff --git a/dolphinscheduler-microbench/src/main/java/org/apache/dolphinscheduler/microbench/common/IUserService.java b/dolphinscheduler-microbench/src/main/java/org/apache/dolphinscheduler/microbench/common/IUserService.java deleted file mode 100644 index 3a77aa8a0da934b4b2930187fdfb6a84900a84d0..0000000000000000000000000000000000000000 --- a/dolphinscheduler-microbench/src/main/java/org/apache/dolphinscheduler/microbench/common/IUserService.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.microbench.common; - -import org.apache.dolphinscheduler.rpc.base.Rpc; - -/** - * IUserService - */ -public interface IUserService { - - @Rpc(async = true, serviceCallback = UserCallback.class, retries = 9999) - Boolean say(String s); - - Integer hi(int num); -} diff --git a/dolphinscheduler-microbench/src/main/java/org/apache/dolphinscheduler/microbench/common/RpcTest.java b/dolphinscheduler-microbench/src/main/java/org/apache/dolphinscheduler/microbench/common/RpcTest.java deleted file mode 100644 index ecc54f8f2613d3799b610f234f91d50207527efe..0000000000000000000000000000000000000000 --- a/dolphinscheduler-microbench/src/main/java/org/apache/dolphinscheduler/microbench/common/RpcTest.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.microbench.common; - -import org.apache.dolphinscheduler.microbench.base.AbstractBaseBenchmark; -import org.apache.dolphinscheduler.remote.config.NettyServerConfig; -import org.apache.dolphinscheduler.remote.utils.Host; -import org.apache.dolphinscheduler.rpc.client.IRpcClient; -import org.apache.dolphinscheduler.rpc.client.RpcClient; -import org.apache.dolphinscheduler.rpc.remote.NettyClient; -import org.apache.dolphinscheduler.rpc.remote.NettyServer; - -import java.util.concurrent.TimeUnit; - -import org.openjdk.jmh.annotations.Benchmark; -import org.openjdk.jmh.annotations.BenchmarkMode; -import org.openjdk.jmh.annotations.Measurement; -import org.openjdk.jmh.annotations.Mode; -import org.openjdk.jmh.annotations.OutputTimeUnit; -import org.openjdk.jmh.annotations.Scope; -import org.openjdk.jmh.annotations.Setup; -import org.openjdk.jmh.annotations.State; -import org.openjdk.jmh.annotations.TearDown; -import org.openjdk.jmh.annotations.Warmup; - -@Warmup(iterations = 5, time = 1) -@Measurement(iterations = 10, time = 1) -@State(Scope.Benchmark) -@BenchmarkMode({Mode.Throughput, Mode.AverageTime, Mode.SampleTime}) -public class RpcTest extends AbstractBaseBenchmark { - private NettyServer nettyServer; - - private IUserService userService; - - private Host host; - private IRpcClient rpcClient = new RpcClient(); - - @Setup - public void before() throws Exception { - nettyServer = new NettyServer(new NettyServerConfig()); - IRpcClient rpcClient = new RpcClient(); - host = new Host("127.0.0.1", 12346); - userService = rpcClient.create(IUserService.class, host); - - } - - @Benchmark - @BenchmarkMode({Mode.Throughput, Mode.AverageTime, Mode.SampleTime}) - @OutputTimeUnit(TimeUnit.MILLISECONDS) - public void sendTest() throws Exception { - - userService = rpcClient.create(IUserService.class, host); - Integer result = userService.hi(1); - } - - @TearDown - public void after() { - NettyClient.getInstance().close(); - nettyServer.close(); - } - -} diff --git a/dolphinscheduler-microbench/src/main/java/org/apache/dolphinscheduler/microbench/common/UserCallback.java b/dolphinscheduler-microbench/src/main/java/org/apache/dolphinscheduler/microbench/common/UserCallback.java deleted file mode 100644 index bb32093f91c7e398734c4b15d974e98090668863..0000000000000000000000000000000000000000 --- a/dolphinscheduler-microbench/src/main/java/org/apache/dolphinscheduler/microbench/common/UserCallback.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.microbench.common; - -import org.apache.dolphinscheduler.rpc.common.AbstractRpcCallBack; - -/** - * UserCallback - */ -public class UserCallback extends AbstractRpcCallBack { - @Override - public void run(Object object) { - - } -} diff --git a/dolphinscheduler-python/pom.xml b/dolphinscheduler-python/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..ea7115eb37ccba411bed4c7a1e26462f1672f761 --- /dev/null +++ b/dolphinscheduler-python/pom.xml @@ -0,0 +1,124 @@ + + + + + 4.0.0 + + org.apache.dolphinscheduler + dolphinscheduler + 2.0.10-SNAPSHOT + + dolphinscheduler-python + ${project.artifactId} + jar + + + + + org.apache.dolphinscheduler + dolphinscheduler-api + + + + + org.springframework.boot + spring-boot-starter-web + + + org.springframework.boot + spring-boot-starter-tomcat + + + log4j-to-slf4j + org.apache.logging.log4j + + + + + + net.sf.py4j + py4j + + + + + + + release + + + + org.codehaus.mojo + exec-maven-plugin + + + python-api-prepare + prepare-package + + exec + + + python3 + ${project.basedir}/pydolphinscheduler + + -m + pip + install + --upgrade + pip + .[build] + + + + + python-api-clean + prepare-package + + exec + + + python3 + ${project.basedir}/pydolphinscheduler + + setup.py + pre_clean + + + + + python-api-build + prepare-package + + exec + + + python3 + ${project.basedir}/pydolphinscheduler + + -m + build + + + + + + + + + + diff --git a/dolphinscheduler-python/pydolphinscheduler/.coveragerc b/dolphinscheduler-python/pydolphinscheduler/.coveragerc new file mode 100644 index 0000000000000000000000000000000000000000..524cb73cb61423c5dfabb682501b0b9045601ec7 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/.coveragerc @@ -0,0 +1,32 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +[run] +command_line = -m pytest +omit = + # Ignore all test cases in tests/ + tests/* + # TODO. Temporary ignore java_gateway file, because we could not find good way to test it. + src/pydolphinscheduler/java_gateway.py + +[report] +# Don’t report files that are 100% covered +skip_covered = True +show_missing = True +precision = 2 +# Report will fail when coverage under 90.00% +fail_under = 85 diff --git a/dolphinscheduler-python/pydolphinscheduler/.flake8 b/dolphinscheduler-python/pydolphinscheduler/.flake8 new file mode 100644 index 0000000000000000000000000000000000000000..7f659a21b3a94d771dee3b442f46899cc7fdd985 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/.flake8 @@ -0,0 +1,38 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +[flake8] +max-line-length = 110 +exclude = + .git, + __pycache__, + .pytest_cache, + *.egg-info, + docs/source/conf.py + old, + build, + dist, + htmlcov +ignore = + # It's clear and not need to add docstring + D107, # D107: Don't require docstrings on __init__ + D105, # D105: Missing docstring in magic method + # Conflict to Black + W503 # W503: Line breaks before binary operators +per-file-ignores = + src/pydolphinscheduler/side/__init__.py:F401 + src/pydolphinscheduler/tasks/__init__.py:F401 diff --git a/dolphinscheduler-python/pydolphinscheduler/.isort.cfg b/dolphinscheduler-python/pydolphinscheduler/.isort.cfg new file mode 100644 index 0000000000000000000000000000000000000000..70fa2e05bd4ecf0c17a13f7e856ca63d7e3ebd90 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/.isort.cfg @@ -0,0 +1,19 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +[settings] +profile=black diff --git a/dolphinscheduler-python/pydolphinscheduler/DEVELOP.md b/dolphinscheduler-python/pydolphinscheduler/DEVELOP.md new file mode 100644 index 0000000000000000000000000000000000000000..f22ab8619bf2de13ebaac6bf1f3483a71ea7b69b --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/DEVELOP.md @@ -0,0 +1,120 @@ + + +# Develop + +pydolphinscheduler is python API for Apache DolphinScheduler, it just defines what workflow look like instead of +store or execute it. We here use [py4j][py4j] to dynamically access Java Virtual Machine. + +## Setup Develop Environment + +**PyDolphinScheduler** use GitHub to hold all source code, you should clone the code before you do same change. + +```shell +git clone git@github.com:apache/dolphinscheduler.git +``` + +Now, we should install all dependence to make sure we could run test or check code style locally + +```shell +cd dolphinscheduler/dolphinscheduler-python/pydolphinscheduler +pip install .[dev] +``` + +Next, we have to open pydolphinscheduler project in you editor. We recommend you use [pycharm][pycharm] +instead of [IntelliJ IDEA][idea] to open it. And you could just open directory +`dolphinscheduler-python/pydolphinscheduler` instead of `dolphinscheduler-python`. + + +## Brief Concept + +Apache DolphinScheduler is design to define workflow by UI, and pydolphinscheduler try to define it by code. When +define by code, user usually do not care user, tenant, or queue exists or not. All user care about is created +a new workflow by the code his/her definition. So we have some **side object** in `pydolphinscheduler/side` +directory, their only check object exists or not, and create them if not exists. + +### Process Definition + +pydolphinscheduler workflow object name, process definition is also same name as Java object(maybe would be change to +other word for more simple). + +### Tasks + +pydolphinscheduler tasks object, we use tasks to define exact job we want DolphinScheduler do for us. For now, +we only support `shell` task to execute shell task. [This link][all-task] list all tasks support in DolphinScheduler +and would be implemented in the further. + +## Code Style + +We use [isort][isort] to automatically keep Python imports alphabetically, and use [Black][black] for code +formatter and [Flake8][flake8] for pep8 checker. If you use [pycharm][pycharm]or [IntelliJ IDEA][idea], +maybe you could follow [Black-integration][black-editor] to configure them in your environment. + +Our Python API CI would automatically run code style checker and unittest when you submit pull request in +GitHub, you could also run static check locally. + +```shell +# We recommend you run isort and Black before Flake8, because Black could auto fix some code style issue +# but Flake8 just hint when code style not match pep8 + +# Run Isort +isort . + +# Run Black +black . + +# Run Flake8 +flake8 +``` + +## Testing + +pydolphinscheduler using [pytest][pytest] to test our codebase. GitHub Action will run our test when you create +pull request or commit to dev branch, with python version `3.6|3.7|3.8|3.9` and operating system `linux|macOS|windows`. + +To test locally, you could directly run pytest after set `PYTHONPATH` + +```shell +PYTHONPATH=src/ pytest +``` + +We try to keep pydolphinscheduler usable through unit test coverage. 90% test coverage is our target, but for +now, we require test coverage up to 85%, and each pull request leas than 85% would fail our CI step +`Tests coverage`. We use [coverage][coverage] to check our test coverage, and you could check it locally by +run command. + +```shell +coverage run && coverage report +``` + +It would not only run unit test but also show each file coverage which cover rate less than 100%, and `TOTAL` +line show you total coverage of you code. If your CI failed with coverage you could go and find some reason by +this command output. + + +[py4j]: https://www.py4j.org/index.html +[pycharm]: https://www.jetbrains.com/pycharm +[idea]: https://www.jetbrains.com/idea/ +[all-task]: https://dolphinscheduler.apache.org/en-us/docs/dev/user_doc/guide/task/shell.html +[pytest]: https://docs.pytest.org/en/latest/ +[black]: https://black.readthedocs.io/en/stable/index.html +[flake8]: https://flake8.pycqa.org/en/latest/index.html +[black-editor]: https://black.readthedocs.io/en/stable/integrations/editors.html#pycharm-intellij-idea +[coverage]: https://coverage.readthedocs.io/en/stable/ +[isort]: https://pycqa.github.io/isort/index.html diff --git a/dolphinscheduler-python/pydolphinscheduler/README.md b/dolphinscheduler-python/pydolphinscheduler/README.md new file mode 100644 index 0000000000000000000000000000000000000000..9cc524d3460453af5caf1d546e3f4d8975e96692 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/README.md @@ -0,0 +1,87 @@ + + +# pydolphinscheduler + +[![PyPi Version](https://img.shields.io/pypi/v/apache-dolphinscheduler.svg?style=flat-square&logo=PyPi)](https://pypi.org/project/apache-dolphinscheduler/) +[![PyPi Python Versions](https://img.shields.io/pypi/pyversions/apache-dolphinscheduler.svg?style=flat-square&logo=python)](https://pypi.org/project/apache-dolphinscheduler/) +[![PyPi License](https://img.shields.io/pypi/l/apache-dolphinscheduler.svg?style=flat-square)](https://pypi.org/project/apache-dolphinscheduler/) +[![PyPi Status](https://img.shields.io/pypi/status/apache-dolphinscheduler.svg?style=flat-square)](https://pypi.org/project/apache-dolphinscheduler/) +[![PyPi Downloads](https://img.shields.io/pypi/dm/apache-dolphinscheduler?style=flat-square)](https://pypi.org/project/apache-dolphinscheduler/) + +[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg?style=flat-square)](https://github.com/psf/black) +[![Imports: isort](https://img.shields.io/badge/%20imports-isort-%231674b1?style=flat-square&labelColor=ef8336)](https://pycqa.github.io/isort) +[![GitHub Build](https://github.com/apache/dolphinscheduler/actions/workflows/py-ci.yml/badge.svg?branch=dev)](https://github.com/apache/dolphinscheduler/actions?query=workflow%3A%22Python+API%22) + +**PyDolphinScheduler** is python API for Apache DolphinScheduler, which allow you definition +your workflow by python code, aka workflow-as-codes. + +## Quick Start + +### Installation + +```shell +# Install +$ pip install apache-dolphinscheduler + +# Check installation, it is success if you see version output, here we use 0.1.0 as example +$ python -c "import pydolphinscheduler; print(pydolphinscheduler.__version__)" +0.1.0 +``` + +Here we show you how to install and run a simple example of pydolphinscheduler + +### Start Server And Run Example + +Before you run an example, you have to start backend server. You could follow +[development setup](https://dolphinscheduler.apache.org/en-us/development/development-environment-setup.html) +section "DolphinScheduler Standalone Quick Start" to set up developer environment. You have to start backend +and frontend server in this step, which mean that you could view DolphinScheduler UI in your browser with URL +http://localhost:12345/dolphinscheduler + +After backend server is being start, all requests from `pydolphinscheduler` would be sent to backend server. +And for now we could run a simple example by: + + + +```shell +# Please make sure your terminal could +curl https://raw.githubusercontent.com/apache/dolphinscheduler/dev/dolphinscheduler-python/pydolphinscheduler/examples/tutorial.py -o ./tutorial.py +python ./tutorial.py +``` + +> **_NOTICE:_** Since Apache DolphinScheduler's tenant is requests while running command, you might need to change +> tenant value in `example/tutorial.py`. For now the value is `tenant_exists`, please change it to username exists +> in you environment. + +After command execute, you could see a new project with single process definition named *tutorial* in the +[UI-project list](https://dolphinscheduler.apache.org/en-us/docs/latest/user_doc/guide/project/project-list.html). + +## Develop + +Until now, we finish quick start by an example of pydolphinscheduler and run it. If you want to inspect or join +pydolphinscheduler develop, you could take a look at [develop](./DEVELOP.md) + +## Release + +If you are interested in how to release **PyDolphinScheduler**, you could go and see at [release](./RELEASE.md) + +## What's more + +For more detail information, please go to see **PyDolphinScheduler** [document](https://dolphinscheduler.apache.org/python/index.html) diff --git a/dolphinscheduler-python/pydolphinscheduler/RELEASE.md b/dolphinscheduler-python/pydolphinscheduler/RELEASE.md new file mode 100644 index 0000000000000000000000000000000000000000..6c2b46eee6b2e320a6a1d96951b9aee8802afb88 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/RELEASE.md @@ -0,0 +1,35 @@ + + +# Release + +**PyDolphinScheduler** office release is in [ASF Distribution Directory](https://downloads.apache.org/dolphinscheduler/), +and it should be released together with [apache-dolphinscheduler](https://github.com/apache/dolphinscheduler). + +## To ASF Distribution Directory + +You could release to [ASF Distribution Directory](https://downloads.apache.org/dolphinscheduler/) according to +[release guide](https://dolphinscheduler.apache.org/en-us/community/release-prepare.html) in DolphinScheduler +website. + +## To PyPi + +[PyPI](https://pypi.org), Python Package Index, is a repository of software for the Python programming language. +User could install Python package from it. Release to PyPi make user easier to install and try PyDolphinScheduler, +There is an official way to package project from [PyPA](https://packaging.python.org/en/latest/tutorials/packaging-projects) diff --git a/dolphinscheduler-python/pydolphinscheduler/docs/Makefile b/dolphinscheduler-python/pydolphinscheduler/docs/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..985198a7795d7ff704936134d4f9b80217b0d72f --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/docs/Makefile @@ -0,0 +1,39 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. + +# Add opts `turn warnings into errors` strict sphinx-build behavior +SPHINXOPTS ?= -W +SPHINXBUILD ?= sphinx-build +SOURCEDIR = source +BUILDDIR = build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/dolphinscheduler-python/pydolphinscheduler/docs/make.bat b/dolphinscheduler-python/pydolphinscheduler/docs/make.bat new file mode 100644 index 0000000000000000000000000000000000000000..feac4c92c04e9958e37d1a82dbc454cf1a48be31 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/docs/make.bat @@ -0,0 +1,54 @@ +REM Licensed to the Apache Software Foundation (ASF) under one +REM or more contributor license agreements. See the NOTICE file +REM distributed with this work for additional information +REM regarding copyright ownership. The ASF licenses this file +REM to you under the Apache License, Version 2.0 (the +REM "License"); you may not use this file except in compliance +REM with the License. You may obtain a copy of the License at +REM +REM http://www.apache.org/licenses/LICENSE-2.0 +REM +REM Unless required by applicable law or agreed to in writing, +REM software distributed under the License is distributed on an +REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +REM KIND, either express or implied. See the License for the +REM specific language governing permissions and limitations +REM under the License. + +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=source +set BUILDDIR=build +REM Add opts `turn warnings into errors` strict sphinx-build behavior +set SPHINXOPTS=-W + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/dolphinscheduler-python/pydolphinscheduler/docs/source/_static/.gitkeep b/dolphinscheduler-python/pydolphinscheduler/docs/source/_static/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/dolphinscheduler-python/pydolphinscheduler/docs/source/api.rst b/dolphinscheduler-python/pydolphinscheduler/docs/source/api.rst new file mode 100644 index 0000000000000000000000000000000000000000..8e55ea5583f71782ebf3b4ff9ef563010386975c --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/docs/source/api.rst @@ -0,0 +1,47 @@ +.. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + +.. http://www.apache.org/licenses/LICENSE-2.0 + +.. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +API +=== + +Core +---- + +.. automodule:: pydolphinscheduler.core + :inherited-members: + +Sides +----- + +.. automodule:: pydolphinscheduler.side + :inherited-members: + +Tasks +----- + +.. automodule:: pydolphinscheduler.tasks + :inherited-members: + +Constants +--------- + +.. automodule:: pydolphinscheduler.constants + +Exceptions +---------- + +.. automodule:: pydolphinscheduler.exceptions diff --git a/dolphinscheduler-python/pydolphinscheduler/docs/source/concept.rst b/dolphinscheduler-python/pydolphinscheduler/docs/source/concept.rst new file mode 100644 index 0000000000000000000000000000000000000000..9a9527df1dea848fd836e62df256c3231439a92e --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/docs/source/concept.rst @@ -0,0 +1,151 @@ +.. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + +.. http://www.apache.org/licenses/LICENSE-2.0 + +.. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Concepts +======== + +In this section, you would know the core concepts of *PyDolphinScheduler*. + +Process Definition +------------------ + +Process definition describe the whole things except `tasks`_ and `tasks dependence`_, which including +name, schedule interval, schedule start time and end time. You would know scheduler + +Process definition could be initialized in normal assign statement or in context manger. + +.. code-block:: python + + # Initialization with assign statement + pd = ProcessDefinition(name="my first process definition") + + # Or context manger + with ProcessDefinition(name="my first process definition") as pd: + pd.submit() + +Process definition is the main object communicate between *PyDolphinScheduler* and DolphinScheduler daemon. +After process definition and task is be declared, you could use `submit` and `run` notify server your definition. + +If you just want to submit your definition and create workflow, without run it, you should use attribute `submit`. +But if you want to run the workflow after you submit it, you could use attribute `run`. + +.. code-block:: python + + # Just submit definition, without run it + pd.submit() + + # Both submit and run definition + pd.run() + +Schedule +~~~~~~~~ + +We use parameter `schedule` determine the schedule interval of workflow, *PyDolphinScheduler* support seven +asterisks expression, and each of the meaning of position as below + +.. code-block:: text + + * * * * * * * + ┬ ┬ ┬ ┬ ┬ ┬ ┬ + │ │ │ │ │ │ │ + │ │ │ │ │ │ └─── year + │ │ │ │ │ └───── day of week (0 - 7) (0 to 6 are Sunday to Saturday, or use names; 7 is Sunday, the same as 0) + │ │ │ │ └─────── month (1 - 12) + │ │ │ └───────── day of month (1 - 31) + │ │ └─────────── hour (0 - 23) + │ └───────────── min (0 - 59) + └─────────────── second (0 - 59) + +Here we add some example crontab: + +- `0 0 0 * * ? *`: Workflow execute every day at 00:00:00. +- `10 2 * * * ? *`: Workflow execute hourly day at ten pass two. +- `10,11 20 0 1,2 * ? *`: Workflow execute first and second day of month at 00:20:10 and 00:20:11. + +Tenant +~~~~~~ + +Tenant is the user who run task command in machine or in virtual machine. it could be assign by simple string. + +.. code-block:: python + + # + pd = ProcessDefinition(name="process definition tenant", tenant="tenant_exists") + +.. note:: + + Make should tenant exists in target machine, otherwise it will raise an error when you try to run command + +Tasks +----- + +Task is the minimum unit running actual job, and it is nodes of DAG, aka directed acyclic graph. You could define +what you want to in the task. It have some required parameter to make uniqueness and definition. + +Here we use :py:meth:`pydolphinscheduler.tasks.Shell` as example, parameter `name` and `command` is required and must be provider. Parameter +`name` set name to the task, and parameter `command` declare the command you wish to run in this task. + +.. code-block:: python + + # We named this task as "shell", and just run command `echo shell task` + shell_task = Shell(name="shell", command="echo shell task") + +If you want to see all type of tasks, you could see :doc:`tasks/index`. + +Tasks Dependence +~~~~~~~~~~~~~~~~ + +You could define many tasks in on single `Process Definition`_. If all those task is in parallel processing, +then you could leave them alone without adding any additional information. But if there have some tasks should +not be run unless pre task in workflow have be done, we should set task dependence to them. Set tasks dependence +have two mainly way and both of them is easy. You could use bitwise operator `>>` and `<<`, or task attribute +`set_downstream` and `set_upstream` to do it. + +.. code-block:: python + + # Set task1 as task2 upstream + task1 >> task2 + # You could use attribute `set_downstream` too, is same as `task1 >> task2` + task1.set_downstream(task2) + + # Set task1 as task2 downstream + task1 << task2 + # It is same as attribute `set_upstream` + task1.set_upstream(task2) + + # Beside, we could set dependence between task and sequence of tasks, + # we set `task1` is upstream to both `task2` and `task3`. It is useful + # for some tasks have same dependence. + task1 >> [task2, task3] + +Task With Process Definition +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +In most of data orchestration cases, you should assigned attribute `process_definition` to task instance to +decide workflow of task. You could set `process_definition` in both normal assign or in context manger mode + +.. code-block:: python + + # Normal assign, have to explicit declaration and pass `ProcessDefinition` instance to task + pd = ProcessDefinition(name="my first process definition") + shell_task = Shell(name="shell", command="echo shell task", process_definition=pd) + + # Context manger, `ProcessDefinition` instance pd would implicit declaration to task + with ProcessDefinition(name="my first process definition") as pd: + shell_task = Shell(name="shell", command="echo shell task", + +With both `Process Definition`_, `Tasks`_ and `Tasks Dependence`_, we could build a workflow with multiple tasks. diff --git a/dolphinscheduler-python/pydolphinscheduler/docs/source/conf.py b/dolphinscheduler-python/pydolphinscheduler/docs/source/conf.py new file mode 100644 index 0000000000000000000000000000000000000000..5ee73a5bb43f6db44bbe36537f5e5c633f4af865 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/docs/source/conf.py @@ -0,0 +1,88 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# Configuration file for the Sphinx documentation builder. +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +# import os +# import sys +# sys.path.insert(0, os.path.abspath('.')) + + +# -- Project information ----------------------------------------------------- + +project = "pydolphinscheduler" +copyright = "2022, apache" +author = "apache" + +# The full version, including alpha/beta/rc tags +release = "0.0.1" + + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + # Measures durations of Sphinx processing + "sphinx.ext.duration", + # Semi-automatic make docstrings to document + "sphinx.ext.autodoc", + "sphinx.ext.viewcode", + "sphinx.ext.autosectionlabel", + "sphinx_rtd_theme", +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = [] + +autodoc_default_options = { + "members": True, + "show-inheritance": True, + "private-members": True, + "undoc-members": True, + "member-order": "groupwise", +} + +autosectionlabel_prefix_document = True + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = "sphinx_rtd_theme" + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] diff --git a/dolphinscheduler-python/pydolphinscheduler/docs/source/index.rst b/dolphinscheduler-python/pydolphinscheduler/docs/source/index.rst new file mode 100644 index 0000000000000000000000000000000000000000..b04c26f094030fcc06c3248549cbede21e9412a1 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/docs/source/index.rst @@ -0,0 +1,42 @@ +.. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + +.. http://www.apache.org/licenses/LICENSE-2.0 + +.. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +PyDolphinScheduler +================== + +**PyDolphinScheduler** is Python API for `Apache DolphinScheduler `_, +which allow you definition your workflow by Python code, aka workflow-as-codes. + +I could go and find how to :ref:`install ` the project. Or if you want to see simply example +then go and see :doc:`tutorial` for more detail. + + +.. toctree:: + :maxdepth: 2 + + start + tutorial + concept + tasks/index + api + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/dolphinscheduler-python/pydolphinscheduler/docs/source/start.rst b/dolphinscheduler-python/pydolphinscheduler/docs/source/start.rst new file mode 100644 index 0000000000000000000000000000000000000000..0af90d54942e2631e175c3d88df293e2f45a052b --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/docs/source/start.rst @@ -0,0 +1,113 @@ +.. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + +.. http://www.apache.org/licenses/LICENSE-2.0 + +.. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Getting Started +=============== + +To get started with *PyDolphinScheduler* you must ensure python and pip +installed on your machine, if you're already set up, you can skip straight +to `Installing PyDolphinScheduler`_, otherwise please continue with +`Installing Python`_. + +Installing Python +----------------- + +How to install `python` and `pip` depends on what operating system +you're using. The python wiki provides up to date +`instructions for all platforms here`_. When you entering the website +and choice your operating system, you would be offered the choice and +select python version. *PyDolphinScheduler* recommend use version above +Python 3.6 and we highly recommend you install *Stable Releases* instead +of *Pre-releases*. + +After you have download and installed Python, you should open your terminal, +typing and running :code:`python --version` to check whether the installation +is correct or not. If all thing good, you could see the version in console +without error(here is a example after Python 3.8.7 installed) + +.. code-block:: bash + + $ python --version + Python 3.8.7 + +Installing PyDolphinScheduler +----------------------------- + +After Python is already installed on your machine following section +`installing Python`_, it easy to *PyDolphinScheduler* by pip. + +.. code-block:: bash + + $ pip install apache-dolphinscheduler + +The latest version of *PyDolphinScheduler* would be installed after you run above +command in your terminal. You could go and `start Python Gateway Server`_ to finish +the prepare, and then go to :doc:`tutorial` to make your hand dirty. But if you +want to install the unreleased version of *PyDolphinScheduler*, you could go and see +section `installing PyDolphinScheduler in dev`_ for more detail. + +Installing PyDolphinScheduler In Dev +------------------------------------ + +Because the project is developing and some of the features still not release. +If you want to try some thing unreleased you could install from the source code +which we hold in GitHub + +.. code-block:: bash + + # Clone Apache DolphinScheduler repository + $ git clone git@github.com:apache/dolphinscheduler.git + # Install PyDolphinScheduler in develop mode + $ cd dolphinscheduler-python/pydolphinscheduler && pip install -e . + +After you installed *PyDolphinScheduler*, please remember `start Python Gateway Server`_ +which waiting for *PyDolphinScheduler*'s workflow definition require. + +Start Python Gateway Server +--------------------------- + +Since **PyDolphinScheduler** is Python API for `Apache DolphinScheduler`_, it +could define workflow and tasks structure, but could not run it unless you +`install Apache DolphinScheduler`_ and start Python gateway server. We only +and some key steps here and you could go `install Apache DolphinScheduler`_ +for more detail + +.. code-block:: bash + + # Start pythonGatewayServer + $ ./bin/dolphinscheduler-daemon.sh start pythonGatewayServer + +To check whether the server is alive or not, you could run :code:`jps`. And +the server is health if keyword `PythonGatewayServer` in the console. + +.. code-block:: bash + + $ jps + .... + 201472 PythonGatewayServer + .... + +What's More +----------- + +If you do not familiar with *PyDolphinScheduler*, you could go to :doc:`tutorial` +and see how it work. But if you already know the inside of *PyDolphinScheduler*, +maybe you could go and play with all :doc:`tasks/index` *PyDolphinScheduler* supports. + +.. _`instructions for all platforms here`: https://wiki.python.org/moin/BeginnersGuide/Download +.. _`Apache DolphinScheduler`: https://dolphinscheduler.apache.org +.. _`install Apache DolphinScheduler`: https://dolphinscheduler.apache.org/en-us/docs/latest/user_doc/guide/installation/standalone.html diff --git a/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/condition.rst b/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/condition.rst new file mode 100644 index 0000000000000000000000000000000000000000..20b03500781dbef33d1ef744f2e1106da3fb51a2 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/condition.rst @@ -0,0 +1,33 @@ +.. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + +.. http://www.apache.org/licenses/LICENSE-2.0 + +.. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Condition +========= + +A condition task type's example and dive into information of **PyDolphinScheduler**. + +Example +------- + +.. literalinclude:: ../../../src/pydolphinscheduler/examples/task_condition_example.py + :start-after: [start workflow_declare] + :end-before: [end workflow_declare] + +Dive Into +--------- + +.. automodule:: pydolphinscheduler.tasks.condition diff --git a/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/datax.rst b/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/datax.rst new file mode 100644 index 0000000000000000000000000000000000000000..c07726941e32b72ad476eaea0030cd7905eb6b69 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/datax.rst @@ -0,0 +1,33 @@ +.. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + +.. http://www.apache.org/licenses/LICENSE-2.0 + +.. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Datax +===== + +A DataX task type's example and dive into information of **PyDolphinScheduler**. + +Example +------- + +.. literalinclude:: ../../../src/pydolphinscheduler/examples/task_datax_example.py + :start-after: [start workflow_declare] + :end-before: [end workflow_declare] + +Dive Into +--------- + +.. automodule:: pydolphinscheduler.tasks.datax diff --git a/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/dependent.rst b/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/dependent.rst new file mode 100644 index 0000000000000000000000000000000000000000..fe26d0f30a392420b7d7599349c033a50bebf58c --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/dependent.rst @@ -0,0 +1,33 @@ +.. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + +.. http://www.apache.org/licenses/LICENSE-2.0 + +.. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Dependent +========= + +A dependent task type's example and dive into information of **PyDolphinScheduler**. + +Example +------- + +.. literalinclude:: ../../../src/pydolphinscheduler/examples/task_dependent_example.py + :start-after: [start workflow_declare] + :end-before: [end workflow_declare] + +Dive Into +--------- + +.. automodule:: pydolphinscheduler.tasks.dependent diff --git a/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/flink.rst b/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/flink.rst new file mode 100644 index 0000000000000000000000000000000000000000..8db9ac266dbe8ca3746c7305d2c5a683eedc3672 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/flink.rst @@ -0,0 +1,33 @@ +.. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + +.. http://www.apache.org/licenses/LICENSE-2.0 + +.. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Flink +===== + +A flink task type's example and dive into information of **PyDolphinScheduler**. + +Example +------- + +.. literalinclude:: ../../../src/pydolphinscheduler/examples/task_flink_example.py + :start-after: [start workflow_declare] + :end-before: [end workflow_declare] + +Dive Into +--------- + +.. automodule:: pydolphinscheduler.tasks.flink diff --git a/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/http.rst b/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/http.rst new file mode 100644 index 0000000000000000000000000000000000000000..4c6d8f8e402d3ab3b8a560d14d3d5f2660345d35 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/http.rst @@ -0,0 +1,21 @@ +.. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + +.. http://www.apache.org/licenses/LICENSE-2.0 + +.. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +HTTP +==== + +.. automodule:: pydolphinscheduler.tasks.http diff --git a/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/index.rst b/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/index.rst new file mode 100644 index 0000000000000000000000000000000000000000..42dcdf9c8c5ece81286d93c4194df1b065088fcc --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/index.rst @@ -0,0 +1,41 @@ +.. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + +.. http://www.apache.org/licenses/LICENSE-2.0 + +.. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Tasks +===== + +In this section + +.. toctree:: + :maxdepth: 1 + + shell + sql + python + http + + switch + condition + dependent + + spark + flink + map_reduce + procedure + + datax + sub_process diff --git a/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/map_reduce.rst b/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/map_reduce.rst new file mode 100644 index 0000000000000000000000000000000000000000..068b8d8b4165292b53e4d8a57b2e8a5a368235ee --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/map_reduce.rst @@ -0,0 +1,34 @@ +.. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + +.. http://www.apache.org/licenses/LICENSE-2.0 + +.. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Map Reduce +========== + + +A Map Reduce task type's example and dive into information of **PyDolphinScheduler**. + +Example +------- + +.. literalinclude:: ../../../src/pydolphinscheduler/examples/task_map_reduce_example.py + :start-after: [start workflow_declare] + :end-before: [end workflow_declare] + +Dive Into +--------- + +.. automodule:: pydolphinscheduler.tasks.map_reduce diff --git a/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/procedure.rst b/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/procedure.rst new file mode 100644 index 0000000000000000000000000000000000000000..cd79eff140b403352b374a6d577ac4bcdb0e9a9a --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/procedure.rst @@ -0,0 +1,21 @@ +.. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + +.. http://www.apache.org/licenses/LICENSE-2.0 + +.. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Procedure +========= + +.. automodule:: pydolphinscheduler.tasks.procedure diff --git a/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/python.rst b/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/python.rst new file mode 100644 index 0000000000000000000000000000000000000000..660e46a6e5f8f6fc2a7d73ae9a5efbcd36c4d116 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/python.rst @@ -0,0 +1,21 @@ +.. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + +.. http://www.apache.org/licenses/LICENSE-2.0 + +.. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Python +====== + +.. automodule:: pydolphinscheduler.tasks.python diff --git a/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/shell.rst b/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/shell.rst new file mode 100644 index 0000000000000000000000000000000000000000..5ce16c3c9f49aa83ac0ee910506db4caf93bbd55 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/shell.rst @@ -0,0 +1,33 @@ +.. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + +.. http://www.apache.org/licenses/LICENSE-2.0 + +.. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Shell +===== + +A shell task type's example and dive into information of **PyDolphinScheduler**. + +Example +------- + +.. literalinclude:: ../../../src/pydolphinscheduler/examples/tutorial.py + :start-after: [start workflow_declare] + :end-before: [end task_relation_declare] + +Dive Into +--------- + +.. automodule:: pydolphinscheduler.tasks.shell diff --git a/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/spark.rst b/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/spark.rst new file mode 100644 index 0000000000000000000000000000000000000000..cdb5902c370187f3f7591df480dca5bf1d4aa355 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/spark.rst @@ -0,0 +1,33 @@ +.. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + +.. http://www.apache.org/licenses/LICENSE-2.0 + +.. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Spark +===== + +A spark task type's example and dive into information of **PyDolphinScheduler**. + +Example +------- + +.. literalinclude:: ../../../src/pydolphinscheduler/examples/task_spark_example.py + :start-after: [start workflow_declare] + :end-before: [end workflow_declare] + +Dive Into +--------- + +.. automodule:: pydolphinscheduler.tasks.spark diff --git a/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/sql.rst b/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/sql.rst new file mode 100644 index 0000000000000000000000000000000000000000..21eaec7ae97e461b5cd1cc23271d4b56a3fd6ca6 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/sql.rst @@ -0,0 +1,21 @@ +.. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + +.. http://www.apache.org/licenses/LICENSE-2.0 + +.. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +SQL +=== + +.. automodule:: pydolphinscheduler.tasks.sql diff --git a/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/sub_process.rst b/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/sub_process.rst new file mode 100644 index 0000000000000000000000000000000000000000..8a9f56220054f921ce0b97be7a3e28023c31ebd9 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/sub_process.rst @@ -0,0 +1,21 @@ +.. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + +.. http://www.apache.org/licenses/LICENSE-2.0 + +.. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Sub Process +=========== + +.. automodule:: pydolphinscheduler.tasks.sub_process diff --git a/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/switch.rst b/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/switch.rst new file mode 100644 index 0000000000000000000000000000000000000000..d8b34a4ac9301c08ca8fdb739cf57dd10e028a30 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/docs/source/tasks/switch.rst @@ -0,0 +1,33 @@ +.. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + +.. http://www.apache.org/licenses/LICENSE-2.0 + +.. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Switch +====== + +A switch task type's example and dive into information of **PyDolphinScheduler**. + +Example +------- + +.. literalinclude:: ../../../src/pydolphinscheduler/examples/task_switch_example.py + :start-after: [start workflow_declare] + :end-before: [end workflow_declare] + +Dive Into +--------- + +.. automodule:: pydolphinscheduler.tasks.switch diff --git a/dolphinscheduler-python/pydolphinscheduler/docs/source/tutorial.rst b/dolphinscheduler-python/pydolphinscheduler/docs/source/tutorial.rst new file mode 100644 index 0000000000000000000000000000000000000000..83c5746abaec5c12d34a249a9a30d3fcc33908ec --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/docs/source/tutorial.rst @@ -0,0 +1,150 @@ +.. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + +.. http://www.apache.org/licenses/LICENSE-2.0 + +.. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Tutorial +======== + +This tutorial show you the basic concept of *PyDolphinScheduler* and tell all +things you should know before you submit or run your first workflow. If you +still not install *PyDolphinScheduler* and start Apache DolphinScheduler, you +could go and see :ref:`how to getting start PyDolphinScheduler ` + +Overview of Tutorial +-------------------- + +Here have an overview of our tutorial, and it look a little complex but do not +worry about that because we explain this example below as detailed as possible. + +.. literalinclude:: ../../src/pydolphinscheduler/examples/tutorial.py + :start-after: [start tutorial] + :end-before: [end tutorial] + +Import Necessary Module +----------------------- + +First of all, we should importing necessary module which we would use later just +like other Python package. We just create a minimum demo here, so we just import +:class:`pydolphinscheduler.core.process_definition` and +:class:`pydolphinscheduler.tasks.shell`. + +.. literalinclude:: ../../src/pydolphinscheduler/examples/tutorial.py + :start-after: [start package_import] + :end-before: [end package_import] + +If you want to use other task type you could click and +:doc:`see all tasks we support ` + +Process Definition Declaration +------------------------------ + +We should instantiate object after we import them from `import necessary module`_. +Here we declare basic arguments for process definition(aka, workflow). We define +the name of process definition, using `Python context manager`_ and it +**the only required argument** for object process definition. Beside that we also +declare three arguments named `schedule`, `start_time` which setting workflow schedule +interval and schedule start_time, and argument `tenant` which changing workflow's +task running user in the worker, :ref:`section tenant ` in *PyDolphinScheduler* +:doc:`concept` page have more detail information. + +.. literalinclude:: ../../src/pydolphinscheduler/examples/tutorial.py + :start-after: [start workflow_declare] + :end-before: [end workflow_declare] + +We could find more detail about process definition in +:ref:`concept about process definition ` if you interested in it. +For all arguments of object process definition, you could find in the +:class:`pydolphinscheduler.core.process_definition` api documentation. + +Task Declaration +---------------- + +Here we declare four tasks, and bot of them are simple task of +:class:`pydolphinscheduler.tasks.shell` which running `echo` command in terminal. +Beside the argument `command`, we also need setting argument `name` for each task *(not +only shell task, `name` is required for each type of task)*. + +.. literalinclude:: ../../src/pydolphinscheduler/examples/tutorial.py + :dedent: 0 + :start-after: [start task_declare] + :end-before: [end task_declare] + +Beside shell task, *PyDolphinScheduler* support multiple tasks and you could +find in :doc:`tasks/index`. + +Setting Task Dependence +----------------------- + +After we declare both process definition and task, we have one workflow with +four tasks, both all tasks is independent so that they would run in parallel. +We should reorder the sort and the dependence of tasks. It useful when we need +run prepare task before we run actual task or we need tasks running is specific +rule. We both support attribute `set_downstream` and `set_upstream`, or bitwise +operators `>>` and `<<`. + +In this example, we set task `task_parent` is the upstream task of task +`task_child_one` and `task_child_two`, and task `task_union` is the downstream +task of both these two task. + +.. literalinclude:: ../../src/pydolphinscheduler/examples/tutorial.py + :dedent: 0 + :start-after: [start task_relation_declare] + :end-before: [end task_relation_declare] + +Please notice that we could grouping some tasks and set dependence if they have +same downstream or upstream. We declare task `task_child_one` and `task_child_two` +as a group here, named as `task_group` and set task `task_parent` as upstream of +both of them. You could see more detail in :ref:`concept:Tasks Dependence` section in concept +documentation. + +Submit Or Run Workflow +---------------------- + +Now we finish our workflow definition, with task and task dependence, but all +these things are in local, we should let Apache DolphinScheduler daemon know what we +define our workflow. So the last thing we have to do here is submit our workflow to +Apache DolphinScheduler daemon. + +We here in the example using `ProcessDefinition` attribute `run` to submit workflow +to the daemon, and set the schedule time we just declare in `process definition declaration`_. + +Now, we could run the Python code like other Python script, for the basic usage run +:code:`python tutorial.py` to trigger and run it. + +.. literalinclude:: ../../src/pydolphinscheduler/examples/tutorial.py + :dedent: 0 + :start-after: [start submit_or_run] + :end-before: [end submit_or_run] + +If you not start your Apache DolphinScheduler server, you could find the way in +:ref:`start:start Python gateway server` and it would have more detail about related server +start. Beside attribute `run`, we have attribute `submit` for object `ProcessDefinition` +and it just submit workflow to the daemon but not setting the schedule information. For +more detail you could see :ref:`concept:process definition`. + +DAG Graph After Tutorial Run +---------------------------- + +After we run the tutorial code, you could login Apache DolphinScheduler web UI, +go and see the `DolphinScheduler project page`_. they is a new process definition be +created and named "Tutorial". It create by *PyDolphinScheduler* and the DAG graph as below + +.. literalinclude:: ../../src/pydolphinscheduler/examples/tutorial.py + :language: text + :lines: 24-28 + +.. _`DolphinScheduler project page`: https://dolphinscheduler.apache.org/en-us/docs/latest/user_doc/guide/project.html +.. _`Python context manager`: https://docs.python.org/3/library/stdtypes.html#context-manager-types diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/common.properties.j2 b/dolphinscheduler-python/pydolphinscheduler/pytest.ini similarity index 78% rename from ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/common.properties.j2 rename to dolphinscheduler-python/pydolphinscheduler/pytest.ini index 2220c4effaa8daad2cdf4d46b54cded4e8b44844..f2c7ae64897a5394f57c6d5b14f67e2172c251dc 100644 --- a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/common.properties.j2 +++ b/dolphinscheduler-python/pydolphinscheduler/pytest.ini @@ -1,4 +1,3 @@ -# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. @@ -13,8 +12,11 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -# -{% for key, value in dolphin_common_map.iteritems() -%} - {{key}}={{value}} -{% endfor %} \ No newline at end of file +[pytest] +# Do not test test_java_gateway.py due to we can not mock java gateway for now +addopts = --ignore=tests/test_java_gateway.py + +# add path here to skip pytest scan it +norecursedirs = + tests/testing diff --git a/dolphinscheduler-python/pydolphinscheduler/setup.cfg b/dolphinscheduler-python/pydolphinscheduler/setup.cfg new file mode 100644 index 0000000000000000000000000000000000000000..13a83393a9124bf6ec36540556b4808abd47e206 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/setup.cfg @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/dolphinscheduler-python/pydolphinscheduler/setup.py b/dolphinscheduler-python/pydolphinscheduler/setup.py new file mode 100644 index 0000000000000000000000000000000000000000..76fe4c154a0cc03ab850c27f03765e434ee0469d --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/setup.py @@ -0,0 +1,176 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""The script for setting up pydolphinscheduler.""" +import logging +import os +import sys +from distutils.dir_util import remove_tree +from os.path import dirname, join +from typing import List + +from setuptools import Command, find_packages, setup + +if sys.version_info[0] < 3: + raise Exception( + "pydolphinscheduler does not support Python 2. Please upgrade to Python 3." + ) + +logger = logging.getLogger(__name__) + +version = "2.0.9" + +# Start package required +prod = [ + "py4j~=0.10", +] + +build = [ + "build", + "setuptools>=42", + "wheel", +] + +doc = [ + "sphinx>=4.3", + "sphinx_rtd_theme>=1.0", +] + +test = [ + "pytest>=6.2", + "freezegun>=1.1", + "coverage>=6.1", +] + +style = [ + "flake8>=4.0", + "flake8-docstrings>=1.6", + "flake8-black>=0.2", + "isort>=5.10", +] + +dev = style + test + doc + build + +all_dep = prod + dev +# End package required + + +def read(*names, **kwargs): + """Read file content from given file path.""" + return open( + join(dirname(__file__), *names), encoding=kwargs.get("encoding", "utf8") + ).read() + + +class CleanCommand(Command): + """Command to clean up python api before setup by running `python setup.py pre_clean`.""" + + description = "Clean up project root" + user_options: List[str] = [] + clean_list = [ + "build", + "htmlcov", + "dist", + ".pytest_cache", + ".coverage", + ] + + def initialize_options(self) -> None: + """Set default values for options.""" + pass + + def finalize_options(self) -> None: + """Set final values for options.""" + pass + + def run(self) -> None: + """Run and remove temporary files.""" + for cl in self.clean_list: + if not os.path.exists(cl): + logger.info("Path %s do not exists.", cl) + elif os.path.isdir(cl): + remove_tree(cl) + else: + os.remove(cl) + logger.info("Finish pre_clean process.") + + +setup( + name="apache-dolphinscheduler", + version=version, + license="Apache License 2.0", + description="Apache DolphinScheduler Python API", + long_description=read("README.md"), + # Make sure pypi is expecting markdown + long_description_content_type="text/markdown", + author="Apache Software Foundation", + author_email="dev@dolphinscheduler.apache.org", + url="https://dolphinscheduler.apache.org/", + python_requires=">=3.6", + keywords=[ + "dolphinscheduler", + "workflow", + "scheduler", + "taskflow", + ], + project_urls={ + "Homepage": "https://dolphinscheduler.apache.org", + "Documentation": "https://dolphinscheduler.apache.org/python/index.html", + "Source": "https://github.com/apache/dolphinscheduler/dolphinscheduler-python/pydolphinscheduler", + "Issue Tracker": "https://github.com/apache/dolphinscheduler/issues", + "Discussion": "https://github.com/apache/dolphinscheduler/discussions", + "Twitter": "https://twitter.com/dolphinschedule", + }, + packages=find_packages(where="src"), + package_dir={"": "src"}, + include_package_data=True, + package_data={ + "examples": ["examples.tutorial.py"], + }, + platforms=["any"], + classifiers=[ + # complete classifier list: http://pypi.python.org/pypi?%3Aaction=list_classifiers + "Development Status :: 3 - Alpha", + "Environment :: Console", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Operating System :: Unix", + "Operating System :: POSIX", + "Operating System :: Microsoft :: Windows", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: Implementation :: CPython", + "Programming Language :: Python :: Implementation :: PyPy", + "Topic :: Software Development :: User Interfaces", + ], + install_requires=prod, + extras_require={ + "all": all_dep, + "dev": dev, + "style": style, + "test": test, + "doc": doc, + "build": build, + }, + cmdclass={ + "pre_clean": CleanCommand, + }, +) diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/__init__.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..2a7b55430c59b21bbecc730f5599b755632e5867 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/__init__.py @@ -0,0 +1,22 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Init root of pydolphinscheduler.""" + +from pkg_resources import get_distribution + +__version__ = get_distribution("apache-dolphinscheduler").version diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/constants.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..65bf6c5149dfd752a0e714bbc7f108bb3efc67d0 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/constants.py @@ -0,0 +1,129 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Constants for pydolphinscheduler.""" + + +class ProcessDefinitionReleaseState: + """Constants for :class:`pydolphinscheduler.core.process_definition.ProcessDefinition` release state.""" + + ONLINE: str = "ONLINE" + OFFLINE: str = "OFFLINE" + + +class ProcessDefinitionDefault: + """Constants default value for :class:`pydolphinscheduler.core.process_definition.ProcessDefinition`.""" + + PROJECT: str = "project-pydolphin" + TENANT: str = "tenant_pydolphin" + USER: str = "userPythonGateway" + # TODO simple set password same as username + USER_PWD: str = "userPythonGateway" + USER_EMAIL: str = "userPythonGateway@dolphinscheduler.com" + USER_PHONE: str = "11111111111" + USER_STATE: int = 1 + QUEUE: str = "queuePythonGateway" + WORKER_GROUP: str = "default" + TIME_ZONE: str = "Asia/Shanghai" + + +class TaskPriority(str): + """Constants for task priority.""" + + HIGHEST = "HIGHEST" + HIGH = "HIGH" + MEDIUM = "MEDIUM" + LOW = "LOW" + LOWEST = "LOWEST" + + +class TaskFlag(str): + """Constants for task flag.""" + + YES = "YES" + NO = "NO" + + +class TaskTimeoutFlag(str): + """Constants for task timeout flag.""" + + CLOSE = "CLOSE" + + +class TaskType(str): + """Constants for task type, it will also show you which kind we support up to now.""" + + SHELL = "SHELL" + HTTP = "HTTP" + PYTHON = "PYTHON" + SQL = "SQL" + SUB_PROCESS = "SUB_PROCESS" + PROCEDURE = "PROCEDURE" + DATAX = "DATAX" + DEPENDENT = "DEPENDENT" + CONDITIONS = "CONDITIONS" + SWITCH = "SWITCH" + FLINK = "FLINK" + SPARK = "SPARK" + MR = "MR" + + +class DefaultTaskCodeNum(str): + """Constants and default value for default task code number.""" + + DEFAULT = 1 + + +class JavaGatewayDefault(str): + """Constants and default value for java gateway.""" + + RESULT_MESSAGE_KEYWORD = "msg" + RESULT_MESSAGE_SUCCESS = "success" + + RESULT_STATUS_KEYWORD = "status" + RESULT_STATUS_SUCCESS = "SUCCESS" + + RESULT_DATA = "data" + + SERVER_ADDRESS = "127.0.0.1" + SERVER_PORT = 25333 + AUTO_CONVERT = True + + +class Delimiter(str): + """Constants for delimiter.""" + + BAR = "-" + DASH = "/" + COLON = ":" + UNDERSCORE = "_" + DIRECTION = "->" + + +class Time(str): + """Constants for date.""" + + FMT_STD_DATE = "%Y-%m-%d" + LEN_STD_DATE = 10 + + FMT_DASH_DATE = "%Y/%m/%d" + + FMT_SHORT_DATE = "%Y%m%d" + LEN_SHORT_DATE = 8 + + FMT_STD_TIME = "%H:%M:%S" + FMT_NO_COLON_TIME = "%H%M%S" diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/core/__init__.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/core/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..31dc9446d8f86d905f6fbba3ba7d394c7e1bd6c5 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/core/__init__.py @@ -0,0 +1,28 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Init pydolphinscheduler.core package.""" + +from pydolphinscheduler.core.database import Database +from pydolphinscheduler.core.process_definition import ProcessDefinition +from pydolphinscheduler.core.task import Task + +__all__ = [ + "ProcessDefinition", + "Task", + "Database", +] diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/core/base.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/core/base.py new file mode 100644 index 0000000000000000000000000000000000000000..690351ab2ff54e583d6c2734325ef1f6f46d5fc4 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/core/base.py @@ -0,0 +1,74 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""DolphinScheduler Base object.""" + +from typing import Dict, Optional + +# from pydolphinscheduler.side.user import User +from pydolphinscheduler.utils.string import attr2camel + + +class Base: + """DolphinScheduler Base object.""" + + # Object key attribute, to test whether object equals and so on. + _KEY_ATTR: set = {"name", "description"} + + # Object defines attribute, use when needs to communicate with Java gateway server. + _DEFINE_ATTR: set = set() + + # Object default attribute, will add those attribute to `_DEFINE_ATTR` when init assign missing. + _DEFAULT_ATTR: Dict = {} + + def __init__(self, name: str, description: Optional[str] = None): + self.name = name + self.description = description + + def __repr__(self) -> str: + return f'<{type(self).__name__}: name="{self.name}">' + + def __eq__(self, other): + return type(self) == type(other) and all( + getattr(self, a, None) == getattr(other, a, None) for a in self._KEY_ATTR + ) + + def get_define_custom( + self, camel_attr: bool = True, custom_attr: set = None + ) -> Dict: + """Get object definition attribute by given attr set.""" + content = {} + for attr in custom_attr: + val = getattr(self, attr, None) + if camel_attr: + content[attr2camel(attr)] = val + else: + content[attr] = val + return content + + def get_define(self, camel_attr: bool = True) -> Dict: + """Get object definition attribute communicate to Java gateway server. + + use attribute `self._DEFINE_ATTR` to determine which attributes should including when + object tries to communicate with Java gateway server. + """ + content = self.get_define_custom(camel_attr, self._DEFINE_ATTR) + update_default = { + k: self._DEFAULT_ATTR.get(k) for k in self._DEFAULT_ATTR if k not in content + } + content.update(update_default) + return content diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/core/base_side.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/core/base_side.py new file mode 100644 index 0000000000000000000000000000000000000000..ed20d7075e483442c71af9e6ec8f1ced96b0e0bd --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/core/base_side.py @@ -0,0 +1,40 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Module for side object.""" + +from typing import Optional + +from pydolphinscheduler.constants import ProcessDefinitionDefault +from pydolphinscheduler.core.base import Base + + +class BaseSide(Base): + """Base class for side object, it declare base behavior for them.""" + + def __init__(self, name: str, description: Optional[str] = None): + super().__init__(name, description) + + @classmethod + def create_if_not_exists( + cls, + # TODO comment for avoiding cycle import + # user: Optional[User] = ProcessDefinitionDefault.USER + user=ProcessDefinitionDefault.USER, + ): + """Create Base if not exists.""" + raise NotImplementedError diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/core/database.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/core/database.py new file mode 100644 index 0000000000000000000000000000000000000000..b6602a648374a94fe196afd471cd962d7fc6f193 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/core/database.py @@ -0,0 +1,63 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Module database.""" + +from typing import Dict + +from py4j.protocol import Py4JJavaError + +from pydolphinscheduler.exceptions import PyDSParamException +from pydolphinscheduler.java_gateway import launch_gateway + + +class Database(dict): + """database object, get information about database. + + You provider database_name contain connection information, it decisions which + database type and database instance would run task. + """ + + def __init__(self, database_name: str, type_key, database_key, *args, **kwargs): + super().__init__(*args, **kwargs) + self._database = {} + self.database_name = database_name + self[type_key] = self.database_type + self[database_key] = self.database_id + + @property + def database_type(self) -> str: + """Get database type from java gateway, a wrapper for :func:`get_database_info`.""" + return self.get_database_info(self.database_name).get("type") + + @property + def database_id(self) -> str: + """Get database id from java gateway, a wrapper for :func:`get_database_info`.""" + return self.get_database_info(self.database_name).get("id") + + def get_database_info(self, name) -> Dict: + """Get database info from java gateway, contains database id, type, name.""" + if self._database: + return self._database + else: + gateway = launch_gateway() + try: + self._database = gateway.entry_point.getDatasourceInfo(name) + # Handler database source do not exists error, for now we just terminate the process. + except Py4JJavaError as ex: + raise PyDSParamException(str(ex.java_exception)) + return self._database diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/core/engine.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/core/engine.py new file mode 100644 index 0000000000000000000000000000000000000000..df84b5ba90d035473cb5e5836208ba2f1899ac6e --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/core/engine.py @@ -0,0 +1,95 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Module engine.""" + +from typing import Dict, Optional + +from py4j.protocol import Py4JJavaError + +from pydolphinscheduler.core.task import Task +from pydolphinscheduler.exceptions import PyDSParamException +from pydolphinscheduler.java_gateway import launch_gateway + + +class ProgramType(str): + """Type of program engine runs, for now it just contain `JAVA`, `SCALA` and `PYTHON`.""" + + JAVA = "JAVA" + SCALA = "SCALA" + PYTHON = "PYTHON" + + +class Engine(Task): + """Task engine object, declare behavior for engine task to dolphinscheduler. + + This is the parent class of spark, flink and mr tasks, + and is used to provide the programType, mainClass and mainJar task parameters for reuse. + """ + + def __init__( + self, + name: str, + task_type: str, + main_class: str, + main_package: str, + program_type: Optional[ProgramType] = ProgramType.SCALA, + *args, + **kwargs + ): + super().__init__(name, task_type, *args, **kwargs) + self.main_class = main_class + self.main_package = main_package + self.program_type = program_type + self._resource = {} + + def get_resource_info(self, program_type, main_package): + """Get resource info from java gateway, contains resource id, name.""" + if self._resource: + return self._resource + else: + gateway = launch_gateway() + try: + self._resource = gateway.entry_point.getResourcesFileInfo( + program_type, main_package + ) + # Handler source do not exists error, for now we just terminate the process. + except Py4JJavaError as ex: + raise PyDSParamException(str(ex.java_exception)) + return self._resource + + def get_jar_id(self) -> int: + """Get jar id from java gateway, a wrapper for :func:`get_resource_info`.""" + return self.get_resource_info(self.program_type, self.main_package).get("id") + + @property + def task_params(self, camel_attr: bool = True, custom_attr: set = None) -> Dict: + """Override Task.task_params for engine children task. + + children task have some specials attribute for task_params, and is odd if we + directly set as python property, so we Override Task.task_params here. + """ + params = super().task_params + custom_params = { + "programType": self.program_type, + "mainClass": self.main_class, + "mainJar": { + "id": self.get_jar_id(), + }, + } + params.update(custom_params) + return params diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/core/process_definition.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/core/process_definition.py new file mode 100644 index 0000000000000000000000000000000000000000..1c123fc605c3fd13db456684cc60b47c985489f8 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/core/process_definition.py @@ -0,0 +1,395 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Module process definition, core class for workflow define.""" + +import json +from datetime import datetime +from typing import Any, Dict, List, Optional, Set + +from pydolphinscheduler.constants import ( + ProcessDefinitionDefault, + ProcessDefinitionReleaseState, + TaskType, +) +from pydolphinscheduler.core.base import Base +from pydolphinscheduler.exceptions import PyDSParamException, PyDSTaskNoFoundException +from pydolphinscheduler.java_gateway import launch_gateway +from pydolphinscheduler.side import Project, Tenant, User +from pydolphinscheduler.utils.date import MAX_DATETIME, conv_from_str, conv_to_schedule + + +class ProcessDefinitionContext: + """Class process definition context, use when task get process definition from context expression.""" + + _context_managed_process_definition: Optional["ProcessDefinition"] = None + + @classmethod + def set(cls, pd: "ProcessDefinition") -> None: + """Set attribute self._context_managed_process_definition.""" + cls._context_managed_process_definition = pd + + @classmethod + def get(cls) -> Optional["ProcessDefinition"]: + """Get attribute self._context_managed_process_definition.""" + return cls._context_managed_process_definition + + @classmethod + def delete(cls) -> None: + """Delete attribute self._context_managed_process_definition.""" + cls._context_managed_process_definition = None + + +class ProcessDefinition(Base): + """process definition object, will define process definition attribute, task, relation. + + TODO: maybe we should rename this class, currently use DS object name. + """ + + # key attribute for identify ProcessDefinition object + _KEY_ATTR = { + "name", + "project", + "tenant", + "release_state", + "param", + } + + _DEFINE_ATTR = { + "name", + "description", + "_project", + "_tenant", + "worker_group", + "timeout", + "release_state", + "param", + "tasks", + "task_definition_json", + "task_relation_json", + } + + def __init__( + self, + name: str, + description: Optional[str] = None, + schedule: Optional[str] = None, + start_time: Optional[str] = None, + end_time: Optional[str] = None, + timezone: Optional[str] = ProcessDefinitionDefault.TIME_ZONE, + user: Optional[str] = ProcessDefinitionDefault.USER, + project: Optional[str] = ProcessDefinitionDefault.PROJECT, + tenant: Optional[str] = ProcessDefinitionDefault.TENANT, + queue: Optional[str] = ProcessDefinitionDefault.QUEUE, + worker_group: Optional[str] = ProcessDefinitionDefault.WORKER_GROUP, + timeout: Optional[int] = 0, + release_state: Optional[str] = ProcessDefinitionReleaseState.ONLINE, + param: Optional[Dict] = None, + ): + super().__init__(name, description) + self.schedule = schedule + self._start_time = start_time + self._end_time = end_time + self.timezone = timezone + self._user = user + self._project = project + self._tenant = tenant + self._queue = queue + self.worker_group = worker_group + self.timeout = timeout + self.release_state = release_state + self.param = param + self.tasks: dict = {} + # TODO how to fix circle import + self._task_relations: set["TaskRelation"] = set() # noqa: F821 + self._process_definition_code = None + + def __enter__(self) -> "ProcessDefinition": + ProcessDefinitionContext.set(self) + return self + + def __exit__(self, exc_type, exc_val, exc_tb) -> None: + ProcessDefinitionContext.delete() + + @property + def tenant(self) -> Tenant: + """Get attribute tenant.""" + return Tenant(self._tenant) + + @tenant.setter + def tenant(self, tenant: Tenant) -> None: + """Set attribute tenant.""" + self._tenant = tenant.name + + @property + def project(self) -> Project: + """Get attribute project.""" + return Project(self._project) + + @project.setter + def project(self, project: Project) -> None: + """Set attribute project.""" + self._project = project.name + + @property + def user(self) -> User: + """Get user object. + + For now we just get from python side but not from java gateway side, so it may not correct. + """ + return User( + self._user, + ProcessDefinitionDefault.USER_PWD, + ProcessDefinitionDefault.USER_EMAIL, + ProcessDefinitionDefault.USER_PHONE, + self._tenant, + self._queue, + ProcessDefinitionDefault.USER_STATE, + ) + + @staticmethod + def _parse_datetime(val: Any) -> Any: + if val is None or isinstance(val, datetime): + return val + elif isinstance(val, str): + return conv_from_str(val) + else: + raise PyDSParamException("Do not support value type %s for now", type(val)) + + @property + def start_time(self) -> Any: + """Get attribute start_time.""" + return self._parse_datetime(self._start_time) + + @start_time.setter + def start_time(self, val) -> None: + """Set attribute start_time.""" + self._start_time = val + + @property + def end_time(self) -> Any: + """Get attribute end_time.""" + return self._parse_datetime(self._end_time) + + @end_time.setter + def end_time(self, val) -> None: + """Set attribute end_time.""" + self._end_time = val + + @property + def param_json(self) -> Optional[List[Dict]]: + """Return param json base on self.param.""" + # Handle empty dict and None value + if not self.param: + return [] + return [ + { + "prop": k, + "direct": "IN", + "type": "VARCHAR", + "value": v, + } + for k, v in self.param.items() + ] + + @property + def task_definition_json(self) -> List[Dict]: + """Return all tasks definition in list of dict.""" + if not self.tasks: + return [self.tasks] + else: + return [task.get_define() for task in self.tasks.values()] + + @property + def task_relation_json(self) -> List[Dict]: + """Return all relation between tasks pair in list of dict.""" + if not self.tasks: + return [self.tasks] + else: + self._handle_root_relation() + return [tr.get_define() for tr in self._task_relations] + + @property + def schedule_json(self) -> Optional[Dict]: + """Get schedule parameter json object. This is requests from java gateway interface.""" + if not self.schedule: + return None + else: + start_time = conv_to_schedule( + self.start_time if self.start_time else datetime.now() + ) + end_time = conv_to_schedule( + self.end_time if self.end_time else MAX_DATETIME + ) + return { + "startTime": start_time, + "endTime": end_time, + "crontab": self.schedule, + "timezoneId": self.timezone, + } + + # TODO inti DAG's tasks are in the same location with default {x: 0, y: 0} + @property + def task_location(self) -> List[Dict]: + """Return all tasks location for all process definition. + + For now, we only set all location with same x and y valued equal to 0. Because we do not + find a good way to set task locations. This is requests from java gateway interface. + """ + if not self.tasks: + return [self.tasks] + else: + return [{"taskCode": task_code, "x": 0, "y": 0} for task_code in self.tasks] + + @property + def task_list(self) -> List["Task"]: # noqa: F821 + """Return list of tasks objects.""" + return list(self.tasks.values()) + + def _handle_root_relation(self): + """Handle root task property :class:`pydolphinscheduler.core.task.TaskRelation`. + + Root task in DAG do not have dominant upstream node, but we have to add an exactly default + upstream task with task_code equal to `0`. This is requests from java gateway interface. + """ + from pydolphinscheduler.core.task import TaskRelation + + post_relation_code = set() + for relation in self._task_relations: + post_relation_code.add(relation.post_task_code) + for task in self.task_list: + if task.code not in post_relation_code: + root_relation = TaskRelation(pre_task_code=0, post_task_code=task.code) + self._task_relations.add(root_relation) + + def add_task(self, task: "Task") -> None: # noqa: F821 + """Add a single task to process definition.""" + self.tasks[task.code] = task + task._process_definition = self + + def add_tasks(self, tasks: List["Task"]) -> None: # noqa: F821 + """Add task sequence to process definition, it a wrapper of :func:`add_task`.""" + for task in tasks: + self.add_task(task) + + def get_task(self, code: str) -> "Task": # noqa: F821 + """Get task object from process definition by given code.""" + if code not in self.tasks: + raise PyDSTaskNoFoundException( + "Task with code %s can not found in process definition %", + (code, self.name), + ) + return self.tasks[code] + + # TODO which tying should return in this case + def get_tasks_by_name(self, name: str) -> Set["Task"]: # noqa: F821 + """Get tasks object by given name, if will return all tasks with this name.""" + find = set() + for task in self.tasks.values(): + if task.name == name: + find.add(task) + return find + + def get_one_task_by_name(self, name: str) -> "Task": # noqa: F821 + """Get exact one task from process definition by given name. + + Function always return one task even though this process definition have more than one task with + this name. + """ + tasks = self.get_tasks_by_name(name) + if not tasks: + raise PyDSTaskNoFoundException(f"Can not find task with name {name}.") + return tasks.pop() + + def run(self): + """Submit and Start ProcessDefinition instance. + + Shortcut for function :func:`submit` and function :func:`start`. Only support manual start workflow + for now, and schedule run will coming soon. + :return: + """ + self.submit() + self.start() + + def _ensure_side_model_exists(self): + """Ensure process definition side model exists. + + For now, side object including :class:`pydolphinscheduler.side.project.Project`, + :class:`pydolphinscheduler.side.tenant.Tenant`, :class:`pydolphinscheduler.side.user.User`. + If these model not exists, would create default value in + :class:`pydolphinscheduler.constants.ProcessDefinitionDefault`. + """ + # TODO used metaclass for more pythonic + self.tenant.create_if_not_exists(self._queue) + # model User have to create after Tenant created + self.user.create_if_not_exists() + # Project model need User object exists + self.project.create_if_not_exists(self._user) + + def _pre_submit_check(self): + """Check specific condition satisfy before. + + This method should be called before process definition submit to java gateway + For now, we have below checker: + * `self.param` should be set if task `switch` in this workflow. + """ + if ( + any([task.task_type == TaskType.SWITCH for task in self.tasks.values()]) + and self.param is None + ): + raise PyDSParamException( + "Parameter param must be provider if task Switch in process definition." + ) + + def submit(self) -> int: + """Submit ProcessDefinition instance to java gateway.""" + self._ensure_side_model_exists() + self._pre_submit_check() + + gateway = launch_gateway() + self._process_definition_code = gateway.entry_point.createOrUpdateProcessDefinition( + self._user, + self._project, + self.name, + str(self.description) if self.description else "", + json.dumps(self.param_json), + json.dumps(self.schedule_json) if self.schedule_json else None, + json.dumps(self.task_location), + self.timeout, + self.worker_group, + self._tenant, + # TODO add serialization function + json.dumps(self.task_relation_json), + json.dumps(self.task_definition_json), + None, + ) + return self._process_definition_code + + def start(self) -> None: + """Create and start ProcessDefinition instance. + + which post to `start-process-instance` to java gateway + """ + gateway = launch_gateway() + gateway.entry_point.execProcessInstance( + self._user, + self._project, + self.name, + "", + self.worker_group, + 24 * 3600, + ) diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/core/task.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/core/task.py new file mode 100644 index 0000000000000000000000000000000000000000..693f508c23ab201f21599254e6c758ef14ff8316 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/core/task.py @@ -0,0 +1,278 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""DolphinScheduler Task and TaskRelation object.""" + +import logging +from typing import Dict, List, Optional, Sequence, Set, Tuple, Union + +from pydolphinscheduler.constants import ( + Delimiter, + ProcessDefinitionDefault, + TaskFlag, + TaskPriority, + TaskTimeoutFlag, +) +from pydolphinscheduler.core.base import Base +from pydolphinscheduler.core.process_definition import ( + ProcessDefinition, + ProcessDefinitionContext, +) +from pydolphinscheduler.java_gateway import launch_gateway + + +class TaskRelation(Base): + """TaskRelation object, describe the relation of exactly two tasks.""" + + # Add attr `_KEY_ATTR` to overwrite :func:`__eq__`, it is make set + # `Task.process_definition._task_relations` work correctly. + _KEY_ATTR = { + "pre_task_code", + "post_task_code", + } + + _DEFINE_ATTR = { + "pre_task_code", + "post_task_code", + } + + _DEFAULT_ATTR = { + "name": "", + "preTaskVersion": 1, + "postTaskVersion": 1, + "conditionType": 0, + "conditionParams": {}, + } + + def __init__( + self, + pre_task_code: int, + post_task_code: int, + name: Optional[str] = None, + ): + super().__init__(name) + self.pre_task_code = pre_task_code + self.post_task_code = post_task_code + + def __hash__(self): + return hash(f"{self.pre_task_code} {Delimiter.DIRECTION} {self.post_task_code}") + + +class Task(Base): + """Task object, parent class for all exactly task type.""" + + _DEFINE_ATTR = { + "name", + "code", + "version", + "task_type", + "task_params", + "description", + "flag", + "task_priority", + "worker_group", + "delay_time", + "fail_retry_times", + "fail_retry_interval", + "timeout_flag", + "timeout_notify_strategy", + "timeout", + } + + _task_custom_attr: set = set() + + DEFAULT_CONDITION_RESULT = {"successNode": [""], "failedNode": [""]} + + def __init__( + self, + name: str, + task_type: str, + description: Optional[str] = None, + flag: Optional[str] = TaskFlag.YES, + task_priority: Optional[str] = TaskPriority.MEDIUM, + worker_group: Optional[str] = ProcessDefinitionDefault.WORKER_GROUP, + delay_time: Optional[int] = 0, + fail_retry_times: Optional[int] = 0, + fail_retry_interval: Optional[int] = 1, + timeout_flag: Optional[int] = TaskTimeoutFlag.CLOSE, + timeout_notify_strategy: Optional = None, + timeout: Optional[int] = 0, + process_definition: Optional[ProcessDefinition] = None, + local_params: Optional[List] = None, + resource_list: Optional[List] = None, + dependence: Optional[Dict] = None, + wait_start_timeout: Optional[Dict] = None, + condition_result: Optional[Dict] = None, + ): + + super().__init__(name, description) + self.task_type = task_type + self.flag = flag + self.task_priority = task_priority + self.worker_group = worker_group + self.fail_retry_times = fail_retry_times + self.fail_retry_interval = fail_retry_interval + self.delay_time = delay_time + self.timeout_flag = timeout_flag + self.timeout_notify_strategy = timeout_notify_strategy + self.timeout = timeout + self._process_definition = None + self.process_definition: ProcessDefinition = ( + process_definition or ProcessDefinitionContext.get() + ) + self._upstream_task_codes: Set[int] = set() + self._downstream_task_codes: Set[int] = set() + self._task_relation: Set[TaskRelation] = set() + # move attribute code and version after _process_definition and process_definition declare + self.code, self.version = self.gen_code_and_version() + # Add task to process definition, maybe we could put into property process_definition latter + if ( + self.process_definition is not None + and self.code not in self.process_definition.tasks + ): + self.process_definition.add_task(self) + else: + logging.warning( + "Task code %d already in process definition, prohibit re-add task.", + self.code, + ) + + # Attribute for task param + self.local_params = local_params or [] + self.resource_list = resource_list or [] + self.dependence = dependence or {} + self.wait_start_timeout = wait_start_timeout or {} + self._condition_result = condition_result or self.DEFAULT_CONDITION_RESULT + + @property + def process_definition(self) -> Optional[ProcessDefinition]: + """Get attribute process_definition.""" + return self._process_definition + + @process_definition.setter + def process_definition(self, process_definition: Optional[ProcessDefinition]): + """Set attribute process_definition.""" + self._process_definition = process_definition + + @property + def condition_result(self) -> Dict: + """Get attribute condition_result.""" + return self._condition_result + + @condition_result.setter + def condition_result(self, condition_result: Optional[Dict]): + """Set attribute condition_result.""" + self._condition_result = condition_result + + @property + def task_params(self) -> Optional[Dict]: + """Get task parameter object. + + Will get result to combine _task_custom_attr and custom_attr. + """ + custom_attr = { + "local_params", + "resource_list", + "dependence", + "wait_start_timeout", + "condition_result", + } + custom_attr |= self._task_custom_attr + return self.get_define_custom(custom_attr=custom_attr) + + def __hash__(self): + return hash(self.code) + + def __lshift__(self, other: Union["Task", Sequence["Task"]]): + """Implement Task << Task.""" + self.set_upstream(other) + return other + + def __rshift__(self, other: Union["Task", Sequence["Task"]]): + """Implement Task >> Task.""" + self.set_downstream(other) + return other + + def __rrshift__(self, other: Union["Task", Sequence["Task"]]): + """Call for Task >> [Task] because list don't have __rshift__ operators.""" + self.__lshift__(other) + return self + + def __rlshift__(self, other: Union["Task", Sequence["Task"]]): + """Call for Task << [Task] because list don't have __lshift__ operators.""" + self.__rshift__(other) + return self + + def _set_deps( + self, tasks: Union["Task", Sequence["Task"]], upstream: bool = True + ) -> None: + """ + Set parameter tasks dependent to current task. + + it is a wrapper for :func:`set_upstream` and :func:`set_downstream`. + """ + if not isinstance(tasks, Sequence): + tasks = [tasks] + + for task in tasks: + if upstream: + self._upstream_task_codes.add(task.code) + task._downstream_task_codes.add(self.code) + + if self._process_definition: + task_relation = TaskRelation( + pre_task_code=task.code, + post_task_code=self.code, + name=f"{task.name} {Delimiter.DIRECTION} {self.name}", + ) + self.process_definition._task_relations.add(task_relation) + else: + self._downstream_task_codes.add(task.code) + task._upstream_task_codes.add(self.code) + + if self._process_definition: + task_relation = TaskRelation( + pre_task_code=self.code, + post_task_code=task.code, + name=f"{self.name} {Delimiter.DIRECTION} {task.name}", + ) + self.process_definition._task_relations.add(task_relation) + + def set_upstream(self, tasks: Union["Task", Sequence["Task"]]) -> None: + """Set parameter tasks as upstream to current task.""" + self._set_deps(tasks, upstream=True) + + def set_downstream(self, tasks: Union["Task", Sequence["Task"]]) -> None: + """Set parameter tasks as downstream to current task.""" + self._set_deps(tasks, upstream=False) + + # TODO code should better generate in bulk mode when :ref: processDefinition run submit or start + def gen_code_and_version(self) -> Tuple: + """ + Generate task code and version from java gateway. + + If task name do not exists in process definition before, if will generate new code and version id + equal to 0 by java gateway, otherwise if will return the exists code and version. + """ + # TODO get code from specific project process definition and task name + gateway = launch_gateway() + result = gateway.entry_point.getCodeAndVersion( + self.process_definition._project, self.name + ) + # result = gateway.entry_point.genTaskCodeList(DefaultTaskCodeNum.DEFAULT) + # gateway_result_checker(result) + return result.get("code"), result.get("version") diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/examples/__init__.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/examples/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..37b2e5b61c2f2e085adcce318023ccbb53eb548c --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/examples/__init__.py @@ -0,0 +1,18 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Init examples package which provides users with pydolphinscheduler examples.""" diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/examples/bulk_create_example.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/examples/bulk_create_example.py new file mode 100644 index 0000000000000000000000000000000000000000..72bdb02243c9817f8286522dd6678f4143ebc36f --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/examples/bulk_create_example.py @@ -0,0 +1,55 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +This example show you how to create workflows in batch mode. + +After this example run, we will create 10 workflows named `workflow:`, and with 3 tasks +named `task:-workflow:` in each workflow. Task shape as below + +task:1-workflow:1 -> task:2-workflow:1 -> task:3-workflow:1 + +Each workflow is linear since we set `IS_CHAIN=True`, you could change task to parallel by set it to `False`. +""" + +from pydolphinscheduler.core.process_definition import ProcessDefinition +from pydolphinscheduler.tasks.shell import Shell + +NUM_WORKFLOWS = 10 +NUM_TASKS = 5 +# Make sure your tenant exists in your operator system +TENANT = "exists_tenant" +# Whether task should dependent on pre one or not +# False will create workflow with independent task, while True task will dependent on pre-task and dependence +# link like `pre_task -> current_task -> next_task`, default True +IS_CHAIN = True + +for wf in range(0, NUM_WORKFLOWS): + workflow_name = f"workflow:{wf}" + + with ProcessDefinition(name=workflow_name, tenant=TENANT) as pd: + for t in range(0, NUM_TASKS): + task_name = f"task:{t}-{workflow_name}" + command = f"echo This is task {task_name}" + task = Shell(name=task_name, command=command) + + if IS_CHAIN and t > 0: + pre_task_name = f"task:{t-1}-{workflow_name}" + pd.get_one_task_by_name(pre_task_name) >> task + + # We just submit workflow and task definition without set schedule time or run it manually + pd.submit() diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/examples/task_condition_example.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/examples/task_condition_example.py new file mode 100644 index 0000000000000000000000000000000000000000..2d73df4b40d5d09ddd8c0de24abf0ebcbfdebb7e --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/examples/task_condition_example.py @@ -0,0 +1,59 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# [start workflow_declare] +r""" +A example workflow for task condition. + +This example will create five task in single workflow, with four shell task and one condition task. Task +condition have one upstream which we declare explicit with syntax `parent >> condition`, and three downstream +automatically set dependence by condition task by passing parameter `condition`. The graph of this workflow +like: +pre_task_1 -> -> success_branch + \ / +pre_task_2 -> -> conditions -> + / \ +pre_task_3 -> -> fail_branch +. +""" + +from pydolphinscheduler.core.process_definition import ProcessDefinition +from pydolphinscheduler.tasks.condition import FAILURE, SUCCESS, And, Condition +from pydolphinscheduler.tasks.shell import Shell + +with ProcessDefinition(name="task_condition_example", tenant="tenant_exists") as pd: + pre_task_1 = Shell(name="pre_task_1", command="echo pre_task_1") + pre_task_2 = Shell(name="pre_task_2", command="echo pre_task_2") + pre_task_3 = Shell(name="pre_task_3", command="echo pre_task_3") + cond_operator = And( + And( + SUCCESS(pre_task_1, pre_task_2), + FAILURE(pre_task_3), + ), + ) + + success_branch = Shell(name="success_branch", command="echo success_branch") + fail_branch = Shell(name="fail_branch", command="echo fail_branch") + + condition = Condition( + name="condition", + condition=cond_operator, + success_task=success_branch, + failed_task=fail_branch, + ) + pd.submit() +# [end workflow_declare] diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/examples/task_datax_example.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/examples/task_datax_example.py new file mode 100644 index 0000000000000000000000000000000000000000..94bd449cf72355385c5b1bdb3a5fac276d179c37 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/examples/task_datax_example.py @@ -0,0 +1,95 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# [start workflow_declare] +""" +A example workflow for task datax. + +This example will create a workflow named `task_datax`. +`task_datax` is true workflow define and run task task_datax. +You can create data sources `first_mysql` and `first_mysql` through UI. +It creates a task to synchronize datax from the source database to the target database. +""" + +from pydolphinscheduler.core.process_definition import ProcessDefinition +from pydolphinscheduler.tasks.datax import CustomDataX, DataX + +# datax json template +JSON_TEMPLATE = { + "job": { + "content": [ + { + "reader": { + "name": "mysqlreader", + "parameter": { + "username": "usr", + "password": "pwd", + "column": ["id", "name", "code", "description"], + "splitPk": "id", + "connection": [ + { + "table": ["source_table"], + "jdbcUrl": ["jdbc:mysql://127.0.0.1:3306/source_db"], + } + ], + }, + }, + "writer": { + "name": "mysqlwriter", + "parameter": { + "writeMode": "insert", + "username": "usr", + "password": "pwd", + "column": ["id", "name"], + "connection": [ + { + "jdbcUrl": "jdbc:mysql://127.0.0.1:3306/target_db", + "table": ["target_table"], + } + ], + }, + }, + } + ], + "setting": { + "errorLimit": {"percentage": 0, "record": 0}, + "speed": {"channel": 1, "record": 1000}, + }, + } +} + +with ProcessDefinition( + name="task_datax_example", + tenant="tenant_exists", +) as pd: + # This task synchronizes the data in `t_ds_project` + # of `first_mysql` database to `target_project` of `second_mysql` database. + # You have to make sure data source named `first_mysql` and `second_mysql` exists + # in your environment. + task1 = DataX( + name="task_datax", + datasource_name="first_mysql", + datatarget_name="second_mysql", + sql="select id, name, code, description from source_table", + target_table="target_table", + ) + + # You can custom json_template of datax to sync data. This task create a new + # datax job same as task1, transfer record from `first_mysql` to `second_mysql` + task2 = CustomDataX(name="task_custom_datax", json=str(JSON_TEMPLATE)) + pd.run() +# [end workflow_declare] diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/examples/task_dependent_example.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/examples/task_dependent_example.py new file mode 100644 index 0000000000000000000000000000000000000000..ae19d95516a881cdac65007ca0701fbd6ea9f16e --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/examples/task_dependent_example.py @@ -0,0 +1,74 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# [start workflow_declare] +r""" +A example workflow for task dependent. + +This example will create two workflows named `task_dependent` and `task_dependent_external`. +`task_dependent` is true workflow define and run task dependent, while `task_dependent_external` +define outside workflow and task from dependent. + +After this script submit, we would get workflow as below: + +task_dependent_external: + +task_1 +task_2 +task_3 + +task_dependent: + +task_dependent(this task dependent on task_dependent_external.task_1 and task_dependent_external.task_2). +""" +from pydolphinscheduler.constants import ProcessDefinitionDefault +from pydolphinscheduler.core.process_definition import ProcessDefinition +from pydolphinscheduler.tasks.dependent import And, Dependent, DependentItem, Or +from pydolphinscheduler.tasks.shell import Shell + +with ProcessDefinition( + name="task_dependent_external", + tenant="tenant_exists", +) as pd: + task_1 = Shell(name="task_1", command="echo task 1") + task_2 = Shell(name="task_2", command="echo task 2") + task_3 = Shell(name="task_3", command="echo task 3") + pd.submit() + +with ProcessDefinition( + name="task_dependent_example", + tenant="tenant_exists", +) as pd: + task = Dependent( + name="task_dependent", + dependence=And( + Or( + DependentItem( + project_name=ProcessDefinitionDefault.PROJECT, + process_definition_name="task_dependent_external", + dependent_task_name="task_1", + ), + DependentItem( + project_name=ProcessDefinitionDefault.PROJECT, + process_definition_name="task_dependent_external", + dependent_task_name="task_2", + ), + ) + ), + ) + pd.submit() +# [end workflow_declare] diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/examples/task_flink_example.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/examples/task_flink_example.py new file mode 100644 index 0000000000000000000000000000000000000000..1e8a040c65ecba4a964f67e16a3a91efe2bbd41b --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/examples/task_flink_example.py @@ -0,0 +1,33 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# [start workflow_declare] +"""A example workflow for task flink.""" + +from pydolphinscheduler.core.process_definition import ProcessDefinition +from pydolphinscheduler.tasks.flink import DeployMode, Flink, ProgramType + +with ProcessDefinition(name="task_flink_example", tenant="tenant_exists") as pd: + task = Flink( + name="task_flink", + main_class="org.apache.flink.streaming.examples.wordcount.WordCount", + main_package="WordCount.jar", + program_type=ProgramType.JAVA, + deploy_mode=DeployMode.LOCAL, + ) + pd.run() +# [end workflow_declare] diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/examples/task_map_reduce_example.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/examples/task_map_reduce_example.py new file mode 100644 index 0000000000000000000000000000000000000000..39b204f82a7f432aed45b5e260331257ae3982bc --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/examples/task_map_reduce_example.py @@ -0,0 +1,34 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# [start workflow_declare] +"""A example workflow for task mr.""" + +from pydolphinscheduler.core.engine import ProgramType +from pydolphinscheduler.core.process_definition import ProcessDefinition +from pydolphinscheduler.tasks.map_reduce import MR + +with ProcessDefinition(name="task_map_reduce_example", tenant="tenant_exists") as pd: + task = MR( + name="task_mr", + main_class="wordcount", + main_package="hadoop-mapreduce-examples-3.3.1.jar", + program_type=ProgramType.JAVA, + main_args="/dolphinscheduler/tenant_exists/resources/file.txt /output/ds", + ) + pd.run() +# [end workflow_declare] diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/examples/task_spark_example.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/examples/task_spark_example.py new file mode 100644 index 0000000000000000000000000000000000000000..594d95f55a6f785a1aef677377e5c082a4cdbb03 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/examples/task_spark_example.py @@ -0,0 +1,33 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# [start workflow_declare] +"""A example workflow for task spark.""" + +from pydolphinscheduler.core.process_definition import ProcessDefinition +from pydolphinscheduler.tasks.spark import DeployMode, ProgramType, Spark + +with ProcessDefinition(name="task_spark_example", tenant="tenant_exists") as pd: + task = Spark( + name="task_spark", + main_class="org.apache.spark.examples.SparkPi", + main_package="spark-examples_2.12-3.2.0.jar", + program_type=ProgramType.JAVA, + deploy_mode=DeployMode.LOCAL, + ) + pd.run() +# [end workflow_declare] diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/examples/task_switch_example.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/examples/task_switch_example.py new file mode 100644 index 0000000000000000000000000000000000000000..7966af320ecfc58afe852512c713dec17a265ad7 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/examples/task_switch_example.py @@ -0,0 +1,51 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# [start workflow_declare] +r""" +A example workflow for task switch. + +This example will create four task in single workflow, with three shell task and one switch task. Task switch +have one upstream which we declare explicit with syntax `parent >> switch`, and two downstream automatically +set dependence by switch task by passing parameter `condition`. The graph of this workflow like: + --> switch_child_1 + / +parent -> switch -> + \ + --> switch_child_2 +. +""" + +from pydolphinscheduler.core.process_definition import ProcessDefinition +from pydolphinscheduler.tasks.shell import Shell +from pydolphinscheduler.tasks.switch import Branch, Default, Switch, SwitchCondition + +with ProcessDefinition( + name="task_switch_example", tenant="tenant_exists", param={"var": "1"} +) as pd: + parent = Shell(name="parent", command="echo parent") + switch_child_1 = Shell(name="switch_child_1", command="echo switch_child_1") + switch_child_2 = Shell(name="switch_child_2", command="echo switch_child_2") + switch_condition = SwitchCondition( + Branch(condition="${var} > 1", task=switch_child_1), + Default(task=switch_child_2), + ) + + switch = Switch(name="switch", condition=switch_condition) + parent >> switch + pd.submit() +# [end workflow_declare] diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/examples/tutorial.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/examples/tutorial.py new file mode 100644 index 0000000000000000000000000000000000000000..0478e685190f63d48af42550f5d5e2a4552c4c74 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/examples/tutorial.py @@ -0,0 +1,68 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +r""" +A tutorial example take you to experience pydolphinscheduler. + +After tutorial.py file submit to Apache DolphinScheduler server a DAG would be create, +and workflow DAG graph as below: + + --> task_child_one + / \ +task_parent --> --> task_union + \ / + --> task_child_two + +it will instantiate and run all the task it have. +""" + +# [start tutorial] +# [start package_import] +# Import ProcessDefinition object to define your workflow attributes +from pydolphinscheduler.core.process_definition import ProcessDefinition + +# Import task Shell object cause we would create some shell tasks later +from pydolphinscheduler.tasks.shell import Shell + +# [end package_import] + +# [start workflow_declare] +with ProcessDefinition( + name="tutorial", + schedule="0 0 0 * * ? *", + start_time="2021-01-01", + tenant="tenant_exists", +) as pd: + # [end workflow_declare] + # [start task_declare] + task_parent = Shell(name="task_parent", command="echo hello pydolphinscheduler") + task_child_one = Shell(name="task_child_one", command="echo 'child one'") + task_child_two = Shell(name="task_child_two", command="echo 'child two'") + task_union = Shell(name="task_union", command="echo union") + # [end task_declare] + + # [start task_relation_declare] + task_group = [task_child_one, task_child_two] + task_parent.set_downstream(task_group) + + task_union << task_group + # [end task_relation_declare] + + # [start submit_or_run] + pd.run() + # [end submit_or_run] +# [end tutorial] diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/exceptions.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..745ef3e99b84f252f7fe6df5c3768e72b37248db --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/exceptions.py @@ -0,0 +1,46 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Exceptions for pydolphinscheduler.""" + + +class PyDSBaseException(Exception): + """Base exception for pydolphinscheduler.""" + + pass + + +class PyDSParamException(PyDSBaseException): + """Exception for pydolphinscheduler parameter verify error.""" + + pass + + +class PyDSTaskNoFoundException(PyDSBaseException): + """Exception for pydolphinscheduler workflow task no found error.""" + + pass + + +class PyDSJavaGatewayException(PyDSBaseException): + """Exception for pydolphinscheduler Java gateway error.""" + + pass + + +class PyDSProcessDefinitionNotAssignException(PyDSBaseException): + """Exception for pydolphinscheduler process definition not assign error.""" diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/java_gateway.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/java_gateway.py new file mode 100644 index 0000000000000000000000000000000000000000..2876ed52155d0d8e9f07e000b97f255090460d23 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/java_gateway.py @@ -0,0 +1,64 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Module java gateway, contain gateway behavior.""" + +from typing import Any, Optional + +from py4j.java_collections import JavaMap +from py4j.java_gateway import GatewayParameters, JavaGateway + +from pydolphinscheduler.constants import JavaGatewayDefault +from pydolphinscheduler.exceptions import PyDSJavaGatewayException + + +def launch_gateway( + address: Optional[str] = None, + port: Optional[int] = None, + auto_convert: Optional[bool] = True, +) -> JavaGateway: + """Launch java gateway to pydolphinscheduler. + + TODO Note that automatic conversion makes calling Java methods slightly less efficient because + in the worst case, Py4J needs to go through all registered converters for all parameters. + This is why automatic conversion is disabled by default. + """ + gateway_parameters = GatewayParameters( + address=address or JavaGatewayDefault.SERVER_ADDRESS, + port=port or JavaGatewayDefault.SERVER_PORT, + auto_convert=auto_convert or JavaGatewayDefault.AUTO_CONVERT, + ) + gateway = JavaGateway(gateway_parameters=gateway_parameters) + return gateway + + +def gateway_result_checker( + result: JavaMap, + msg_check: Optional[str] = JavaGatewayDefault.RESULT_MESSAGE_SUCCESS, +) -> Any: + """Check weather java gateway result success or not.""" + if ( + result[JavaGatewayDefault.RESULT_STATUS_KEYWORD].toString() + != JavaGatewayDefault.RESULT_STATUS_SUCCESS + ): + raise PyDSJavaGatewayException("Failed when try to got result for java gateway") + if ( + msg_check is not None + and result[JavaGatewayDefault.RESULT_MESSAGE_KEYWORD] != msg_check + ): + raise PyDSJavaGatewayException("Get result state not success.") + return result diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/side/__init__.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/side/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..c4479ddf24e026d2a7ecacce38e8da5cedfb1f24 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/side/__init__.py @@ -0,0 +1,32 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Init Side package, Side package keep object related to DolphinScheduler but not in the Core part.""" + +from pydolphinscheduler.side.project import Project +from pydolphinscheduler.side.queue import Queue +from pydolphinscheduler.side.tenant import Tenant +from pydolphinscheduler.side.user import User +from pydolphinscheduler.side.worker_group import WorkerGroup + +__all__ = [ + "Project", + "Tenant", + "User", + "Queue", + "WorkerGroup", +] diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/side/project.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/side/project.py new file mode 100644 index 0000000000000000000000000000000000000000..02382cadc564ba9e9bbba7c48bc62f2530607a63 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/side/project.py @@ -0,0 +1,42 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""DolphinScheduler Project object.""" + +from typing import Optional + +from pydolphinscheduler.constants import ProcessDefinitionDefault +from pydolphinscheduler.core.base_side import BaseSide +from pydolphinscheduler.java_gateway import launch_gateway + + +class Project(BaseSide): + """DolphinScheduler Project object.""" + + def __init__( + self, + name: str = ProcessDefinitionDefault.PROJECT, + description: Optional[str] = None, + ): + super().__init__(name, description) + + def create_if_not_exists(self, user=ProcessDefinitionDefault.USER) -> None: + """Create Project if not exists.""" + gateway = launch_gateway() + gateway.entry_point.createProject(user, self.name, self.description) + # TODO recover result checker + # gateway_result_checker(result, None) diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/side/queue.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/side/queue.py new file mode 100644 index 0000000000000000000000000000000000000000..9d6664e14b005d868a26735c424926660277a527 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/side/queue.py @@ -0,0 +1,42 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""DolphinScheduler User object.""" + +from typing import Optional + +from pydolphinscheduler.constants import ProcessDefinitionDefault +from pydolphinscheduler.core.base_side import BaseSide +from pydolphinscheduler.java_gateway import gateway_result_checker, launch_gateway + + +class Queue(BaseSide): + """DolphinScheduler Queue object.""" + + def __init__( + self, + name: str = ProcessDefinitionDefault.QUEUE, + description: Optional[str] = "", + ): + super().__init__(name, description) + + def create_if_not_exists(self, user=ProcessDefinitionDefault.USER) -> None: + """Create Queue if not exists.""" + gateway = launch_gateway() + # Here we set Queue.name and Queue.queueName same as self.name + result = gateway.entry_point.createProject(user, self.name, self.name) + gateway_result_checker(result, None) diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/side/tenant.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/side/tenant.py new file mode 100644 index 0000000000000000000000000000000000000000..508c0331021716a69de9bf4b7940199020ae4f67 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/side/tenant.py @@ -0,0 +1,45 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""DolphinScheduler Tenant object.""" + +from typing import Optional + +from pydolphinscheduler.constants import ProcessDefinitionDefault +from pydolphinscheduler.core.base_side import BaseSide +from pydolphinscheduler.java_gateway import launch_gateway + + +class Tenant(BaseSide): + """DolphinScheduler Tenant object.""" + + def __init__( + self, + name: str = ProcessDefinitionDefault.TENANT, + queue: str = ProcessDefinitionDefault.QUEUE, + description: Optional[str] = None, + ): + super().__init__(name, description) + self.queue = queue + + def create_if_not_exists( + self, queue_name: str, user=ProcessDefinitionDefault.USER + ) -> None: + """Create Tenant if not exists.""" + gateway = launch_gateway() + gateway.entry_point.createTenant(self.name, self.description, queue_name) + # gateway_result_checker(result, None) diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/side/user.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/side/user.py new file mode 100644 index 0000000000000000000000000000000000000000..cd0145aea7ccc918c79aba999f0c4fd4a7ee478b --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/side/user.py @@ -0,0 +1,70 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""DolphinScheduler User object.""" + +from typing import Optional + +from pydolphinscheduler.core.base_side import BaseSide +from pydolphinscheduler.java_gateway import launch_gateway + + +class User(BaseSide): + """DolphinScheduler User object.""" + + _KEY_ATTR = { + "name", + "password", + "email", + "phone", + "tenant", + "queue", + "status", + } + + def __init__( + self, + name: str, + password: str, + email: str, + phone: str, + tenant: str, + queue: Optional[str] = None, + status: Optional[int] = 1, + ): + super().__init__(name) + self.password = password + self.email = email + self.phone = phone + self.tenant = tenant + self.queue = queue + self.status = status + + def create_if_not_exists(self, **kwargs): + """Create User if not exists.""" + gateway = launch_gateway() + gateway.entry_point.createUser( + self.name, + self.password, + self.email, + self.phone, + self.tenant, + self.queue, + self.status, + ) + # TODO recover result checker + # gateway_result_checker(result, None) diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/side/worker_group.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/side/worker_group.py new file mode 100644 index 0000000000000000000000000000000000000000..ed50ec66309101d3f590934bb38dda75eca6f787 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/side/worker_group.py @@ -0,0 +1,30 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""DolphinScheduler Worker Group object.""" + +from typing import Optional + +from pydolphinscheduler.core.base_side import BaseSide + + +class WorkerGroup(BaseSide): + """DolphinScheduler Worker Group object.""" + + def __init__(self, name: str, address: str, description: Optional[str] = None): + super().__init__(name, description) + self.address = address diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/__init__.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..dd46c912dfaf3c07ff4889bbe0e4964a75cf0ecb --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/__init__.py @@ -0,0 +1,48 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Init pydolphinscheduler.tasks package.""" + +from pydolphinscheduler.tasks.condition import FAILURE, SUCCESS, And, Condition, Or +from pydolphinscheduler.tasks.datax import CustomDataX, DataX +from pydolphinscheduler.tasks.dependent import Dependent +from pydolphinscheduler.tasks.flink import Flink +from pydolphinscheduler.tasks.http import Http +from pydolphinscheduler.tasks.map_reduce import MR +from pydolphinscheduler.tasks.procedure import Procedure +from pydolphinscheduler.tasks.python import Python +from pydolphinscheduler.tasks.shell import Shell +from pydolphinscheduler.tasks.spark import Spark +from pydolphinscheduler.tasks.sql import Sql +from pydolphinscheduler.tasks.sub_process import SubProcess +from pydolphinscheduler.tasks.switch import Branch, Default, Switch, SwitchCondition + +__all__ = [ + "Condition", + "DataX", + "Dependent", + "Flink", + "Http", + "MR", + "Procedure", + "Python", + "Shell", + "Spark", + "Sql", + "SubProcess", + "Switch", +] diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/condition.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/condition.py new file mode 100644 index 0000000000000000000000000000000000000000..50aac251d8c86ae17d5a45860b19d8a847794460 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/condition.py @@ -0,0 +1,204 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Task Conditions.""" + +from typing import Dict, List + +from pydolphinscheduler.constants import TaskType +from pydolphinscheduler.core.base import Base +from pydolphinscheduler.core.task import Task +from pydolphinscheduler.exceptions import PyDSParamException + + +class Status(Base): + """Base class of Condition task status. + + It a parent class for :class:`SUCCESS` and :class:`FAILURE`. Provider status name + and :func:`get_define` to sub class. + """ + + def __init__(self, *tasks): + super().__init__(f"Condition.{self.status_name()}") + self.tasks = tasks + + def __repr__(self) -> str: + return "depend_item_list" + + @classmethod + def status_name(cls) -> str: + """Get name for Status or its sub class.""" + return cls.__name__.upper() + + def get_define(self, camel_attr: bool = True) -> List: + """Get status definition attribute communicate to Java gateway server.""" + content = [] + for task in self.tasks: + if not isinstance(task, Task): + raise PyDSParamException( + "%s only accept class Task or sub class Task, but get %s", + (self.status_name(), type(task)), + ) + content.append({"depTaskCode": task.code, "status": self.status_name()}) + return content + + +class SUCCESS(Status): + """Class SUCCESS to task condition, sub class of :class:`Status`.""" + + def __init__(self, *tasks): + super().__init__(*tasks) + + +class FAILURE(Status): + """Class FAILURE to task condition, sub class of :class:`Status`.""" + + def __init__(self, *tasks): + super().__init__(*tasks) + + +class ConditionOperator(Base): + """Set ConditionTask or ConditionOperator with specific operator.""" + + _DEFINE_ATTR = { + "relation", + } + + def __init__(self, *args): + super().__init__(self.__class__.__name__) + self.args = args + + def __repr__(self) -> str: + return "depend_task_list" + + @classmethod + def operator_name(cls) -> str: + """Get operator name in different class.""" + return cls.__name__.upper() + + @property + def relation(self) -> str: + """Get operator name in different class, for function :func:`get_define`.""" + return self.operator_name() + + def set_define_attr(self) -> str: + """Set attribute to function :func:`get_define`. + + It is a wrapper for both `And` and `Or` operator. + """ + result = [] + attr = None + for condition in self.args: + if isinstance(condition, (Status, ConditionOperator)): + if attr is None: + attr = repr(condition) + elif repr(condition) != attr: + raise PyDSParamException( + "Condition %s operator parameter only support same type.", + self.relation, + ) + else: + raise PyDSParamException( + "Condition %s operator parameter support ConditionTask and ConditionOperator but got %s.", + (self.relation, type(condition)), + ) + if attr == "depend_item_list": + result.extend(condition.get_define()) + else: + result.append(condition.get_define()) + setattr(self, attr, result) + return attr + + def get_define(self, camel_attr=True) -> Dict: + """Overwrite Base.get_define to get task Condition specific get define.""" + attr = self.set_define_attr() + dependent_define_attr = self._DEFINE_ATTR.union({attr}) + return super().get_define_custom( + camel_attr=True, custom_attr=dependent_define_attr + ) + + +class And(ConditionOperator): + """Operator And for task condition. + + It could accept both :class:`Task` and children of :class:`ConditionOperator`, + and set AND condition to those args. + """ + + def __init__(self, *args): + super().__init__(*args) + + +class Or(ConditionOperator): + """Operator Or for task condition. + + It could accept both :class:`Task` and children of :class:`ConditionOperator`, + and set OR condition to those args. + """ + + def __init__(self, *args): + super().__init__(*args) + + +class Condition(Task): + """Task condition object, declare behavior for condition task to dolphinscheduler.""" + + def __init__( + self, + name: str, + condition: ConditionOperator, + success_task: Task, + failed_task: Task, + *args, + **kwargs, + ): + super().__init__(name, TaskType.CONDITIONS, *args, **kwargs) + self.condition = condition + self.success_task = success_task + self.failed_task = failed_task + # Set condition tasks as current task downstream + self._set_dep() + + def _set_dep(self) -> None: + """Set upstream according to parameter `condition`.""" + upstream = [] + for cond in self.condition.args: + if isinstance(cond, ConditionOperator): + for status in cond.args: + upstream.extend(list(status.tasks)) + self.set_upstream(upstream) + self.set_downstream([self.success_task, self.failed_task]) + + @property + def condition_result(self) -> Dict: + """Get condition result define for java gateway.""" + return { + "successNode": [self.success_task.code], + "failedNode": [self.failed_task.code], + } + + @property + def task_params(self, camel_attr: bool = True, custom_attr: set = None) -> Dict: + """Override Task.task_params for Condition task. + + Condition task have some specials attribute `dependence`, and in most of the task + this attribute is None and use empty dict `{}` as default value. We do not use class + attribute `_task_custom_attr` due to avoid attribute cover. + """ + params = super().task_params + params["dependence"] = self.condition.get_define() + return params diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/datax.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/datax.py new file mode 100644 index 0000000000000000000000000000000000000000..f881a67de9235eb5e144a81a59b3ea269ea742be --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/datax.py @@ -0,0 +1,121 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Task datax.""" + +from typing import Dict, List, Optional + +from pydolphinscheduler.constants import TaskType +from pydolphinscheduler.core.database import Database +from pydolphinscheduler.core.task import Task + + +class CustomDataX(Task): + """Task CustomDatax object, declare behavior for custom DataX task to dolphinscheduler. + + You provider json template for DataX, it can synchronize data according to the template you provided. + """ + + CUSTOM_CONFIG = 1 + + _task_custom_attr = {"custom_config", "json", "xms", "xmx"} + + def __init__( + self, + name: str, + json: str, + xms: Optional[int] = 1, + xmx: Optional[int] = 1, + *args, + **kwargs + ): + super().__init__(name, TaskType.DATAX, *args, **kwargs) + self.custom_config = self.CUSTOM_CONFIG + self.json = json + self.xms = xms + self.xmx = xmx + + +class DataX(Task): + """Task DataX object, declare behavior for DataX task to dolphinscheduler. + + It should run database datax job in multiply sql link engine, such as: + - MySQL + - Oracle + - Postgresql + - SQLServer + You provider datasource_name and datatarget_name contain connection information, it decisions which + database type and database instance would synchronous data. + """ + + CUSTOM_CONFIG = 0 + + _task_custom_attr = { + "custom_config", + "sql", + "target_table", + "job_speed_byte", + "job_speed_record", + "pre_statements", + "post_statements", + "xms", + "xmx", + } + + def __init__( + self, + name: str, + datasource_name: str, + datatarget_name: str, + sql: str, + target_table: str, + job_speed_byte: Optional[int] = 0, + job_speed_record: Optional[int] = 1000, + pre_statements: Optional[List[str]] = None, + post_statements: Optional[List[str]] = None, + xms: Optional[int] = 1, + xmx: Optional[int] = 1, + *args, + **kwargs + ): + super().__init__(name, TaskType.DATAX, *args, **kwargs) + self.sql = sql + self.custom_config = self.CUSTOM_CONFIG + self.datasource_name = datasource_name + self.datatarget_name = datatarget_name + self.target_table = target_table + self.job_speed_byte = job_speed_byte + self.job_speed_record = job_speed_record + self.pre_statements = pre_statements or [] + self.post_statements = post_statements or [] + self.xms = xms + self.xmx = xmx + + @property + def task_params(self, camel_attr: bool = True, custom_attr: set = None) -> Dict: + """Override Task.task_params for datax task. + + datax task have some specials attribute for task_params, and is odd if we + directly set as python property, so we Override Task.task_params here. + """ + params = super().task_params + datasource = Database(self.datasource_name, "dsType", "dataSource") + params.update(datasource) + + datatarget = Database(self.datatarget_name, "dtType", "dataTarget") + params.update(datatarget) + return params diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/dependent.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/dependent.py new file mode 100644 index 0000000000000000000000000000000000000000..7cff24c8411c2d7e5eafc0c98ae59fcc5ec63cf5 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/dependent.py @@ -0,0 +1,274 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Task dependent.""" + +from typing import Dict, Optional, Tuple + +from pydolphinscheduler.constants import TaskType +from pydolphinscheduler.core.base import Base +from pydolphinscheduler.core.task import Task +from pydolphinscheduler.exceptions import PyDSJavaGatewayException, PyDSParamException +from pydolphinscheduler.java_gateway import launch_gateway + +DEPENDENT_ALL_TASK_IN_WORKFLOW = "0" + + +class DependentDate(str): + """Constant of Dependent date value. + + These values set according to Java server side, if you want to add and change it, + please change Java server side first. + """ + + # TODO Maybe we should add parent level to DependentDate for easy to use, such as + # DependentDate.MONTH.THIS_MONTH + + # Hour + CURRENT_HOUR = "currentHour" + LAST_ONE_HOUR = "last1Hour" + LAST_TWO_HOURS = "last2Hours" + LAST_THREE_HOURS = "last3Hours" + LAST_TWENTY_FOUR_HOURS = "last24Hours" + + # Day + TODAY = "today" + LAST_ONE_DAYS = "last1Days" + LAST_TWO_DAYS = "last2Days" + LAST_THREE_DAYS = "last3Days" + LAST_SEVEN_DAYS = "last7Days" + + # Week + THIS_WEEK = "thisWeek" + LAST_WEEK = "lastWeek" + LAST_MONDAY = "lastMonday" + LAST_TUESDAY = "lastTuesday" + LAST_WEDNESDAY = "lastWednesday" + LAST_THURSDAY = "lastThursday" + LAST_FRIDAY = "lastFriday" + LAST_SATURDAY = "lastSaturday" + LAST_SUNDAY = "lastSunday" + + # Month + THIS_MONTH = "thisMonth" + LAST_MONTH = "lastMonth" + LAST_MONTH_BEGIN = "lastMonthBegin" + LAST_MONTH_END = "lastMonthEnd" + + +class DependentItem(Base): + """Dependent item object, minimal unit for task dependent. + + It declare which project, process_definition, task are dependent to this task. + """ + + _DEFINE_ATTR = { + "project_code", + "definition_code", + "dep_task_code", + "cycle", + "date_value", + } + + # TODO maybe we should conside overwrite operator `and` and `or` for DependentItem to + # support more easy way to set relation + def __init__( + self, + project_name: str, + process_definition_name: str, + dependent_task_name: Optional[str] = DEPENDENT_ALL_TASK_IN_WORKFLOW, + dependent_date: Optional[DependentDate] = DependentDate.TODAY, + ): + obj_name = f"{project_name}.{process_definition_name}.{dependent_task_name}.{dependent_date}" + super().__init__(obj_name) + self.project_name = project_name + self.process_definition_name = process_definition_name + self.dependent_task_name = dependent_task_name + if dependent_date is None: + raise PyDSParamException( + "Parameter dependent_date must provider by got None." + ) + else: + self.dependent_date = dependent_date + self._code = {} + + def __repr__(self) -> str: + return "depend_item_list" + + @property + def project_code(self) -> str: + """Get dependent project code.""" + return self.get_code_from_gateway().get("projectCode") + + @property + def definition_code(self) -> str: + """Get dependent definition code.""" + return self.get_code_from_gateway().get("processDefinitionCode") + + @property + def dep_task_code(self) -> str: + """Get dependent tasks code list.""" + if self.is_all_task: + return DEPENDENT_ALL_TASK_IN_WORKFLOW + else: + return self.get_code_from_gateway().get("taskDefinitionCode") + + # TODO Maybe we should get cycle from dependent date class. + @property + def cycle(self) -> str: + """Get dependent cycle.""" + if "Hour" in self.dependent_date: + return "hour" + elif self.dependent_date == "today" or "Days" in self.dependent_date: + return "day" + elif "Month" in self.dependent_date: + return "month" + else: + return "week" + + @property + def date_value(self) -> str: + """Get dependent date.""" + return self.dependent_date + + @property + def is_all_task(self) -> bool: + """Check whether dependent all tasks or not.""" + return self.dependent_task_name == DEPENDENT_ALL_TASK_IN_WORKFLOW + + @property + def code_parameter(self) -> Tuple: + """Get name info parameter to query code.""" + param = ( + self.project_name, + self.process_definition_name, + self.dependent_task_name if not self.is_all_task else None, + ) + return param + + def get_code_from_gateway(self) -> Dict: + """Get project, definition, task code from given parameter.""" + if self._code: + return self._code + else: + gateway = launch_gateway() + try: + self._code = gateway.entry_point.getDependentInfo(*self.code_parameter) + return self._code + except Exception: + raise PyDSJavaGatewayException("Function get_code_from_gateway error.") + + +class DependentOperator(Base): + """Set DependentItem or dependItemList with specific operator.""" + + _DEFINE_ATTR = { + "relation", + } + + def __init__(self, *args): + super().__init__(self.__class__.__name__) + self.args = args + + def __repr__(self) -> str: + return "depend_task_list" + + @classmethod + def operator_name(cls) -> str: + """Get operator name in different class.""" + return cls.__name__.upper() + + @property + def relation(self) -> str: + """Get operator name in different class, for function :func:`get_define`.""" + return self.operator_name() + + def set_define_attr(self) -> str: + """Set attribute to function :func:`get_define`. + + It is a wrapper for both `And` and `Or` operator. + """ + result = [] + attr = None + for dependent in self.args: + if isinstance(dependent, (DependentItem, DependentOperator)): + if attr is None: + attr = repr(dependent) + elif repr(dependent) != attr: + raise PyDSParamException( + "Dependent %s operator parameter only support same type.", + self.relation, + ) + else: + raise PyDSParamException( + "Dependent %s operator parameter support DependentItem and " + "DependentOperator but got %s.", + (self.relation, type(dependent)), + ) + result.append(dependent.get_define()) + setattr(self, attr, result) + return attr + + def get_define(self, camel_attr=True) -> Dict: + """Overwrite Base.get_define to get task dependent specific get define.""" + attr = self.set_define_attr() + dependent_define_attr = self._DEFINE_ATTR.union({attr}) + return super().get_define_custom( + camel_attr=True, custom_attr=dependent_define_attr + ) + + +class And(DependentOperator): + """Operator And for task dependent. + + It could accept both :class:`DependentItem` and children of :class:`DependentOperator`, + and set AND condition to those args. + """ + + def __init__(self, *args): + super().__init__(*args) + + +class Or(DependentOperator): + """Operator Or for task dependent. + + It could accept both :class:`DependentItem` and children of :class:`DependentOperator`, + and set OR condition to those args. + """ + + def __init__(self, *args): + super().__init__(*args) + + +class Dependent(Task): + """Task dependent object, declare behavior for dependent task to dolphinscheduler.""" + + def __init__(self, name: str, dependence: DependentOperator, *args, **kwargs): + super().__init__(name, TaskType.DEPENDENT, *args, **kwargs) + self.dependence = dependence + + @property + def task_params(self, camel_attr: bool = True, custom_attr: set = None) -> Dict: + """Override Task.task_params for dependent task. + + Dependent task have some specials attribute `dependence`, and in most of the task + this attribute is None and use empty dict `{}` as default value. We do not use class + attribute `_task_custom_attr` due to avoid attribute cover. + """ + params = super().task_params + params["dependence"] = self.dependence.get_define() + return params diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/flink.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/flink.py new file mode 100644 index 0000000000000000000000000000000000000000..83cae956a5ea3cdb8a0e51e0bba0bf8f85c0e105 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/flink.py @@ -0,0 +1,93 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Task Flink.""" + +from typing import Optional + +from pydolphinscheduler.constants import TaskType +from pydolphinscheduler.core.engine import Engine, ProgramType + + +class FlinkVersion(str): + """Flink version, for now it just contain `HIGHT` and `LOW`.""" + + LOW_VERSION = "<1.10" + HIGHT_VERSION = ">=1.10" + + +class DeployMode(str): + """Flink deploy mode, for now it just contain `LOCAL` and `CLUSTER`.""" + + LOCAL = "local" + CLUSTER = "cluster" + + +class Flink(Engine): + """Task flink object, declare behavior for flink task to dolphinscheduler.""" + + _task_custom_attr = { + "deploy_mode", + "flink_version", + "slot", + "task_manager", + "job_manager_memory", + "task_manager_memory", + "app_name", + "parallelism", + "main_args", + "others", + } + + def __init__( + self, + name: str, + main_class: str, + main_package: str, + program_type: Optional[ProgramType] = ProgramType.SCALA, + deploy_mode: Optional[DeployMode] = DeployMode.CLUSTER, + flink_version: Optional[FlinkVersion] = FlinkVersion.LOW_VERSION, + app_name: Optional[str] = None, + job_manager_memory: Optional[str] = "1G", + task_manager_memory: Optional[str] = "2G", + slot: Optional[int] = 1, + task_manager: Optional[int] = 2, + parallelism: Optional[int] = 1, + main_args: Optional[str] = None, + others: Optional[str] = None, + *args, + **kwargs + ): + super().__init__( + name, + TaskType.FLINK, + main_class, + main_package, + program_type, + *args, + **kwargs + ) + self.deploy_mode = deploy_mode + self.flink_version = flink_version + self.app_name = app_name + self.job_manager_memory = job_manager_memory + self.task_manager_memory = task_manager_memory + self.slot = slot + self.task_manager = task_manager + self.parallelism = parallelism + self.main_args = main_args + self.others = others diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/http.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/http.py new file mode 100644 index 0000000000000000000000000000000000000000..781333d481dadefb5c298d5e3487463f52cbbb15 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/http.py @@ -0,0 +1,101 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Task shell.""" + +from typing import Optional + +from pydolphinscheduler.constants import TaskType +from pydolphinscheduler.core.task import Task +from pydolphinscheduler.exceptions import PyDSParamException + + +class HttpMethod: + """Constant of HTTP method.""" + + GET = "GET" + POST = "POST" + HEAD = "HEAD" + PUT = "PUT" + DELETE = "DELETE" + + +class HttpCheckCondition: + """Constant of HTTP check condition. + + For now it contain four value: + - STATUS_CODE_DEFAULT: when http response code equal to 200, mark as success. + - STATUS_CODE_CUSTOM: when http response code equal to the code user define, mark as success. + - BODY_CONTAINS: when http response body contain text user define, mark as success. + - BODY_NOT_CONTAINS: when http response body do not contain text user define, mark as success. + """ + + STATUS_CODE_DEFAULT = "STATUS_CODE_DEFAULT" + STATUS_CODE_CUSTOM = "STATUS_CODE_CUSTOM" + BODY_CONTAINS = "BODY_CONTAINS" + BODY_NOT_CONTAINS = "BODY_NOT_CONTAINS" + + +class Http(Task): + """Task HTTP object, declare behavior for HTTP task to dolphinscheduler.""" + + _task_custom_attr = { + "url", + "http_method", + "http_params", + "http_check_condition", + "condition", + "connect_timeout", + "socket_timeout", + } + + def __init__( + self, + name: str, + url: str, + http_method: Optional[str] = HttpMethod.GET, + http_params: Optional[str] = None, + http_check_condition: Optional[str] = HttpCheckCondition.STATUS_CODE_DEFAULT, + condition: Optional[str] = None, + connect_timeout: Optional[int] = 60000, + socket_timeout: Optional[int] = 60000, + *args, + **kwargs + ): + super().__init__(name, TaskType.HTTP, *args, **kwargs) + self.url = url + if not hasattr(HttpMethod, http_method): + raise PyDSParamException( + "Parameter http_method %s not support.", http_method + ) + self.http_method = http_method + self.http_params = http_params or [] + if not hasattr(HttpCheckCondition, http_check_condition): + raise PyDSParamException( + "Parameter http_check_condition %s not support.", http_check_condition + ) + self.http_check_condition = http_check_condition + if ( + http_check_condition != HttpCheckCondition.STATUS_CODE_DEFAULT + and condition is None + ): + raise PyDSParamException( + "Parameter condition must provider if http_check_condition not equal to STATUS_CODE_DEFAULT" + ) + self.condition = condition + self.connect_timeout = connect_timeout + self.socket_timeout = socket_timeout diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/map_reduce.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/map_reduce.py new file mode 100644 index 0000000000000000000000000000000000000000..5050bd3cf1ef15b6bf0c650a5ec3d90c16c6fff8 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/map_reduce.py @@ -0,0 +1,52 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Task MR.""" + +from typing import Optional + +from pydolphinscheduler.constants import TaskType +from pydolphinscheduler.core.engine import Engine, ProgramType + + +class MR(Engine): + """Task mr object, declare behavior for mr task to dolphinscheduler.""" + + _task_custom_attr = { + "app_name", + "main_args", + "others", + } + + def __init__( + self, + name: str, + main_class: str, + main_package: str, + program_type: Optional[ProgramType] = ProgramType.SCALA, + app_name: Optional[str] = None, + main_args: Optional[str] = None, + others: Optional[str] = None, + *args, + **kwargs + ): + super().__init__( + name, TaskType.MR, main_class, main_package, program_type, *args, **kwargs + ) + self.app_name = app_name + self.main_args = main_args + self.others = others diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/procedure.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/procedure.py new file mode 100644 index 0000000000000000000000000000000000000000..6383e075abaf0aed9f9ba754b130316ae7526658 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/procedure.py @@ -0,0 +1,60 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Task procedure.""" + +from typing import Dict + +from pydolphinscheduler.constants import TaskType +from pydolphinscheduler.core.database import Database +from pydolphinscheduler.core.task import Task + + +class Procedure(Task): + """Task Procedure object, declare behavior for Procedure task to dolphinscheduler. + + It should run database procedure job in multiply sql lik engine, such as: + - ClickHouse + - DB2 + - HIVE + - MySQL + - Oracle + - Postgresql + - Presto + - SQLServer + You provider datasource_name contain connection information, it decisions which + database type and database instance would run this sql. + """ + + _task_custom_attr = {"method"} + + def __init__(self, name: str, datasource_name: str, method: str, *args, **kwargs): + super().__init__(name, TaskType.PROCEDURE, *args, **kwargs) + self.datasource_name = datasource_name + self.method = method + + @property + def task_params(self, camel_attr: bool = True, custom_attr: set = None) -> Dict: + """Override Task.task_params for produce task. + + produce task have some specials attribute for task_params, and is odd if we + directly set as python property, so we Override Task.task_params here. + """ + params = super().task_params + datasource = Database(self.datasource_name, "type", "datasource") + params.update(datasource) + return params diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/python.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/python.py new file mode 100644 index 0000000000000000000000000000000000000000..79504808c893c374ddc7bef3ad2b52f5959d8d02 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/python.py @@ -0,0 +1,51 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Task Python.""" + +import inspect +import types +from typing import Any + +from pydolphinscheduler.constants import TaskType +from pydolphinscheduler.core.task import Task +from pydolphinscheduler.exceptions import PyDSParamException + + +class Python(Task): + """Task Python object, declare behavior for Python task to dolphinscheduler.""" + + _task_custom_attr = { + "raw_script", + } + + def __init__(self, name: str, code: Any, *args, **kwargs): + super().__init__(name, TaskType.PYTHON, *args, **kwargs) + self._code = code + + @property + def raw_script(self) -> str: + """Get python task define attribute `raw_script`.""" + if isinstance(self._code, str): + return self._code + elif isinstance(self._code, types.FunctionType): + py_function = inspect.getsource(self._code) + return py_function + else: + raise PyDSParamException( + "Parameter code do not support % for now.", type(self._code) + ) diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/shell.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/shell.py new file mode 100644 index 0000000000000000000000000000000000000000..9a73535c8cab3f7d0645d778e6fafac5ba753f3a --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/shell.py @@ -0,0 +1,55 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Task shell.""" + +from pydolphinscheduler.constants import TaskType +from pydolphinscheduler.core.task import Task + + +class Shell(Task): + """Task shell object, declare behavior for shell task to dolphinscheduler. + + :param name: A unique, meaningful string for the shell task. + :param command: One or more command want to run in this task. + + It could be simply command:: + + Shell(name=..., command="echo task shell") + + or maybe same commands trying to do complex task:: + + command = '''echo task shell step 1; + echo task shell step 2; + echo task shell step 3 + ''' + + Shell(name=..., command=command) + + """ + + # TODO maybe we could use instance name to replace attribute `name` + # which is simplify as `task_shell = Shell(command = "echo 1")` and + # task.name assign to `task_shell` + + _task_custom_attr = { + "raw_script", + } + + def __init__(self, name: str, command: str, *args, **kwargs): + super().__init__(name, TaskType.SHELL, *args, **kwargs) + self.raw_script = command diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/spark.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/spark.py new file mode 100644 index 0000000000000000000000000000000000000000..565daad71df6916928f24c257c9f786678361e96 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/spark.py @@ -0,0 +1,94 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Task Spark.""" + +from typing import Optional + +from pydolphinscheduler.constants import TaskType +from pydolphinscheduler.core.engine import Engine, ProgramType + + +class SparkVersion(str): + """Spark version, for now it just contain `SPARK1` and `SPARK2`.""" + + SPARK1 = "SPARK1" + SPARK2 = "SPARK2" + + +class DeployMode(str): + """SPARK deploy mode, for now it just contain `LOCAL`, `CLIENT` and `CLUSTER`.""" + + LOCAL = "local" + CLIENT = "client" + CLUSTER = "cluster" + + +class Spark(Engine): + """Task spark object, declare behavior for spark task to dolphinscheduler.""" + + _task_custom_attr = { + "deploy_mode", + "spark_version", + "driver_cores", + "driver_memory", + "num_executors", + "executor_memory", + "executor_cores", + "app_name", + "main_args", + "others", + } + + def __init__( + self, + name: str, + main_class: str, + main_package: str, + program_type: Optional[ProgramType] = ProgramType.SCALA, + deploy_mode: Optional[DeployMode] = DeployMode.CLUSTER, + spark_version: Optional[SparkVersion] = SparkVersion.SPARK2, + app_name: Optional[str] = None, + driver_cores: Optional[int] = 1, + driver_memory: Optional[str] = "512M", + num_executors: Optional[int] = 2, + executor_memory: Optional[str] = "2G", + executor_cores: Optional[int] = 2, + main_args: Optional[str] = None, + others: Optional[str] = None, + *args, + **kwargs + ): + super().__init__( + name, + TaskType.SPARK, + main_class, + main_package, + program_type, + *args, + **kwargs + ) + self.deploy_mode = deploy_mode + self.spark_version = spark_version + self.app_name = app_name + self.driver_cores = driver_cores + self.driver_memory = driver_memory + self.num_executors = num_executors + self.executor_memory = executor_memory + self.executor_cores = executor_cores + self.main_args = main_args + self.others = others diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/sql.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/sql.py new file mode 100644 index 0000000000000000000000000000000000000000..b5be3e45aaada49cc0632891a2d7a26323c5f6ac --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/sql.py @@ -0,0 +1,99 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Task sql.""" + +import re +from typing import Dict, Optional + +from pydolphinscheduler.constants import TaskType +from pydolphinscheduler.core.database import Database +from pydolphinscheduler.core.task import Task + + +class SqlType: + """SQL type, for now it just contain `SELECT` and `NO_SELECT`.""" + + SELECT = 0 + NOT_SELECT = 1 + + +class Sql(Task): + """Task SQL object, declare behavior for SQL task to dolphinscheduler. + + It should run sql job in multiply sql lik engine, such as: + - ClickHouse + - DB2 + - HIVE + - MySQL + - Oracle + - Postgresql + - Presto + - SQLServer + You provider datasource_name contain connection information, it decisions which + database type and database instance would run this sql. + """ + + _task_custom_attr = { + "sql", + "sql_type", + "pre_statements", + "post_statements", + "display_rows", + } + + def __init__( + self, + name: str, + datasource_name: str, + sql: str, + pre_statements: Optional[str] = None, + post_statements: Optional[str] = None, + display_rows: Optional[int] = 10, + *args, + **kwargs + ): + super().__init__(name, TaskType.SQL, *args, **kwargs) + self.sql = sql + self.datasource_name = datasource_name + self.pre_statements = pre_statements or [] + self.post_statements = post_statements or [] + self.display_rows = display_rows + + @property + def sql_type(self) -> int: + """Judgement sql type, use regexp to check which type of the sql is.""" + pattern_select_str = ( + "^(?!(.* |)insert |(.* |)delete |(.* |)drop |(.* |)update |(.* |)alter ).*" + ) + pattern_select = re.compile(pattern_select_str, re.IGNORECASE) + if pattern_select.match(self.sql) is None: + return SqlType.NOT_SELECT + else: + return SqlType.SELECT + + @property + def task_params(self, camel_attr: bool = True, custom_attr: set = None) -> Dict: + """Override Task.task_params for sql task. + + sql task have some specials attribute for task_params, and is odd if we + directly set as python property, so we Override Task.task_params here. + """ + params = super().task_params + datasource = Database(self.datasource_name, "type", "datasource") + params.update(datasource) + return params diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/sub_process.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/sub_process.py new file mode 100644 index 0000000000000000000000000000000000000000..8ba6b4c64dacbfde61079e39a6d51e494199c613 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/sub_process.py @@ -0,0 +1,55 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Task sub_process.""" + +from typing import Dict + +from pydolphinscheduler.constants import TaskType +from pydolphinscheduler.core.task import Task +from pydolphinscheduler.exceptions import PyDSProcessDefinitionNotAssignException +from pydolphinscheduler.java_gateway import launch_gateway + + +class SubProcess(Task): + """Task SubProcess object, declare behavior for SubProcess task to dolphinscheduler.""" + + _task_custom_attr = {"process_definition_code"} + + def __init__(self, name: str, process_definition_name: str, *args, **kwargs): + super().__init__(name, TaskType.SUB_PROCESS, *args, **kwargs) + self.process_definition_name = process_definition_name + + @property + def process_definition_code(self) -> str: + """Get process definition code, a wrapper for :func:`get_process_definition_info`.""" + return self.get_process_definition_info(self.process_definition_name).get( + "code" + ) + + def get_process_definition_info(self, process_definition_name: str) -> Dict: + """Get process definition info from java gateway, contains process definition id, name, code.""" + if not self.process_definition: + raise PyDSProcessDefinitionNotAssignException( + "ProcessDefinition must be provider for task SubProcess." + ) + gateway = launch_gateway() + return gateway.entry_point.getProcessDefinitionInfo( + self.process_definition.user.name, + self.process_definition.project.name, + process_definition_name, + ) diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/switch.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/switch.py new file mode 100644 index 0000000000000000000000000000000000000000..28032f88e7bd93025bae4967683d510c8998c9d7 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/tasks/switch.py @@ -0,0 +1,158 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Task Switch.""" + +from typing import Dict, Optional + +from pydolphinscheduler.constants import TaskType +from pydolphinscheduler.core.base import Base +from pydolphinscheduler.core.task import Task +from pydolphinscheduler.exceptions import PyDSParamException + + +class SwitchBranch(Base): + """Base class of ConditionBranch of task switch. + + It a parent class for :class:`Branch` and :class:`Default`. + """ + + _DEFINE_ATTR = { + "next_node", + } + + def __init__(self, task: Task, exp: Optional[str] = None): + super().__init__(f"Switch.{self.__class__.__name__.upper()}") + self.task = task + self.exp = exp + + @property + def next_node(self) -> str: + """Get task switch property next_node, it return task code when init class switch.""" + return self.task.code + + @property + def condition(self) -> Optional[str]: + """Get task switch property condition.""" + return self.exp + + def get_define(self, camel_attr: bool = True) -> Dict: + """Get :class:`ConditionBranch` definition attribute communicate to Java gateway server.""" + if self.condition: + self._DEFINE_ATTR.add("condition") + return super().get_define() + + +class Branch(SwitchBranch): + """Common condition branch for switch task. + + If any condition in :class:`Branch` match, would set this :class:`Branch`'s task as downstream of task + switch. If all condition branch do not match would set :class:`Default`'s task as task switch downstream. + """ + + def __init__(self, condition: str, task: Task): + super().__init__(task, condition) + + +class Default(SwitchBranch): + """Class default branch for switch task. + + If all condition of :class:`Branch` do not match, task switch would run the tasks in :class:`Default` + and set :class:`Default`'s task as switch downstream. Please notice that each switch condition + could only have one single :class:`Default`. + """ + + def __init__(self, task: Task): + super().__init__(task) + + +class SwitchCondition(Base): + """Set switch condition of given parameter.""" + + _DEFINE_ATTR = { + "depend_task_list", + } + + def __init__(self, *args): + super().__init__(self.__class__.__name__) + self.args = args + + def set_define_attr(self) -> None: + """Set attribute to function :func:`get_define`. + + It is a wrapper for both `And` and `Or` operator. + """ + result = [] + num_branch_default = 0 + for condition in self.args: + if isinstance(condition, SwitchBranch): + if num_branch_default < 1: + if isinstance(condition, Default): + self._DEFINE_ATTR.add("next_node") + setattr(self, "next_node", condition.next_node) + num_branch_default += 1 + elif isinstance(condition, Branch): + result.append(condition.get_define()) + else: + raise PyDSParamException( + "Task Switch's parameter only support exactly one default branch." + ) + else: + raise PyDSParamException( + "Task Switch's parameter only support SwitchBranch but got %s.", + type(condition), + ) + # Handle switch default branch, default value is `""` if not provide. + if num_branch_default == 0: + self._DEFINE_ATTR.add("next_node") + setattr(self, "next_node", "") + setattr(self, "depend_task_list", result) + + def get_define(self, camel_attr=True) -> Dict: + """Overwrite Base.get_define to get task Condition specific get define.""" + self.set_define_attr() + return super().get_define() + + +class Switch(Task): + """Task switch object, declare behavior for switch task to dolphinscheduler.""" + + def __init__(self, name: str, condition: SwitchCondition, *args, **kwargs): + super().__init__(name, TaskType.SWITCH, *args, **kwargs) + self.condition = condition + # Set condition tasks as current task downstream + self._set_dep() + + def _set_dep(self) -> None: + """Set downstream according to parameter `condition`.""" + downstream = [] + for condition in self.condition.args: + if isinstance(condition, SwitchBranch): + downstream.append(condition.task) + self.set_downstream(downstream) + + @property + def task_params(self, camel_attr: bool = True, custom_attr: set = None) -> Dict: + """Override Task.task_params for switch task. + + switch task have some specials attribute `switch`, and in most of the task + this attribute is None and use empty dict `{}` as default value. We do not use class + attribute `_task_custom_attr` due to avoid attribute cover. + """ + params = super().task_params + params["switchResult"] = self.condition.get_define() + return params diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/utils/__init__.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f8d3fbf62f39aa5517c2513441946dc741b7041d --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/utils/__init__.py @@ -0,0 +1,18 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Init utils package.""" diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/utils/date.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/utils/date.py new file mode 100644 index 0000000000000000000000000000000000000000..18cf93e3181fb991bf2765f04768187ecdcc8d95 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/utils/date.py @@ -0,0 +1,82 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Date util function collections.""" + +from datetime import datetime + +from pydolphinscheduler.constants import Delimiter, Time + +LEN_SUPPORT_DATETIME = ( + 15, + 19, +) + +FMT_SHORT = f"{Time.FMT_SHORT_DATE} {Time.FMT_NO_COLON_TIME}" +FMT_DASH = f"{Time.FMT_DASH_DATE} {Time.FMT_STD_TIME}" +FMT_STD = f"{Time.FMT_STD_DATE} {Time.FMT_STD_TIME}" + +MAX_DATETIME = datetime(9999, 12, 31, 23, 59, 59) + + +def conv_to_schedule(src: datetime) -> str: + """Convert given datetime to schedule date string.""" + return datetime.strftime(src, FMT_STD) + + +def conv_from_str(src: str) -> datetime: + """Convert given string to datetime. + + This function give an ability to convert string to datetime, and for now it could handle + format like: + - %Y-%m-%d + - %Y/%m/%d + - %Y%m%d + - %Y-%m-%d %H:%M:%S + - %Y/%m/%d %H:%M:%S + - %Y%m%d %H%M%S + If pattern not like above be given will raise NotImplementedError. + """ + len_ = len(src) + if len_ == Time.LEN_SHORT_DATE: + return datetime.strptime(src, Time.FMT_SHORT_DATE) + elif len_ == Time.LEN_STD_DATE: + if Delimiter.BAR in src: + return datetime.strptime(src, Time.FMT_STD_DATE) + elif Delimiter.DASH in src: + return datetime.strptime(src, Time.FMT_DASH_DATE) + else: + raise NotImplementedError( + "%s could not be convert to datetime for now.", src + ) + elif len_ in LEN_SUPPORT_DATETIME: + if Delimiter.BAR in src and Delimiter.COLON in src: + return datetime.strptime(src, FMT_STD) + elif Delimiter.DASH in src and Delimiter.COLON in src: + return datetime.strptime(src, FMT_DASH) + elif ( + Delimiter.DASH not in src + and Delimiter.BAR not in src + and Delimiter.COLON not in src + ): + return datetime.strptime(src, FMT_SHORT) + else: + raise NotImplementedError( + "%s could not be convert to datetime for now.", src + ) + else: + raise NotImplementedError("%s could not be convert to datetime for now.", src) diff --git a/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/utils/string.py b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/utils/string.py new file mode 100644 index 0000000000000000000000000000000000000000..e7e781c4d6bb94cc486c5fbd5ee84dc9b73cd394 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/src/pydolphinscheduler/utils/string.py @@ -0,0 +1,39 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""String util function collections.""" + +from pydolphinscheduler.constants import Delimiter + + +def attr2camel(attr: str, include_private=True): + """Covert class attribute name to camel case.""" + if include_private: + attr = attr.lstrip(Delimiter.UNDERSCORE) + return snake2camel(attr) + + +def snake2camel(snake: str): + """Covert snake case to camel case.""" + components = snake.split(Delimiter.UNDERSCORE) + return components[0] + "".join(x.title() for x in components[1:]) + + +def class_name2camel(class_name: str): + """Covert class name string to camel case.""" + class_name = class_name.lstrip(Delimiter.UNDERSCORE) + return class_name[0].lower() + snake2camel(class_name[1:]) diff --git a/dolphinscheduler-python/pydolphinscheduler/tests/__init__.py b/dolphinscheduler-python/pydolphinscheduler/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..5ce1f82a1aad7648211c71a2f586b493b0b564ad --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/tests/__init__.py @@ -0,0 +1,18 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Init tests package.""" diff --git a/dolphinscheduler-python/pydolphinscheduler/tests/core/__init__.py b/dolphinscheduler-python/pydolphinscheduler/tests/core/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..62ce0ea4ee03f744cf853c15b1468b854ef5535c --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/tests/core/__init__.py @@ -0,0 +1,18 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Init core package tests.""" diff --git a/dolphinscheduler-python/pydolphinscheduler/tests/core/test_database.py b/dolphinscheduler-python/pydolphinscheduler/tests/core/test_database.py new file mode 100644 index 0000000000000000000000000000000000000000..1286a4a7f8b0943486e0176fdd1aa6c289aba6ba --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/tests/core/test_database.py @@ -0,0 +1,54 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Test Database.""" + + +from unittest.mock import patch + +import pytest + +from pydolphinscheduler.core.database import Database + +TEST_DATABASE_DATASOURCE_NAME = "test_datasource" +TEST_DATABASE_TYPE_KEY = "type" +TEST_DATABASE_KEY = "datasource" + + +@pytest.mark.parametrize( + "expect", + [ + { + TEST_DATABASE_TYPE_KEY: "mock_type", + TEST_DATABASE_KEY: 1, + } + ], +) +@patch( + "pydolphinscheduler.core.task.Task.gen_code_and_version", + return_value=(123, 1), +) +@patch( + "pydolphinscheduler.core.database.Database.get_database_info", + return_value=({"id": 1, "type": "mock_type"}), +) +def test_get_datasource_detail(mock_datasource, mock_code_version, expect): + """Test :func:`get_database_type` and :func:`get_database_id` can return expect value.""" + database_info = Database( + TEST_DATABASE_DATASOURCE_NAME, TEST_DATABASE_TYPE_KEY, TEST_DATABASE_KEY + ) + assert expect == database_info diff --git a/dolphinscheduler-python/pydolphinscheduler/tests/core/test_engine.py b/dolphinscheduler-python/pydolphinscheduler/tests/core/test_engine.py new file mode 100644 index 0000000000000000000000000000000000000000..e36c47ba1b6451398f8563c8c75ecc33688ec8f0 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/tests/core/test_engine.py @@ -0,0 +1,147 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Test Task Engine.""" + + +from unittest.mock import patch + +import pytest + +from pydolphinscheduler.core.engine import Engine, ProgramType + +TEST_ENGINE_TASK_TYPE = "ENGINE" +TEST_MAIN_CLASS = "org.apache.examples.mock.Mock" +TEST_MAIN_PACKAGE = "Mock.jar" +TEST_PROGRAM_TYPE = ProgramType.JAVA + + +@patch( + "pydolphinscheduler.core.task.Task.gen_code_and_version", + return_value=(123, 1), +) +@patch( + "pydolphinscheduler.core.engine.Engine.get_resource_info", + return_value=({"id": 1, "name": "mock_name"}), +) +def test_get_jar_detail(mock_resource, mock_code_version): + """Test :func:`get_jar_id` can return expect value.""" + name = "test_get_jar_detail" + task = Engine( + name, + TEST_ENGINE_TASK_TYPE, + TEST_MAIN_CLASS, + TEST_MAIN_PACKAGE, + TEST_PROGRAM_TYPE, + ) + assert 1 == task.get_jar_id() + + +@pytest.mark.parametrize( + "attr, expect", + [ + ( + { + "name": "test-task-params", + "task_type": "test-engine", + "main_class": "org.apache.examples.mock.Mock", + "main_package": "TestMock.jar", + "program_type": ProgramType.JAVA, + }, + { + "mainClass": "org.apache.examples.mock.Mock", + "mainJar": { + "id": 1, + }, + "programType": ProgramType.JAVA, + "localParams": [], + "resourceList": [], + "dependence": {}, + "conditionResult": {"successNode": [""], "failedNode": [""]}, + "waitStartTimeout": {}, + }, + ) + ], +) +@patch( + "pydolphinscheduler.core.task.Task.gen_code_and_version", + return_value=(123, 1), +) +@patch( + "pydolphinscheduler.core.engine.Engine.get_resource_info", + return_value=({"id": 1, "name": "mock_name"}), +) +def test_property_task_params(mock_resource, mock_code_version, attr, expect): + """Test task engine task property.""" + task = Engine(**attr) + assert expect == task.task_params + + +@pytest.mark.parametrize( + "attr, expect", + [ + ( + { + "name": "test-task-test_engine_get_define", + "task_type": "test-engine", + "main_class": "org.apache.examples.mock.Mock", + "main_package": "TestMock.jar", + "program_type": ProgramType.JAVA, + }, + { + "code": 123, + "name": "test-task-test_engine_get_define", + "version": 1, + "description": None, + "delayTime": 0, + "taskType": "test-engine", + "taskParams": { + "mainClass": "org.apache.examples.mock.Mock", + "mainJar": { + "id": 1, + }, + "programType": ProgramType.JAVA, + "localParams": [], + "resourceList": [], + "dependence": {}, + "conditionResult": {"successNode": [""], "failedNode": [""]}, + "waitStartTimeout": {}, + }, + "flag": "YES", + "taskPriority": "MEDIUM", + "workerGroup": "default", + "failRetryTimes": 0, + "failRetryInterval": 1, + "timeoutFlag": "CLOSE", + "timeoutNotifyStrategy": None, + "timeout": 0, + }, + ) + ], +) +@patch( + "pydolphinscheduler.core.task.Task.gen_code_and_version", + return_value=(123, 1), +) +@patch( + "pydolphinscheduler.core.engine.Engine.get_resource_info", + return_value=({"id": 1, "name": "mock_name"}), +) +def test_engine_get_define(mock_resource, mock_code_version, attr, expect): + """Test task engine function get_define.""" + task = Engine(**attr) + assert task.get_define() == expect diff --git a/dolphinscheduler-python/pydolphinscheduler/tests/core/test_process_definition.py b/dolphinscheduler-python/pydolphinscheduler/tests/core/test_process_definition.py new file mode 100644 index 0000000000000000000000000000000000000000..f51338df521f3730ff8bb5e65c8b541c190ae39f --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/tests/core/test_process_definition.py @@ -0,0 +1,419 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Test process definition.""" + +from datetime import datetime +from typing import Any +from unittest.mock import patch + +import pytest +from freezegun import freeze_time + +from pydolphinscheduler.constants import ( + ProcessDefinitionDefault, + ProcessDefinitionReleaseState, +) +from pydolphinscheduler.core.process_definition import ProcessDefinition +from pydolphinscheduler.exceptions import PyDSParamException +from pydolphinscheduler.side import Project, Tenant, User +from pydolphinscheduler.tasks.switch import Branch, Default, Switch, SwitchCondition +from pydolphinscheduler.utils.date import conv_to_schedule +from tests.testing.task import Task + +TEST_PROCESS_DEFINITION_NAME = "simple-test-process-definition" +TEST_TASK_TYPE = "test-task-type" + + +@pytest.mark.parametrize("func", ["run", "submit", "start"]) +def test_process_definition_key_attr(func): + """Test process definition have specific functions or attributes.""" + with ProcessDefinition(TEST_PROCESS_DEFINITION_NAME) as pd: + assert hasattr( + pd, func + ), f"ProcessDefinition instance don't have attribute `{func}`" + + +@pytest.mark.parametrize( + "name,value", + [ + ("timezone", ProcessDefinitionDefault.TIME_ZONE), + ("project", Project(ProcessDefinitionDefault.PROJECT)), + ("tenant", Tenant(ProcessDefinitionDefault.TENANT)), + ( + "user", + User( + ProcessDefinitionDefault.USER, + ProcessDefinitionDefault.USER_PWD, + ProcessDefinitionDefault.USER_EMAIL, + ProcessDefinitionDefault.USER_PHONE, + ProcessDefinitionDefault.TENANT, + ProcessDefinitionDefault.QUEUE, + ProcessDefinitionDefault.USER_STATE, + ), + ), + ("worker_group", ProcessDefinitionDefault.WORKER_GROUP), + ("release_state", ProcessDefinitionReleaseState.ONLINE), + ], +) +def test_process_definition_default_value(name, value): + """Test process definition default attributes.""" + with ProcessDefinition(TEST_PROCESS_DEFINITION_NAME) as pd: + assert getattr(pd, name) == value, ( + f"ProcessDefinition instance attribute `{name}` not with " + f"except default value `{getattr(pd, name)}`" + ) + + +@pytest.mark.parametrize( + "name,cls,expect", + [ + ("name", str, "name"), + ("description", str, "description"), + ("schedule", str, "schedule"), + ("timezone", str, "timezone"), + ("worker_group", str, "worker_group"), + ("timeout", int, 1), + ("release_state", str, "OFFLINE"), + ("param", dict, {"key": "value"}), + ], +) +def test_set_attr(name, cls, expect): + """Test process definition set attributes which get with same type.""" + with ProcessDefinition(TEST_PROCESS_DEFINITION_NAME) as pd: + setattr(pd, name, expect) + assert ( + getattr(pd, name) == expect + ), f"ProcessDefinition set attribute `{name}` do not work expect" + + +@pytest.mark.parametrize( + "set_attr,set_val,get_attr,get_val", + [ + ("_project", "project", "project", Project("project")), + ("_tenant", "tenant", "tenant", Tenant("tenant")), + ("_start_time", "2021-01-01", "start_time", datetime(2021, 1, 1)), + ("_end_time", "2021-01-01", "end_time", datetime(2021, 1, 1)), + ], +) +def test_set_attr_return_special_object(set_attr, set_val, get_attr, get_val): + """Test process definition set attributes which get with different type.""" + with ProcessDefinition(TEST_PROCESS_DEFINITION_NAME) as pd: + setattr(pd, set_attr, set_val) + assert get_val == getattr( + pd, get_attr + ), f"Set attribute {set_attr} can not get back with {get_val}." + + +@pytest.mark.parametrize( + "val,expect", + [ + (datetime(2021, 1, 1), datetime(2021, 1, 1)), + (None, None), + ("2021-01-01", datetime(2021, 1, 1)), + ("2021-01-01 01:01:01", datetime(2021, 1, 1, 1, 1, 1)), + ], +) +def test__parse_datetime(val, expect): + """Test process definition function _parse_datetime. + + Only two datetime test cases here because we have more test cases in tests/utils/test_date.py file. + """ + with ProcessDefinition(TEST_PROCESS_DEFINITION_NAME) as pd: + assert expect == pd._parse_datetime( + val + ), f"Function _parse_datetime with unexpect value by {val}." + + +@pytest.mark.parametrize( + "val", + [ + 20210101, + (2021, 1, 1), + {"year": "2021", "month": "1", "day": 1}, + ], +) +def test__parse_datetime_not_support_type(val: Any): + """Test process definition function _parse_datetime not support type error.""" + with ProcessDefinition(TEST_PROCESS_DEFINITION_NAME) as pd: + with pytest.raises(PyDSParamException, match="Do not support value type.*?"): + pd._parse_datetime(val) + + +@pytest.mark.parametrize( + "param, expect", + [ + ( + None, + [], + ), + ( + {}, + [], + ), + ( + {"key1": "val1"}, + [ + { + "prop": "key1", + "direct": "IN", + "type": "VARCHAR", + "value": "val1", + } + ], + ), + ( + { + "key1": "val1", + "key2": "val2", + }, + [ + { + "prop": "key1", + "direct": "IN", + "type": "VARCHAR", + "value": "val1", + }, + { + "prop": "key2", + "direct": "IN", + "type": "VARCHAR", + "value": "val2", + }, + ], + ), + ], +) +def test_property_param_json(param, expect): + """Test ProcessDefinition's property param_json.""" + pd = ProcessDefinition(TEST_PROCESS_DEFINITION_NAME, param=param) + assert pd.param_json == expect + + +@patch( + "pydolphinscheduler.core.task.Task.gen_code_and_version", + return_value=(123, 1), +) +def test__pre_submit_check_switch_without_param(mock_code_version): + """Test :func:`_pre_submit_check` if process definition with switch but without attribute param.""" + with ProcessDefinition(TEST_PROCESS_DEFINITION_NAME) as pd: + parent = Task(name="parent", task_type=TEST_TASK_TYPE) + switch_child_1 = Task(name="switch_child_1", task_type=TEST_TASK_TYPE) + switch_child_2 = Task(name="switch_child_2", task_type=TEST_TASK_TYPE) + switch_condition = SwitchCondition( + Branch(condition="${var} > 1", task=switch_child_1), + Default(task=switch_child_2), + ) + + switch = Switch(name="switch", condition=switch_condition) + parent >> switch + with pytest.raises( + PyDSParamException, + match="Parameter param must be provider if task Switch in process definition.", + ): + pd._pre_submit_check() + + +def test_process_definition_get_define_without_task(): + """Test process definition function get_define without task.""" + expect = { + "name": TEST_PROCESS_DEFINITION_NAME, + "description": None, + "project": ProcessDefinitionDefault.PROJECT, + "tenant": ProcessDefinitionDefault.TENANT, + "workerGroup": ProcessDefinitionDefault.WORKER_GROUP, + "timeout": 0, + "releaseState": ProcessDefinitionReleaseState.ONLINE, + "param": None, + "tasks": {}, + "taskDefinitionJson": [{}], + "taskRelationJson": [{}], + } + with ProcessDefinition(TEST_PROCESS_DEFINITION_NAME) as pd: + assert pd.get_define() == expect + + +def test_process_definition_simple_context_manager(): + """Test simple create workflow in process definition context manager mode.""" + expect_tasks_num = 5 + with ProcessDefinition(TEST_PROCESS_DEFINITION_NAME) as pd: + for i in range(expect_tasks_num): + curr_task = Task(name=f"task-{i}", task_type=f"type-{i}") + # Set deps task i as i-1 parent + if i > 0: + pre_task = pd.get_one_task_by_name(f"task-{i - 1}") + curr_task.set_upstream(pre_task) + assert len(pd.tasks) == expect_tasks_num + + # Test if task process_definition same as origin one + task: Task = pd.get_one_task_by_name("task-0") + assert pd is task.process_definition + + # Test if all tasks with expect deps + for i in range(expect_tasks_num): + task: Task = pd.get_one_task_by_name(f"task-{i}") + if i == 0: + assert task._upstream_task_codes == set() + assert task._downstream_task_codes == { + pd.get_one_task_by_name("task-1").code + } + elif i == expect_tasks_num - 1: + assert task._upstream_task_codes == { + pd.get_one_task_by_name(f"task-{i - 1}").code + } + assert task._downstream_task_codes == set() + else: + assert task._upstream_task_codes == { + pd.get_one_task_by_name(f"task-{i - 1}").code + } + assert task._downstream_task_codes == { + pd.get_one_task_by_name(f"task-{i + 1}").code + } + + +def test_process_definition_simple_separate(): + """Test process definition simple create workflow in separate mode. + + This test just test basic information, cause most of test case is duplicate to + test_process_definition_simple_context_manager. + """ + expect_tasks_num = 5 + pd = ProcessDefinition(TEST_PROCESS_DEFINITION_NAME) + for i in range(expect_tasks_num): + curr_task = Task( + name=f"task-{i}", + task_type=f"type-{i}", + process_definition=pd, + ) + # Set deps task i as i-1 parent + if i > 0: + pre_task = pd.get_one_task_by_name(f"task-{i - 1}") + curr_task.set_upstream(pre_task) + assert len(pd.tasks) == expect_tasks_num + assert all(["task-" in task.name for task in pd.task_list]) + + +@pytest.mark.parametrize( + "user_attrs", + [ + {"tenant": "tenant_specific"}, + {"queue": "queue_specific"}, + {"tenant": "tenant_specific", "queue": "queue_specific"}, + ], +) +def test_set_process_definition_user_attr(user_attrs): + """Test user with correct attributes if we specific assigned to process definition object.""" + default_value = { + "tenant": ProcessDefinitionDefault.TENANT, + "queue": ProcessDefinitionDefault.QUEUE, + } + with ProcessDefinition(TEST_PROCESS_DEFINITION_NAME, **user_attrs) as pd: + user = pd.user + for attr in default_value: + # Get assigned attribute if we specific, else get default value + except_attr = ( + user_attrs[attr] if attr in user_attrs else default_value[attr] + ) + # Get actually attribute of user object + actual_attr = getattr(user, attr) + assert ( + except_attr == actual_attr + ), f"Except attribute is {except_attr} but get {actual_attr}" + + +def test_schedule_json_none_schedule(): + """Test function schedule_json with None as schedule.""" + with ProcessDefinition( + TEST_PROCESS_DEFINITION_NAME, + schedule=None, + ) as pd: + assert pd.schedule_json is None + + +# We freeze time here, because we test start_time with None, and if will get datetime.datetime.now. If we do +# not freeze time, it will cause flaky test here. +@freeze_time("2021-01-01") +@pytest.mark.parametrize( + "start_time,end_time,expect_date", + [ + ( + "20210101", + "20210201", + {"start_time": "2021-01-01 00:00:00", "end_time": "2021-02-01 00:00:00"}, + ), + ( + "2021-01-01", + "2021-02-01", + {"start_time": "2021-01-01 00:00:00", "end_time": "2021-02-01 00:00:00"}, + ), + ( + "2021/01/01", + "2021/02/01", + {"start_time": "2021-01-01 00:00:00", "end_time": "2021-02-01 00:00:00"}, + ), + # Test mix pattern + ( + "2021/01/01 01:01:01", + "2021-02-02 02:02:02", + {"start_time": "2021-01-01 01:01:01", "end_time": "2021-02-02 02:02:02"}, + ), + ( + "2021/01/01 01:01:01", + "20210202 020202", + {"start_time": "2021-01-01 01:01:01", "end_time": "2021-02-02 02:02:02"}, + ), + ( + "20210101 010101", + "2021-02-02 02:02:02", + {"start_time": "2021-01-01 01:01:01", "end_time": "2021-02-02 02:02:02"}, + ), + # Test None value + ( + "2021/01/01 01:02:03", + None, + {"start_time": "2021-01-01 01:02:03", "end_time": "9999-12-31 23:59:59"}, + ), + ( + None, + None, + { + "start_time": conv_to_schedule(datetime(2021, 1, 1)), + "end_time": "9999-12-31 23:59:59", + }, + ), + ], +) +def test_schedule_json_start_and_end_time(start_time, end_time, expect_date): + """Test function schedule_json about handle start_time and end_time. + + Only two datetime test cases here because we have more test cases in tests/utils/test_date.py file. + """ + schedule = "0 0 0 * * ? *" + expect = { + "crontab": schedule, + "startTime": expect_date["start_time"], + "endTime": expect_date["end_time"], + "timezoneId": ProcessDefinitionDefault.TIME_ZONE, + } + with ProcessDefinition( + TEST_PROCESS_DEFINITION_NAME, + schedule=schedule, + start_time=start_time, + end_time=end_time, + timezone=ProcessDefinitionDefault.TIME_ZONE, + ) as pd: + assert pd.schedule_json == expect diff --git a/dolphinscheduler-python/pydolphinscheduler/tests/core/test_task.py b/dolphinscheduler-python/pydolphinscheduler/tests/core/test_task.py new file mode 100644 index 0000000000000000000000000000000000000000..6af731b5ff7179e45706e4d4005f9075739f3bfa --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/tests/core/test_task.py @@ -0,0 +1,224 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Test Task class function.""" + +from unittest.mock import patch + +import pytest + +from pydolphinscheduler.core.task import Task, TaskRelation +from tests.testing.task import Task as testTask + +TEST_TASK_RELATION_SET = set() +TEST_TASK_RELATION_SIZE = 0 + + +@pytest.mark.parametrize( + "attr, expect", + [ + ( + dict(), + { + "localParams": [], + "resourceList": [], + "dependence": {}, + "waitStartTimeout": {}, + "conditionResult": {"successNode": [""], "failedNode": [""]}, + }, + ), + ( + { + "local_params": ["foo", "bar"], + "resource_list": ["foo", "bar"], + "dependence": {"foo", "bar"}, + "wait_start_timeout": {"foo", "bar"}, + "condition_result": {"foo": ["bar"]}, + }, + { + "localParams": ["foo", "bar"], + "resourceList": ["foo", "bar"], + "dependence": {"foo", "bar"}, + "waitStartTimeout": {"foo", "bar"}, + "conditionResult": {"foo": ["bar"]}, + }, + ), + ], +) +def test_property_task_params(attr, expect): + """Test class task property.""" + task = testTask( + "test-property-task-params", + "test-task", + **attr, + ) + assert expect == task.task_params + + +@pytest.mark.parametrize( + "pre_code, post_code, expect", + [ + (123, 456, hash("123 -> 456")), + (12345678, 987654321, hash("12345678 -> 987654321")), + ], +) +def test_task_relation_hash_func(pre_code, post_code, expect): + """Test TaskRelation magic function :func:`__hash__`.""" + task_param = TaskRelation(pre_task_code=pre_code, post_task_code=post_code) + assert hash(task_param) == expect + + +@pytest.mark.parametrize( + "pre_code, post_code, size_add", + [ + (123, 456, 1), + (123, 456, 0), + (456, 456, 1), + (123, 123, 1), + (456, 123, 1), + (0, 456, 1), + (123, 0, 1), + ], +) +def test_task_relation_add_to_set(pre_code, post_code, size_add): + """Test TaskRelation with different pre_code and post_code add to set behavior. + + Here we use global variable to keep set of :class:`TaskRelation` instance and the number we expect + of the size when we add a new task relation to exists set. + """ + task_relation = TaskRelation(pre_task_code=pre_code, post_task_code=post_code) + TEST_TASK_RELATION_SET.add(task_relation) + # hint python interpreter use global variable instead of local's + global TEST_TASK_RELATION_SIZE + TEST_TASK_RELATION_SIZE += size_add + assert len(TEST_TASK_RELATION_SET) == TEST_TASK_RELATION_SIZE + + +def test_task_relation_to_dict(): + """Test TaskRelation object function to_dict.""" + pre_task_code = 123 + post_task_code = 456 + expect = { + "name": "", + "preTaskCode": pre_task_code, + "postTaskCode": post_task_code, + "preTaskVersion": 1, + "postTaskVersion": 1, + "conditionType": 0, + "conditionParams": {}, + } + task_relation = TaskRelation( + pre_task_code=pre_task_code, post_task_code=post_task_code + ) + assert task_relation.get_define() == expect + + +def test_task_get_define(): + """Test Task object function get_define.""" + code = 123 + version = 1 + name = "test_task_get_define" + task_type = "test_task_get_define_type" + expect = { + "code": code, + "name": name, + "version": version, + "description": None, + "delayTime": 0, + "taskType": task_type, + "taskParams": { + "resourceList": [], + "localParams": [], + "dependence": {}, + "conditionResult": {"successNode": [""], "failedNode": [""]}, + "waitStartTimeout": {}, + }, + "flag": "YES", + "taskPriority": "MEDIUM", + "workerGroup": "default", + "failRetryTimes": 0, + "failRetryInterval": 1, + "timeoutFlag": "CLOSE", + "timeoutNotifyStrategy": None, + "timeout": 0, + } + with patch( + "pydolphinscheduler.core.task.Task.gen_code_and_version", + return_value=(code, version), + ): + task = Task(name=name, task_type=task_type) + assert task.get_define() == expect + + +@pytest.mark.parametrize("shift", ["<<", ">>"]) +def test_two_tasks_shift(shift: str): + """Test bit operator between tasks. + + Here we test both `>>` and `<<` bit operator. + """ + upstream = testTask(name="upstream", task_type=shift) + downstream = testTask(name="downstream", task_type=shift) + if shift == "<<": + downstream << upstream + elif shift == ">>": + upstream >> downstream + else: + assert False, f"Unexpect bit operator type {shift}." + assert ( + 1 == len(upstream._downstream_task_codes) + and downstream.code in upstream._downstream_task_codes + ), "Task downstream task attributes error, downstream codes size or specific code failed." + assert ( + 1 == len(downstream._upstream_task_codes) + and upstream.code in downstream._upstream_task_codes + ), "Task upstream task attributes error, upstream codes size or upstream code failed." + + +@pytest.mark.parametrize( + "dep_expr, flag", + [ + ("task << tasks", "upstream"), + ("tasks << task", "downstream"), + ("task >> tasks", "downstream"), + ("tasks >> task", "upstream"), + ], +) +def test_tasks_list_shift(dep_expr: str, flag: str): + """Test bit operator between task and sequence of tasks. + + Here we test both `>>` and `<<` bit operator. + """ + reverse_dict = { + "upstream": "downstream", + "downstream": "upstream", + } + task_type = "dep_task_and_tasks" + task = testTask(name="upstream", task_type=task_type) + tasks = [ + testTask(name="downstream1", task_type=task_type), + testTask(name="downstream2", task_type=task_type), + ] + + # Use build-in function eval to simply test case and reduce duplicate code + eval(dep_expr) + direction_attr = f"_{flag}_task_codes" + reverse_direction_attr = f"_{reverse_dict[flag]}_task_codes" + assert 2 == len(getattr(task, direction_attr)) + assert [t.code in getattr(task, direction_attr) for t in tasks] + + assert all([1 == len(getattr(t, reverse_direction_attr)) for t in tasks]) + assert all([task.code in getattr(t, reverse_direction_attr) for t in tasks]) diff --git a/dolphinscheduler-python/pydolphinscheduler/tests/example/__init__.py b/dolphinscheduler-python/pydolphinscheduler/tests/example/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..49323e711d2ab74e3b4129f68724bb80a2df90be --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/tests/example/__init__.py @@ -0,0 +1,18 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Init example package tests.""" diff --git a/dolphinscheduler-python/pydolphinscheduler/tests/example/test_example.py b/dolphinscheduler-python/pydolphinscheduler/tests/example/test_example.py new file mode 100644 index 0000000000000000000000000000000000000000..5bf897f56004eeac702523e141c5e949c96b0fd2 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/tests/example/test_example.py @@ -0,0 +1,172 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Test example.""" + +import ast +import importlib +from unittest.mock import patch + +import pytest + +from tests.testing.constants import task_without_example +from tests.testing.path import get_all_examples, get_tasks +from tests.testing.task import Task + +process_definition_name = set() + + +def import_module(script_name, script_path): + """Import and run example module in examples directory.""" + spec = importlib.util.spec_from_file_location(script_name, script_path) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + return module + + +def test_task_without_example(): + """Test task which without example. + + Avoiding add new type of tasks but without adding example describe how to use it. + """ + # We use example/tutorial.py as shell task example + ignore_name = {"__init__.py", "shell.py"} + all_tasks = {task.stem for task in get_tasks(ignore_name=ignore_name)} + + have_example_tasks = set() + start = "task_" + end = "_example" + for ex in get_all_examples(): + stem = ex.stem + if stem.startswith(start) and stem.endswith(end): + task_name = stem.replace(start, "").replace(end, "") + have_example_tasks.add(task_name) + + assert all_tasks.difference(have_example_tasks) == task_without_example + + +@pytest.fixture +def setup_and_teardown_for_stuff(): + """Fixture of py.test handle setup and teardown.""" + yield + global process_definition_name + process_definition_name = set() + + +def submit_check_without_same_name(self): + """Side effect for verifying process definition name and adding it to global variable.""" + if self.name in process_definition_name: + raise ValueError( + "Example process definition should not have same name, but get duplicate name: %s", + self.name, + ) + submit_add_process_definition(self) + + +def submit_add_process_definition(self): + """Side effect for adding process definition name to global variable.""" + process_definition_name.add(self.name) + + +def test_example_basic(): + """Test example basic information. + + Which including: + * File extension name is `.py` + * All example except `tutorial.py` is end with keyword "_example" + * All example must have not empty `__doc__`. + """ + for ex in get_all_examples(): + # All files in example is python script + assert ( + ex.suffix == ".py" + ), f"We expect all examples is python script, but get {ex.name}." + + # All except tutorial and __init__ is end with keyword "_example" + if ex.stem != "tutorial" and ex.stem != "__init__": + assert ex.stem.endswith( + "_example" + ), f"We expect all examples script end with keyword '_example', but get {ex.stem}." + + # All files have __doc__ + tree = ast.parse(ex.read_text()) + example_doc = ast.get_docstring(tree, clean=False) + assert ( + example_doc is not None + ), f"We expect all examples have __doc__, but {ex.name} do not." + + +@patch("pydolphinscheduler.core.process_definition.ProcessDefinition.start") +@patch( + "pydolphinscheduler.core.process_definition.ProcessDefinition.submit", + side_effect=submit_check_without_same_name, + autospec=True, +) +@patch( + "pydolphinscheduler.core.task.Task.gen_code_and_version", + # Example bulk_create_example.py would create workflow dynamic by :func:`get_one_task_by_name` + # and would raise error in :func:`get_one_task_by_name` if we return constant value + # using :arg:`return_value` + side_effect=Task("test_example", "test_example").gen_code_and_version, +) +def test_example_process_definition_without_same_name( + mock_code_version, mock_submit, mock_start +): + """Test all examples file without same process definition's name. + + Our process definition would compete with others if we have same process definition name. It will make + different between actually workflow and our workflow-as-code file which make users feel strange. + """ + for ex in get_all_examples(): + # We use side_effect `submit_check_without_same_name` overwrite :func:`submit` + # and check whether it have duplicate name or not + import_module(ex.name, str(ex)) + assert True + + +@patch("pydolphinscheduler.core.process_definition.ProcessDefinition.start") +@patch( + "pydolphinscheduler.core.process_definition.ProcessDefinition.submit", + side_effect=submit_add_process_definition, + autospec=True, +) +@patch( + "pydolphinscheduler.core.task.Task.gen_code_and_version", + # Example bulk_create_example.py would create workflow dynamic by :func:`get_one_task_by_name` + # and would raise error in :func:`get_one_task_by_name` if we return constant value + # using :arg:`return_value` + side_effect=Task("test_example", "test_example").gen_code_and_version, +) +def test_file_name_in_process_definition(mock_code_version, mock_submit, mock_start): + """Test example file name in example definition name. + + We should not directly assert equal, because some of the examples contain + more than one process definition. + """ + global process_definition_name + for ex in get_all_examples(): + # Skip __init__ file + if ex.stem == "__init__": + continue + # Skip bulk_create_example check, cause it contain multiple workflow and + # without one named bulk_create_example + if ex.stem == "bulk_create_example": + continue + process_definition_name = set() + assert ex.stem not in process_definition_name + import_module(ex.name, str(ex)) + assert ex.stem in process_definition_name diff --git a/dolphinscheduler-python/pydolphinscheduler/tests/tasks/__init__.py b/dolphinscheduler-python/pydolphinscheduler/tests/tasks/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..095e3013e5cfe293a2098e2dff3e3b70c29e2f87 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/tests/tasks/__init__.py @@ -0,0 +1,18 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Init tasks package tests.""" diff --git a/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_condition.py b/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_condition.py new file mode 100644 index 0000000000000000000000000000000000000000..523264034a857a3d3fb8e854315d29fd4550c4f8 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_condition.py @@ -0,0 +1,460 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Test Task dependent.""" +from typing import List, Tuple +from unittest.mock import patch + +import pytest + +from pydolphinscheduler.core.process_definition import ProcessDefinition +from pydolphinscheduler.exceptions import PyDSParamException +from pydolphinscheduler.tasks.condition import ( + FAILURE, + SUCCESS, + And, + Condition, + ConditionOperator, + Or, + Status, +) +from tests.testing.task import Task + +TEST_NAME = "test-name" +TEST_PROJECT = "test-project" +TEST_PROCESS_DEFINITION = "test-process-definition" +TEST_TYPE = "test-type" +TEST_PROJECT_CODE, TEST_DEFINITION_CODE, TEST_TASK_CODE = 12345, 123456, 1234567 + +TEST_OPERATOR_LIST = ("AND", "OR") + + +@pytest.mark.parametrize( + "obj, expect", + [ + (Status, "STATUS"), + (SUCCESS, "SUCCESS"), + (FAILURE, "FAILURE"), + ], +) +def test_class_status_status_name(obj: Status, expect: str): + """Test class status and sub class property status_name.""" + assert obj.status_name() == expect + + +@pytest.mark.parametrize( + "obj, tasks", + [ + (Status, (1, 2, 3)), + (SUCCESS, (1.1, 2.2, 3.3)), + (FAILURE, (ConditionOperator(1), ConditionOperator(2), ConditionOperator(3))), + ], +) +def test_class_status_depend_item_list_no_expect_type(obj: Status, tasks: Tuple): + """Test class status and sub class raise error when assign not support type.""" + with pytest.raises( + PyDSParamException, match=".*?only accept class Task or sub class Task, but get" + ): + obj(*tasks).get_define() + + +@pytest.mark.parametrize( + "obj, tasks", + [ + (Status, [Task(str(i), TEST_TYPE) for i in range(1)]), + (Status, [Task(str(i), TEST_TYPE) for i in range(2)]), + (Status, [Task(str(i), TEST_TYPE) for i in range(3)]), + (SUCCESS, [Task(str(i), TEST_TYPE) for i in range(1)]), + (SUCCESS, [Task(str(i), TEST_TYPE) for i in range(2)]), + (SUCCESS, [Task(str(i), TEST_TYPE) for i in range(3)]), + (FAILURE, [Task(str(i), TEST_TYPE) for i in range(1)]), + (FAILURE, [Task(str(i), TEST_TYPE) for i in range(2)]), + (FAILURE, [Task(str(i), TEST_TYPE) for i in range(3)]), + ], +) +def test_class_status_depend_item_list(obj: Status, tasks: Tuple): + """Test class status and sub class function :func:`depend_item_list`.""" + status = obj.status_name() + expect = [ + { + "depTaskCode": i.code, + "status": status, + } + for i in tasks + ] + assert obj(*tasks).get_define() == expect + + +@pytest.mark.parametrize( + "obj, expect", + [ + (ConditionOperator, "CONDITIONOPERATOR"), + (And, "AND"), + (Or, "OR"), + ], +) +def test_condition_operator_operator_name(obj: ConditionOperator, expect: str): + """Test class ConditionOperator and sub class class function :func:`operator_name`.""" + assert obj.operator_name() == expect + + +@pytest.mark.parametrize( + "obj, expect", + [ + (ConditionOperator, "CONDITIONOPERATOR"), + (And, "AND"), + (Or, "OR"), + ], +) +def test_condition_operator_relation(obj: ConditionOperator, expect: str): + """Test class ConditionOperator and sub class class property `relation`.""" + assert obj(1).relation == expect + + +@pytest.mark.parametrize( + "obj, status_or_operator, match", + [ + ( + ConditionOperator, + [Status(Task("1", TEST_TYPE)), 1], + ".*?operator parameter support ConditionTask and ConditionOperator.*?", + ), + ( + ConditionOperator, + [ + Status(Task("1", TEST_TYPE)), + 1.0, + ], + ".*?operator parameter support ConditionTask and ConditionOperator.*?", + ), + ( + ConditionOperator, + [ + Status(Task("1", TEST_TYPE)), + ConditionOperator(And(Status(Task("1", TEST_TYPE)))), + ], + ".*?operator parameter only support same type.", + ), + ( + ConditionOperator, + [ + ConditionOperator(And(Status(Task("1", TEST_TYPE)))), + Status(Task("1", TEST_TYPE)), + ], + ".*?operator parameter only support same type.", + ), + ], +) +def test_condition_operator_set_define_attr_not_support_type( + obj, status_or_operator, match +): + """Test class ConditionOperator parameter error, including parameter not same or type not support.""" + with pytest.raises(PyDSParamException, match=match): + op = obj(*status_or_operator) + op.set_define_attr() + + +@pytest.mark.parametrize( + "obj, task_num", + [ + (ConditionOperator, 1), + (ConditionOperator, 2), + (ConditionOperator, 3), + (And, 1), + (And, 2), + (And, 3), + (Or, 1), + (Or, 2), + (Or, 3), + ], +) +def test_condition_operator_set_define_attr_status( + obj: ConditionOperator, task_num: int +): + """Test :func:`set_define_attr` with one or more class status.""" + attr = "depend_item_list" + + tasks = [Task(str(i), TEST_TYPE) for i in range(task_num)] + status = Status(*tasks) + + expect = [ + {"depTaskCode": task.code, "status": status.status_name()} for task in tasks + ] + + co = obj(status) + co.set_define_attr() + assert getattr(co, attr) == expect + + +@pytest.mark.parametrize( + "obj, status", + [ + (ConditionOperator, (SUCCESS, SUCCESS)), + (ConditionOperator, (FAILURE, FAILURE)), + (ConditionOperator, (SUCCESS, FAILURE)), + (ConditionOperator, (FAILURE, SUCCESS)), + (And, (SUCCESS, SUCCESS)), + (And, (FAILURE, FAILURE)), + (And, (SUCCESS, FAILURE)), + (And, (FAILURE, SUCCESS)), + (Or, (SUCCESS, SUCCESS)), + (Or, (FAILURE, FAILURE)), + (Or, (SUCCESS, FAILURE)), + (Or, (FAILURE, SUCCESS)), + ], +) +def test_condition_operator_set_define_attr_mix_status( + obj: ConditionOperator, status: List[Status] +): + """Test :func:`set_define_attr` with one or more mixed status.""" + attr = "depend_item_list" + + task = Task("test-operator", TEST_TYPE) + status_list = [] + expect = [] + for sta in status: + status_list.append(sta(task)) + expect.append({"depTaskCode": task.code, "status": sta.status_name()}) + + co = obj(*status_list) + co.set_define_attr() + assert getattr(co, attr) == expect + + +@pytest.mark.parametrize( + "obj, task_num", + [ + (ConditionOperator, 1), + (ConditionOperator, 2), + (ConditionOperator, 3), + (And, 1), + (And, 2), + (And, 3), + (Or, 1), + (Or, 2), + (Or, 3), + ], +) +def test_condition_operator_set_define_attr_operator( + obj: ConditionOperator, task_num: int +): + """Test :func:`set_define_attr` with one or more class condition operator.""" + attr = "depend_task_list" + + task = Task("test-operator", TEST_TYPE) + status = Status(task) + + expect = [ + { + "relation": obj.operator_name(), + "dependItemList": [ + { + "depTaskCode": task.code, + "status": status.status_name(), + } + ], + } + for _ in range(task_num) + ] + + co = obj(*[obj(status) for _ in range(task_num)]) + co.set_define_attr() + assert getattr(co, attr) == expect + + +@pytest.mark.parametrize( + "cond, sub_cond", + [ + (ConditionOperator, (And, Or)), + (ConditionOperator, (Or, And)), + (And, (And, Or)), + (And, (Or, And)), + (Or, (And, Or)), + (Or, (Or, And)), + ], +) +def test_condition_operator_set_define_attr_mix_operator( + cond: ConditionOperator, sub_cond: Tuple[ConditionOperator] +): + """Test :func:`set_define_attr` with one or more class mix condition operator.""" + attr = "depend_task_list" + + task = Task("test-operator", TEST_TYPE) + + expect = [] + sub_condition = [] + for cond in sub_cond: + status = Status(task) + sub_condition.append(cond(status)) + expect.append( + { + "relation": cond.operator_name(), + "dependItemList": [ + { + "depTaskCode": task.code, + "status": status.status_name(), + } + ], + } + ) + co = cond(*sub_condition) + co.set_define_attr() + assert getattr(co, attr) == expect + + +@patch( + "pydolphinscheduler.core.task.Task.gen_code_and_version", + return_value=(12345, 1), +) +@patch( + "pydolphinscheduler.tasks.condition.Condition.gen_code_and_version", + return_value=(123, 1), +) +def test_condition_get_define(mock_condition_code_version, mock_task_code_version): + """Test task condition :func:`get_define`.""" + common_task = Task(name="common_task", task_type="test_task_condition") + cond_operator = And( + And( + SUCCESS(common_task, common_task), + FAILURE(common_task, common_task), + ), + Or( + SUCCESS(common_task, common_task), + FAILURE(common_task, common_task), + ), + ) + + name = "test_condition_get_define" + expect = { + "code": 123, + "name": name, + "version": 1, + "description": None, + "delayTime": 0, + "taskType": "CONDITIONS", + "taskParams": { + "resourceList": [], + "localParams": [], + "dependence": { + "relation": "AND", + "dependTaskList": [ + { + "relation": "AND", + "dependItemList": [ + {"depTaskCode": common_task.code, "status": "SUCCESS"}, + {"depTaskCode": common_task.code, "status": "SUCCESS"}, + {"depTaskCode": common_task.code, "status": "FAILURE"}, + {"depTaskCode": common_task.code, "status": "FAILURE"}, + ], + }, + { + "relation": "OR", + "dependItemList": [ + {"depTaskCode": common_task.code, "status": "SUCCESS"}, + {"depTaskCode": common_task.code, "status": "SUCCESS"}, + {"depTaskCode": common_task.code, "status": "FAILURE"}, + {"depTaskCode": common_task.code, "status": "FAILURE"}, + ], + }, + ], + }, + "conditionResult": { + "successNode": [common_task.code], + "failedNode": [common_task.code], + }, + "waitStartTimeout": {}, + }, + "flag": "YES", + "taskPriority": "MEDIUM", + "workerGroup": "default", + "failRetryTimes": 0, + "failRetryInterval": 1, + "timeoutFlag": "CLOSE", + "timeoutNotifyStrategy": None, + "timeout": 0, + } + + task = Condition( + name, condition=cond_operator, success_task=common_task, failed_task=common_task + ) + assert task.get_define() == expect + + +@patch( + "pydolphinscheduler.core.task.Task.gen_code_and_version", + return_value=(123, 1), +) +def test_condition_set_dep_workflow(mock_task_code_version): + """Test task condition set dependence in workflow level.""" + with ProcessDefinition(name="test-condition-set-dep-workflow") as pd: + pre_task_1 = Task(name="pre_task_1", task_type=TEST_TYPE) + pre_task_2 = Task(name="pre_task_2", task_type=TEST_TYPE) + pre_task_3 = Task(name="pre_task_3", task_type=TEST_TYPE) + cond_operator = And( + And( + SUCCESS(pre_task_1, pre_task_2), + FAILURE(pre_task_3), + ), + ) + + success_branch = Task(name="success_branch", task_type=TEST_TYPE) + fail_branch = Task(name="fail_branch", task_type=TEST_TYPE) + + condition = Condition( + name="conditions", + condition=cond_operator, + success_task=success_branch, + failed_task=fail_branch, + ) + + # General tasks test + assert len(pd.tasks) == 6 + assert sorted(pd.task_list, key=lambda t: t.name) == sorted( + [ + pre_task_1, + pre_task_2, + pre_task_3, + success_branch, + fail_branch, + condition, + ], + key=lambda t: t.name, + ) + # Task dep test + assert success_branch._upstream_task_codes == {condition.code} + assert fail_branch._upstream_task_codes == {condition.code} + assert condition._downstream_task_codes == { + success_branch.code, + fail_branch.code, + } + + # Condition task dep after ProcessDefinition function get_define called + assert condition._upstream_task_codes == { + pre_task_1.code, + pre_task_2.code, + pre_task_3.code, + } + assert all( + [ + child._downstream_task_codes == {condition.code} + for child in [ + pre_task_1, + pre_task_2, + pre_task_3, + ] + ] + ) diff --git a/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_datax.py b/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_datax.py new file mode 100644 index 0000000000000000000000000000000000000000..9473f5732121df3a46d1aa4277287131b9e1ef09 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_datax.py @@ -0,0 +1,122 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Test Task DataX.""" + +from unittest.mock import patch + +import pytest + +from pydolphinscheduler.tasks.datax import CustomDataX, DataX + + +@patch( + "pydolphinscheduler.core.database.Database.get_database_info", + return_value=({"id": 1, "type": "MYSQL"}), +) +def test_datax_get_define(mock_datasource): + """Test task datax function get_define.""" + code = 123 + version = 1 + name = "test_datax_get_define" + command = "select name from test_source_table_name" + datasource_name = "test_datasource" + datatarget_name = "test_datatarget" + target_table = "test_target_table_name" + expect = { + "code": code, + "name": name, + "version": 1, + "description": None, + "delayTime": 0, + "taskType": "DATAX", + "taskParams": { + "customConfig": 0, + "dsType": "MYSQL", + "dataSource": 1, + "dtType": "MYSQL", + "dataTarget": 1, + "sql": command, + "targetTable": target_table, + "jobSpeedByte": 0, + "jobSpeedRecord": 1000, + "xms": 1, + "xmx": 1, + "preStatements": [], + "postStatements": [], + "localParams": [], + "resourceList": [], + "dependence": {}, + "conditionResult": {"successNode": [""], "failedNode": [""]}, + "waitStartTimeout": {}, + }, + "flag": "YES", + "taskPriority": "MEDIUM", + "workerGroup": "default", + "failRetryTimes": 0, + "failRetryInterval": 1, + "timeoutFlag": "CLOSE", + "timeoutNotifyStrategy": None, + "timeout": 0, + } + with patch( + "pydolphinscheduler.core.task.Task.gen_code_and_version", + return_value=(code, version), + ): + task = DataX(name, datasource_name, datatarget_name, command, target_table) + assert task.get_define() == expect + + +@pytest.mark.parametrize("json_template", ["json_template"]) +def test_custom_datax_get_define(json_template): + """Test task custom datax function get_define.""" + code = 123 + version = 1 + name = "test_custom_datax_get_define" + expect = { + "code": code, + "name": name, + "version": 1, + "description": None, + "delayTime": 0, + "taskType": "DATAX", + "taskParams": { + "customConfig": 1, + "json": json_template, + "xms": 1, + "xmx": 1, + "localParams": [], + "resourceList": [], + "dependence": {}, + "conditionResult": {"successNode": [""], "failedNode": [""]}, + "waitStartTimeout": {}, + }, + "flag": "YES", + "taskPriority": "MEDIUM", + "workerGroup": "default", + "failRetryTimes": 0, + "failRetryInterval": 1, + "timeoutFlag": "CLOSE", + "timeoutNotifyStrategy": None, + "timeout": 0, + } + with patch( + "pydolphinscheduler.core.task.Task.gen_code_and_version", + return_value=(code, version), + ): + task = CustomDataX(name, json_template) + assert task.get_define() == expect diff --git a/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_dependent.py b/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_dependent.py new file mode 100644 index 0000000000000000000000000000000000000000..f16e291c82313ca3673f8b781b539bd569c332a7 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_dependent.py @@ -0,0 +1,793 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Test Task dependent.""" +import itertools +from typing import Dict, List, Optional, Tuple, Union +from unittest.mock import patch + +import pytest + +from pydolphinscheduler.exceptions import PyDSParamException +from pydolphinscheduler.tasks.dependent import ( + And, + Dependent, + DependentDate, + DependentItem, + DependentOperator, + Or, +) + +TEST_PROJECT = "test-project" +TEST_PROCESS_DEFINITION = "test-process-definition" +TEST_TASK = "test-task" +TEST_PROJECT_CODE, TEST_DEFINITION_CODE, TEST_TASK_CODE = 12345, 123456, 1234567 + +TEST_OPERATOR_LIST = ("AND", "OR") + + +@pytest.mark.parametrize( + "dep_date, dep_cycle", + [ + # hour + (DependentDate.CURRENT_HOUR, "hour"), + (DependentDate.LAST_ONE_HOUR, "hour"), + (DependentDate.LAST_TWO_HOURS, "hour"), + (DependentDate.LAST_THREE_HOURS, "hour"), + (DependentDate.LAST_TWENTY_FOUR_HOURS, "hour"), + # day + (DependentDate.TODAY, "day"), + (DependentDate.LAST_ONE_DAYS, "day"), + (DependentDate.LAST_TWO_DAYS, "day"), + (DependentDate.LAST_THREE_DAYS, "day"), + (DependentDate.LAST_SEVEN_DAYS, "day"), + # week + (DependentDate.THIS_WEEK, "week"), + (DependentDate.LAST_WEEK, "week"), + (DependentDate.LAST_MONDAY, "week"), + (DependentDate.LAST_TUESDAY, "week"), + (DependentDate.LAST_WEDNESDAY, "week"), + (DependentDate.LAST_THURSDAY, "week"), + (DependentDate.LAST_FRIDAY, "week"), + (DependentDate.LAST_SATURDAY, "week"), + (DependentDate.LAST_SUNDAY, "week"), + # month + (DependentDate.THIS_MONTH, "month"), + (DependentDate.LAST_MONTH, "month"), + (DependentDate.LAST_MONTH_BEGIN, "month"), + (DependentDate.LAST_MONTH_END, "month"), + ], +) +@patch( + "pydolphinscheduler.tasks.dependent.DependentItem.get_code_from_gateway", + return_value={ + "projectCode": TEST_PROJECT_CODE, + "processDefinitionCode": TEST_DEFINITION_CODE, + "taskDefinitionCode": TEST_TASK_CODE, + }, +) +def test_dependent_item_get_define(mock_task_info, dep_date, dep_cycle): + """Test dependent.DependentItem get define. + + Here we have test some cases as below. + ```py + { + "projectCode": "project code", + "definitionCode": "definition code", + "depTaskCode": "dep task code", + "cycle": "day", + "dateValue": "today" + } + ``` + """ + attr = { + "project_name": TEST_PROJECT, + "process_definition_name": TEST_PROCESS_DEFINITION, + "dependent_task_name": TEST_TASK, + "dependent_date": dep_date, + } + expect = { + "projectCode": TEST_PROJECT_CODE, + "definitionCode": TEST_DEFINITION_CODE, + "depTaskCode": TEST_TASK_CODE, + "cycle": dep_cycle, + "dateValue": dep_date, + } + task = DependentItem(**attr) + assert expect == task.get_define() + + +def test_dependent_item_date_error(): + """Test error when pass None to dependent_date.""" + with pytest.raises( + PyDSParamException, match="Parameter dependent_date must provider.*?" + ): + DependentItem( + project_name=TEST_PROJECT, + process_definition_name=TEST_PROCESS_DEFINITION, + dependent_date=None, + ) + + +@pytest.mark.parametrize( + "task_name, result", + [ + ({"dependent_task_name": TEST_TASK}, TEST_TASK), + ({}, None), + ], +) +def test_dependent_item_code_parameter(task_name: dict, result: Optional[str]): + """Test dependent item property code_parameter.""" + dependent_item = DependentItem( + project_name=TEST_PROJECT, + process_definition_name=TEST_PROCESS_DEFINITION, + **task_name, + ) + expect = (TEST_PROJECT, TEST_PROCESS_DEFINITION, result) + assert dependent_item.code_parameter == expect + + +@pytest.mark.parametrize( + "arg_list", + [ + [1, 2], + [ + DependentItem( + project_name=TEST_PROJECT, + process_definition_name=TEST_PROCESS_DEFINITION, + ), + 1, + ], + [ + And( + DependentItem( + project_name=TEST_PROJECT, + process_definition_name=TEST_PROCESS_DEFINITION, + ) + ), + 1, + ], + [ + DependentItem( + project_name=TEST_PROJECT, + process_definition_name=TEST_PROCESS_DEFINITION, + ), + And( + DependentItem( + project_name=TEST_PROJECT, + process_definition_name=TEST_PROCESS_DEFINITION, + ) + ), + ], + ], +) +@patch( + "pydolphinscheduler.tasks.dependent.DependentItem.get_code_from_gateway", + return_value={ + "projectCode": TEST_PROJECT_CODE, + "processDefinitionCode": TEST_DEFINITION_CODE, + "taskDefinitionCode": TEST_TASK_CODE, + }, +) +def test_dependent_operator_set_define_error(mock_code, arg_list): + """Test dependent operator function :func:`set_define` with not support type.""" + dep_op = DependentOperator(*arg_list) + with pytest.raises(PyDSParamException, match="Dependent .*? operator.*?"): + dep_op.set_define_attr() + + +@pytest.mark.parametrize( + # Test dependent operator, Test dependent item parameters, expect operator define + "operators, kwargs, expect", + [ + # Test dependent operator (And | Or) with single dependent item + ( + (And, Or), + ( + { + "project_name": TEST_PROJECT, + "process_definition_name": TEST_PROCESS_DEFINITION, + "dependent_task_name": TEST_TASK, + "dependent_date": DependentDate.LAST_MONTH_END, + }, + ), + [ + { + "relation": op, + "dependItemList": [ + { + "projectCode": TEST_PROJECT_CODE, + "definitionCode": TEST_DEFINITION_CODE, + "depTaskCode": TEST_TASK_CODE, + "cycle": "month", + "dateValue": DependentDate.LAST_MONTH_END, + }, + ], + } + for op in TEST_OPERATOR_LIST + ], + ), + # Test dependent operator (And | Or) with two dependent item + ( + (And, Or), + ( + { + "project_name": TEST_PROJECT, + "process_definition_name": TEST_PROCESS_DEFINITION, + "dependent_task_name": TEST_TASK, + "dependent_date": DependentDate.LAST_MONTH_END, + }, + { + "project_name": TEST_PROJECT, + "process_definition_name": TEST_PROCESS_DEFINITION, + "dependent_task_name": TEST_TASK, + "dependent_date": DependentDate.LAST_WEEK, + }, + ), + [ + { + "relation": op, + "dependItemList": [ + { + "projectCode": TEST_PROJECT_CODE, + "definitionCode": TEST_DEFINITION_CODE, + "depTaskCode": TEST_TASK_CODE, + "cycle": "month", + "dateValue": DependentDate.LAST_MONTH_END, + }, + { + "projectCode": TEST_PROJECT_CODE, + "definitionCode": TEST_DEFINITION_CODE, + "depTaskCode": TEST_TASK_CODE, + "cycle": "week", + "dateValue": DependentDate.LAST_WEEK, + }, + ], + } + for op in TEST_OPERATOR_LIST + ], + ), + # Test dependent operator (And | Or) with multiply dependent item + ( + (And, Or), + ( + { + "project_name": TEST_PROJECT, + "process_definition_name": TEST_PROCESS_DEFINITION, + "dependent_task_name": TEST_TASK, + "dependent_date": DependentDate.LAST_MONTH_END, + }, + { + "project_name": TEST_PROJECT, + "process_definition_name": TEST_PROCESS_DEFINITION, + "dependent_task_name": TEST_TASK, + "dependent_date": DependentDate.LAST_WEEK, + }, + { + "project_name": TEST_PROJECT, + "process_definition_name": TEST_PROCESS_DEFINITION, + "dependent_task_name": TEST_TASK, + "dependent_date": DependentDate.LAST_ONE_DAYS, + }, + ), + [ + { + "relation": op, + "dependItemList": [ + { + "projectCode": TEST_PROJECT_CODE, + "definitionCode": TEST_DEFINITION_CODE, + "depTaskCode": TEST_TASK_CODE, + "cycle": "month", + "dateValue": DependentDate.LAST_MONTH_END, + }, + { + "projectCode": TEST_PROJECT_CODE, + "definitionCode": TEST_DEFINITION_CODE, + "depTaskCode": TEST_TASK_CODE, + "cycle": "week", + "dateValue": DependentDate.LAST_WEEK, + }, + { + "projectCode": TEST_PROJECT_CODE, + "definitionCode": TEST_DEFINITION_CODE, + "depTaskCode": TEST_TASK_CODE, + "cycle": "day", + "dateValue": DependentDate.LAST_ONE_DAYS, + }, + ], + } + for op in TEST_OPERATOR_LIST + ], + ), + ], +) +@patch( + "pydolphinscheduler.tasks.dependent.DependentItem.get_code_from_gateway", + return_value={ + "projectCode": TEST_PROJECT_CODE, + "processDefinitionCode": TEST_DEFINITION_CODE, + "taskDefinitionCode": TEST_TASK_CODE, + }, +) +def test_operator_dependent_item( + mock_code_info, + operators: Tuple[DependentOperator], + kwargs: Tuple[dict], + expect: List[Dict], +): + """Test DependentOperator(DependentItem) function get_define. + + Here we have test some cases as below, including single dependentItem and multiply dependentItem. + ```py + { + "relation": "AND", + "dependItemList": [ + { + "projectCode": "project code", + "definitionCode": "definition code", + "depTaskCode": "dep task code", + "cycle": "day", + "dateValue": "today" + }, + ... + ] + } + ``` + """ + for idx, operator in enumerate(operators): + # Use variable to keep one or more dependent item to test dependent operator behavior + dependent_item_list = [] + for kwarg in kwargs: + dependent_item = DependentItem(**kwarg) + dependent_item_list.append(dependent_item) + op = operator(*dependent_item_list) + assert expect[idx] == op.get_define() + + +@pytest.mark.parametrize( + # Test dependent operator, Test dependent item parameters, expect operator define + "operators, args, expect", + [ + # Test dependent operator (And | Or) with single dependent task list + ( + (And, Or), + ( + (And, Or), + ( + { + "project_name": TEST_PROJECT, + "process_definition_name": TEST_PROCESS_DEFINITION, + "dependent_task_name": TEST_TASK, + "dependent_date": DependentDate.LAST_MONTH_END, + }, + ), + ), + [ + { + "relation": par_op, + "dependTaskList": [ + { + "relation": chr_op, + "dependItemList": [ + { + "projectCode": TEST_PROJECT_CODE, + "definitionCode": TEST_DEFINITION_CODE, + "depTaskCode": TEST_TASK_CODE, + "cycle": "month", + "dateValue": DependentDate.LAST_MONTH_END, + }, + ], + } + ], + } + for (par_op, chr_op) in itertools.product( + TEST_OPERATOR_LIST, TEST_OPERATOR_LIST + ) + ], + ), + # Test dependent operator (And | Or) with two dependent task list + ( + (And, Or), + ( + (And, Or), + ( + { + "project_name": TEST_PROJECT, + "process_definition_name": TEST_PROCESS_DEFINITION, + "dependent_task_name": TEST_TASK, + "dependent_date": DependentDate.LAST_MONTH_END, + }, + { + "project_name": TEST_PROJECT, + "process_definition_name": TEST_PROCESS_DEFINITION, + "dependent_task_name": TEST_TASK, + "dependent_date": DependentDate.LAST_WEEK, + }, + ), + ), + [ + { + "relation": par_op, + "dependTaskList": [ + { + "relation": chr_op, + "dependItemList": [ + { + "projectCode": TEST_PROJECT_CODE, + "definitionCode": TEST_DEFINITION_CODE, + "depTaskCode": TEST_TASK_CODE, + "cycle": "month", + "dateValue": DependentDate.LAST_MONTH_END, + }, + { + "projectCode": TEST_PROJECT_CODE, + "definitionCode": TEST_DEFINITION_CODE, + "depTaskCode": TEST_TASK_CODE, + "cycle": "week", + "dateValue": DependentDate.LAST_WEEK, + }, + ], + } + ], + } + for (par_op, chr_op) in itertools.product( + TEST_OPERATOR_LIST, TEST_OPERATOR_LIST + ) + ], + ), + # Test dependent operator (And | Or) with multiply dependent task list + ( + (And, Or), + ( + (And, Or), + ( + { + "project_name": TEST_PROJECT, + "process_definition_name": TEST_PROCESS_DEFINITION, + "dependent_task_name": TEST_TASK, + "dependent_date": DependentDate.LAST_MONTH_END, + }, + { + "project_name": TEST_PROJECT, + "process_definition_name": TEST_PROCESS_DEFINITION, + "dependent_task_name": TEST_TASK, + "dependent_date": DependentDate.LAST_WEEK, + }, + { + "project_name": TEST_PROJECT, + "process_definition_name": TEST_PROCESS_DEFINITION, + "dependent_task_name": TEST_TASK, + "dependent_date": DependentDate.LAST_ONE_DAYS, + }, + ), + ), + [ + { + "relation": par_op, + "dependTaskList": [ + { + "relation": chr_op, + "dependItemList": [ + { + "projectCode": TEST_PROJECT_CODE, + "definitionCode": TEST_DEFINITION_CODE, + "depTaskCode": TEST_TASK_CODE, + "cycle": "month", + "dateValue": DependentDate.LAST_MONTH_END, + }, + { + "projectCode": TEST_PROJECT_CODE, + "definitionCode": TEST_DEFINITION_CODE, + "depTaskCode": TEST_TASK_CODE, + "cycle": "week", + "dateValue": DependentDate.LAST_WEEK, + }, + { + "projectCode": TEST_PROJECT_CODE, + "definitionCode": TEST_DEFINITION_CODE, + "depTaskCode": TEST_TASK_CODE, + "cycle": "day", + "dateValue": DependentDate.LAST_ONE_DAYS, + }, + ], + } + ], + } + for (par_op, chr_op) in itertools.product( + TEST_OPERATOR_LIST, TEST_OPERATOR_LIST + ) + ], + ), + ], +) +@patch( + "pydolphinscheduler.tasks.dependent.DependentItem.get_code_from_gateway", + return_value={ + "projectCode": TEST_PROJECT_CODE, + "processDefinitionCode": TEST_DEFINITION_CODE, + "taskDefinitionCode": TEST_TASK_CODE, + }, +) +def test_operator_dependent_task_list_multi_dependent_item( + mock_code_info, + operators: Tuple[DependentOperator], + args: Tuple[Union[Tuple, dict]], + expect: List[Dict], +): + """Test DependentOperator(DependentOperator(DependentItem)) single operator function get_define. + + Here we have test some cases as below. This test case only test single DependTaskList with one or + multiply dependItemList. + ```py + { + "relation": "OR", + "dependTaskList": [ + { + "relation": "AND", + "dependItemList": [ + { + "projectCode": "project code", + "definitionCode": "definition code", + "depTaskCode": "dep task code", + "cycle": "day", + "dateValue": "today" + }, + ... + ] + }, + ] + } + ``` + """ + # variable expect_idx record idx should be use to get specific expect + expect_idx = 0 + + for op_idx, operator in enumerate(operators): + dependent_operator = args[0] + dependent_item_kwargs = args[1] + + for dop_idx, dpt_op in enumerate(dependent_operator): + dependent_item_list = [] + for dpt_kwargs in dependent_item_kwargs: + dpti = DependentItem(**dpt_kwargs) + dependent_item_list.append(dpti) + child_dep_op = dpt_op(*dependent_item_list) + op = operator(child_dep_op) + assert expect[expect_idx] == op.get_define() + expect_idx += 1 + + +def get_dep_task_list(*operator): + """Return dependent task list from given operators list.""" + result = [] + for op in operator: + result.append( + { + "relation": op.operator_name(), + "dependItemList": [ + { + "projectCode": TEST_PROJECT_CODE, + "definitionCode": TEST_DEFINITION_CODE, + "depTaskCode": TEST_TASK_CODE, + "cycle": "month", + "dateValue": DependentDate.LAST_MONTH_END, + }, + ], + } + ) + return result + + +@pytest.mark.parametrize( + # Test dependent operator, Test dependent item parameters, expect operator define + "operators, args, expect", + [ + # Test dependent operator (And | Or) with two dependent task list + ( + (And, Or), + ( + ((And, And), (And, Or), (Or, And), (Or, Or)), + { + "project_name": TEST_PROJECT, + "process_definition_name": TEST_PROCESS_DEFINITION, + "dependent_task_name": TEST_TASK, + "dependent_date": DependentDate.LAST_MONTH_END, + }, + ), + [ + { + "relation": parent_op.operator_name(), + "dependTaskList": get_dep_task_list(*child_ops), + } + for parent_op in (And, Or) + for child_ops in ((And, And), (And, Or), (Or, And), (Or, Or)) + ], + ), + # Test dependent operator (And | Or) with multiple dependent task list + ( + (And, Or), + ( + ((And, And, And), (And, And, And, And), (And, And, And, And, And)), + { + "project_name": TEST_PROJECT, + "process_definition_name": TEST_PROCESS_DEFINITION, + "dependent_task_name": TEST_TASK, + "dependent_date": DependentDate.LAST_MONTH_END, + }, + ), + [ + { + "relation": parent_op.operator_name(), + "dependTaskList": get_dep_task_list(*child_ops), + } + for parent_op in (And, Or) + for child_ops in ( + (And, And, And), + (And, And, And, And), + (And, And, And, And, And), + ) + ], + ), + ], +) +@patch( + "pydolphinscheduler.tasks.dependent.DependentItem.get_code_from_gateway", + return_value={ + "projectCode": TEST_PROJECT_CODE, + "processDefinitionCode": TEST_DEFINITION_CODE, + "taskDefinitionCode": TEST_TASK_CODE, + }, +) +def test_operator_dependent_task_list_multi_dependent_list( + mock_code_info, + operators: Tuple[DependentOperator], + args: Tuple[Union[Tuple, dict]], + expect: List[Dict], +): + """Test DependentOperator(DependentOperator(DependentItem)) multiply operator function get_define. + + Here we have test some cases as below. This test case only test single DependTaskList with one or + multiply dependTaskList. + ```py + { + "relation": "OR", + "dependTaskList": [ + { + "relation": "AND", + "dependItemList": [ + { + "projectCode": "project code", + "definitionCode": "definition code", + "depTaskCode": "dep task code", + "cycle": "day", + "dateValue": "today" + } + ] + }, + ... + ] + } + ``` + """ + # variable expect_idx record idx should be use to get specific expect + expect_idx = 0 + for op_idx, operator in enumerate(operators): + dependent_operator = args[0] + dependent_item_kwargs = args[1] + + for dop_idx, dpt_ops in enumerate(dependent_operator): + dependent_task_list = [ + dpt_op(DependentItem(**dependent_item_kwargs)) for dpt_op in dpt_ops + ] + op = operator(*dependent_task_list) + assert ( + expect[expect_idx] == op.get_define() + ), f"Failed with operator syntax {operator}.{dpt_ops}" + expect_idx += 1 + + +@patch( + "pydolphinscheduler.tasks.dependent.DependentItem.get_code_from_gateway", + return_value={ + "projectCode": TEST_PROJECT_CODE, + "processDefinitionCode": TEST_DEFINITION_CODE, + "taskDefinitionCode": TEST_TASK_CODE, + }, +) +@patch( + "pydolphinscheduler.core.task.Task.gen_code_and_version", + return_value=(123, 1), +) +def test_dependent_get_define(mock_code_version, mock_dep_code): + """Test task dependent function get_define.""" + project_name = "test-dep-project" + process_definition_name = "test-dep-definition" + dependent_task_name = "test-dep-task" + dep_operator = And( + Or( + # test dependence with add tasks + DependentItem( + project_name=project_name, + process_definition_name=process_definition_name, + ) + ), + And( + # test dependence with specific task + DependentItem( + project_name=project_name, + process_definition_name=process_definition_name, + dependent_task_name=dependent_task_name, + ) + ), + ) + + name = "test_dependent_get_define" + expect = { + "code": 123, + "name": name, + "version": 1, + "description": None, + "delayTime": 0, + "taskType": "DEPENDENT", + "taskParams": { + "resourceList": [], + "localParams": [], + "dependence": { + "relation": "AND", + "dependTaskList": [ + { + "relation": "OR", + "dependItemList": [ + { + "projectCode": TEST_PROJECT_CODE, + "definitionCode": TEST_DEFINITION_CODE, + "depTaskCode": "0", + "cycle": "day", + "dateValue": "today", + } + ], + }, + { + "relation": "AND", + "dependItemList": [ + { + "projectCode": TEST_PROJECT_CODE, + "definitionCode": TEST_DEFINITION_CODE, + "depTaskCode": TEST_TASK_CODE, + "cycle": "day", + "dateValue": "today", + } + ], + }, + ], + }, + "conditionResult": {"successNode": [""], "failedNode": [""]}, + "waitStartTimeout": {}, + }, + "flag": "YES", + "taskPriority": "MEDIUM", + "workerGroup": "default", + "failRetryTimes": 0, + "failRetryInterval": 1, + "timeoutFlag": "CLOSE", + "timeoutNotifyStrategy": None, + "timeout": 0, + } + + task = Dependent(name, dependence=dep_operator) + assert task.get_define() == expect diff --git a/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_flink.py b/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_flink.py new file mode 100644 index 0000000000000000000000000000000000000000..92ae3ba91fac5fd9b73bb3ead522e06b2ee007cb --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_flink.py @@ -0,0 +1,82 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Test Task Flink.""" + +from unittest.mock import patch + +from pydolphinscheduler.tasks.flink import DeployMode, Flink, FlinkVersion, ProgramType + + +@patch( + "pydolphinscheduler.core.engine.Engine.get_resource_info", + return_value=({"id": 1, "name": "test"}), +) +def test_flink_get_define(mock_resource): + """Test task flink function get_define.""" + code = 123 + version = 1 + name = "test_flink_get_define" + main_class = "org.apache.flink.test_main_class" + main_package = "test_main_package" + program_type = ProgramType.JAVA + deploy_mode = DeployMode.LOCAL + + expect = { + "code": code, + "name": name, + "version": 1, + "description": None, + "delayTime": 0, + "taskType": "FLINK", + "taskParams": { + "mainClass": main_class, + "mainJar": { + "id": 1, + }, + "programType": program_type, + "deployMode": deploy_mode, + "flinkVersion": FlinkVersion.LOW_VERSION, + "slot": 1, + "parallelism": 1, + "taskManager": 2, + "jobManagerMemory": "1G", + "taskManagerMemory": "2G", + "appName": None, + "mainArgs": None, + "others": None, + "localParams": [], + "resourceList": [], + "dependence": {}, + "conditionResult": {"successNode": [""], "failedNode": [""]}, + "waitStartTimeout": {}, + }, + "flag": "YES", + "taskPriority": "MEDIUM", + "workerGroup": "default", + "failRetryTimes": 0, + "failRetryInterval": 1, + "timeoutFlag": "CLOSE", + "timeoutNotifyStrategy": None, + "timeout": 0, + } + with patch( + "pydolphinscheduler.core.task.Task.gen_code_and_version", + return_value=(code, version), + ): + task = Flink(name, main_class, main_package, program_type, deploy_mode) + assert task.get_define() == expect diff --git a/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_http.py b/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_http.py new file mode 100644 index 0000000000000000000000000000000000000000..060cdec0b0d85735a954623039759f79df3a584d --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_http.py @@ -0,0 +1,144 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Test Task HTTP.""" + +from unittest.mock import patch + +import pytest + +from pydolphinscheduler.exceptions import PyDSParamException +from pydolphinscheduler.tasks.http import Http, HttpCheckCondition, HttpMethod + + +@pytest.mark.parametrize( + "class_name, attrs", + [ + (HttpMethod, ("GET", "POST", "HEAD", "PUT", "DELETE")), + ( + HttpCheckCondition, + ( + "STATUS_CODE_DEFAULT", + "STATUS_CODE_CUSTOM", + "BODY_CONTAINS", + "BODY_NOT_CONTAINS", + ), + ), + ], +) +def test_attr_exists(class_name, attrs): + """Test weather class HttpMethod and HttpCheckCondition contain specific attribute.""" + assert all(hasattr(class_name, attr) for attr in attrs) + + +@pytest.mark.parametrize( + "attr, expect", + [ + ( + {"url": "https://www.apache.org"}, + { + "url": "https://www.apache.org", + "httpMethod": "GET", + "httpParams": [], + "httpCheckCondition": "STATUS_CODE_DEFAULT", + "condition": None, + "connectTimeout": 60000, + "socketTimeout": 60000, + "localParams": [], + "resourceList": [], + "dependence": {}, + "waitStartTimeout": {}, + "conditionResult": {"successNode": [""], "failedNode": [""]}, + }, + ) + ], +) +@patch( + "pydolphinscheduler.core.task.Task.gen_code_and_version", + return_value=(123, 1), +) +def test_property_task_params(mock_code_version, attr, expect): + """Test task http property.""" + task = Http("test-http-task-params", **attr) + assert expect == task.task_params + + +@pytest.mark.parametrize( + "param", + [ + {"http_method": "http_method"}, + {"http_check_condition": "http_check_condition"}, + {"http_check_condition": HttpCheckCondition.STATUS_CODE_CUSTOM}, + { + "http_check_condition": HttpCheckCondition.STATUS_CODE_CUSTOM, + "condition": None, + }, + ], +) +@patch( + "pydolphinscheduler.core.task.Task.gen_code_and_version", + return_value=(123, 1), +) +def test_http_task_param_not_support_param(mock_code, param): + """Test HttpTaskParams not support parameter.""" + url = "https://www.apache.org" + with pytest.raises(PyDSParamException, match="Parameter .*?"): + Http("test-no-supprot-param", url, **param) + + +def test_http_get_define(): + """Test task HTTP function get_define.""" + code = 123 + version = 1 + name = "test_http_get_define" + url = "https://www.apache.org" + expect = { + "code": code, + "name": name, + "version": 1, + "description": None, + "delayTime": 0, + "taskType": "HTTP", + "taskParams": { + "localParams": [], + "httpParams": [], + "url": url, + "httpMethod": "GET", + "httpCheckCondition": "STATUS_CODE_DEFAULT", + "condition": None, + "connectTimeout": 60000, + "socketTimeout": 60000, + "dependence": {}, + "resourceList": [], + "conditionResult": {"successNode": [""], "failedNode": [""]}, + "waitStartTimeout": {}, + }, + "flag": "YES", + "taskPriority": "MEDIUM", + "workerGroup": "default", + "failRetryTimes": 0, + "failRetryInterval": 1, + "timeoutFlag": "CLOSE", + "timeoutNotifyStrategy": None, + "timeout": 0, + } + with patch( + "pydolphinscheduler.core.task.Task.gen_code_and_version", + return_value=(code, version), + ): + http = Http(name, url) + assert http.get_define() == expect diff --git a/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_map_reduce.py b/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_map_reduce.py new file mode 100644 index 0000000000000000000000000000000000000000..dbe9e513f5c34f379dd1fcf428746d3b99b24809 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_map_reduce.py @@ -0,0 +1,75 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Test Task MR.""" + +from unittest.mock import patch + +from pydolphinscheduler.tasks.map_reduce import MR, ProgramType + + +@patch( + "pydolphinscheduler.core.engine.Engine.get_resource_info", + return_value=({"id": 1, "name": "test"}), +) +def test_mr_get_define(mock_resource): + """Test task mr function get_define.""" + code = 123 + version = 1 + name = "test_mr_get_define" + main_class = "org.apache.mr.test_main_class" + main_package = "test_main_package" + program_type = ProgramType.JAVA + main_args = "/dolphinscheduler/resources/file.txt /output/ds" + + expect = { + "code": code, + "name": name, + "version": 1, + "description": None, + "delayTime": 0, + "taskType": "MR", + "taskParams": { + "mainClass": main_class, + "mainJar": { + "id": 1, + }, + "programType": program_type, + "appName": None, + "mainArgs": main_args, + "others": None, + "localParams": [], + "resourceList": [], + "dependence": {}, + "conditionResult": {"successNode": [""], "failedNode": [""]}, + "waitStartTimeout": {}, + }, + "flag": "YES", + "taskPriority": "MEDIUM", + "workerGroup": "default", + "failRetryTimes": 0, + "failRetryInterval": 1, + "timeoutFlag": "CLOSE", + "timeoutNotifyStrategy": None, + "timeout": 0, + } + with patch( + "pydolphinscheduler.core.task.Task.gen_code_and_version", + return_value=(code, version), + ): + task = MR(name, main_class, main_package, program_type, main_args=main_args) + assert task.get_define() == expect diff --git a/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_procedure.py b/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_procedure.py new file mode 100644 index 0000000000000000000000000000000000000000..17825939555efeb6f26d937e8266a0813824fa61 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_procedure.py @@ -0,0 +1,106 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Test Task Procedure.""" + +from unittest.mock import patch + +import pytest + +from pydolphinscheduler.tasks.procedure import Procedure + +TEST_PROCEDURE_SQL = ( + 'create procedure HelloWorld() selece "hello world"; call HelloWorld();' +) +TEST_PROCEDURE_DATASOURCE_NAME = "test_datasource" + + +@pytest.mark.parametrize( + "attr, expect", + [ + ( + { + "name": "test-procedure-task-params", + "datasource_name": TEST_PROCEDURE_DATASOURCE_NAME, + "method": TEST_PROCEDURE_SQL, + }, + { + "method": TEST_PROCEDURE_SQL, + "type": "MYSQL", + "datasource": 1, + "localParams": [], + "resourceList": [], + "dependence": {}, + "waitStartTimeout": {}, + "conditionResult": {"successNode": [""], "failedNode": [""]}, + }, + ) + ], +) +@patch( + "pydolphinscheduler.core.task.Task.gen_code_and_version", + return_value=(123, 1), +) +@patch( + "pydolphinscheduler.core.database.Database.get_database_info", + return_value=({"id": 1, "type": "MYSQL"}), +) +def test_property_task_params(mock_datasource, mock_code_version, attr, expect): + """Test task sql task property.""" + task = Procedure(**attr) + assert expect == task.task_params + + +@patch( + "pydolphinscheduler.core.task.Task.gen_code_and_version", + return_value=(123, 1), +) +@patch( + "pydolphinscheduler.core.database.Database.get_database_info", + return_value=({"id": 1, "type": "MYSQL"}), +) +def test_sql_get_define(mock_datasource, mock_code_version): + """Test task procedure function get_define.""" + name = "test_procedure_get_define" + expect = { + "code": 123, + "name": name, + "version": 1, + "description": None, + "delayTime": 0, + "taskType": "PROCEDURE", + "taskParams": { + "type": "MYSQL", + "datasource": 1, + "method": TEST_PROCEDURE_SQL, + "localParams": [], + "resourceList": [], + "dependence": {}, + "conditionResult": {"successNode": [""], "failedNode": [""]}, + "waitStartTimeout": {}, + }, + "flag": "YES", + "taskPriority": "MEDIUM", + "workerGroup": "default", + "failRetryTimes": 0, + "failRetryInterval": 1, + "timeoutFlag": "CLOSE", + "timeoutNotifyStrategy": None, + "timeout": 0, + } + task = Procedure(name, TEST_PROCEDURE_DATASOURCE_NAME, TEST_PROCEDURE_SQL) + assert task.get_define() == expect diff --git a/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_python.py b/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_python.py new file mode 100644 index 0000000000000000000000000000000000000000..dbcd2986fb1b3432f9ab2ae6735df0547eec3e9b --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_python.py @@ -0,0 +1,122 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Test Task python.""" + + +from unittest.mock import patch + +import pytest + +from pydolphinscheduler.exceptions import PyDSParamException +from pydolphinscheduler.tasks.python import Python + + +@pytest.mark.parametrize( + "attr, expect", + [ + ( + {"code": "print(1)"}, + { + "rawScript": "print(1)", + "localParams": [], + "resourceList": [], + "dependence": {}, + "waitStartTimeout": {}, + "conditionResult": {"successNode": [""], "failedNode": [""]}, + }, + ) + ], +) +@patch( + "pydolphinscheduler.core.task.Task.gen_code_and_version", + return_value=(123, 1), +) +def test_property_task_params(mock_code_version, attr, expect): + """Test task python property.""" + task = Python("test-python-task-params", **attr) + assert expect == task.task_params + + +@pytest.mark.parametrize( + "script_code", + [ + 123, + ("print", "hello world"), + ], +) +@patch( + "pydolphinscheduler.core.task.Task.gen_code_and_version", + return_value=(123, 1), +) +def test_python_task_not_support_code(mock_code, script_code): + """Test python task parameters.""" + name = "not_support_code_type" + with pytest.raises(PyDSParamException, match="Parameter code do not support .*?"): + task = Python(name, script_code) + task.raw_script + + +def foo(): # noqa: D103 + print("hello world.") + + +@pytest.mark.parametrize( + "name, script_code, raw", + [ + ("string_define", 'print("hello world.")', 'print("hello world.")'), + ( + "function_define", + foo, + 'def foo(): # noqa: D103\n print("hello world.")\n', + ), + ], +) +def test_python_get_define(name, script_code, raw): + """Test task python function get_define.""" + code = 123 + version = 1 + expect = { + "code": code, + "name": name, + "version": 1, + "description": None, + "delayTime": 0, + "taskType": "PYTHON", + "taskParams": { + "resourceList": [], + "localParams": [], + "rawScript": raw, + "dependence": {}, + "conditionResult": {"successNode": [""], "failedNode": [""]}, + "waitStartTimeout": {}, + }, + "flag": "YES", + "taskPriority": "MEDIUM", + "workerGroup": "default", + "failRetryTimes": 0, + "failRetryInterval": 1, + "timeoutFlag": "CLOSE", + "timeoutNotifyStrategy": None, + "timeout": 0, + } + with patch( + "pydolphinscheduler.core.task.Task.gen_code_and_version", + return_value=(code, version), + ): + shell = Python(name, script_code) + assert shell.get_define() == expect diff --git a/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_shell.py b/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_shell.py new file mode 100644 index 0000000000000000000000000000000000000000..e42f6dc0fb9da1ae49781a1ed344b7f28c6ddd0c --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_shell.py @@ -0,0 +1,89 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Test Task shell.""" + + +from unittest.mock import patch + +import pytest + +from pydolphinscheduler.tasks.shell import Shell + + +@pytest.mark.parametrize( + "attr, expect", + [ + ( + {"command": "test script"}, + { + "rawScript": "test script", + "localParams": [], + "resourceList": [], + "dependence": {}, + "waitStartTimeout": {}, + "conditionResult": {"successNode": [""], "failedNode": [""]}, + }, + ) + ], +) +@patch( + "pydolphinscheduler.core.task.Task.gen_code_and_version", + return_value=(123, 1), +) +def test_property_task_params(mock_code_version, attr, expect): + """Test task shell task property.""" + task = Shell("test-shell-task-params", **attr) + assert expect == task.task_params + + +def test_shell_get_define(): + """Test task shell function get_define.""" + code = 123 + version = 1 + name = "test_shell_get_define" + command = "echo test shell" + expect = { + "code": code, + "name": name, + "version": 1, + "description": None, + "delayTime": 0, + "taskType": "SHELL", + "taskParams": { + "resourceList": [], + "localParams": [], + "rawScript": command, + "dependence": {}, + "conditionResult": {"successNode": [""], "failedNode": [""]}, + "waitStartTimeout": {}, + }, + "flag": "YES", + "taskPriority": "MEDIUM", + "workerGroup": "default", + "failRetryTimes": 0, + "failRetryInterval": 1, + "timeoutFlag": "CLOSE", + "timeoutNotifyStrategy": None, + "timeout": 0, + } + with patch( + "pydolphinscheduler.core.task.Task.gen_code_and_version", + return_value=(code, version), + ): + shell = Shell(name, command) + assert shell.get_define() == expect diff --git a/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_spark.py b/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_spark.py new file mode 100644 index 0000000000000000000000000000000000000000..3b0672f9634513ff04400719895e2fe246a3cb30 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_spark.py @@ -0,0 +1,82 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Test Task Spark.""" + +from unittest.mock import patch + +from pydolphinscheduler.tasks.spark import DeployMode, ProgramType, Spark, SparkVersion + + +@patch( + "pydolphinscheduler.core.engine.Engine.get_resource_info", + return_value=({"id": 1, "name": "test"}), +) +def test_spark_get_define(mock_resource): + """Test task spark function get_define.""" + code = 123 + version = 1 + name = "test_spark_get_define" + main_class = "org.apache.spark.test_main_class" + main_package = "test_main_package" + program_type = ProgramType.JAVA + deploy_mode = DeployMode.LOCAL + + expect = { + "code": code, + "name": name, + "version": 1, + "description": None, + "delayTime": 0, + "taskType": "SPARK", + "taskParams": { + "mainClass": main_class, + "mainJar": { + "id": 1, + }, + "programType": program_type, + "deployMode": deploy_mode, + "sparkVersion": SparkVersion.SPARK2, + "driverCores": 1, + "driverMemory": "512M", + "numExecutors": 2, + "executorMemory": "2G", + "executorCores": 2, + "appName": None, + "mainArgs": None, + "others": None, + "localParams": [], + "resourceList": [], + "dependence": {}, + "conditionResult": {"successNode": [""], "failedNode": [""]}, + "waitStartTimeout": {}, + }, + "flag": "YES", + "taskPriority": "MEDIUM", + "workerGroup": "default", + "failRetryTimes": 0, + "failRetryInterval": 1, + "timeoutFlag": "CLOSE", + "timeoutNotifyStrategy": None, + "timeout": 0, + } + with patch( + "pydolphinscheduler.core.task.Task.gen_code_and_version", + return_value=(code, version), + ): + task = Spark(name, main_class, main_package, program_type, deploy_mode) + assert task.get_define() == expect diff --git a/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_sql.py b/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_sql.py new file mode 100644 index 0000000000000000000000000000000000000000..3f8209c23e6fd7e61df6dfdb3f0244c96844c879 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_sql.py @@ -0,0 +1,149 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Test Task Sql.""" + + +from unittest.mock import patch + +import pytest + +from pydolphinscheduler.tasks.sql import Sql, SqlType + + +@pytest.mark.parametrize( + "sql, sql_type", + [ + ("select 1", SqlType.SELECT), + (" select 1", SqlType.SELECT), + (" select 1 ", SqlType.SELECT), + (" select 'insert' ", SqlType.SELECT), + (" select 'insert ' ", SqlType.SELECT), + ("with tmp as (select 1) select * from tmp ", SqlType.SELECT), + ("insert into table_name(col1, col2) value (val1, val2)", SqlType.NOT_SELECT), + ( + "insert into table_name(select, col2) value ('select', val2)", + SqlType.NOT_SELECT, + ), + ("update table_name SET col1=val1 where col1=val2", SqlType.NOT_SELECT), + ("update table_name SET col1='select' where col1=val2", SqlType.NOT_SELECT), + ("delete from table_name where id < 10", SqlType.NOT_SELECT), + ("delete from table_name where id < 10", SqlType.NOT_SELECT), + ("alter table table_name add column col1 int", SqlType.NOT_SELECT), + ], +) +@patch( + "pydolphinscheduler.core.task.Task.gen_code_and_version", + return_value=(123, 1), +) +@patch( + "pydolphinscheduler.core.database.Database.get_database_info", + return_value=({"id": 1, "type": "mock_type"}), +) +def test_get_sql_type(mock_datasource, mock_code_version, sql, sql_type): + """Test property sql_type could return correct type.""" + name = "test_get_sql_type" + datasource_name = "test_datasource" + task = Sql(name, datasource_name, sql) + assert ( + sql_type == task.sql_type + ), f"Sql {sql} expect sql type is {sql_type} but got {task.sql_type}" + + +@pytest.mark.parametrize( + "attr, expect", + [ + ( + {"datasource_name": "datasource_name", "sql": "select 1"}, + { + "sql": "select 1", + "type": "MYSQL", + "datasource": 1, + "sqlType": SqlType.SELECT, + "preStatements": [], + "postStatements": [], + "displayRows": 10, + "localParams": [], + "resourceList": [], + "dependence": {}, + "waitStartTimeout": {}, + "conditionResult": {"successNode": [""], "failedNode": [""]}, + }, + ) + ], +) +@patch( + "pydolphinscheduler.core.task.Task.gen_code_and_version", + return_value=(123, 1), +) +@patch( + "pydolphinscheduler.core.database.Database.get_database_info", + return_value=({"id": 1, "type": "MYSQL"}), +) +def test_property_task_params(mock_datasource, mock_code_version, attr, expect): + """Test task sql task property.""" + task = Sql("test-sql-task-params", **attr) + assert expect == task.task_params + + +@patch( + "pydolphinscheduler.core.database.Database.get_database_info", + return_value=({"id": 1, "type": "MYSQL"}), +) +def test_sql_get_define(mock_datasource): + """Test task sql function get_define.""" + code = 123 + version = 1 + name = "test_sql_get_define" + command = "select 1" + datasource_name = "test_datasource" + expect = { + "code": code, + "name": name, + "version": 1, + "description": None, + "delayTime": 0, + "taskType": "SQL", + "taskParams": { + "type": "MYSQL", + "datasource": 1, + "sql": command, + "sqlType": SqlType.SELECT, + "displayRows": 10, + "preStatements": [], + "postStatements": [], + "localParams": [], + "resourceList": [], + "dependence": {}, + "conditionResult": {"successNode": [""], "failedNode": [""]}, + "waitStartTimeout": {}, + }, + "flag": "YES", + "taskPriority": "MEDIUM", + "workerGroup": "default", + "failRetryTimes": 0, + "failRetryInterval": 1, + "timeoutFlag": "CLOSE", + "timeoutNotifyStrategy": None, + "timeout": 0, + } + with patch( + "pydolphinscheduler.core.task.Task.gen_code_and_version", + return_value=(code, version), + ): + task = Sql(name, datasource_name, command) + assert task.get_define() == expect diff --git a/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_sub_process.py b/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_sub_process.py new file mode 100644 index 0000000000000000000000000000000000000000..7f471a1b8b258103899f80737eb5d0262738cbd4 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_sub_process.py @@ -0,0 +1,114 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Test Task sub_process.""" + + +from unittest.mock import patch + +import pytest + +from pydolphinscheduler.core.process_definition import ProcessDefinition +from pydolphinscheduler.tasks.sub_process import SubProcess + +TEST_SUB_PROCESS_DEFINITION_NAME = "sub-test-process-definition" +TEST_SUB_PROCESS_DEFINITION_CODE = "3643589832320" +TEST_PROCESS_DEFINITION_NAME = "simple-test-process-definition" + + +@pytest.mark.parametrize( + "attr, expect", + [ + ( + {"process_definition_name": TEST_SUB_PROCESS_DEFINITION_NAME}, + { + "processDefinitionCode": TEST_SUB_PROCESS_DEFINITION_CODE, + "localParams": [], + "resourceList": [], + "dependence": {}, + "waitStartTimeout": {}, + "conditionResult": {"successNode": [""], "failedNode": [""]}, + }, + ) + ], +) +@patch( + "pydolphinscheduler.tasks.sub_process.SubProcess.get_process_definition_info", + return_value=( + { + "id": 1, + "name": TEST_SUB_PROCESS_DEFINITION_NAME, + "code": TEST_SUB_PROCESS_DEFINITION_CODE, + } + ), +) +@patch( + "pydolphinscheduler.core.task.Task.gen_code_and_version", + return_value=(123, 1), +) +def test_property_task_params(mock_code_version, mock_pd_info, attr, expect): + """Test task sub process property.""" + task = SubProcess("test-sub-process-task-params", **attr) + assert expect == task.task_params + + +@patch( + "pydolphinscheduler.tasks.sub_process.SubProcess.get_process_definition_info", + return_value=( + { + "id": 1, + "name": TEST_SUB_PROCESS_DEFINITION_NAME, + "code": TEST_SUB_PROCESS_DEFINITION_CODE, + } + ), +) +def test_sub_process_get_define(mock_process_definition): + """Test task sub_process function get_define.""" + code = 123 + version = 1 + name = "test_sub_process_get_define" + expect = { + "code": code, + "name": name, + "version": 1, + "description": None, + "delayTime": 0, + "taskType": "SUB_PROCESS", + "taskParams": { + "resourceList": [], + "localParams": [], + "processDefinitionCode": TEST_SUB_PROCESS_DEFINITION_CODE, + "dependence": {}, + "conditionResult": {"successNode": [""], "failedNode": [""]}, + "waitStartTimeout": {}, + }, + "flag": "YES", + "taskPriority": "MEDIUM", + "workerGroup": "default", + "failRetryTimes": 0, + "failRetryInterval": 1, + "timeoutFlag": "CLOSE", + "timeoutNotifyStrategy": None, + "timeout": 0, + } + with patch( + "pydolphinscheduler.core.task.Task.gen_code_and_version", + return_value=(code, version), + ): + with ProcessDefinition(TEST_PROCESS_DEFINITION_NAME): + sub_process = SubProcess(name, TEST_SUB_PROCESS_DEFINITION_NAME) + assert sub_process.get_define() == expect diff --git a/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_switch.py b/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_switch.py new file mode 100644 index 0000000000000000000000000000000000000000..1f6ff5bfa2dd05a20966db7ab77d40a67a4b4e5a --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/tests/tasks/test_switch.py @@ -0,0 +1,300 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Test Task switch.""" + +from typing import Optional, Tuple +from unittest.mock import patch + +import pytest + +from pydolphinscheduler.core.process_definition import ProcessDefinition +from pydolphinscheduler.exceptions import PyDSParamException +from pydolphinscheduler.tasks.switch import ( + Branch, + Default, + Switch, + SwitchBranch, + SwitchCondition, +) +from tests.testing.task import Task + +TEST_NAME = "test-task" +TEST_TYPE = "test-type" + + +def task_switch_arg_wrapper(obj, task: Task, exp: Optional[str] = None) -> SwitchBranch: + """Wrap task switch and its subclass.""" + if obj is Default: + return obj(task) + elif obj is Branch: + return obj(exp, task) + else: + return obj(task, exp) + + +@pytest.mark.parametrize( + "obj", + [ + SwitchBranch, + Branch, + Default, + ], +) +def test_switch_branch_attr_next_node(obj: SwitchBranch): + """Test get attribute from class switch branch.""" + task = Task(name=TEST_NAME, task_type=TEST_TYPE) + switch_branch = task_switch_arg_wrapper(obj, task=task, exp="unittest") + assert switch_branch.next_node == task.code + + +@pytest.mark.parametrize( + "obj", + [ + SwitchBranch, + Default, + ], +) +def test_switch_branch_get_define_without_condition(obj: SwitchBranch): + """Test function :func:`get_define` with None value of attribute condition from class switch branch.""" + task = Task(name=TEST_NAME, task_type=TEST_TYPE) + expect = {"nextNode": task.code} + switch_branch = task_switch_arg_wrapper(obj, task=task) + assert switch_branch.get_define() == expect + + +@pytest.mark.parametrize( + "obj", + [ + SwitchBranch, + Branch, + ], +) +def test_switch_branch_get_define_condition(obj: SwitchBranch): + """Test function :func:`get_define` with specific attribute condition from class switch branch.""" + task = Task(name=TEST_NAME, task_type=TEST_TYPE) + exp = "${var} == 1" + expect = { + "nextNode": task.code, + "condition": exp, + } + switch_branch = task_switch_arg_wrapper(obj, task=task, exp=exp) + assert switch_branch.get_define() == expect + + +@pytest.mark.parametrize( + "args, msg", + [ + ( + (1,), + ".*?parameter only support SwitchBranch but got.*?", + ), + ( + (Default(Task(TEST_NAME, TEST_TYPE)), 2), + ".*?parameter only support SwitchBranch but got.*?", + ), + ( + (Default(Task(TEST_NAME, TEST_TYPE)), Default(Task(TEST_NAME, TEST_TYPE))), + ".*?parameter only support exactly one default branch", + ), + ( + ( + Branch(condition="unittest", task=Task(TEST_NAME, TEST_TYPE)), + Default(Task(TEST_NAME, TEST_TYPE)), + Default(Task(TEST_NAME, TEST_TYPE)), + ), + ".*?parameter only support exactly one default branch", + ), + ], +) +def test_switch_condition_set_define_attr_error(args: Tuple, msg: str): + """Test error case on :class:`SwitchCondition`.""" + switch_condition = SwitchCondition(*args) + with pytest.raises(PyDSParamException, match=msg): + switch_condition.set_define_attr() + + +def test_switch_condition_set_define_attr_default(): + """Test set :class:`Default` to attribute on :class:`SwitchCondition`.""" + task = Task(TEST_NAME, TEST_TYPE) + switch_condition = SwitchCondition(Default(task)) + switch_condition.set_define_attr() + assert getattr(switch_condition, "next_node") == task.code + assert getattr(switch_condition, "depend_task_list") == [] + + +def test_switch_condition_set_define_attr_branch(): + """Test set :class:`Branch` to attribute on :class:`SwitchCondition`.""" + task = Task(TEST_NAME, TEST_TYPE) + switch_condition = SwitchCondition( + Branch("unittest1", task), Branch("unittest2", task) + ) + expect = [ + {"condition": "unittest1", "nextNode": task.code}, + {"condition": "unittest2", "nextNode": task.code}, + ] + + switch_condition.set_define_attr() + assert getattr(switch_condition, "next_node") == "" + assert getattr(switch_condition, "depend_task_list") == expect + + +def test_switch_condition_set_define_attr_mix_branch_and_default(): + """Test set bot :class:`Branch` and :class:`Default` to attribute on :class:`SwitchCondition`.""" + task = Task(TEST_NAME, TEST_TYPE) + switch_condition = SwitchCondition( + Branch("unittest1", task), Branch("unittest2", task), Default(task) + ) + expect = [ + {"condition": "unittest1", "nextNode": task.code}, + {"condition": "unittest2", "nextNode": task.code}, + ] + + switch_condition.set_define_attr() + assert getattr(switch_condition, "next_node") == task.code + assert getattr(switch_condition, "depend_task_list") == expect + + +def test_switch_condition_get_define_default(): + """Test function :func:`get_define` with :class:`Default` in :class:`SwitchCondition`.""" + task = Task(TEST_NAME, TEST_TYPE) + switch_condition = SwitchCondition(Default(task)) + expect = { + "dependTaskList": [], + "nextNode": task.code, + } + assert switch_condition.get_define() == expect + + +def test_switch_condition_get_define_branch(): + """Test function :func:`get_define` with :class:`Branch` in :class:`SwitchCondition`.""" + task = Task(TEST_NAME, TEST_TYPE) + switch_condition = SwitchCondition( + Branch("unittest1", task), Branch("unittest2", task) + ) + expect = { + "dependTaskList": [ + {"condition": "unittest1", "nextNode": task.code}, + {"condition": "unittest2", "nextNode": task.code}, + ], + "nextNode": "", + } + assert switch_condition.get_define() == expect + + +def test_switch_condition_get_define_mix_branch_and_default(): + """Test function :func:`get_define` with both :class:`Branch` and :class:`Default`.""" + task = Task(TEST_NAME, TEST_TYPE) + switch_condition = SwitchCondition( + Branch("unittest1", task), Branch("unittest2", task), Default(task) + ) + expect = { + "dependTaskList": [ + {"condition": "unittest1", "nextNode": task.code}, + {"condition": "unittest2", "nextNode": task.code}, + ], + "nextNode": task.code, + } + assert switch_condition.get_define() == expect + + +@patch( + "pydolphinscheduler.core.task.Task.gen_code_and_version", + return_value=(123, 1), +) +def test_switch_get_define(mock_task_code_version): + """Test task switch :func:`get_define`.""" + task = Task(name=TEST_NAME, task_type=TEST_TYPE) + switch_condition = SwitchCondition( + Branch(condition="${var1} > 1", task=task), + Branch(condition="${var1} <= 1", task=task), + Default(task), + ) + + name = "test_switch_get_define" + expect = { + "code": 123, + "name": name, + "version": 1, + "description": None, + "delayTime": 0, + "taskType": "SWITCH", + "taskParams": { + "resourceList": [], + "localParams": [], + "dependence": {}, + "conditionResult": {"successNode": [""], "failedNode": [""]}, + "waitStartTimeout": {}, + "switchResult": { + "dependTaskList": [ + {"condition": "${var1} > 1", "nextNode": task.code}, + {"condition": "${var1} <= 1", "nextNode": task.code}, + ], + "nextNode": task.code, + }, + }, + "flag": "YES", + "taskPriority": "MEDIUM", + "workerGroup": "default", + "failRetryTimes": 0, + "failRetryInterval": 1, + "timeoutFlag": "CLOSE", + "timeoutNotifyStrategy": None, + "timeout": 0, + } + + task = Switch(name, condition=switch_condition) + assert task.get_define() == expect + + +@patch( + "pydolphinscheduler.core.task.Task.gen_code_and_version", + return_value=(123, 1), +) +def test_switch_set_dep_workflow(mock_task_code_version): + """Test task switch set dependence in workflow level.""" + with ProcessDefinition(name="test-switch-set-dep-workflow") as pd: + parent = Task(name="parent", task_type=TEST_TYPE) + switch_child_1 = Task(name="switch_child_1", task_type=TEST_TYPE) + switch_child_2 = Task(name="switch_child_2", task_type=TEST_TYPE) + switch_condition = SwitchCondition( + Branch(condition="${var} > 1", task=switch_child_1), + Default(task=switch_child_2), + ) + + switch = Switch(name=TEST_NAME, condition=switch_condition) + parent >> switch + # General tasks test + assert len(pd.tasks) == 4 + assert sorted(pd.task_list, key=lambda t: t.name) == sorted( + [parent, switch, switch_child_1, switch_child_2], key=lambda t: t.name + ) + # Task dep test + assert parent._downstream_task_codes == {switch.code} + assert switch._upstream_task_codes == {parent.code} + + # Switch task dep after ProcessDefinition function get_define called + assert switch._downstream_task_codes == { + switch_child_1.code, + switch_child_2.code, + } + assert all( + [ + child._upstream_task_codes == {switch.code} + for child in [switch_child_1, switch_child_2] + ] + ) diff --git a/dolphinscheduler-python/pydolphinscheduler/tests/test_docs.py b/dolphinscheduler-python/pydolphinscheduler/tests/test_docs.py new file mode 100644 index 0000000000000000000000000000000000000000..930e4f709e1e27852525a8d90d887c0f1e2925b6 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/tests/test_docs.py @@ -0,0 +1,59 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Test pydolphinscheduler docs.""" + +import re + +from tests.testing.constants import task_without_example +from tests.testing.path import get_doc_tasks, get_tasks + +ignore_code_file = {"__init__.py"} +ignore_doc_file = {"index.rst"} + + +def test_without_missing_task_rst(): + """Test without missing any task document by compare filename. + + Avoiding add new type of tasks but without adding document about it. + """ + code_files = {p.stem for p in get_tasks(ignore_name=ignore_code_file)} + doc_files = {p.stem for p in get_doc_tasks(ignore_name=ignore_doc_file)} + assert code_files == doc_files + + +def test_task_without_example(): + """Test task document which without example. + + Avoiding add new type of tasks but without adding example content describe how to use it. + """ + task_without_example_detected = set() + pattern = re.compile("Example\n-------") + + for doc in get_doc_tasks(ignore_name=ignore_doc_file): + search_result = pattern.search(doc.read_text()) + if not search_result: + task_without_example_detected.add(doc.stem) + assert task_without_example == task_without_example_detected + + +def test_doc_automodule_directive_name(): + """Test task document with correct name in directive automodule.""" + pattern = re.compile(".. automodule:: (.*)") + for doc in get_doc_tasks(ignore_name=ignore_doc_file): + match_string = pattern.search(doc.read_text()).group(1) + assert f"pydolphinscheduler.tasks.{doc.stem}" == match_string diff --git a/dolphinscheduler-python/pydolphinscheduler/tests/test_java_gateway.py b/dolphinscheduler-python/pydolphinscheduler/tests/test_java_gateway.py new file mode 100644 index 0000000000000000000000000000000000000000..3c8831e16d78935d153551ccd07cee0c030839d4 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/tests/test_java_gateway.py @@ -0,0 +1,52 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Test pydolphinscheduler java gateway.""" + + +from py4j.java_gateway import JavaGateway, java_import + + +def test_gateway_connect(): + """Test weather client could connect java gate way or not.""" + gateway = JavaGateway() + app = gateway.entry_point + assert app.ping() == "PONG" + + +def test_jvm_simple(): + """Test use JVM build-in object and operator from java gateway.""" + gateway = JavaGateway() + smaller = gateway.jvm.java.lang.Integer.MIN_VALUE + bigger = gateway.jvm.java.lang.Integer.MAX_VALUE + assert bigger > smaller + + +def test_python_client_java_import_single(): + """Test import single class from java gateway.""" + gateway = JavaGateway() + java_import(gateway.jvm, "org.apache.dolphinscheduler.common.utils.FileUtils") + assert hasattr(gateway.jvm, "FileUtils") + + +def test_python_client_java_import_package(): + """Test import package contain multiple class from java gateway.""" + gateway = JavaGateway() + java_import(gateway.jvm, "org.apache.dolphinscheduler.common.utils.*") + # test if jvm view have some common utils + for util in ("FileUtils", "OSUtils", "DateUtils"): + assert hasattr(gateway.jvm, util) diff --git a/dolphinscheduler-python/pydolphinscheduler/tests/testing/__init__.py b/dolphinscheduler-python/pydolphinscheduler/tests/testing/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..c8caf5b5af45eb9f0160f8d7d35c9f739b3078c2 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/tests/testing/__init__.py @@ -0,0 +1,18 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Init testing package, it provider easy way for pydolphinscheduler test.""" diff --git a/dolphinscheduler-python/pydolphinscheduler/tests/testing/constants.py b/dolphinscheduler-python/pydolphinscheduler/tests/testing/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..155219267eac4e9f6840b31a69701e65cdec2b44 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/tests/testing/constants.py @@ -0,0 +1,28 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Constants variables for test module.""" + +# Record some task without example in directory `example`. Some of them maybe can not write example, +# but most of them just without adding by mistake, and we should add it later. +task_without_example = { + "sql", + "http", + "sub_process", + "python", + "procedure", +} diff --git a/dolphinscheduler-python/pydolphinscheduler/tests/testing/path.py b/dolphinscheduler-python/pydolphinscheduler/tests/testing/path.py new file mode 100644 index 0000000000000000000000000000000000000000..2e75be24ac97df35b490baa2cbfe97a7a2451d16 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/tests/testing/path.py @@ -0,0 +1,56 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Handle path related issue in test module.""" + +from pathlib import Path +from typing import Any, Generator + +path_code_tasks = Path(__file__).parent.parent.parent.joinpath( + "src", "pydolphinscheduler", "tasks" +) +path_example = Path(__file__).parent.parent.parent.joinpath( + "src", "pydolphinscheduler", "examples" +) +path_doc_tasks = Path(__file__).parent.parent.parent.joinpath("docs", "source", "tasks") + + +def get_all_examples() -> Generator[Path, Any, None]: + """Get all examples files path in examples directory.""" + return (ex for ex in path_example.iterdir() if ex.is_file()) + + +def get_tasks(ignore_name: set = None) -> Generator[Path, Any, None]: + """Get all tasks files path in src/pydolphinscheduler/tasks directory.""" + if not ignore_name: + ignore_name = set() + return ( + ex + for ex in path_code_tasks.iterdir() + if ex.is_file() and ex.name not in ignore_name + ) + + +def get_doc_tasks(ignore_name: set = None) -> Generator[Path, Any, None]: + """Get all tasks document path in docs/source/tasks directory.""" + if not ignore_name: + ignore_name = set() + return ( + ex + for ex in path_doc_tasks.iterdir() + if ex.is_file() and ex.name not in ignore_name + ) diff --git a/dolphinscheduler-python/pydolphinscheduler/tests/testing/task.py b/dolphinscheduler-python/pydolphinscheduler/tests/testing/task.py new file mode 100644 index 0000000000000000000000000000000000000000..e0affc9f851f39d3d4dfa6dc5f8457cf5e471edd --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/tests/testing/task.py @@ -0,0 +1,32 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Mock class Task for other test.""" + +import uuid + +from pydolphinscheduler.core.task import Task as SourceTask + + +class Task(SourceTask): + """Mock class :class:`pydolphinscheduler.core.task.Task` for unittest.""" + + DEFAULT_VERSION = 1 + + def gen_code_and_version(self): + """Mock java gateway code and version, convenience method for unittest.""" + return uuid.uuid1().time, self.DEFAULT_VERSION diff --git a/dolphinscheduler-python/pydolphinscheduler/tests/utils/__init__.py b/dolphinscheduler-python/pydolphinscheduler/tests/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..119f825bc026ad4d7dcbfa6714bdc21e06291afc --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/tests/utils/__init__.py @@ -0,0 +1,18 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Init tests for utils package.""" diff --git a/dolphinscheduler-python/pydolphinscheduler/tests/utils/test_date.py b/dolphinscheduler-python/pydolphinscheduler/tests/utils/test_date.py new file mode 100644 index 0000000000000000000000000000000000000000..b9f8ce5ff36ed5a6dad65161d489c3be32cebf45 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/tests/utils/test_date.py @@ -0,0 +1,78 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Test utils.date module.""" + +from datetime import datetime + +import pytest + +from pydolphinscheduler.utils.date import FMT_STD, conv_from_str, conv_to_schedule + +curr_date = datetime.now() + + +@pytest.mark.parametrize( + "src,expect", + [ + (curr_date, curr_date.strftime(FMT_STD)), + (datetime(2021, 1, 1), "2021-01-01 00:00:00"), + (datetime(2021, 1, 1, 1), "2021-01-01 01:00:00"), + (datetime(2021, 1, 1, 1, 1), "2021-01-01 01:01:00"), + (datetime(2021, 1, 1, 1, 1, 1), "2021-01-01 01:01:01"), + (datetime(2021, 1, 1, 1, 1, 1, 1), "2021-01-01 01:01:01"), + ], +) +def test_conv_to_schedule(src: datetime, expect: str) -> None: + """Test function conv_to_schedule.""" + assert expect == conv_to_schedule(src) + + +@pytest.mark.parametrize( + "src,expect", + [ + ("2021-01-01", datetime(2021, 1, 1)), + ("2021/01/01", datetime(2021, 1, 1)), + ("20210101", datetime(2021, 1, 1)), + ("2021-01-01 01:01:01", datetime(2021, 1, 1, 1, 1, 1)), + ("2021/01/01 01:01:01", datetime(2021, 1, 1, 1, 1, 1)), + ("20210101 010101", datetime(2021, 1, 1, 1, 1, 1)), + ], +) +def test_conv_from_str_success(src: str, expect: datetime) -> None: + """Test function conv_from_str success case.""" + assert expect == conv_from_str( + src + ), f"Function conv_from_str convert {src} not expect to {expect}." + + +@pytest.mark.parametrize( + "src", + [ + "2021-01-01 010101", + "2021:01:01", + "202111", + "20210101010101", + "2021:01:01 01:01:01", + ], +) +def test_conv_from_str_not_impl(src: str) -> None: + """Test function conv_from_str fail case.""" + with pytest.raises( + NotImplementedError, match=".*? could not be convert to datetime for now." + ): + conv_from_str(src) diff --git a/dolphinscheduler-python/pydolphinscheduler/tests/utils/test_string.py b/dolphinscheduler-python/pydolphinscheduler/tests/utils/test_string.py new file mode 100644 index 0000000000000000000000000000000000000000..2ccd206df18af2687e668dd3d0f8fee9a3aa77d5 --- /dev/null +++ b/dolphinscheduler-python/pydolphinscheduler/tests/utils/test_string.py @@ -0,0 +1,87 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Test utils.string module.""" + +import pytest + +from pydolphinscheduler.utils.string import attr2camel, class_name2camel, snake2camel + + +@pytest.mark.parametrize( + "snake, expect", + [ + ("snake_case", "snakeCase"), + ("snake_123case", "snake123Case"), + ("snake_c_a_s_e", "snakeCASE"), + ("snake__case", "snakeCase"), + ("snake_case_case", "snakeCaseCase"), + ("_snake_case", "SnakeCase"), + ("__snake_case", "SnakeCase"), + ("Snake_case", "SnakeCase"), + ], +) +def test_snake2camel(snake: str, expect: str): + """Test function snake2camel, this is a base function for utils.string.""" + assert expect == snake2camel( + snake + ), f"Test case {snake} do no return expect result {expect}." + + +@pytest.mark.parametrize( + "attr, expects", + [ + # source attribute, (true expect, false expect), + ("snake_case", ("snakeCase", "snakeCase")), + ("snake_123case", ("snake123Case", "snake123Case")), + ("snake_c_a_s_e", ("snakeCASE", "snakeCASE")), + ("snake__case", ("snakeCase", "snakeCase")), + ("snake_case_case", ("snakeCaseCase", "snakeCaseCase")), + ("_snake_case", ("snakeCase", "SnakeCase")), + ("__snake_case", ("snakeCase", "SnakeCase")), + ("Snake_case", ("SnakeCase", "SnakeCase")), + ], +) +def test_attr2camel(attr: str, expects: tuple): + """Test function attr2camel.""" + for idx, expect in enumerate(expects): + include_private = idx % 2 == 0 + assert expect == attr2camel( + attr, include_private + ), f"Test case {attr} do no return expect result {expect} when include_private is {include_private}." + + +@pytest.mark.parametrize( + "class_name, expect", + [ + ("snake_case", "snakeCase"), + ("snake_123case", "snake123Case"), + ("snake_c_a_s_e", "snakeCASE"), + ("snake__case", "snakeCase"), + ("snake_case_case", "snakeCaseCase"), + ("_snake_case", "snakeCase"), + ("_Snake_case", "snakeCase"), + ("__snake_case", "snakeCase"), + ("__Snake_case", "snakeCase"), + ("Snake_case", "snakeCase"), + ], +) +def test_class_name2camel(class_name: str, expect: str): + """Test function class_name2camel.""" + assert expect == class_name2camel( + class_name + ), f"Test case {class_name} do no return expect result {expect}." diff --git a/dolphinscheduler-python/src/main/java/org/apache/dolphinscheduler/server/PythonGatewayServer.java b/dolphinscheduler-python/src/main/java/org/apache/dolphinscheduler/server/PythonGatewayServer.java new file mode 100644 index 0000000000000000000000000000000000000000..a12ff5bfd35111a928868d683bd3c2a11648760a --- /dev/null +++ b/dolphinscheduler-python/src/main/java/org/apache/dolphinscheduler/server/PythonGatewayServer.java @@ -0,0 +1,540 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server; + +import org.apache.dolphinscheduler.api.dto.resources.ResourceComponent; +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.ExecutorService; +import org.apache.dolphinscheduler.api.service.ProcessDefinitionService; +import org.apache.dolphinscheduler.api.service.ProjectService; +import org.apache.dolphinscheduler.api.service.QueueService; +import org.apache.dolphinscheduler.api.service.ResourcesService; +import org.apache.dolphinscheduler.api.service.SchedulerService; +import org.apache.dolphinscheduler.api.service.TaskDefinitionService; +import org.apache.dolphinscheduler.api.service.TenantService; +import org.apache.dolphinscheduler.api.service.UsersService; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.FailureStrategy; +import org.apache.dolphinscheduler.common.enums.Priority; +import org.apache.dolphinscheduler.common.enums.ProgramType; +import org.apache.dolphinscheduler.common.enums.ReleaseState; +import org.apache.dolphinscheduler.common.enums.RunMode; +import org.apache.dolphinscheduler.common.enums.TaskDependType; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.common.enums.WarningType; +import org.apache.dolphinscheduler.common.utils.CodeGenerateUtils; +import org.apache.dolphinscheduler.dao.entity.DataSource; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.Project; +import org.apache.dolphinscheduler.dao.entity.Queue; +import org.apache.dolphinscheduler.dao.entity.Schedule; +import org.apache.dolphinscheduler.dao.entity.TaskDefinition; +import org.apache.dolphinscheduler.dao.entity.Tenant; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper; +import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; +import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; +import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper; +import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper; +import org.apache.dolphinscheduler.server.config.PythonGatewayConfig; +import org.apache.dolphinscheduler.spi.enums.ResourceType; + +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; + +import javax.annotation.PostConstruct; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.builder.SpringApplicationBuilder; +import org.springframework.boot.web.servlet.support.SpringBootServletInitializer; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.FilterType; + +import py4j.GatewayServer; + +import org.apache.commons.collections.CollectionUtils; + +@ComponentScan(value = "org.apache.dolphinscheduler", excludeFilters = { + @ComponentScan.Filter(type = FilterType.REGEX, pattern = { + "org.apache.dolphinscheduler.server.master.*", + "org.apache.dolphinscheduler.server.worker.*", + "org.apache.dolphinscheduler.server.monitor.*", + "org.apache.dolphinscheduler.server.log.*", + "org.apache.dolphinscheduler.alert.*" + }) +}) +public class PythonGatewayServer extends SpringBootServletInitializer { + private static final Logger logger = LoggerFactory.getLogger(PythonGatewayServer.class); + + private static final WarningType DEFAULT_WARNING_TYPE = WarningType.NONE; + private static final int DEFAULT_WARNING_GROUP_ID = 0; + private static final FailureStrategy DEFAULT_FAILURE_STRATEGY = FailureStrategy.CONTINUE; + private static final Priority DEFAULT_PRIORITY = Priority.MEDIUM; + private static final Long DEFAULT_ENVIRONMENT_CODE = -1L; + + private static final TaskDependType DEFAULT_TASK_DEPEND_TYPE = TaskDependType.TASK_POST; + private static final RunMode DEFAULT_RUN_MODE = RunMode.RUN_MODE_SERIAL; + private static final int DEFAULT_DRY_RUN = 0; + + @Autowired + private ProcessDefinitionMapper processDefinitionMapper; + + @Autowired + private ProjectService projectService; + + @Autowired + private TenantService tenantService; + + @Autowired + private ExecutorService executorService; + + @Autowired + private ProcessDefinitionService processDefinitionService; + + @Autowired + private TaskDefinitionService taskDefinitionService; + + @Autowired + private UsersService usersService; + + @Autowired + private QueueService queueService; + + @Autowired + private ResourcesService resourceService; + + @Autowired + private ProjectMapper projectMapper; + + @Autowired + private TaskDefinitionMapper taskDefinitionMapper; + + @Autowired + private SchedulerService schedulerService; + + @Autowired + private ScheduleMapper scheduleMapper; + + @Autowired + private DataSourceMapper dataSourceMapper; + + @Autowired + private PythonGatewayConfig pythonGatewayConfig; + + // TODO replace this user to build in admin user if we make sure build in one could not be change + private final User dummyAdminUser = new User() { + { + setId(Integer.MAX_VALUE); + setUserName("dummyUser"); + setUserType(UserType.ADMIN_USER); + } + }; + + private final Queue queuePythonGateway = new Queue() { + { + setId(Integer.MAX_VALUE); + setQueueName("queuePythonGateway"); + } + }; + + public String ping() { + return "PONG"; + } + + // TODO Should we import package in python client side? utils package can but service can not, why + // Core api + public Map genTaskCodeList(Integer genNum) { + return taskDefinitionService.genTaskCodeList(genNum); + } + + public Map getCodeAndVersion(String projectName, String taskName) throws CodeGenerateUtils.CodeGenerateException { + Project project = projectMapper.queryByName(projectName); + Map result = new HashMap<>(); + // project do not exists, mean task not exists too, so we should directly return init value + if (project == null) { + result.put("code", CodeGenerateUtils.getInstance().genCode()); + result.put("version", 0L); + return result; + } + TaskDefinition taskDefinition = taskDefinitionMapper.queryByName(project.getCode(), taskName); + if (taskDefinition == null) { + result.put("code", CodeGenerateUtils.getInstance().genCode()); + result.put("version", 0L); + } else { + result.put("code", taskDefinition.getCode()); + result.put("version", (long) taskDefinition.getVersion()); + } + return result; + } + + /** + * create or update process definition. + * If process definition do not exists in Project=`projectCode` would create a new one + * If process definition already exists in Project=`projectCode` would update it + * + * @param userName user name who create or update process definition + * @param projectName project name which process definition belongs to + * @param name process definition name + * @param description description + * @param globalParams global params + * @param schedule schedule for process definition, will not set schedule if null, + * and if would always fresh exists schedule if not null + * @param locations locations json object about all tasks + * @param timeout timeout for process definition working, if running time longer than timeout, + * task will mark as fail + * @param workerGroup run task in which worker group + * @param tenantCode tenantCode + * @param taskRelationJson relation json for nodes + * @param taskDefinitionJson taskDefinitionJson + * @return create result code + */ + public Long createOrUpdateProcessDefinition(String userName, + String projectName, + String name, + String description, + String globalParams, + String schedule, + String locations, + int timeout, + String workerGroup, + String tenantCode, + String taskRelationJson, + String taskDefinitionJson) { + User user = usersService.queryUser(userName); + Project project = (Project) projectService.queryByName(user, projectName).get(Constants.DATA_LIST); + long projectCode = project.getCode(); + ProcessDefinition processDefinition = getProcessDefinition(user, projectCode, name); + long processDefinitionCode; + // create or update process definition + if (processDefinition != null) { + processDefinitionCode = processDefinition.getCode(); + // make sure process definition offline which could edit + processDefinitionService.releaseProcessDefinition(user, projectCode, processDefinitionCode, ReleaseState.OFFLINE); + Map result = processDefinitionService.updateProcessDefinition(user, projectCode, name, processDefinitionCode, description, globalParams, + locations, timeout, tenantCode, taskRelationJson, taskDefinitionJson); + } else { + Map result = processDefinitionService.createProcessDefinition(user, projectCode, name, description, globalParams, + locations, timeout, tenantCode, taskRelationJson, taskDefinitionJson); + processDefinition = (ProcessDefinition) result.get(Constants.DATA_LIST); + processDefinitionCode = processDefinition.getCode(); + } + + // Fresh process definition schedule + if (schedule != null) { + createOrUpdateSchedule(user, projectCode, processDefinitionCode, schedule, workerGroup); + } + processDefinitionService.releaseProcessDefinition(user, projectCode, processDefinitionCode, ReleaseState.ONLINE); + return processDefinitionCode; + } + + /** + * get process definition + * @param user user who create or update schedule + * @param projectCode project which process definition belongs to + * @param processDefinitionName process definition name + */ + private ProcessDefinition getProcessDefinition(User user, long projectCode, String processDefinitionName) { + Map verifyProcessDefinitionExists = processDefinitionService.verifyProcessDefinitionName(user, projectCode, processDefinitionName); + Status verifyStatus = (Status) verifyProcessDefinitionExists.get(Constants.STATUS); + + ProcessDefinition processDefinition = null; + if (verifyStatus == Status.PROCESS_DEFINITION_NAME_EXIST) { + processDefinition = processDefinitionMapper.queryByDefineName(projectCode, processDefinitionName); + } else if (verifyStatus != Status.SUCCESS) { + String msg = "Verify process definition exists status is invalid, neither SUCCESS or PROCESS_DEFINITION_NAME_EXIST."; + logger.error(msg); + throw new RuntimeException(msg); + } + + return processDefinition; + } + + /** + * create or update process definition schedule. + * It would always use latest schedule define in workflow-as-code, and set schedule online when + * it's not null + * + * @param user user who create or update schedule + * @param projectCode project which process definition belongs to + * @param processDefinitionCode process definition code + * @param schedule schedule expression + * @param workerGroup work group + */ + private void createOrUpdateSchedule(User user, + long projectCode, + long processDefinitionCode, + String schedule, + String workerGroup) { + Schedule scheduleObj = scheduleMapper.queryByProcessDefinitionCode(processDefinitionCode); + // create or update schedule + int scheduleId; + if (scheduleObj == null) { + processDefinitionService.releaseProcessDefinition(user, projectCode, processDefinitionCode, ReleaseState.ONLINE); + Map result = schedulerService.insertSchedule(user, projectCode, processDefinitionCode, schedule, DEFAULT_WARNING_TYPE, + DEFAULT_WARNING_GROUP_ID, DEFAULT_FAILURE_STRATEGY, DEFAULT_PRIORITY, workerGroup, DEFAULT_ENVIRONMENT_CODE); + scheduleId = (int) result.get("scheduleId"); + } else { + scheduleId = scheduleObj.getId(); + processDefinitionService.releaseProcessDefinition(user, projectCode, processDefinitionCode, ReleaseState.OFFLINE); + schedulerService.updateSchedule(user, projectCode, scheduleId, schedule, DEFAULT_WARNING_TYPE, + DEFAULT_WARNING_GROUP_ID, DEFAULT_FAILURE_STRATEGY, DEFAULT_PRIORITY, workerGroup, DEFAULT_ENVIRONMENT_CODE); + } + schedulerService.setScheduleState(user, projectCode, scheduleId, ReleaseState.ONLINE); + } + + public void execProcessInstance(String userName, + String projectName, + String processDefinitionName, + String cronTime, + String workerGroup, + Integer timeout + ) { + User user = usersService.queryUser(userName); + Project project = projectMapper.queryByName(projectName); + ProcessDefinition processDefinition = processDefinitionMapper.queryByDefineName(project.getCode(), processDefinitionName); + + // make sure process definition online + processDefinitionService.releaseProcessDefinition(user, project.getCode(), processDefinition.getCode(), ReleaseState.ONLINE); + + executorService.execProcessInstance(user, + project.getCode(), + processDefinition.getCode(), + cronTime, + null, + DEFAULT_FAILURE_STRATEGY, + null, + DEFAULT_TASK_DEPEND_TYPE, + DEFAULT_WARNING_TYPE, + DEFAULT_WARNING_GROUP_ID, + DEFAULT_RUN_MODE, + DEFAULT_PRIORITY, + workerGroup, + DEFAULT_ENVIRONMENT_CODE, + timeout, + null, + null, + DEFAULT_DRY_RUN + ); + } + + // side object + public Map createProject(String userName, String name, String desc) { + User user = usersService.queryUser(userName); + return projectService.createProject(user, name, desc); + } + + public Map createQueue(String name, String queueName) { + Result verifyQueueExists = queueService.verifyQueue(name, queueName); + if (verifyQueueExists.getCode() == 0) { + return queueService.createQueue(dummyAdminUser, name, queueName); + } else { + Map result = new HashMap<>(); + // TODO function putMsg do not work here + result.put(Constants.STATUS, Status.SUCCESS); + result.put(Constants.MSG, Status.SUCCESS.getMsg()); + return result; + } + } + + public Map createTenant(String tenantCode, String desc, String queueName) throws Exception { + if (tenantService.checkTenantExists(tenantCode)) { + Map result = new HashMap<>(); + // TODO function putMsg do not work here + result.put(Constants.STATUS, Status.SUCCESS); + result.put(Constants.MSG, Status.SUCCESS.getMsg()); + return result; + } else { + Result verifyQueueExists = queueService.verifyQueue(queueName, queueName); + if (verifyQueueExists.getCode() == 0) { + // TODO why create do not return id? + queueService.createQueue(dummyAdminUser, queueName, queueName); + } + Map result = queueService.queryQueueName(queueName); + List queueList = (List) result.get(Constants.DATA_LIST); + Queue queue = queueList.get(0); + return tenantService.createTenant(dummyAdminUser, tenantCode, queue.getId(), desc); + } + } + + public void createUser(String userName, + String userPassword, + String email, + String phone, + String tenantCode, + String queue, + int state) { + User user = usersService.queryUser(userName); + if (Objects.isNull(user)) { + Map tenantResult = tenantService.queryByTenantCode(tenantCode); + Tenant tenant = (Tenant) tenantResult.get(Constants.DATA_LIST); + usersService.createUser(userName, userPassword, email, tenant.getId(), phone, queue, state); + } + } + + /** + * Get datasource by given datasource name. It return map contain datasource id, type, name. + * Useful in Python API create sql task which need datasource information. + * + * @param datasourceName user who create or update schedule + */ + public Map getDatasourceInfo(String datasourceName) { + Map result = new HashMap<>(); + List dataSourceList = dataSourceMapper.queryDataSourceByName(datasourceName); + if (dataSourceList == null || dataSourceList.isEmpty()) { + String msg = String.format("Can not find any datasource by name %s", datasourceName); + logger.error(msg); + throw new IllegalArgumentException(msg); + } else if (dataSourceList.size() > 1) { + String msg = String.format("Get more than one datasource by name %s", datasourceName); + logger.error(msg); + throw new IllegalArgumentException(msg); + } else { + DataSource dataSource = dataSourceList.get(0); + result.put("id", dataSource.getId()); + result.put("type", dataSource.getType().name()); + result.put("name", dataSource.getName()); + } + return result; + } + + /** + * Get processDefinition by given processDefinitionName name. It return map contain processDefinition id, name, code. + * Useful in Python API create subProcess task which need processDefinition information. + * + * @param userName user who create or update schedule + * @param projectName project name which process definition belongs to + * @param processDefinitionName process definition name + */ + public Map getProcessDefinitionInfo(String userName, String projectName, String processDefinitionName) { + Map result = new HashMap<>(); + + User user = usersService.queryUser(userName); + Project project = (Project) projectService.queryByName(user, projectName).get(Constants.DATA_LIST); + long projectCode = project.getCode(); + ProcessDefinition processDefinition = getProcessDefinition(user, projectCode, processDefinitionName); + // get process definition info + if (processDefinition != null) { + // make sure process definition online + processDefinitionService.releaseProcessDefinition(user, projectCode, processDefinition.getCode(), ReleaseState.ONLINE); + result.put("id", processDefinition.getId()); + result.put("name", processDefinition.getName()); + result.put("code", processDefinition.getCode()); + } else { + String msg = String.format("Can not find valid process definition by name %s", processDefinitionName); + logger.error(msg); + throw new IllegalArgumentException(msg); + } + + return result; + } + + /** + * Get project, process definition, task code. + * Useful in Python API create dependent task which need processDefinition information. + * + * @param projectName project name which process definition belongs to + * @param processDefinitionName process definition name + * @param taskName task name + */ + public Map getDependentInfo(String projectName, String processDefinitionName, String taskName) { + Map result = new HashMap<>(); + + Project project = projectMapper.queryByName(projectName); + if (project == null) { + String msg = String.format("Can not find valid project by name %s", projectName); + logger.error(msg); + throw new IllegalArgumentException(msg); + } + long projectCode = project.getCode(); + result.put("projectCode", projectCode); + + ProcessDefinition processDefinition = processDefinitionMapper.queryByDefineName(projectCode, processDefinitionName); + if (processDefinition == null) { + String msg = String.format("Can not find valid process definition by name %s", processDefinitionName); + logger.error(msg); + throw new IllegalArgumentException(msg); + } + result.put("processDefinitionCode", processDefinition.getCode()); + + if (taskName != null) { + TaskDefinition taskDefinition = taskDefinitionMapper.queryByName(projectCode, taskName); + result.put("taskDefinitionCode", taskDefinition.getCode()); + } + return result; + } + + /** + * Get resource by given program type and full name. It return map contain resource id, name. + * Useful in Python API create flink or spark task which need processDefinition information. + * + * @param programType program type one of SCALA, JAVA and PYTHON + * @param fullName full name of the resource + */ + public Map getResourcesFileInfo(String programType, String fullName) { + Map result = new HashMap<>(); + + Map resources = resourceService.queryResourceByProgramType(dummyAdminUser, ResourceType.FILE, ProgramType.valueOf(programType)); + List resourcesComponent = (List) resources.get(Constants.DATA_LIST); + List namedResources = resourcesComponent.stream().filter(s -> fullName.equals(s.getFullName())).collect(Collectors.toList()); + if (CollectionUtils.isEmpty(namedResources)) { + String msg = String.format("Can not find valid resource by program type %s and name %s", programType, fullName); + logger.error(msg); + throw new IllegalArgumentException(msg); + } + + result.put("id", namedResources.get(0).getId()); + result.put("name", namedResources.get(0).getName()); + return result; + } + + @PostConstruct + public void run() { + GatewayServer server; + try { + InetAddress gatewayHost = InetAddress.getByName(pythonGatewayConfig.getGatewayServerAddress()); + InetAddress pythonHost = InetAddress.getByName(pythonGatewayConfig.getPythonAddress()); + server = new GatewayServer( + this, + pythonGatewayConfig.getGatewayServerPort(), + pythonGatewayConfig.getPythonPort(), + gatewayHost, + pythonHost, + pythonGatewayConfig.getConnectTimeout(), + pythonGatewayConfig.getReadTimeout(), + null + ); + GatewayServer.turnLoggingOn(); + logger.info("PythonGatewayServer started on: " + gatewayHost.toString()); + server.start(); + } catch (UnknownHostException e) { + logger.error("exception occurred while constructing PythonGatewayServer().", e); + } + } + + public static void main(String[] args) { + new SpringApplicationBuilder(PythonGatewayServer.class).run(args); + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/BaseHdfsDatasourceParamDTO.java b/dolphinscheduler-python/src/main/java/org/apache/dolphinscheduler/server/config/PythonGatewayConfig.java similarity index 32% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/BaseHdfsDatasourceParamDTO.java rename to dolphinscheduler-python/src/main/java/org/apache/dolphinscheduler/server/config/PythonGatewayConfig.java index 747d3ea274878654a7521153495711577a6e5c02..e4ab09d1ce3653be956da65511b13dd21768b60b 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/BaseHdfsDatasourceParamDTO.java +++ b/dolphinscheduler-python/src/main/java/org/apache/dolphinscheduler/server/config/PythonGatewayConfig.java @@ -15,47 +15,79 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.datasource; +package org.apache.dolphinscheduler.server.config; -public abstract class BaseHdfsDatasourceParamDTO extends BaseDataSourceParamDTO { +import org.springframework.context.annotation.PropertySource; +import org.springframework.stereotype.Component; +import org.springframework.beans.factory.annotation.Value; - protected String principal; +@Component +@PropertySource(value = "python-gateway.properties") +public class PythonGatewayConfig { - protected String javaSecurityKrb5Conf; + @Value("${gateway.server.address:0.0.0.0}") + private String gatewayServerAddress; - protected String loginUserKeytabUsername; + @Value("${gateway.server.port:25333}") + private int gatewayServerPort; - protected String loginUserKeytabPath; + @Value("${python.address:127.0.0.1}") + private String pythonAddress; - public String getPrincipal() { - return principal; + @Value("${python.port:25334}") + private int pythonPort; + + @Value("${connect.timeout:0}") + private int connectTimeout; + + @Value("${read.timeout:0}") + private int readTimeout; + + public String getGatewayServerAddress() { + return gatewayServerAddress; + } + + public void setGatewayServerAddress(String gatewayServerAddress) { + this.gatewayServerAddress = gatewayServerAddress; + } + + public int getGatewayServerPort() { + return gatewayServerPort; + } + + public void setGatewayServerPort(int gatewayServerPort) { + this.gatewayServerPort = gatewayServerPort; + } + + public String getPythonAddress() { + return pythonAddress; } - public void setPrincipal(String principal) { - this.principal = principal; + public void setPythonAddress(String pythonAddress) { + this.pythonAddress = pythonAddress; } - public String getLoginUserKeytabUsername() { - return loginUserKeytabUsername; + public int getPythonPort() { + return pythonPort; } - public void setLoginUserKeytabUsername(String loginUserKeytabUsername) { - this.loginUserKeytabUsername = loginUserKeytabUsername; + public void setPythonPort(int pythonPort) { + this.pythonPort = pythonPort; } - public String getLoginUserKeytabPath() { - return loginUserKeytabPath; + public int getConnectTimeout() { + return connectTimeout; } - public void setLoginUserKeytabPath(String loginUserKeytabPath) { - this.loginUserKeytabPath = loginUserKeytabPath; + public void setConnectTimeout(int connectTimeout) { + this.connectTimeout = connectTimeout; } - public String getJavaSecurityKrb5Conf() { - return javaSecurityKrb5Conf; + public int getReadTimeout() { + return readTimeout; } - public void setJavaSecurityKrb5Conf(String javaSecurityKrb5Conf) { - this.javaSecurityKrb5Conf = javaSecurityKrb5Conf; + public void setReadTimeout(int readTimeout) { + this.readTimeout = readTimeout; } } diff --git a/dolphinscheduler-python/src/main/resources/logback-python-gateway.xml b/dolphinscheduler-python/src/main/resources/logback-python-gateway.xml new file mode 100644 index 0000000000000000000000000000000000000000..42e612c3851a2b973534d4b0d4b9a7008bcbec80 --- /dev/null +++ b/dolphinscheduler-python/src/main/resources/logback-python-gateway.xml @@ -0,0 +1,52 @@ + + + + + + + + + + + [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n + + UTF-8 + + + + + ${log.base}/dolphinscheduler-python-gateway.log + + ${log.base}/dolphinscheduler-python-gateway.%d{yyyy-MM-dd_HH}.%i.log + 20 + 64MB + + + + [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n + + UTF-8 + + + + + + + + + diff --git a/docker/build/conf/dolphinscheduler/alert.properties.tpl b/dolphinscheduler-python/src/main/resources/python-gateway.properties similarity index 43% rename from docker/build/conf/dolphinscheduler/alert.properties.tpl rename to dolphinscheduler-python/src/main/resources/python-gateway.properties index 1ff6fe7adb92aae1898c08219efcdeb643214ff0..40d587dbe2d2e8bdcba30416396b94769e525466 100644 --- a/docker/build/conf/dolphinscheduler/alert.properties.tpl +++ b/dolphinscheduler-python/src/main/resources/python-gateway.properties @@ -15,16 +15,27 @@ # limitations under the License. # -#This configuration file configures the configuration parameters related to the AlertServer. -#These parameters are only related to the AlertServer, and it has nothing to do with the specific Alert Plugin. -#eg : max retry num. -#eg : Alert Server Listener port +# Spring boot application server port +server.port=54321 -#alert.plugin.dir config the Alert Plugin dir . AlertServer while find and load the Alert Plugin Jar from this dir when deploy and start AlertServer on the server . -alert.plugin.dir=${ALERT_PLUGIN_DIR} +# The address of Python gateway server start. Set its value to `0.0.0.0` if your Python API run in different +# between Python gateway server. It could be be specific to other address like `127.0.0.1` or `localhost` +#gateway.server.address=0.0.0.0 -#maven.local.repository=/Users/gaojun/Documents/jianguoyun/localRepository +# The port of Python gateway server start. Define which port you could connect to Python gateway server from +# Python API side. +#gateway.server.port=25333 -#alert.plugin.binding config the Alert Plugin need be load when development and run in IDE -#alert.plugin.binding=\ -# ./dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/pom.xml +# The address of Python callback client. +#python.address=127.0.0.1 + +# The port of Python callback client. +#python.port=25334 + +# Close connection of socket server if no other request accept after x milliseconds. Define value is (0 = infinite), +# and socket server would never close even though no requests accept +#connect.timeout=0 + +# Close each active connection of socket server if python program not active after x milliseconds. Define value is +# (0 = infinite), and socket server would never close even though no requests accept +#read.timeout=0 diff --git a/dolphinscheduler-registry-plugin/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistryPlugin.java b/dolphinscheduler-registry-plugin/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistryPlugin.java deleted file mode 100644 index 85723ada09f69af236868b40c62b9b150bdf5f75..0000000000000000000000000000000000000000 --- a/dolphinscheduler-registry-plugin/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistryPlugin.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.registry.zookeeper; - -import org.apache.dolphinscheduler.spi.DolphinSchedulerPlugin; -import org.apache.dolphinscheduler.spi.register.RegistryFactory; - -import com.google.common.collect.ImmutableList; - -/** - * zookeeper registry plugin - */ -public class ZookeeperRegistryPlugin implements DolphinSchedulerPlugin { - - @Override - public Iterable getRegisterFactorys() { - return ImmutableList.of(new ZookeeperRegistryFactory()); - } -} diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-api/pom.xml b/dolphinscheduler-registry/dolphinscheduler-registry-api/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..d72534908a075d93a62bf90410bab7180af82e0e --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-api/pom.xml @@ -0,0 +1,30 @@ + + + + + + dolphinscheduler-registry + org.apache.dolphinscheduler + 2.0.10-SNAPSHOT + + 4.0.0 + + dolphinscheduler-registry-api + diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/ConnectionListener.java b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/ConnectionListener.java new file mode 100644 index 0000000000000000000000000000000000000000..eaebc81482a937f6a86a4268880960191a47554b --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/ConnectionListener.java @@ -0,0 +1,25 @@ +/* + * Licensed to Apache Software Foundation (ASF) under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Apache Software Foundation (ASF) licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.dolphinscheduler.registry.api; + +@FunctionalInterface +public interface ConnectionListener { + void onUpdate(ConnectionState newState); +} diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/ConnectionState.java b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/ConnectionState.java new file mode 100644 index 0000000000000000000000000000000000000000..fef3bcab79d4851b4423e1e068ba4e4a19ce4fd6 --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/ConnectionState.java @@ -0,0 +1,27 @@ +/* + * Licensed to Apache Software Foundation (ASF) under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Apache Software Foundation (ASF) licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.dolphinscheduler.registry.api; + +public enum ConnectionState { + CONNECTED, + RECONNECTED, + SUSPENDED, + DISCONNECTED +} diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/Event.java b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/Event.java new file mode 100644 index 0000000000000000000000000000000000000000..6afa5a2c26e71f0cf4266af3d4880bbb05bdb73a --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/Event.java @@ -0,0 +1,125 @@ +/* + * Licensed to Apache Software Foundation (ASF) under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Apache Software Foundation (ASF) licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.dolphinscheduler.registry.api; + +public class Event { + private String key; + private String path; + private String data; + private Type type; + + public Event(String key, String path, String data, Type type) { + this.key = key; + this.path = path; + this.data = data; + this.type = type; + } + + public Event() { + } + + public static EventBuilder builder() { + return new EventBuilder(); + } + + public String key() { + return this.key; + } + + public String path() { + return this.path; + } + + public String data() { + return this.data; + } + + public Type type() { + return this.type; + } + + public Event key(String key) { + this.key = key; + return this; + } + + public Event path(String path) { + this.path = path; + return this; + } + + public Event data(String data) { + this.data = data; + return this; + } + + public Event type(Type type) { + this.type = type; + return this; + } + + public String toString() { + return "Event(key=" + this.key() + ", path=" + this.path() + ", data=" + this.data() + ", type=" + this.type() + ")"; + } + + public enum Type { + ADD, + REMOVE, + UPDATE + } + + public static class EventBuilder { + private String key; + private String path; + private String data; + private Type type; + + EventBuilder() { + } + + public EventBuilder key(String key) { + this.key = key; + return this; + } + + public EventBuilder path(String path) { + this.path = path; + return this; + } + + public EventBuilder data(String data) { + this.data = data; + return this; + } + + public EventBuilder type(Type type) { + this.type = type; + return this; + } + + public Event build() { + return new Event(key, path, data, type); + } + + public String toString() { + return "Event.EventBuilder(key=" + this.key + ", path=" + this.path + ", data=" + this.data + ", type=" + this.type + ")"; + } + } +} diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/Registry.java b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/Registry.java new file mode 100644 index 0000000000000000000000000000000000000000..6057a7e2e4808c0ca07ce92360fbb7375edbd4f7 --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/Registry.java @@ -0,0 +1,48 @@ +/* + * Licensed to Apache Software Foundation (ASF) under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Apache Software Foundation (ASF) licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.dolphinscheduler.registry.api; + +import java.io.Closeable; +import java.util.Collection; +import java.util.Map; + +public interface Registry extends Closeable { + void start(Map config); + + boolean subscribe(String path, SubscribeListener listener); + + void unsubscribe(String path); + + void addConnectionStateListener(ConnectionListener listener); + + String get(String key); + + void put(String key, String value, boolean deleteOnDisconnect); + + void delete(String key); + + Collection children(String key); + + boolean exists(String key); + + boolean acquireLock(String key); + + boolean releaseLock(String key); +} diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/RegistryException.java b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/RegistryException.java new file mode 100644 index 0000000000000000000000000000000000000000..b88fe2540faa46178cb206da079479a8f6e9657d --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/RegistryException.java @@ -0,0 +1,31 @@ +/* + * Licensed to Apache Software Foundation (ASF) under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Apache Software Foundation (ASF) licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.dolphinscheduler.registry.api; + +public final class RegistryException extends RuntimeException { + + public RegistryException(String message, Throwable cause) { + super(message, cause); + } + + public RegistryException(String message) { + super(message); + } +} diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/RegistryFactory.java b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/RegistryFactory.java new file mode 100644 index 0000000000000000000000000000000000000000..6903e95bbd1335c348829620b957e80dc9b5aa5a --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/RegistryFactory.java @@ -0,0 +1,26 @@ +/* + * Licensed to Apache Software Foundation (ASF) under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Apache Software Foundation (ASF) licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.dolphinscheduler.registry.api; + +public interface RegistryFactory { + String name(); + + Registry create(); +} diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/RegistryFactoryLoader.java b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/RegistryFactoryLoader.java new file mode 100644 index 0000000000000000000000000000000000000000..45d4596e289aecd63b67538a771d2a4a60dfac37 --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/RegistryFactoryLoader.java @@ -0,0 +1,35 @@ +/* + * Licensed to Apache Software Foundation (ASF) under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Apache Software Foundation (ASF) licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.dolphinscheduler.registry.api; + +import static java.util.stream.Collectors.toMap; + +import java.util.Map; +import java.util.ServiceLoader; +import java.util.function.Function; +import java.util.stream.StreamSupport; + +public final class RegistryFactoryLoader { + public static Map load() { + final ServiceLoader factories = ServiceLoader.load(RegistryFactory.class); + return StreamSupport.stream(factories.spliterator(), false) + .collect(toMap(RegistryFactory::name, Function.identity())); + } +} diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/SubscribeListener.java b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/SubscribeListener.java new file mode 100644 index 0000000000000000000000000000000000000000..2432eb1e2532957ac563b5027b78df7a23bf34ec --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/SubscribeListener.java @@ -0,0 +1,24 @@ +/* + * Licensed to Apache Software Foundation (ASF) under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Apache Software Foundation (ASF) licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.dolphinscheduler.registry.api; + +public interface SubscribeListener { + void notify(Event event); +} diff --git a/dolphinscheduler-registry-plugin/dolphinscheduler-registry-zookeeper/pom.xml b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/pom.xml similarity index 68% rename from dolphinscheduler-registry-plugin/dolphinscheduler-registry-zookeeper/pom.xml rename to dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/pom.xml index 5ad7ee934e82a67a85973389d96d2cf1d2f799f4..7ce261069129b217d559ba5c9ad3ba7ff9193d8a 100644 --- a/dolphinscheduler-registry-plugin/dolphinscheduler-registry-zookeeper/pom.xml +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/pom.xml @@ -15,22 +15,21 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - + - dolphinscheduler-registry-plugin + dolphinscheduler-registry-plugins org.apache.dolphinscheduler - 2.0.0-SNAPSHOT + 2.0.10-SNAPSHOT 4.0.0 - dolphinscheduler-registry-zookeeper - - dolphinscheduler-plugin + + org.apache.dolphinscheduler + dolphinscheduler-registry-api + org.apache.zookeeper @@ -57,30 +56,10 @@ slf4j-api - org.apache.curator curator-test test - - - junit - junit - test - - - - org.jacoco - org.jacoco.agent - runtime - test - - - - - dolphinscheduler-registry-zookeeper-${project.version} - - - \ No newline at end of file + diff --git a/dolphinscheduler-registry-plugin/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperConfiguration.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperConfiguration.java similarity index 93% rename from dolphinscheduler-registry-plugin/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperConfiguration.java rename to dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperConfiguration.java index 7abc859bf3fc75a7002fc277893629ce562a958a..ffe1f0edbd510563637e6ff16c987ac9ae046c85 100644 --- a/dolphinscheduler-registry-plugin/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperConfiguration.java +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperConfiguration.java @@ -34,9 +34,8 @@ public enum ZookeeperConfiguration { MAX_RETRIES("max.retries", 5, Integer::valueOf), - //todo - SESSION_TIMEOUT_MS("session.timeout.ms", 1000, Integer::valueOf), - CONNECTION_TIMEOUT_MS("connection.timeout.ms", 1000, Integer::valueOf), + SESSION_TIMEOUT_MS("session.timeout.ms", 30000, Integer::valueOf), + CONNECTION_TIMEOUT_MS("connection.timeout.ms", 7500, Integer::valueOf), BLOCK_UNTIL_CONNECTED_WAIT_MS("block.until.connected.wait", 600, Integer::valueOf), ; diff --git a/dolphinscheduler-registry-plugin/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperConnectionStateListener.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperConnectionStateListener.java similarity index 44% rename from dolphinscheduler-registry-plugin/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperConnectionStateListener.java rename to dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperConnectionStateListener.java index cda98ef0dacdec004af0a27f4db507d660ea19fa..7faa3db82b70a045f22b4a6771a495f44a1b013a 100644 --- a/dolphinscheduler-registry-plugin/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperConnectionStateListener.java +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperConnectionStateListener.java @@ -17,40 +17,41 @@ package org.apache.dolphinscheduler.plugin.registry.zookeeper; -import org.apache.dolphinscheduler.spi.register.RegistryConnectListener; -import org.apache.dolphinscheduler.spi.register.RegistryConnectState; +import org.apache.dolphinscheduler.registry.api.ConnectionListener; +import org.apache.dolphinscheduler.registry.api.ConnectionState; import org.apache.curator.framework.CuratorFramework; -import org.apache.curator.framework.state.ConnectionState; import org.apache.curator.framework.state.ConnectionStateListener; import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -public class ZookeeperConnectionStateListener implements ConnectionStateListener { +public final class ZookeeperConnectionStateListener implements ConnectionStateListener { + private static final Logger log = org.slf4j.LoggerFactory.getLogger(ZookeeperConnectionStateListener.class); - private static final Logger logger = LoggerFactory.getLogger(ZookeeperConnectionStateListener.class); + private final ConnectionListener listener; - private RegistryConnectListener registryConnectListener; - - public ZookeeperConnectionStateListener(RegistryConnectListener registryConnectListener) { - this.registryConnectListener = registryConnectListener; + public ZookeeperConnectionStateListener(ConnectionListener listener) { + this.listener = listener; } @Override - public void stateChanged(CuratorFramework client, ConnectionState newState) { - - if (newState == ConnectionState.LOST) { - logger.error("connection lost from zookeeper"); - registryConnectListener.notify(RegistryConnectState.LOST); - } else if (newState == ConnectionState.RECONNECTED) { - logger.info("reconnected to zookeeper"); - registryConnectListener.notify(RegistryConnectState.RECONNECTED); - } else if (newState == ConnectionState.SUSPENDED) { - logger.warn("zookeeper connection SUSPENDED"); - registryConnectListener.notify(RegistryConnectState.SUSPENDED); + public void stateChanged(CuratorFramework client, + org.apache.curator.framework.state.ConnectionState newState) { + switch (newState) { + case LOST: + log.warn("Registry disconnected"); + listener.onUpdate(ConnectionState.DISCONNECTED); + break; + case RECONNECTED: + log.info("Registry reconnected"); + listener.onUpdate(ConnectionState.RECONNECTED); + break; + case SUSPENDED: + log.warn("Registry suspended"); + listener.onUpdate(ConnectionState.SUSPENDED); + break; + default: + break; } - } - } diff --git a/dolphinscheduler-registry-plugin/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistry.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistry.java similarity index 53% rename from dolphinscheduler-registry-plugin/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistry.java rename to dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistry.java index 64b0b13d115c53aea9fe1d786e2f8f632dd6c661..89cb28006d32a957ebb3cd77b4f00841e7ea3860 100644 --- a/dolphinscheduler-registry-plugin/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistry.java +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistry.java @@ -28,21 +28,19 @@ import static org.apache.dolphinscheduler.plugin.registry.zookeeper.ZookeeperCon import static java.util.concurrent.TimeUnit.MILLISECONDS; -import org.apache.dolphinscheduler.spi.register.DataChangeEvent; -import org.apache.dolphinscheduler.spi.register.ListenerManager; -import org.apache.dolphinscheduler.spi.register.Registry; -import org.apache.dolphinscheduler.spi.register.RegistryConnectListener; -import org.apache.dolphinscheduler.spi.register.RegistryException; -import org.apache.dolphinscheduler.spi.register.SubscribeListener; +import org.apache.dolphinscheduler.registry.api.ConnectionListener; +import org.apache.dolphinscheduler.registry.api.Event; +import org.apache.dolphinscheduler.registry.api.Registry; +import org.apache.dolphinscheduler.registry.api.RegistryException; +import org.apache.dolphinscheduler.registry.api.SubscribeListener; import org.apache.curator.RetryPolicy; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFrameworkFactory; import org.apache.curator.framework.api.ACLProvider; -import org.apache.curator.framework.api.transaction.TransactionOp; +import org.apache.curator.framework.recipes.cache.ChildData; import org.apache.curator.framework.recipes.cache.TreeCache; import org.apache.curator.framework.recipes.cache.TreeCacheEvent; -import org.apache.curator.framework.recipes.cache.TreeCacheListener; import org.apache.curator.framework.recipes.locks.InterProcessMutex; import org.apache.curator.retry.ExponentialBackoffRetry; import org.apache.curator.utils.CloseableUtils; @@ -56,28 +54,18 @@ import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; import com.google.common.base.Strings; -public class ZookeeperRegistry implements Registry { +public final class ZookeeperRegistry implements Registry { private CuratorFramework client; - /** - * treeCache map - * k-subscribe key - * v-listener - */ - private Map treeCacheMap = new HashMap<>(); - - /** - * Distributed lock map - */ - private ThreadLocal> threadLocalLockMap = new ThreadLocal<>(); - - /** - * build retry policy - */ + private final Map treeCacheMap = new ConcurrentHashMap<>(); + + private static final ThreadLocal> threadLocalLockMap = new ThreadLocal<>(); + private static RetryPolicy buildRetryPolicy(Map registerData) { int baseSleepTimeMs = BASE_SLEEP_TIME.getParameterValue(registerData.get(BASE_SLEEP_TIME.getName())); int maxRetries = MAX_RETRIES.getParameterValue(registerData.get(MAX_RETRIES.getName())); @@ -85,35 +73,32 @@ public class ZookeeperRegistry implements Registry { return new ExponentialBackoffRetry(baseSleepTimeMs, maxRetries, maxSleepMs); } - /** - * build digest - */ private static void buildDigest(CuratorFrameworkFactory.Builder builder, String digest) { builder.authorization(DIGEST.getName(), digest.getBytes(StandardCharsets.UTF_8)) - .aclProvider(new ACLProvider() { - @Override - public List getDefaultAcl() { - return ZooDefs.Ids.CREATOR_ALL_ACL; - } - - @Override - public List getAclForPath(final String path) { - return ZooDefs.Ids.CREATOR_ALL_ACL; - } - }); + .aclProvider(new ACLProvider() { + @Override + public List getDefaultAcl() { + return ZooDefs.Ids.CREATOR_ALL_ACL; + } + + @Override + public List getAclForPath(final String path) { + return ZooDefs.Ids.CREATOR_ALL_ACL; + } + }); } @Override - public void init(Map registerData) { - - CuratorFrameworkFactory.Builder builder = CuratorFrameworkFactory.builder() - .connectString(SERVERS.getParameterValue(registerData.get(SERVERS.getName()))) - .retryPolicy(buildRetryPolicy(registerData)) - .namespace(NAME_SPACE.getParameterValue(registerData.get(NAME_SPACE.getName()))) - .sessionTimeoutMs(SESSION_TIMEOUT_MS.getParameterValue(registerData.get(SESSION_TIMEOUT_MS.getName()))) - .connectionTimeoutMs(CONNECTION_TIMEOUT_MS.getParameterValue(registerData.get(CONNECTION_TIMEOUT_MS.getName()))); - - String digest = DIGEST.getParameterValue(registerData.get(DIGEST.getName())); + public void start(Map config) { + CuratorFrameworkFactory.Builder builder = + CuratorFrameworkFactory.builder() + .connectString(SERVERS.getParameterValue(config.get(SERVERS.getName()))) + .retryPolicy(buildRetryPolicy(config)) + .namespace(NAME_SPACE.getParameterValue(config.get(NAME_SPACE.getName()))) + .sessionTimeoutMs(SESSION_TIMEOUT_MS.getParameterValue(config.get(SESSION_TIMEOUT_MS.getName()))) + .connectionTimeoutMs(CONNECTION_TIMEOUT_MS.getParameterValue(config.get(CONNECTION_TIMEOUT_MS.getName()))); + + String digest = DIGEST.getParameterValue(config.get(DIGEST.getName())); if (!Strings.isNullOrEmpty(digest)) { buildDigest(builder, digest); } @@ -121,7 +106,7 @@ public class ZookeeperRegistry implements Registry { client.start(); try { - if (!client.blockUntilConnected(BLOCK_UNTIL_CONNECTED_WAIT_MS.getParameterValue(registerData.get(BLOCK_UNTIL_CONNECTED_WAIT_MS.getName())), MILLISECONDS)) { + if (!client.blockUntilConnected(BLOCK_UNTIL_CONNECTED_WAIT_MS.getParameterValue(config.get(BLOCK_UNTIL_CONNECTED_WAIT_MS.getName())), MILLISECONDS)) { client.close(); throw new RegistryException("zookeeper connect timeout"); } @@ -132,57 +117,26 @@ public class ZookeeperRegistry implements Registry { } @Override - public void addConnectionStateListener(RegistryConnectListener registryConnectListener) { - client.getConnectionStateListenable().addListener(new ZookeeperConnectionStateListener(registryConnectListener)); + public void addConnectionStateListener(ConnectionListener listener) { + client.getConnectionStateListenable().addListener(new ZookeeperConnectionStateListener(listener)); } @Override - public boolean subscribe(String path, SubscribeListener subscribeListener) { - if (null != treeCacheMap.get(path)) { - return false; - } - TreeCache treeCache = new TreeCache(client, path); - TreeCacheListener treeCacheListener = (client, event) -> { - TreeCacheEvent.Type type = event.getType(); - DataChangeEvent eventType = null; - String dataPath = null; - switch (type) { - case NODE_ADDED: - - dataPath = event.getData().getPath(); - eventType = DataChangeEvent.ADD; - break; - case NODE_UPDATED: - eventType = DataChangeEvent.UPDATE; - dataPath = event.getData().getPath(); - - break; - case NODE_REMOVED: - eventType = DataChangeEvent.REMOVE; - dataPath = event.getData().getPath(); - break; - default: - } - if (null != eventType && null != dataPath) { - ListenerManager.dataChange(path, dataPath, eventType); - } - }; - treeCache.getListenable().addListener(treeCacheListener); - treeCacheMap.put(path, treeCache); + public boolean subscribe(String path, SubscribeListener listener) { + final TreeCache treeCache = treeCacheMap.computeIfAbsent(path, $ -> new TreeCache(client, path)); + treeCache.getListenable().addListener(($, event) -> listener.notify(new EventAdaptor(event, path))); try { treeCache.start(); } catch (Exception e) { - throw new RegistryException("start zookeeper tree cache error", e); + treeCacheMap.remove(path); + throw new RegistryException("Failed to subscribe listener for key: " + path, e); } - ListenerManager.addListener(path, subscribeListener); return true; } @Override public void unsubscribe(String path) { - TreeCache treeCache = treeCacheMap.get(path); - treeCache.close(); - ListenerManager.removeListener(path); + CloseableUtils.closeQuietly(treeCacheMap.get(path)); } @Override @@ -195,12 +149,7 @@ public class ZookeeperRegistry implements Registry { } @Override - public void remove(String key) { - delete(key); - } - - @Override - public boolean isExisted(String key) { + public boolean exists(String key) { try { return null != client.checkExists().forPath(key); } catch (Exception e) { @@ -209,47 +158,22 @@ public class ZookeeperRegistry implements Registry { } @Override - public void persist(String key, String value) { - try { - if (isExisted(key)) { - update(key, value); - return; - } - client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).forPath(key, value.getBytes(StandardCharsets.UTF_8)); + public void put(String key, String value, boolean deleteOnDisconnect) { + final CreateMode mode = deleteOnDisconnect ? CreateMode.EPHEMERAL : CreateMode.PERSISTENT; - } catch (Exception e) { - throw new RegistryException("zookeeper persist error", e); - } - } - - @Override - public void persistEphemeral(String key, String value) { try { - if (isExisted(key)) { - update(key, value); - return; - } - client.create().creatingParentsIfNeeded().withMode(CreateMode.EPHEMERAL).forPath(key, value.getBytes(StandardCharsets.UTF_8)); + client.create() + .orSetData() + .creatingParentsIfNeeded() + .withMode(mode) + .forPath(key, value.getBytes(StandardCharsets.UTF_8)); } catch (Exception e) { - throw new RegistryException("zookeeper persist ephemeral error", e); + throw new RegistryException("Failed to put registry key: " + key, e); } } @Override - public void update(String key, String value) { - try { - if (!isExisted(key)) { - return; - } - TransactionOp transactionOp = client.transactionOp(); - client.transaction().forOperations(transactionOp.check().forPath(key), transactionOp.setData().forPath(key, value.getBytes(StandardCharsets.UTF_8))); - } catch (Exception e) { - throw new RegistryException("zookeeper update error", e); - } - } - - @Override - public List getChildren(String key) { + public List children(String key) { try { List result = client.getChildren().forPath(key); result.sort(Comparator.reverseOrder()); @@ -260,23 +184,20 @@ public class ZookeeperRegistry implements Registry { } @Override - public boolean delete(String nodePath) { + public void delete(String nodePath) { try { client.delete() - .deletingChildrenIfNeeded() - .forPath(nodePath); - } catch (KeeperException.NoNodeException ignore) { - // the node is not exist, we can believe the node has been removed - + .deletingChildrenIfNeeded() + .forPath(nodePath); + } catch (KeeperException.NoNodeException ignored) { + // Is already deleted or does not exist } catch (Exception e) { - throw new RegistryException("zookeeper delete key error", e); + throw new RegistryException("Failed to delete registry key: " + nodePath, e); } - return true; } @Override public boolean acquireLock(String key) { - InterProcessMutex interProcessMutex = new InterProcessMutex(client, key); try { interProcessMutex.acquire(); @@ -293,7 +214,6 @@ public class ZookeeperRegistry implements Registry { throw new RegistryException("zookeeper release lock error", e); } } - } @Override @@ -313,22 +233,35 @@ public class ZookeeperRegistry implements Registry { return true; } - public CuratorFramework getClient() { - return client; - } - @Override public void close() { - treeCacheMap.forEach((key, value) -> value.close()); - waitForCacheClose(500); + treeCacheMap.values().forEach(CloseableUtils::closeQuietly); CloseableUtils.closeQuietly(client); } - private void waitForCacheClose(long millis) { - try { - Thread.sleep(millis); - } catch (final InterruptedException ex) { - Thread.currentThread().interrupt(); + static final class EventAdaptor extends Event { + public EventAdaptor(TreeCacheEvent event, String key) { + key(key); + + switch (event.getType()) { + case NODE_ADDED: + type(Type.ADD); + break; + case NODE_UPDATED: + type(Type.UPDATE); + break; + case NODE_REMOVED: + type(Type.REMOVE); + break; + default: + break; + } + + final ChildData data = event.getData(); + if (data != null) { + path(data.getPath()); + data(new String(data.getData())); + } } } } diff --git a/dolphinscheduler-registry-plugin/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistryFactory.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistryFactory.java similarity index 76% rename from dolphinscheduler-registry-plugin/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistryFactory.java rename to dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistryFactory.java index 1ecf3e05b1f728072c7368263eb05c40274a0fdd..949df21beed86e2741cdd89a4249c3a3dac1037a 100644 --- a/dolphinscheduler-registry-plugin/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistryFactory.java +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistryFactory.java @@ -17,16 +17,16 @@ package org.apache.dolphinscheduler.plugin.registry.zookeeper; -import org.apache.dolphinscheduler.spi.register.Registry; -import org.apache.dolphinscheduler.spi.register.RegistryFactory; +import org.apache.dolphinscheduler.registry.api.Registry; +import org.apache.dolphinscheduler.registry.api.RegistryFactory; -/** - * Zookeeper registry factory - */ -public class ZookeeperRegistryFactory implements RegistryFactory { +import com.google.auto.service.AutoService; + +@AutoService(RegistryFactory.class) +public final class ZookeeperRegistryFactory implements RegistryFactory { @Override - public String getName() { + public String name() { return "zookeeper"; } diff --git a/dolphinscheduler-registry-plugin/dolphinscheduler-registry-zookeeper/src/test/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistryTest.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/test/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistryTest.java similarity index 87% rename from dolphinscheduler-registry-plugin/dolphinscheduler-registry-zookeeper/src/test/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistryTest.java rename to dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/test/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistryTest.java index 900c7e4173a0a48d0d9f332ffca036c44c8fa9f3..8442c02c7066911d9fe3112e1a6166097152d4d9 100644 --- a/dolphinscheduler-registry-plugin/dolphinscheduler-registry-zookeeper/src/test/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistryTest.java +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/test/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperRegistryTest.java @@ -17,8 +17,8 @@ package org.apache.dolphinscheduler.plugin.registry.zookeeper; -import org.apache.dolphinscheduler.spi.register.DataChangeEvent; -import org.apache.dolphinscheduler.spi.register.SubscribeListener; +import org.apache.dolphinscheduler.registry.api.Event; +import org.apache.dolphinscheduler.registry.api.SubscribeListener; import org.apache.curator.test.TestingServer; @@ -50,18 +50,18 @@ public class ZookeeperRegistryTest { server = new TestingServer(true); Map registryConfig = new HashMap<>(); registryConfig.put(ZookeeperConfiguration.SERVERS.getName(), server.getConnectString()); - registry.init(registryConfig); - registry.persist("/sub", ""); + registry.start(registryConfig); + registry.put("/sub", "", false); } @Test public void persistTest() { - registry.persist("/nodes/m1", ""); - registry.persist("/nodes/m2", ""); - Assert.assertEquals(Arrays.asList("m2", "m1"), registry.getChildren("/nodes")); - Assert.assertTrue(registry.isExisted("/nodes/m1")); + registry.put("/nodes/m1", "", false); + registry.put("/nodes/m2", "", false); + Assert.assertEquals(Arrays.asList("m2", "m1"), registry.children("/nodes")); + Assert.assertTrue(registry.exists("/nodes/m1")); registry.delete("/nodes/m2"); - Assert.assertFalse(registry.isExisted("/nodes/m2")); + Assert.assertFalse(registry.exists("/nodes/m2")); } @Test @@ -112,10 +112,9 @@ public class ZookeeperRegistryTest { } - class TestListener implements SubscribeListener { - + static class TestListener implements SubscribeListener { @Override - public void notify(String path, DataChangeEvent dataChangeEvent) { + public void notify(Event event) { logger.info("I'm test listener"); } } diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/pom.xml b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..3d85e1c7a32064de9772320cad9cb13f7742cca6 --- /dev/null +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/pom.xml @@ -0,0 +1,34 @@ + + + + + + dolphinscheduler-registry + org.apache.dolphinscheduler + 2.0.10-SNAPSHOT + + dolphinscheduler-registry-plugins + 4.0.0 + pom + + + dolphinscheduler-registry-zookeeper + + diff --git a/dolphinscheduler-registry-plugin/pom.xml b/dolphinscheduler-registry/pom.xml similarity index 60% rename from dolphinscheduler-registry-plugin/pom.xml rename to dolphinscheduler-registry/pom.xml index 2f8bb0d2d1dadded1a3abc379054505e5ad7bc06..a4399ee6d40667be1dcaf5ffdea598ad8514863a 100644 --- a/dolphinscheduler-registry-plugin/pom.xml +++ b/dolphinscheduler-registry/pom.xml @@ -15,29 +15,18 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - + dolphinscheduler org.apache.dolphinscheduler - 2.0.0-SNAPSHOT + 2.0.10-SNAPSHOT 4.0.0 - org.apache.dolphinscheduler - dolphinscheduler-registry-plugin + dolphinscheduler-registry pom - - - - org.apache.dolphinscheduler - dolphinscheduler-spi - provided - - - - dolphinscheduler-registry-zookeeper + dolphinscheduler-registry-api + dolphinscheduler-registry-plugins - \ No newline at end of file + diff --git a/dolphinscheduler-remote/pom.xml b/dolphinscheduler-remote/pom.xml index ada20e32f5a8f3beefac35cbace87af6c49102c8..a1dafcc2d9ed7e6d875a16a2c8ab063f7defb5a0 100644 --- a/dolphinscheduler-remote/pom.xml +++ b/dolphinscheduler-remote/pom.xml @@ -15,12 +15,11 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - + dolphinscheduler org.apache.dolphinscheduler - 2.0.0-SNAPSHOT + 2.0.10-SNAPSHOT 4.0.0 @@ -28,10 +27,6 @@ dolphinscheduler-remote - - UTF-8 - - @@ -67,19 +62,6 @@ reflections - - junit - junit - test - - - - org.jacoco - org.jacoco.agent - runtime - test - - com.google.guava guava diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/alert/AlertInfo.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/CacheExpireCommand.java similarity index 44% rename from dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/alert/AlertInfo.java rename to dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/CacheExpireCommand.java index 8f61fe2f9ddcc3d18f338baee23925d0fe1cf198..a32d4fce13d9e98081f852d2cb3c86f9a1497ea3 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/alert/AlertInfo.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/CacheExpireCommand.java @@ -15,38 +15,53 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.spi.alert; +package org.apache.dolphinscheduler.remote.command; -import java.util.Map; +import org.apache.dolphinscheduler.common.enums.CacheType; +import org.apache.dolphinscheduler.common.utils.JSONUtils; + +import java.io.Serializable; /** - * AlertInfo + * db task ack request command */ -public class AlertInfo { +public class CacheExpireCommand implements Serializable { - /** - * all params this plugin need is in alertProps - */ - private Map alertParams; + private CacheType cacheType; + private String cacheKey; - /** - * the alert content - */ - private AlertData alertData; + public CacheExpireCommand() { + super(); + } + + public CacheExpireCommand(CacheType cacheType, String cacheKey) { + this.cacheType = cacheType; + this.cacheKey = cacheKey; + } - public Map getAlertParams() { - return alertParams; + public CacheType getCacheType() { + return cacheType; } - public void setAlertParams(Map alertParams) { - this.alertParams = alertParams; + public String getCacheKey() { + return cacheKey; } - public AlertData getAlertData() { - return alertData; + /** + * package command + * + * @return command + */ + public Command convert2Command() { + Command command = new Command(); + command.setType(CommandType.CACHE_EXPIRE); + byte[] body = JSONUtils.toJsonByteArray(this); + command.setBody(body); + return command; } - public void setAlertData(AlertData alertData) { - this.alertData = alertData; + @Override + public String toString() { + return String.format("CacheExpireCommand{CacheType=%s, cacheKey=%s}", cacheType, cacheKey); } } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Command.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Command.java index 9baa321a9e6f7254580bd0dd061f8956b0413c2a..1e9626a6dbbaf089d1df76b189f4dc85527743e1 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Command.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Command.java @@ -43,6 +43,11 @@ public class Command implements Serializable { */ private CommandType type; + /** + * gen command time millis + */ + private long genCommandTimeMillis; + /** * request unique identification */ @@ -66,6 +71,14 @@ public class Command implements Serializable { this.type = type; } + public long getGenCommandTimeMillis() { + return genCommandTimeMillis; + } + + public void setGenCommandTimeMillis(long genCommandTimeMillis) { + this.genCommandTimeMillis = genCommandTimeMillis; + } + public long getOpaque() { return opaque; } @@ -115,7 +128,8 @@ public class Command implements Serializable { @Override public String toString() { - return "Command [type=" + type + ", opaque=" + opaque + ", bodyLen=" + (body == null ? 0 : body.length) + "]"; + return "Command [type=" + type + ", opaque=" + opaque + ", genCommandTimeMillis=" + genCommandTimeMillis + + ", bodyLen=" + (body == null ? 0 : body.length) + "]"; } } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/CommandType.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/CommandType.java index 4301910101455ab3d68d7c6b655098567aa0537d..6c675189dd6a838737de7d6edc7595419856e64a 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/CommandType.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/CommandType.java @@ -98,6 +98,21 @@ public enum CommandType { */ TASK_KILL_RESPONSE, + /** + * kill task response ack + */ + TASK_KILL_RESPONSE_ACK, + + /** + * task recall + */ + TASK_RECALL, + + /** + * task recall ack + */ + TASK_RECALL_ACK, + /** * HEART_BEAT */ @@ -126,7 +141,7 @@ public enum CommandType { /** * process host update */ - PROCESS_HOST_UPDATE_REQUST, + PROCESS_HOST_UPDATE_REQUEST, /** * process host update response @@ -136,5 +151,10 @@ public enum CommandType { /** * state event request */ - STATE_EVENT_REQUEST; + STATE_EVENT_REQUEST, + + /** + * cache expire + */ + CACHE_EXPIRE; } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/HostUpdateCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/HostUpdateCommand.java index d70124b6f21b4b5df17ae9b5edd11ced00f9cdbf..4fc752e4b08ae59c5c080c11bec69618157946c6 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/HostUpdateCommand.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/HostUpdateCommand.java @@ -56,7 +56,7 @@ public class HostUpdateCommand implements Serializable { */ public Command convert2Command() { Command command = new Command(); - command.setType(CommandType.PROCESS_HOST_UPDATE_REQUST); + command.setType(CommandType.PROCESS_HOST_UPDATE_REQUEST); byte[] body = JSONUtils.toJsonByteArray(this); command.setBody(body); return command; diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/HostUpdateResponseCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/HostUpdateResponseCommand.java index ddf4fc22352fdd2b7fa1f16944f9a9dcbfda3cb7..b44856c83da3c9d27d8c6396f3a0bf73b64be4c0 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/HostUpdateResponseCommand.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/HostUpdateResponseCommand.java @@ -66,7 +66,7 @@ public class HostUpdateResponseCommand implements Serializable { */ public Command convert2Command() { Command command = new Command(); - command.setType(CommandType.PROCESS_HOST_UPDATE_REQUST); + command.setType(CommandType.PROCESS_HOST_UPDATE_REQUEST); byte[] body = JSONUtils.toJsonByteArray(this); command.setBody(body); return command; diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/StateEventChangeCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/StateEventChangeCommand.java index 13cade405d79d0dd1e8d2d26655bb1d1dacdcb04..49ffdaae41f6a9cdabeaed1b1e177280e25841d8 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/StateEventChangeCommand.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/StateEventChangeCommand.java @@ -46,8 +46,7 @@ public class StateEventChangeCommand implements Serializable { public StateEventChangeCommand(int sourceProcessInstanceId, int sourceTaskInstanceId, ExecutionStatus sourceStatus, int destProcessInstanceId, - int destTaskInstanceId - ) { + int destTaskInstanceId) { this.key = String.format("%d-%d-%d-%d", sourceProcessInstanceId, sourceTaskInstanceId, diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskExecuteAckCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskExecuteAckCommand.java index 96f15ad6a295fdf92d44db17573b933ca939c821..d6826e5902f3d077d9db0b5fadaf0f3f42913759 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskExecuteAckCommand.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskExecuteAckCommand.java @@ -121,6 +121,7 @@ public class TaskExecuteAckCommand implements Serializable { public Command convert2Command() { Command command = new Command(); command.setType(CommandType.TASK_EXECUTE_ACK); + command.setGenCommandTimeMillis(System.currentTimeMillis()); byte[] body = JSONUtils.toJsonByteArray(this); command.setBody(body); return command; diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskExecuteRequestCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskExecuteRequestCommand.java index 5b2e33922c05d96323911c9181b79ce56871a0fc..0d5a5c95c093034c0025b74d53fe28812e52ec36 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskExecuteRequestCommand.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskExecuteRequestCommand.java @@ -54,6 +54,7 @@ public class TaskExecuteRequestCommand implements Serializable { public Command convert2Command() { Command command = new Command(); command.setType(CommandType.TASK_EXECUTE_REQUEST); + command.setGenCommandTimeMillis(System.currentTimeMillis()); byte[] body = JSONUtils.toJsonByteArray(this); command.setBody(body); return command; diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskExecuteResponseCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskExecuteResponseCommand.java index f114a3fe2ce253e0720ff83f7a9a99591ce0699b..bee027cada379930552182e09561b505588c154d 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskExecuteResponseCommand.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskExecuteResponseCommand.java @@ -45,13 +45,33 @@ public class TaskExecuteResponseCommand implements Serializable { /** * process instance id */ - private int processInstanceId; + private int processInstanceId; /** * status */ private int status; + /** + * startTime + */ + private Date startTime; + + /** + * host + */ + private String host; + + /** + * logPath + */ + private String logPath; + + /** + * end time + * executePath + */ + private String executePath; /** * end time @@ -59,7 +79,6 @@ public class TaskExecuteResponseCommand implements Serializable { @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") private Date endTime; - /** * processId */ @@ -123,6 +142,38 @@ public class TaskExecuteResponseCommand implements Serializable { this.appIds = appIds; } + public Date getStartTime() { + return startTime; + } + + public void setStartTime(Date startTime) { + this.startTime = startTime; + } + + public String getHost() { + return host; + } + + public void setHost(String host) { + this.host = host; + } + + public String getLogPath() { + return logPath; + } + + public void setLogPath(String logPath) { + this.logPath = logPath; + } + + public String getExecutePath() { + return executePath; + } + + public void setExecutePath(String executePath) { + this.executePath = executePath; + } + /** * package response command * @return command @@ -130,6 +181,7 @@ public class TaskExecuteResponseCommand implements Serializable { public Command convert2Command() { Command command = new Command(); command.setType(CommandType.TASK_EXECUTE_RESPONSE); + command.setGenCommandTimeMillis(System.currentTimeMillis()); byte[] body = JSONUtils.toJsonByteArray(this); command.setBody(body); return command; @@ -138,12 +190,18 @@ public class TaskExecuteResponseCommand implements Serializable { @Override public String toString() { return "TaskExecuteResponseCommand{" - + "taskInstanceId=" + taskInstanceId - + ", status=" + status - + ", endTime=" + endTime - + ", processId=" + processId - + ", appIds='" + appIds + '\'' - + '}'; + + "taskInstanceId=" + taskInstanceId + + ", processInstanceId=" + processInstanceId + + ", status=" + status + + ", startTime=" + startTime + + ", endTime=" + endTime + + ", host=" + host + + ", logPath=" + logPath + + ", executePath=" + executePath + + ", processId=" + processId + + ", appIds='" + appIds + '\'' + + ", varPool=" + varPool + + '}'; } public int getProcessInstanceId() { diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/alert/AlertResult.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskKillAckCommand.java similarity index 44% rename from dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/alert/AlertResult.java rename to dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskKillAckCommand.java index 6ce5425f7f5af56d1a7c6c737a04a13309ffd1db..61775d59f48efa4ae798a69ceb322f60811614be 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/alert/AlertResult.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskKillAckCommand.java @@ -15,35 +15,57 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.spi.alert; +package org.apache.dolphinscheduler.remote.command; -public class AlertResult { +import org.apache.dolphinscheduler.common.utils.JSONUtils; - private String status; +import java.io.Serializable; - private String message; +public class TaskKillAckCommand implements Serializable { - public String getStatus() { - return status; + private int taskInstanceId; + private int status; + + public TaskKillAckCommand() { + super(); } - public void setStatus(String status) { + public TaskKillAckCommand(int status, int taskInstanceId) { this.status = status; + this.taskInstanceId = taskInstanceId; } - public String getMessage() { - return message; + public int getTaskInstanceId() { + return taskInstanceId; } - public void setMessage(String message) { - this.message = message; + public void setTaskInstanceId(int taskInstanceId) { + this.taskInstanceId = taskInstanceId; } - public AlertResult(String status, String message) { + public int getStatus() { + return status; + } + + public void setStatus(int status) { this.status = status; - this.message = message; } - public AlertResult() { + /** + * package response command + * + * @return command + */ + public Command convert2Command() { + Command command = new Command(); + command.setType(CommandType.TASK_KILL_RESPONSE_ACK); + byte[] body = JSONUtils.toJsonByteArray(this); + command.setBody(body); + return command; + } + + @Override + public String toString() { + return "KillTaskAckCommand{" + "taskInstanceId=" + taskInstanceId + ", status=" + status + '}'; } } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskKillRequestCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskKillRequestCommand.java index 155b31785ef654179bcf7a621cfdea4e443b1258..72c8fc5518325a716043335112d4937a9fb778a6 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskKillRequestCommand.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskKillRequestCommand.java @@ -47,6 +47,7 @@ public class TaskKillRequestCommand implements Serializable { public Command convert2Command() { Command command = new Command(); command.setType(CommandType.TASK_KILL_REQUEST); + command.setGenCommandTimeMillis(System.currentTimeMillis()); byte[] body = JSONUtils.toJsonByteArray(this); command.setBody(body); return command; diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskKillResponseCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskKillResponseCommand.java index 03ad4dd6940460d566d2be02e1242fd082a3fb21..c4d1f476af5b2f30857a8947b011fcd60f3fbb6e 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskKillResponseCommand.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskKillResponseCommand.java @@ -48,6 +48,11 @@ public class TaskKillResponseCommand implements Serializable { */ private int processId; + /** + * process instance id + */ + private int processInstanceId; + /** * other resource manager appId , for example : YARN etc */ @@ -85,6 +90,14 @@ public class TaskKillResponseCommand implements Serializable { this.processId = processId; } + public int getProcessInstanceId() { + return processInstanceId; + } + + public void setProcessInstanceId(int processInstanceId) { + this.processInstanceId = processInstanceId; + } + public List getAppIds() { return appIds; } @@ -101,6 +114,7 @@ public class TaskKillResponseCommand implements Serializable { public Command convert2Command() { Command command = new Command(); command.setType(CommandType.TASK_KILL_RESPONSE); + command.setGenCommandTimeMillis(System.currentTimeMillis()); byte[] body = JSONUtils.toJsonByteArray(this); command.setBody(body); return command; @@ -114,6 +128,7 @@ public class TaskKillResponseCommand implements Serializable { + ", status=" + status + ", processId=" + processId + ", appIds=" + appIds + + ", processInstanceId=" + processInstanceId + '}'; } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/oracle/OracleDatasourceParamDTO.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskRecallAckCommand.java similarity index 41% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/oracle/OracleDatasourceParamDTO.java rename to dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskRecallAckCommand.java index 615c5140743f2771f2bf6570994ffe84c1013191..702df2e5558f0d99f0ab373bcf4d751042e05d6b 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/oracle/OracleDatasourceParamDTO.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskRecallAckCommand.java @@ -15,41 +15,57 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.datasource.oracle; +package org.apache.dolphinscheduler.remote.command; -import org.apache.dolphinscheduler.common.datasource.BaseDataSourceParamDTO; -import org.apache.dolphinscheduler.common.enums.DbConnectType; -import org.apache.dolphinscheduler.common.enums.DbType; +import org.apache.dolphinscheduler.common.utils.JSONUtils; -public class OracleDatasourceParamDTO extends BaseDataSourceParamDTO { +import java.io.Serializable; - private DbConnectType connectType; +public class TaskRecallAckCommand implements Serializable { - public DbConnectType getConnectType() { - return connectType; + private int taskInstanceId; + private int status; + + public TaskRecallAckCommand() { + super(); } - public void setConnectType(DbConnectType connectType) { - this.connectType = connectType; + public TaskRecallAckCommand(int status, int taskInstanceId) { + this.status = status; + this.taskInstanceId = taskInstanceId; } - @Override - public String toString() { - return "OracleDatasourceParamDTO{" - + "name='" + name + '\'' - + ", note='" + note + '\'' - + ", host='" + host + '\'' - + ", port=" + port - + ", database='" + database + '\'' - + ", userName='" + userName + '\'' - + ", password='" + password + '\'' - + ", connectType=" + connectType - + ", other='" + other + '\'' - + '}'; + public int getTaskInstanceId() { + return taskInstanceId; + } + + public void setTaskInstanceId(int taskInstanceId) { + this.taskInstanceId = taskInstanceId; + } + + public int getStatus() { + return status; + } + + public void setStatus(int status) { + this.status = status; + } + + /** + * package response command + * + * @return command + */ + public Command convert2Command(long opaque) { + Command command = new Command(opaque); + command.setType(CommandType.TASK_RECALL_ACK); + byte[] body = JSONUtils.toJsonByteArray(this); + command.setBody(body); + return command; } @Override - public DbType getType() { - return DbType.ORACLE; + public String toString() { + return "TaskRecallAckCommand{" + "taskInstanceId=" + taskInstanceId + ", status=" + status + '}'; } } diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/alert/AlertData.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskRecallCommand.java similarity index 32% rename from dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/alert/AlertData.java rename to dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskRecallCommand.java index 5e0abf299cd6bf22a46823443e09660c9f8abedd..df99315f8b8243e25b97d6a6dd0bb995916e9b56 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/alert/AlertData.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskRecallCommand.java @@ -15,63 +15,99 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.spi.alert; +package org.apache.dolphinscheduler.remote.command; + +import org.apache.dolphinscheduler.common.enums.Event; +import org.apache.dolphinscheduler.common.utils.JSONUtils; + +import java.io.Serializable; /** - * AlertData + * kill task recall command */ -public class AlertData { +public class TaskRecallCommand implements Serializable { /** - * alert primary key + * taskInstanceId */ - private int id; - /** - * title - */ - private String title; + private int taskInstanceId; + /** - * content + * host */ - private String content; + private String host; + /** - * log + * process instance id */ - private String log; + private int processInstanceId; + + private Event event; + + private int status; + + public int getTaskInstanceId() { + return taskInstanceId; + } + + public void setTaskInstanceId(int taskInstanceId) { + this.taskInstanceId = taskInstanceId; + } + + public String getHost() { + return host; + } - public int getId() { - return id; + public void setHost(String host) { + this.host = host; } - public AlertData setId(int id) { - this.id = id; - return this; + public int getProcessInstanceId() { + return processInstanceId; } - public String getTitle() { - return title; + public void setProcessInstanceId(int processInstanceId) { + this.processInstanceId = processInstanceId; } - public AlertData setTitle(String title) { - this.title = title; - return this; + public Event getEvent() { + return event; } - public String getContent() { - return content; + public void setEvent(Event event) { + this.event = event; } - public AlertData setContent(String content) { - this.content = content; - return this; + public int getStatus() { + return status; } - public String getLog() { - return log; + public void setStatus(int status) { + this.status = status; + } + + /** + * package request command + * + * @return command + */ + public Command convert2Command() { + Command command = new Command(); + command.setType(CommandType.TASK_RECALL); + command.setGenCommandTimeMillis(System.currentTimeMillis()); + byte[] body = JSONUtils.toJsonByteArray(this); + command.setBody(body); + return command; } - public AlertData setLog(String log) { - this.log = log; - return this; + @Override + public String toString() { + return "TaskRecallCommand{" + + "taskInstanceId=" + taskInstanceId + + ", host='" + host + '\'' + + ", processInstanceId=" + processInstanceId + + ", event=" + event + + ", status=" + status + + '}'; } } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Host.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Host.java index 359baefae6163fc98b145d1359dd77871dbe60eb..2163e9c7d883f09c28d48589ba11c490e1f0db93 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Host.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Host.java @@ -20,6 +20,7 @@ package org.apache.dolphinscheduler.remote.utils; import static org.apache.dolphinscheduler.common.Constants.COLON; import java.io.Serializable; +import java.util.Objects; /** * server address @@ -134,4 +135,20 @@ public class Host implements Serializable { + '}'; } + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Host host = (Host) o; + return port == host.port && Objects.equals(address, host.address) && Objects.equals(ip, host.ip); + } + + @Override + public int hashCode() { + return Objects.hash(address, ip, port); + } } diff --git a/dolphinscheduler-remote/src/test/java/org/apache/dolphinscheduler/remote/NettyRemotingClientTest.java b/dolphinscheduler-remote/src/test/java/org/apache/dolphinscheduler/remote/NettyRemotingClientTest.java index a3f6c7b582f67ebebec431b5eaf442e4c368f148..87b25189a373bacf10f16cc5c8d2b139836540b8 100644 --- a/dolphinscheduler-remote/src/test/java/org/apache/dolphinscheduler/remote/NettyRemotingClientTest.java +++ b/dolphinscheduler-remote/src/test/java/org/apache/dolphinscheduler/remote/NettyRemotingClientTest.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.remote; +import io.netty.channel.Channel; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.command.Ping; @@ -27,22 +28,19 @@ import org.apache.dolphinscheduler.remote.future.InvokeCallback; import org.apache.dolphinscheduler.remote.future.ResponseFuture; import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; import org.apache.dolphinscheduler.remote.utils.Host; - -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.atomic.AtomicLong; - import org.junit.Assert; import org.junit.Test; -import io.netty.channel.Channel; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicLong; /** - * netty remote client test + * netty remote client test */ public class NettyRemotingClientTest { /** - * test send sync + * test send sync */ @Test public void testSendSync() { @@ -73,10 +71,10 @@ public class NettyRemotingClientTest { } /** - * test sned async + * test sned async */ @Test - public void testSendAsync(){ + public void testSendAsync() { NettyServerConfig serverConfig = new NettyServerConfig(); NettyRemotingServer server = new NettyRemotingServer(serverConfig); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/RegistryCenterUtilsTest.java b/dolphinscheduler-remote/src/test/java/org/apache/dolphinscheduler/remote/command/cache/CacheExpireCommandTest.java similarity index 58% rename from dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/RegistryCenterUtilsTest.java rename to dolphinscheduler-remote/src/test/java/org/apache/dolphinscheduler/remote/command/cache/CacheExpireCommandTest.java index b8b49459eaa16eb18aefa2d259b2fb20562cd4a5..23512341eed4a4972648c3f91669b26079f97eee 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/RegistryCenterUtilsTest.java +++ b/dolphinscheduler-remote/src/test/java/org/apache/dolphinscheduler/remote/command/cache/CacheExpireCommandTest.java @@ -15,29 +15,22 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.api.utils; +package org.apache.dolphinscheduler.remote.command.cache; -import org.apache.dolphinscheduler.common.model.Server; - -import java.util.List; +import org.apache.dolphinscheduler.common.enums.CacheType; +import org.apache.dolphinscheduler.remote.command.CacheExpireCommand; +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.command.CommandType; import org.junit.Assert; -import org.junit.Ignore; import org.junit.Test; -/** - * zookeeper monitor utils test - */ -@Ignore -public class RegistryCenterUtilsTest { +public class CacheExpireCommandTest { @Test - public void testGetMasterList(){ - List masterServerList = RegistryCenterUtils.getMasterServers(); - List workerServerList = RegistryCenterUtils.getWorkerServers(); - - Assert.assertTrue(masterServerList.size() >= 0); - Assert.assertTrue(workerServerList.size() >= 0); + public void testConvert2Command() { + CacheExpireCommand cacheExpireCommand = new CacheExpireCommand(CacheType.TENANT, "1"); + Command command = cacheExpireCommand.convert2Command(); + Assert.assertEquals(CommandType.CACHE_EXPIRE, command.getType()); } - } diff --git a/dolphinscheduler-server/pom.xml b/dolphinscheduler-server/pom.xml index 8ba1f4cf03a6b10fe2c66634cb24d957ea7a71dc..bb6981b40fea631c0d830e4416b43e695466b906 100644 --- a/dolphinscheduler-server/pom.xml +++ b/dolphinscheduler-server/pom.xml @@ -16,21 +16,16 @@ ~ limitations under the License. --> - + 4.0.0 org.apache.dolphinscheduler dolphinscheduler - 2.0.0-SNAPSHOT + 2.0.10-SNAPSHOT dolphinscheduler-server dolphinscheduler-server - jar - - UTF-8 - @@ -42,6 +37,51 @@ org.apache.dolphinscheduler dolphinscheduler-spi + + org.apache.dolphinscheduler + dolphinscheduler-task-datax + + + org.apache.dolphinscheduler + dolphinscheduler-task-flink + + + org.apache.dolphinscheduler + dolphinscheduler-task-http + + + org.apache.dolphinscheduler + dolphinscheduler-task-mr + + + org.apache.dolphinscheduler + dolphinscheduler-task-pigeon + + + org.apache.dolphinscheduler + dolphinscheduler-task-procedure + + + org.apache.dolphinscheduler + dolphinscheduler-task-python + + + org.apache.dolphinscheduler + dolphinscheduler-task-shell + + + org.apache.dolphinscheduler + dolphinscheduler-task-spark + + + org.apache.dolphinscheduler + dolphinscheduler-task-sql + + + org.apache.dolphinscheduler + dolphinscheduler-task-sqoop + + org.apache.httpcomponents httpclient @@ -50,11 +90,6 @@ org.apache.httpcomponents httpcore - - junit - junit - test - com.google.guava guava @@ -74,12 +109,6 @@ org.powermock powermock-api-mockito2 test - - - org.mockito - mockito-core - - org.mockito @@ -87,65 +116,44 @@ test - org.jacoco - org.jacoco.agent - runtime + org.springframework + spring-test test - org.springframework - spring-test + org.springframework.boot + spring-boot-test test + + org.springframework.boot + spring-boot-starter-cache + + + log4j-api + org.apache.logging.log4j + + + log4j-to-slf4j + org.apache.logging.log4j + + + - - - - - - - - - - - - - - - - - - org.apache.maven.plugins - maven-compiler-plugin + maven-jar-plugin - ${java.version} - ${java.version} - ${project.build.sourceEncoding} + + config/ + *.yaml + *.xml + - diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/LoggerRequestProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/LoggerRequestProcessor.java index f6e23f0b635468a6ae41027cf500193f016c0922..78a4d32c290a40c38de99223164bc32869d3fda6 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/LoggerRequestProcessor.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/LoggerRequestProcessor.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.server.log; +import io.netty.channel.Channel; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.LoggerUtils; import org.apache.dolphinscheduler.remote.command.Command; @@ -31,6 +32,9 @@ import org.apache.dolphinscheduler.remote.command.log.ViewLogRequestCommand; import org.apache.dolphinscheduler.remote.command.log.ViewLogResponseCommand; import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; import org.apache.dolphinscheduler.remote.utils.Constants; +import org.apache.dolphinscheduler.spi.utils.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.ByteArrayOutputStream; import java.io.File; @@ -46,15 +50,11 @@ import java.util.concurrent.Executors; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import io.netty.channel.Channel; - /** * logger request process logic */ -public class LoggerRequestProcessor implements NettyRequestProcessor { +public class LoggerRequestProcessor + implements NettyRequestProcessor { private final Logger logger = LoggerFactory.getLogger(LoggerRequestProcessor.class); @@ -76,21 +76,35 @@ public class LoggerRequestProcessor implements NettyRequestProcessor { case GET_LOG_BYTES_REQUEST: GetLogBytesRequestCommand getLogRequest = JSONUtils.parseObject( command.getBody(), GetLogBytesRequestCommand.class); - byte[] bytes = getFileContentBytes(getLogRequest.getPath()); + String path = getLogRequest.getPath(); + if (!checkPathSecurity(path)) { + throw new IllegalArgumentException("Illegal path"); + } + byte[] bytes = getFileContentBytes(path); GetLogBytesResponseCommand getLogResponse = new GetLogBytesResponseCommand(bytes); channel.writeAndFlush(getLogResponse.convert2Command(command.getOpaque())); break; case VIEW_WHOLE_LOG_REQUEST: ViewLogRequestCommand viewLogRequest = JSONUtils.parseObject( command.getBody(), ViewLogRequestCommand.class); - String msg = LoggerUtils.readWholeFileContent(viewLogRequest.getPath()); + String viewLogPath = viewLogRequest.getPath(); + if (!checkPathSecurity(viewLogPath)) { + throw new IllegalArgumentException("Illegal path"); + } + String msg = LoggerUtils.readWholeFileContent(viewLogPath); ViewLogResponseCommand viewLogResponse = new ViewLogResponseCommand(msg); channel.writeAndFlush(viewLogResponse.convert2Command(command.getOpaque())); break; case ROLL_VIEW_LOG_REQUEST: RollViewLogRequestCommand rollViewLogRequest = JSONUtils.parseObject( command.getBody(), RollViewLogRequestCommand.class); - List lines = readPartFileContent(rollViewLogRequest.getPath(), + + String rollViewLogPath = rollViewLogRequest.getPath(); + if (!checkPathSecurity(rollViewLogPath)) { + throw new IllegalArgumentException("Illegal path"); + } + + List lines = readPartFileContent(rollViewLogPath, rollViewLogRequest.getSkipLineNum(), rollViewLogRequest.getLimit()); StringBuilder builder = new StringBuilder(); for (String line : lines) { @@ -104,14 +118,17 @@ public class LoggerRequestProcessor implements NettyRequestProcessor { command.getBody(), RemoveTaskLogRequestCommand.class); String taskLogPath = removeTaskLogRequest.getPath(); - + if (!checkPathSecurity(taskLogPath)) { + throw new IllegalArgumentException("Illegal path"); + } File taskLogFile = new File(taskLogPath); Boolean status = true; try { if (taskLogFile.exists()) { status = taskLogFile.delete(); } - } catch (Exception e) { + } + catch (Exception e) { status = false; } @@ -123,6 +140,23 @@ public class LoggerRequestProcessor implements NettyRequestProcessor { } } + private boolean checkPathSecurity(String path) { + String dsHome = System.getProperty("DOLPHINSCHEDULER_HOME"); + // if we run server in IDE, user.dir is the DS Home. + if (StringUtils.isBlank(dsHome)) { + dsHome = System.getProperty("user.dir"); + } + if (!StringUtils.isBlank(path)) { + if (path.startsWith(dsHome) && !path.contains("../") && path.endsWith(".log")) { + return true; + } + } + else { + logger.warn("path is null"); + } + return false; + } + public ExecutorService getExecutor() { return this.executor; } @@ -136,14 +170,15 @@ public class LoggerRequestProcessor implements NettyRequestProcessor { */ private byte[] getFileContentBytes(String filePath) { try (InputStream in = new FileInputStream(filePath); - ByteArrayOutputStream bos = new ByteArrayOutputStream()) { + ByteArrayOutputStream bos = new ByteArrayOutputStream()) { byte[] buf = new byte[1024]; int len; while ((len = in.read(buf)) != -1) { bos.write(buf, 0, len); } return bos.toByteArray(); - } catch (IOException e) { + } + catch (IOException e) { logger.error("get file bytes error", e); } return new byte[0]; @@ -158,19 +193,20 @@ public class LoggerRequestProcessor implements NettyRequestProcessor { * @return part file content */ private List readPartFileContent(String filePath, - int skipLine, - int limit) { + int skipLine, + int limit) { File file = new File(filePath); if (file.exists() && file.isFile()) { try (Stream stream = Files.lines(Paths.get(filePath))) { return stream.skip(skipLine).limit(limit).collect(Collectors.toList()); - } catch (IOException e) { + } + catch (IOException e) { logger.error("read file error", e); } - } else { + } + else { logger.info("file path: {} not exists", filePath); } return Collections.emptyList(); } - } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/LoggerServer.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/LoggerServer.java index f1999e641cc33a36a4ab29888b80e8ecf1df0753..c7c7761e0cee6b09c3e276252edb606861589b4c 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/LoggerServer.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/LoggerServer.java @@ -17,11 +17,11 @@ package org.apache.dolphinscheduler.server.log; - import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.remote.NettyRemotingServer; import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.config.NettyServerConfig; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -47,7 +47,7 @@ public class LoggerServer { */ private final LoggerRequestProcessor requestProcessor; - public LoggerServer(){ + public LoggerServer() { this.serverConfig = new NettyServerConfig(); this.serverConfig.setListenPort(Constants.RPC_PORT); this.server = new NettyRemotingServer(serverConfig); @@ -72,7 +72,7 @@ public class LoggerServer { */ public void start() { this.server.start(); - logger.info("logger server started, listening on port : {}" , Constants.RPC_PORT); + logger.info("logger server started, listening on port : {}", Constants.RPC_PORT); Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/MasterLogFilter.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/MasterLogFilter.java index 575571d9ac77fa8d46fe8d9e0e456f6b4b934178..230fb0bdef9046d2aba074ffbd656f851044a6dc 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/MasterLogFilter.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/MasterLogFilter.java @@ -46,4 +46,4 @@ public class MasterLogFilter extends Filter { public void setLevel(String level) { this.level = Level.toLevel(level); } -} \ No newline at end of file +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/SensitiveDataConverter.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/SensitiveDataConverter.java index 85c9e99d78d8732f4d1110d1406f30d8665c8117..8413fed57625584730e56e3dc024d445709392fa 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/SensitiveDataConverter.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/SensitiveDataConverter.java @@ -17,17 +17,18 @@ package org.apache.dolphinscheduler.server.log; -import ch.qos.logback.classic.pattern.MessageConverter; -import ch.qos.logback.classic.spi.ILoggingEvent; +import static org.apache.dolphinscheduler.common.Constants.STAR; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.utils.SensitiveLogUtils; import org.apache.commons.lang.StringUtils; import java.util.regex.Matcher; import java.util.regex.Pattern; +import ch.qos.logback.classic.pattern.MessageConverter; +import ch.qos.logback.classic.spi.ILoggingEvent; + /** * sensitive data log converter */ @@ -69,7 +70,7 @@ public class SensitiveDataConverter extends MessageConverter { * * @param logMsg original log */ - private String passwordHandler(Pattern pwdPattern, String logMsg) { + static String passwordHandler(Pattern pwdPattern, String logMsg) { Matcher matcher = pwdPattern.matcher(logMsg); @@ -79,7 +80,7 @@ public class SensitiveDataConverter extends MessageConverter { String password = matcher.group(); - String maskPassword = SensitiveLogUtils.maskDataSourcePwd(password); + String maskPassword = StringUtils.repeat(STAR, StringUtils.length(password)); matcher.appendReplacement(sb, maskPassword); } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/TaskLogDiscriminator.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/TaskLogDiscriminator.java index 029242f5348b938957bfa70fe006331eea255d69..524f8be9431854be5b9fcc5695b5302c4304af83 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/TaskLogDiscriminator.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/TaskLogDiscriminator.java @@ -16,16 +16,23 @@ */ package org.apache.dolphinscheduler.server.log; -import ch.qos.logback.classic.spi.ILoggingEvent; -import ch.qos.logback.core.sift.AbstractDiscriminator; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.LoggerUtils; +import org.apache.dolphinscheduler.spi.task.TaskConstants; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import ch.qos.logback.classic.spi.ILoggingEvent; +import ch.qos.logback.core.sift.AbstractDiscriminator; /** * Task Log Discriminator */ public class TaskLogDiscriminator extends AbstractDiscriminator { + private static Logger logger = LoggerFactory.getLogger(TaskLogDiscriminator.class); + /** * key */ @@ -42,15 +49,25 @@ public class TaskLogDiscriminator extends AbstractDiscriminator { */ @Override public String getDiscriminatingValue(ILoggingEvent event) { - String loggerName = event.getLoggerName() - .split(Constants.EQUAL_SIGN)[1]; - String prefix = LoggerUtils.TASK_LOGGER_INFO_PREFIX + "-"; - if (loggerName.startsWith(prefix)) { - return loggerName.substring(prefix.length(), - loggerName.length() - 1).replace("-","/"); - } else { - return "unknown_task"; + String key = "unknown_task"; + + logger.debug("task log discriminator start, key is:{}, thread name:{},loggerName:{}", key, event.getThreadName(), event.getLoggerName()); + + if (event.getLoggerName().startsWith(TaskConstants.TASK_LOG_LOGGER_NAME)) { + String threadName = event.getThreadName(); + if (threadName.endsWith(TaskConstants.GET_OUTPUT_LOG_SERVICE)) { + threadName = threadName.substring(0, threadName.length() - TaskConstants.GET_OUTPUT_LOG_SERVICE.length()); + } + String part1 = threadName + .split(Constants.EQUAL_SIGN)[1]; + String prefix = LoggerUtils.TASK_LOGGER_INFO_PREFIX + "-"; + if (part1.startsWith(prefix)) { + key = part1.substring(prefix.length(), + part1.length() - 1).replace("-", "/"); + } } + logger.debug("task log discriminator end, key is:{}, thread name:{},loggerName:{}", key, event.getThreadName(), event.getLoggerName()); + return key; } @Override diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/TaskLogFilter.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/TaskLogFilter.java index 9c47fb901fdf41412c74495b45fb9f15fdd71c4a..dfe95c1569a574a30442fa95e0f29bab3205e754 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/TaskLogFilter.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/TaskLogFilter.java @@ -14,11 +14,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.server.log; -import static org.apache.dolphinscheduler.common.utils.LoggerUtils.TASK_APPID_LOG_FORMAT; +import org.apache.dolphinscheduler.spi.task.TaskConstants; -import org.apache.dolphinscheduler.common.utils.LoggerUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import ch.qos.logback.classic.Level; import ch.qos.logback.classic.spi.ILoggingEvent; @@ -30,6 +32,7 @@ import ch.qos.logback.core.spi.FilterReply; */ public class TaskLogFilter extends Filter { + private static Logger logger = LoggerFactory.getLogger(TaskLogFilter.class); /** * level */ @@ -46,11 +49,13 @@ public class TaskLogFilter extends Filter { */ @Override public FilterReply decide(ILoggingEvent event) { - if (event.getThreadName().startsWith(LoggerUtils.TASK_LOGGER_THREAD_NAME) - || event.getLoggerName().startsWith(" - " + TASK_APPID_LOG_FORMAT) + FilterReply filterReply = FilterReply.DENY; + if ((event.getThreadName().startsWith(TaskConstants.TASK_LOGGER_THREAD_NAME) + && event.getLoggerName().startsWith(TaskConstants.TASK_LOG_LOGGER_NAME)) || event.getLevel().isGreaterOrEqual(level)) { - return FilterReply.ACCEPT; + filterReply = FilterReply.ACCEPT; } - return FilterReply.DENY; + logger.debug("task log filter, thread name:{},loggerName:{},filterReply:{}", event.getThreadName(), event.getLoggerName(), filterReply.name()); + return filterReply; } } \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/MasterServer.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/MasterServer.java index 7b2a1cdcbf7c7a3994b0494a869930d15458583b..b4ed74678e926f9a4ba200bfaa3344100a923591 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/MasterServer.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/MasterServer.java @@ -17,51 +17,60 @@ package org.apache.dolphinscheduler.server.master; +import static org.apache.dolphinscheduler.common.Constants.SPRING_DATASOURCE_DRIVER_CLASS_NAME; + import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.IStoppable; import org.apache.dolphinscheduler.common.thread.Stopper; +import org.apache.dolphinscheduler.common.utils.PropertyUtils; import org.apache.dolphinscheduler.remote.NettyRemotingServer; import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.config.NettyServerConfig; import org.apache.dolphinscheduler.server.master.config.MasterConfig; +import org.apache.dolphinscheduler.server.master.processor.CacheProcessor; import org.apache.dolphinscheduler.server.master.processor.StateEventProcessor; import org.apache.dolphinscheduler.server.master.processor.TaskAckProcessor; import org.apache.dolphinscheduler.server.master.processor.TaskKillResponseProcessor; +import org.apache.dolphinscheduler.server.master.processor.TaskRecallProcessor; import org.apache.dolphinscheduler.server.master.processor.TaskResponseProcessor; import org.apache.dolphinscheduler.server.master.registry.MasterRegistryClient; import org.apache.dolphinscheduler.server.master.runner.EventExecuteService; -import org.apache.dolphinscheduler.server.master.runner.WorkflowExecuteThread; +import org.apache.dolphinscheduler.server.master.runner.FailoverExecuteThread; import org.apache.dolphinscheduler.server.master.runner.MasterSchedulerService; +import org.apache.dolphinscheduler.server.master.runner.WorkflowExecuteThread; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.quartz.QuartzExecutors; import java.util.concurrent.ConcurrentHashMap; -import javax.annotation.PostConstruct; - import org.quartz.SchedulerException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.WebApplicationType; import org.springframework.boot.builder.SpringApplicationBuilder; +import org.springframework.boot.context.event.ApplicationReadyEvent; +import org.springframework.cache.annotation.EnableCaching; import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.FilterType; +import org.springframework.context.event.EventListener; import org.springframework.transaction.annotation.EnableTransactionManagement; /** * master server */ @ComponentScan(value = "org.apache.dolphinscheduler", excludeFilters = { - @ComponentScan.Filter(type = FilterType.REGEX, pattern = { - "org.apache.dolphinscheduler.server.worker.*", - "org.apache.dolphinscheduler.server.monitor.*", - "org.apache.dolphinscheduler.server.log.*" - }) + @ComponentScan.Filter(type = FilterType.REGEX, pattern = { + "org.apache.dolphinscheduler.server.worker.*", + "org.apache.dolphinscheduler.server.monitor.*", + "org.apache.dolphinscheduler.server.log.*", + "org.apache.dolphinscheduler.alert.*" + }) }) @EnableTransactionManagement +@EnableCaching public class MasterServer implements IStoppable { - /** * logger of MasterServer */ @@ -100,6 +109,12 @@ public class MasterServer implements IStoppable { @Autowired private EventExecuteService eventExecuteService; + @Autowired + private FailoverExecuteThread failoverExecuteThread; + + @Value("${spring.datasource.driver-class-name}") + private String driverClassName; + private ConcurrentHashMap processInstanceExecMaps = new ConcurrentHashMap<>(); /** @@ -115,8 +130,10 @@ public class MasterServer implements IStoppable { /** * run master server */ - @PostConstruct - public void run() { + @EventListener + public void run(ApplicationReadyEvent ignored) { + PropertyUtils.setValue(SPRING_DATASOURCE_DRIVER_CLASS_NAME, driverClassName); + // init remoting server NettyServerConfig serverConfig = new NettyServerConfig(); serverConfig.setListenPort(masterConfig.getListenPort()); @@ -125,18 +142,22 @@ public class MasterServer implements IStoppable { ackProcessor.init(processInstanceExecMaps); TaskResponseProcessor taskResponseProcessor = new TaskResponseProcessor(); taskResponseProcessor.init(processInstanceExecMaps); + TaskKillResponseProcessor taskKillResponseProcessor = new TaskKillResponseProcessor(); + taskKillResponseProcessor.init(processInstanceExecMaps); StateEventProcessor stateEventProcessor = new StateEventProcessor(); stateEventProcessor.init(processInstanceExecMaps); this.nettyRemotingServer.registerProcessor(CommandType.TASK_EXECUTE_RESPONSE, taskResponseProcessor); this.nettyRemotingServer.registerProcessor(CommandType.TASK_EXECUTE_ACK, ackProcessor); - this.nettyRemotingServer.registerProcessor(CommandType.TASK_KILL_RESPONSE, new TaskKillResponseProcessor()); + this.nettyRemotingServer.registerProcessor(CommandType.TASK_KILL_RESPONSE, taskKillResponseProcessor); + this.nettyRemotingServer.registerProcessor(CommandType.TASK_RECALL, new TaskRecallProcessor()); this.nettyRemotingServer.registerProcessor(CommandType.STATE_EVENT_REQUEST, stateEventProcessor); + this.nettyRemotingServer.registerProcessor(CommandType.CACHE_EXPIRE, new CacheProcessor()); this.nettyRemotingServer.start(); // self tolerant this.masterRegistryClient.init(this.processInstanceExecMaps); - this.masterRegistryClient.start(); this.masterRegistryClient.setRegistryStoppable(this); + this.masterRegistryClient.start(); this.eventExecuteService.init(this.processInstanceExecMaps); this.eventExecuteService.start(); @@ -145,6 +166,8 @@ public class MasterServer implements IStoppable { this.masterSchedulerService.start(); + this.failoverExecuteThread.start(); + // start QuartzExecutors // what system should do if exception try { @@ -207,8 +230,18 @@ public class MasterServer implements IStoppable { } // close spring Context and will invoke method with @PreDestroy annotation to destory beans. like ServerNodeManager,HostManager,TaskResponseService,CuratorZookeeperClient,etc springApplicationContext.close(); + logger.info("springApplicationContext close"); + try { + // thread sleep 60 seconds for quietly stop + Thread.sleep(60000L); + } catch (Exception e) { + logger.warn("thread sleep exception ", e); + } + // Since close will be executed in hook, so we can't use System.exit here. + Runtime.getRuntime().halt(0); } catch (Exception e) { logger.error("master server stop exception ", e); + Runtime.getRuntime().halt(1); } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/cache/TaskInstanceCacheManager.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/cache/TaskInstanceCacheManager.java deleted file mode 100644 index 1388c5b73db97f01cab6160c22086055e7b18412..0000000000000000000000000000000000000000 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/cache/TaskInstanceCacheManager.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.server.master.cache; - -import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import org.apache.dolphinscheduler.remote.command.TaskExecuteAckCommand; -import org.apache.dolphinscheduler.remote.command.TaskExecuteResponseCommand; -import org.apache.dolphinscheduler.service.queue.entity.TaskExecutionContext; - -/** - * task instance state manager - */ -public interface TaskInstanceCacheManager { - - /** - * get taskInstance by taskInstance id - * - * @param taskInstanceId taskInstanceId - * @return taskInstance - */ - TaskInstance getByTaskInstanceId(Integer taskInstanceId); - - /** - * cache taskInstance - * - * @param taskExecutionContext taskExecutionContext - */ - void cacheTaskInstance(TaskExecutionContext taskExecutionContext); - - /** - * cache taskInstance - * - * @param taskAckCommand taskAckCommand - */ - void cacheTaskInstance(TaskExecuteAckCommand taskAckCommand); - - /** - * cache taskInstance - * - * @param taskExecuteResponseCommand taskExecuteResponseCommand - */ - void cacheTaskInstance(TaskExecuteResponseCommand taskExecuteResponseCommand); - - /** - * remove taskInstance by taskInstanceId - * @param taskInstanceId taskInstanceId - */ - void removeByTaskInstanceId(Integer taskInstanceId); -} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/cache/impl/TaskInstanceCacheManagerImpl.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/cache/impl/TaskInstanceCacheManagerImpl.java deleted file mode 100644 index dd2d6eb8543c1f460fb61d0b18b96a9b2d6e585d..0000000000000000000000000000000000000000 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/cache/impl/TaskInstanceCacheManagerImpl.java +++ /dev/null @@ -1,155 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.server.master.cache.impl; - -import static org.apache.dolphinscheduler.common.Constants.CACHE_REFRESH_TIME_MILLIS; - -import org.apache.dolphinscheduler.common.enums.ExecutionStatus; -import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import org.apache.dolphinscheduler.remote.command.TaskExecuteAckCommand; -import org.apache.dolphinscheduler.remote.command.TaskExecuteResponseCommand; -import org.apache.dolphinscheduler.server.master.cache.TaskInstanceCacheManager; -import org.apache.dolphinscheduler.service.process.ProcessService; -import org.apache.dolphinscheduler.service.queue.entity.TaskExecutionContext; - -import java.util.Map; -import java.util.Map.Entry; -import java.util.Timer; -import java.util.TimerTask; -import java.util.concurrent.ConcurrentHashMap; - -import javax.annotation.PostConstruct; -import javax.annotation.PreDestroy; - -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; - -/** - * taskInstance state manager - */ -@Component -public class TaskInstanceCacheManagerImpl implements TaskInstanceCacheManager { - - /** - * taskInstance cache - */ - private Map taskInstanceCache = new ConcurrentHashMap<>(); - - /** - * process service - */ - @Autowired - private ProcessService processService; - - /** - * taskInstance cache refresh timer - */ - private Timer refreshTaskInstanceTimer = null; - - @PostConstruct - public void init() { - //issue#5539 add thread to fetch task state from database in a fixed rate - this.refreshTaskInstanceTimer = new Timer(true); - refreshTaskInstanceTimer.scheduleAtFixedRate( - new RefreshTaskInstanceTimerTask(), CACHE_REFRESH_TIME_MILLIS, CACHE_REFRESH_TIME_MILLIS - ); - } - - @PreDestroy - public void close() { - this.refreshTaskInstanceTimer.cancel(); - } - - /** - * get taskInstance by taskInstance id - * - * @param taskInstanceId taskInstanceId - * @return taskInstance - */ - @Override - public TaskInstance getByTaskInstanceId(Integer taskInstanceId) { - return taskInstanceCache.computeIfAbsent(taskInstanceId, k -> processService.findTaskInstanceById(taskInstanceId)); - } - - /** - * cache taskInstance - * - * @param taskExecutionContext taskExecutionContext - */ - @Override - public void cacheTaskInstance(TaskExecutionContext taskExecutionContext) { - TaskInstance taskInstance = new TaskInstance(); - taskInstance.setId(taskExecutionContext.getTaskInstanceId()); - taskInstance.setName(taskExecutionContext.getTaskName()); - taskInstance.setStartTime(taskExecutionContext.getStartTime()); - taskInstance.setTaskType(taskExecutionContext.getTaskType()); - taskInstance.setExecutePath(taskExecutionContext.getExecutePath()); - taskInstanceCache.put(taskExecutionContext.getTaskInstanceId(), taskInstance); - } - - /** - * cache taskInstance - * - * @param taskAckCommand taskAckCommand - */ - @Override - public void cacheTaskInstance(TaskExecuteAckCommand taskAckCommand) { - TaskInstance taskInstance = new TaskInstance(); - taskInstance.setState(ExecutionStatus.of(taskAckCommand.getStatus())); - taskInstance.setStartTime(taskAckCommand.getStartTime()); - taskInstance.setHost(taskAckCommand.getHost()); - taskInstance.setExecutePath(taskAckCommand.getExecutePath()); - taskInstance.setLogPath(taskAckCommand.getLogPath()); - taskInstanceCache.put(taskAckCommand.getTaskInstanceId(), taskInstance); - } - - /** - * cache taskInstance - * - * @param taskExecuteResponseCommand taskExecuteResponseCommand - */ - @Override - public void cacheTaskInstance(TaskExecuteResponseCommand taskExecuteResponseCommand) { - TaskInstance taskInstance = getByTaskInstanceId(taskExecuteResponseCommand.getTaskInstanceId()); - taskInstance.setState(ExecutionStatus.of(taskExecuteResponseCommand.getStatus())); - taskInstance.setEndTime(taskExecuteResponseCommand.getEndTime()); - taskInstanceCache.put(taskExecuteResponseCommand.getTaskInstanceId(), taskInstance); - } - - /** - * remove taskInstance by taskInstanceId - * @param taskInstanceId taskInstanceId - */ - @Override - public void removeByTaskInstanceId(Integer taskInstanceId) { - taskInstanceCache.remove(taskInstanceId); - } - - class RefreshTaskInstanceTimerTask extends TimerTask { - @Override - public void run() { - for (Entry taskInstanceEntry : taskInstanceCache.entrySet()) { - TaskInstance taskInstance = processService.findTaskInstanceById(taskInstanceEntry.getKey()); - if (null != taskInstance && taskInstance.getState() == ExecutionStatus.NEED_FAULT_TOLERANCE) { - taskInstanceCache.computeIfPresent(taskInstanceEntry.getKey(), (k, v) -> taskInstance); - } - } - - } - } -} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/config/MasterConfig.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/config/MasterConfig.java index 6c2e2a1e47fe8847dfb922c7b4222ed722b2c4ae..c075415337888f700a92b86de7baf354f2231e48 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/config/MasterConfig.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/config/MasterConfig.java @@ -60,6 +60,15 @@ public class MasterConfig { @Value("${master.reserved.memory:0.3}") private double masterReservedMemory; + @Value("${master.failover.interval:10}") + private int failoverInterval; + + @Value("${master.kill.yarn.job.when.handle.failover:true}") + private boolean masterKillYarnJobWhenHandleFailOver; + + @Value("${master.persist.event.state.threads:10}") + private int masterPersistEventStateThreads; + public int getListenPort() { return listenPort; } @@ -150,4 +159,28 @@ public class MasterConfig { public void setStateWheelInterval(int stateWheelInterval) { this.stateWheelInterval = stateWheelInterval; } + + public int getFailoverInterval() { + return failoverInterval; + } + + public void setFailoverInterval(int failoverInterval) { + this.failoverInterval = failoverInterval; + } + + public boolean getMasterKillYarnJobWhenHandleFailOver() { + return masterKillYarnJobWhenHandleFailOver; + } + + public void setMasterKillYarnJobWhenHandleFailOver(boolean masterKillYarnJobWhenHandleFailOver) { + this.masterKillYarnJobWhenHandleFailOver = masterKillYarnJobWhenHandleFailOver; + } + + public int getMasterPersistEventStateThreads() { + return masterPersistEventStateThreads; + } + + public void setMasterPersistEventStateThreads(int masterPersistEventStateThreads) { + this.masterPersistEventStateThreads = masterPersistEventStateThreads; + } } \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumer.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumer.java index 7b18e2bc718723b16dc63934f807925b45424b48..574f5db386e95a3160f34993ddd1fd9ad7ebd845 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumer.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumer.java @@ -31,6 +31,7 @@ import org.apache.dolphinscheduler.service.queue.TaskPriorityQueue; import org.apache.dolphinscheduler.service.queue.entity.TaskExecutionContext; import java.util.ArrayList; +import java.util.Date; import java.util.List; import java.util.Objects; import java.util.concurrent.TimeUnit; @@ -136,8 +137,13 @@ public class TaskPriorityQueueConsumer extends Thread { } else { result = dispatcher.dispatch(executionContext); } + if (result) { + processService.updateHostAndSubmitTimeById(taskPriority.getTaskId(), executionContext.getHost().getAddress(), new Date()); + } } catch (ExecuteException e) { - logger.error("dispatch error: {}", e.getMessage(),e); + logger.error("ExecuteException dispatch error: {}", e.getMessage(), e); + } catch (Throwable t) { + logger.error("dispatch error: {}", t, t); } return result; } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/ExecutorDispatcher.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/ExecutorDispatcher.java index bd53ad621ac86b9428328a8b36a68d5785ea547b..c4c85ee86922bf58e9439f23a7fc5555f11e51f2 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/ExecutorDispatcher.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/ExecutorDispatcher.java @@ -18,6 +18,8 @@ package org.apache.dolphinscheduler.server.master.dispatch; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.thread.ThreadUtils; import org.apache.dolphinscheduler.remote.utils.Host; import org.apache.dolphinscheduler.server.master.dispatch.context.ExecutionContext; import org.apache.dolphinscheduler.server.master.dispatch.enums.ExecutorType; @@ -72,20 +74,16 @@ public class ExecutorDispatcher implements InitializingBean { * @throws ExecuteException if error throws ExecuteException */ public Boolean dispatch(final ExecutionContext context) throws ExecuteException { - /** - * get executor manager - */ + // get executor manager ExecutorManager executorManager = this.executorManagers.get(context.getExecutorType()); if(executorManager == null){ throw new ExecuteException("no ExecutorManager for type : " + context.getExecutorType()); } - /** - * host select - */ - + // host select Host host = hostManager.select(context); if (StringUtils.isEmpty(host.getAddress())) { + ThreadUtils.sleep(Constants.SLEEP_TIME_MILLIS); throw new ExecuteException(String.format("fail to execute : %s due to no suitable worker, " + "current task needs worker group %s to execute", context.getCommand(),context.getWorkerGroup())); @@ -93,9 +91,7 @@ public class ExecutorDispatcher implements InitializingBean { context.setHost(host); executorManager.beforeExecute(context); try { - /** - * task execute - */ + // task execute return executorManager.execute(context); } finally { executorManager.afterExecute(context); diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/executor/NettyExecutorManager.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/executor/NettyExecutorManager.java index 03a3672aed5a28fd49791b87d42c332f351023d8..d649b5056523dde49f7e5782886da2836fd7ddcb 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/executor/NettyExecutorManager.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/executor/NettyExecutorManager.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.server.master.dispatch.executor; +import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.thread.ThreadUtils; import org.apache.dolphinscheduler.remote.NettyRemotingClient; import org.apache.dolphinscheduler.remote.command.Command; @@ -28,6 +29,7 @@ import org.apache.dolphinscheduler.server.master.dispatch.enums.ExecutorType; import org.apache.dolphinscheduler.server.master.dispatch.exceptions.ExecuteException; import org.apache.dolphinscheduler.server.master.processor.TaskAckProcessor; import org.apache.dolphinscheduler.server.master.processor.TaskKillResponseProcessor; +import org.apache.dolphinscheduler.server.master.processor.TaskRecallProcessor; import org.apache.dolphinscheduler.server.master.processor.TaskResponseProcessor; import org.apache.dolphinscheduler.server.master.registry.ServerNodeManager; @@ -81,6 +83,7 @@ public class NettyExecutorManager extends AbstractExecutorManager{ this.nettyRemotingClient.registerProcessor(CommandType.TASK_EXECUTE_RESPONSE, new TaskResponseProcessor()); this.nettyRemotingClient.registerProcessor(CommandType.TASK_EXECUTE_ACK, new TaskAckProcessor()); this.nettyRemotingClient.registerProcessor(CommandType.TASK_KILL_RESPONSE, new TaskKillResponseProcessor()); + this.nettyRemotingClient.registerProcessor(CommandType.TASK_RECALL, new TaskRecallProcessor()); } /** @@ -91,30 +94,15 @@ public class NettyExecutorManager extends AbstractExecutorManager{ */ @Override public Boolean execute(ExecutionContext context) throws ExecuteException { - - /** - * all nodes - */ Set allNodes = getAllNodes(context); - - /** - * fail nodes - */ Set failNodeSet = new HashSet<>(); - - /** - * build command accord executeContext - */ Command command = context.getCommand(); - - /** - * execute task host - */ + // execute task host Host host = context.getHost(); boolean success = false; while (!success) { try { - doExecute(host,command); + doExecute(host, command); success = true; context.setHost(host); } catch (ExecuteException ex) { @@ -163,7 +151,7 @@ public class NettyExecutorManager extends AbstractExecutorManager{ } catch (Exception ex) { logger.error(String.format("send command : %s to %s error", command, host), ex); retryCount--; - ThreadUtils.sleep(100); + ThreadUtils.sleep(Constants.SLEEP_TIME_MILLIS); } } while (retryCount >= 0 && !success); diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/CommonHostManager.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/CommonHostManager.java index 0e84db678aac08857b7ea6e89bc00dab45928b4e..e0753b72d6a4fc1e27ac39bf448bb145b7c72aac 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/CommonHostManager.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/CommonHostManager.java @@ -18,14 +18,14 @@ package org.apache.dolphinscheduler.server.master.dispatch.host; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; -import org.apache.dolphinscheduler.common.utils.ResInfo; +import org.apache.dolphinscheduler.common.utils.HeartBeat; import org.apache.dolphinscheduler.remote.utils.Host; import org.apache.dolphinscheduler.server.master.dispatch.context.ExecutionContext; import org.apache.dolphinscheduler.server.master.dispatch.enums.ExecutorType; import org.apache.dolphinscheduler.server.master.dispatch.host.assign.HostWorker; import org.apache.dolphinscheduler.server.master.registry.ServerNodeManager; +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.StringUtils; import java.util.ArrayList; @@ -36,7 +36,7 @@ import java.util.Set; import org.springframework.beans.factory.annotation.Autowired; /** - * common host manager + * common host manager */ public abstract class CommonHostManager implements HostManager { @@ -48,6 +48,7 @@ public abstract class CommonHostManager implements HostManager { /** * select host + * * @param context context * @return host */ @@ -87,12 +88,12 @@ public abstract class CommonHostManager implements HostManager { return hostWorkers; } - protected int getWorkerHostWeightFromHeartbeat(String heartbeat) { + protected int getWorkerHostWeightFromHeartbeat(String heartBeatInfo) { int hostWeight = Constants.DEFAULT_WORKER_HOST_WEIGHT; - if (!StringUtils.isEmpty(heartbeat)) { - String[] parts = heartbeat.split(Constants.COMMA); - if (ResInfo.isNewHeartbeatWithWeight(parts)) { - hostWeight = Integer.parseInt(parts[10]); + if (!StringUtils.isEmpty(heartBeatInfo)) { + HeartBeat heartBeat = HeartBeat.decodeHeartBeat(heartBeatInfo); + if (heartBeat != null) { + hostWeight = heartBeat.getWorkerHostWeight(); } } return hostWeight; diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/LowerWeightHostManager.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/LowerWeightHostManager.java index 86ed6a8310c695fc43cf29742eec0c4633807250..fdfa064556430eae96393344225173f6fc8e8e33 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/LowerWeightHostManager.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/LowerWeightHostManager.java @@ -17,22 +17,22 @@ package org.apache.dolphinscheduler.server.master.dispatch.host; +import org.apache.commons.collections.CollectionUtils; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; -import org.apache.dolphinscheduler.common.utils.DateUtils; -import org.apache.dolphinscheduler.common.utils.ResInfo; +import org.apache.dolphinscheduler.common.utils.HeartBeat; import org.apache.dolphinscheduler.remote.utils.Host; import org.apache.dolphinscheduler.remote.utils.NamedThreadFactory; import org.apache.dolphinscheduler.server.master.dispatch.context.ExecutionContext; import org.apache.dolphinscheduler.server.master.dispatch.host.assign.HostWeight; import org.apache.dolphinscheduler.server.master.dispatch.host.assign.HostWorker; import org.apache.dolphinscheduler.server.master.dispatch.host.assign.LowerWeightRoundRobin; +import org.apache.dolphinscheduler.spi.utils.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; +import javax.annotation.PostConstruct; +import javax.annotation.PreDestroy; +import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; @@ -40,14 +40,8 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; -import javax.annotation.PostConstruct; -import javax.annotation.PreDestroy; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - /** - * lower weight host manager + * lower weight host manager */ public class LowerWeightHostManager extends CommonHostManager { @@ -79,7 +73,7 @@ public class LowerWeightHostManager extends CommonHostManager { this.workerHostWeightsMap = new ConcurrentHashMap<>(); this.lock = new ReentrantLock(); this.executorService = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("LowerWeightHostManagerExecutor")); - this.executorService.scheduleWithFixedDelay(new RefreshResourceTask(),0, 5, TimeUnit.SECONDS); + this.executorService.scheduleWithFixedDelay(new RefreshResourceTask(), 0, 1, TimeUnit.SECONDS); } @PreDestroy @@ -89,6 +83,7 @@ public class LowerWeightHostManager extends CommonHostManager { /** * select host + * * @param context context * @return host */ @@ -138,9 +133,9 @@ public class LowerWeightHostManager extends CommonHostManager { Set hostWeights = new HashSet<>(nodes.size()); for (String node : nodes) { String heartbeat = serverNodeManager.getWorkerNodeInfo(node); - HostWeight hostWeight = getHostWeight(node, workerGroup, heartbeat); - if (hostWeight != null) { - hostWeights.add(hostWeight); + Optional hostWeightOpt = getHostWeight(node, workerGroup, heartbeat); + if (hostWeightOpt.isPresent()) { + hostWeights.add(hostWeightOpt.get()); } } if (!hostWeights.isEmpty()) { @@ -153,23 +148,29 @@ public class LowerWeightHostManager extends CommonHostManager { } } - public HostWeight getHostWeight(String addr, String workerGroup, String heartbeat) { - if (ResInfo.isValidHeartbeatForRegistryInfo(heartbeat)) { - String[] parts = heartbeat.split(Constants.COMMA); - int status = Integer.parseInt(parts[8]); - if (status == Constants.ABNORMAL_NODE_STATUS) { - logger.warn("worker {} current cpu load average {} is too high or available memory {}G is too low", - addr, Double.parseDouble(parts[2]), Double.parseDouble(parts[3])); - return null; - } - double cpu = Double.parseDouble(parts[0]); - double memory = Double.parseDouble(parts[1]); - double loadAverage = Double.parseDouble(parts[2]); - long startTime = DateUtils.stringToDate(parts[6]).getTime(); - int weight = getWorkerHostWeightFromHeartbeat(heartbeat); - return new HostWeight(HostWorker.of(addr, weight, workerGroup), cpu, memory, loadAverage, startTime); + public Optional getHostWeight(String addr, String workerGroup, String heartBeatInfo) { + if (StringUtils.isEmpty(heartBeatInfo)) { + logger.warn("worker {} in work group {} have not received the heartbeat", addr, workerGroup); + return Optional.empty(); + } + HeartBeat heartBeat = HeartBeat.decodeHeartBeat(heartBeatInfo); + if (heartBeat == null) { + return Optional.empty(); + } + if (Constants.ABNORMAL_NODE_STATUS == heartBeat.getServerStatus()) { + logger.warn("worker {} current cpu load average {} is too high or available memory {}G is too low", + addr, heartBeat.getLoadAverage(), heartBeat.getAvailablePhysicalMemorySize()); + return Optional.empty(); + } + if (Constants.BUSY_NODE_STATUE == heartBeat.getServerStatus()) { + logger.warn("worker {} is busy, current waiting task count {} is large than worker thread count {}", + addr, heartBeat.getWorkerWaitingTaskCount(), heartBeat.getWorkerExecThreadCount()); + return Optional.empty(); } - return null; + return Optional.of( + new HostWeight(HostWorker.of(addr, heartBeat.getWorkerHostWeight(), workerGroup), + heartBeat.getCpuUsage(), heartBeat.getMemoryUsage(), heartBeat.getLoadAverage(), + heartBeat.getWorkerWaitingTaskCount(), heartBeat.getStartupTime())); } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/AbstractSelector.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/AbstractSelector.java index 087a5ff002c5aa0709bbdc46b102d5df96d85c9c..d07f62a1f4b0646c07b545bc94df5ce5eefa4be2 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/AbstractSelector.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/AbstractSelector.java @@ -16,7 +16,7 @@ */ package org.apache.dolphinscheduler.server.master.dispatch.host.assign; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.commons.collections.CollectionUtils; import java.util.Collection; diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/HostWeight.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/HostWeight.java index 9d7855f054b78adaaddb7546b403c6611699979a..a441582235135a8f775386e9ea2fd01a68929304 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/HostWeight.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/HostWeight.java @@ -37,10 +37,13 @@ public class HostWeight { private double currentWeight; - public HostWeight(HostWorker hostWorker, double cpu, double memory, double loadAverage, long startTime) { + private final int waitingTaskCount; + + public HostWeight(HostWorker hostWorker, double cpu, double memory, double loadAverage, int waitingTaskCount, long startTime) { this.hostWorker = hostWorker; this.weight = calculateWeight(cpu, memory, loadAverage, startTime); this.currentWeight = this.weight; + this.waitingTaskCount = waitingTaskCount; } public double getWeight() { @@ -63,12 +66,17 @@ public class HostWeight { return (Host)hostWorker; } + public int getWaitingTaskCount() { + return waitingTaskCount; + } + @Override public String toString() { return "HostWeight{" + "hostWorker=" + hostWorker + ", weight=" + weight + ", currentWeight=" + currentWeight + + ", waitingTaskCount=" + waitingTaskCount + '}'; } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/LowerWeightRoundRobin.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/LowerWeightRoundRobin.java index ea55785182911ead4b37d0e9fc3277f7e937a129..f099d81473c4fdfa0ecad2a8efbd95640b05cafa 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/LowerWeightRoundRobin.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/LowerWeightRoundRobin.java @@ -18,6 +18,11 @@ package org.apache.dolphinscheduler.server.master.dispatch.host.assign; import java.util.Collection; +import java.util.Comparator; +import java.util.List; +import java.util.stream.Collectors; + +import com.google.common.collect.Lists; /** * lower weight round robin @@ -35,7 +40,8 @@ public class LowerWeightRoundRobin extends AbstractSelector { double totalWeight = 0; double lowWeight = 0; HostWeight lowerNode = null; - for (HostWeight hostWeight : sources) { + List weights = canAssignTaskHost(sources); + for (HostWeight hostWeight : weights) { totalWeight += hostWeight.getWeight(); hostWeight.setCurrentWeight(hostWeight.getCurrentWeight() + hostWeight.getWeight()); if (lowerNode == null || lowWeight > hostWeight.getCurrentWeight()) { @@ -45,7 +51,21 @@ public class LowerWeightRoundRobin extends AbstractSelector { } lowerNode.setCurrentWeight(lowerNode.getCurrentWeight() + totalWeight); return lowerNode; + } + private List canAssignTaskHost(Collection sources) { + List zeroWaitingTask = sources.stream().filter(h -> h.getWaitingTaskCount() == 0).collect(Collectors.toList()); + if (!zeroWaitingTask.isEmpty()) { + return zeroWaitingTask; + } + HostWeight hostWeight = sources.stream().min(Comparator.comparing(HostWeight::getWaitingTaskCount)).get(); + List waitingTask = Lists.newArrayList(hostWeight); + List equalWaitingTask = sources.stream().filter(h -> !h.getHost().equals(hostWeight.getHost()) && h.getWaitingTaskCount() == hostWeight.getWaitingTaskCount()) + .collect(Collectors.toList()); + if (!equalWaitingTask.isEmpty()) { + waitingTask.addAll(equalWaitingTask); + } + return waitingTask; } } diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/register/RegistryPluginManager.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/CacheProcessor.java similarity index 31% rename from dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/register/RegistryPluginManager.java rename to dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/CacheProcessor.java index 211795f5b9ba14df6b368bda06a7d9e6dedeb185..6db7f65d7f784e41e3691dfde093841240fcd35c 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/register/RegistryPluginManager.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/CacheProcessor.java @@ -15,68 +15,59 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.spi.register; +package org.apache.dolphinscheduler.server.master.processor; -import org.apache.dolphinscheduler.spi.DolphinSchedulerPlugin; -import org.apache.dolphinscheduler.spi.classloader.ThreadContextClassLoader; -import org.apache.dolphinscheduler.spi.plugin.AbstractDolphinPluginManager; +import org.apache.dolphinscheduler.common.enums.CacheType; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.remote.command.CacheExpireCommand; +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.command.CommandType; +import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.cache.Cache; +import org.springframework.cache.CacheManager; + +import com.google.common.base.Preconditions; + +import io.netty.channel.Channel; /** - * The plug-in address of the registry needs to be configured. - * Multi-registries are not supported. - * When the plug-in directory contains multiple plug-ins, only the configured plug-in will be used. - * todo It’s not good to put it here, consider creating a separate API module for each plugin + * cache process from master/api */ -public class RegistryPluginManager extends AbstractDolphinPluginManager { +public class CacheProcessor implements NettyRequestProcessor { - private static final Logger logger = LoggerFactory.getLogger(RegistryPluginManager.class); + private final Logger logger = LoggerFactory.getLogger(CacheProcessor.class); - private RegistryFactory registryFactory; + private CacheManager cacheManager; - public static Registry registry; + @Override + public void process(Channel channel, Command command) { + Preconditions.checkArgument(CommandType.CACHE_EXPIRE == command.getType(), String.format("invalid command type: %s", command.getType())); - private String registerPluginName; + CacheExpireCommand cacheExpireCommand = JSONUtils.parseObject(command.getBody(), CacheExpireCommand.class); - public RegistryPluginManager(String registerPluginName) { - this.registerPluginName = registerPluginName; + logger.info("received command : {}", cacheExpireCommand); + + this.cacheExpire(cacheExpireCommand); } - @Override - public void installPlugin(DolphinSchedulerPlugin dolphinSchedulerPlugin) { - for (RegistryFactory registryFactory : dolphinSchedulerPlugin.getRegisterFactorys()) { - logger.info("Registering Registry Plugin '{}'", registryFactory.getName()); - if (registerPluginName.equals(registryFactory.getName())) { - this.registryFactory = registryFactory; - loadRegistry(); - return; - } + private void cacheExpire(CacheExpireCommand cacheExpireCommand) { + if (cacheManager == null) { + cacheManager = SpringApplicationContext.getBean(CacheManager.class); } - if (null == registry) { - throw new RegistryException(String.format("not found %s registry plugin ", registerPluginName)); - } - } - /** - * load registry - */ - private void loadRegistry() { - try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(registryFactory.getClass().getClassLoader())) { - registry = registryFactory.create(); + if (cacheExpireCommand.getCacheKey().isEmpty()) { + return; } - } - /** - * get registry - * @return registry - */ - public Registry getRegistry() { - if (null == registry) { - throw new RegistryException("not install registry"); + CacheType cacheType = cacheExpireCommand.getCacheType(); + Cache cache = cacheManager.getCache(cacheType.getCacheName()); + if (cache != null) { + cache.evict(cacheExpireCommand.getCacheKey()); + logger.info("cache evict, type:{}, key:{}", cacheType.getCacheName(), cacheExpireCommand.getCacheKey()); } - return registry; } - } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/StateEventProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/StateEventProcessor.java index d5a8e85b5dbee585dfa77d737b2346cd56267a78..2f9a6342501e5b2d71c51607ae08e4d1d645084a 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/StateEventProcessor.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/StateEventProcessor.java @@ -61,8 +61,12 @@ public class StateEventProcessor implements NettyRequestProcessor { StateEventChangeCommand stateEventChangeCommand = JSONUtils.parseObject(command.getBody(), StateEventChangeCommand.class); StateEvent stateEvent = new StateEvent(); - stateEvent.setExecutionStatus(ExecutionStatus.RUNNING_EXECUTION); stateEvent.setKey(stateEventChangeCommand.getKey()); + if (stateEventChangeCommand.getSourceProcessInstanceId() != stateEventChangeCommand.getDestProcessInstanceId()) { + stateEvent.setExecutionStatus(ExecutionStatus.RUNNING_EXECUTION); + } else { + stateEvent.setExecutionStatus(stateEventChangeCommand.getSourceStatus()); + } stateEvent.setProcessInstanceId(stateEventChangeCommand.getDestProcessInstanceId()); stateEvent.setTaskInstanceId(stateEventChangeCommand.getDestTaskInstanceId()); StateEventType type = stateEvent.getTaskInstanceId() == 0 ? StateEventType.PROCESS_STATE_CHANGE : StateEventType.TASK_STATE_CHANGE; diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskAckProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskAckProcessor.java index 15f97c17a574708d98e4ee2cea5e25f001651634..74871a4d1a7ccd1ea365beb4452aa84fe7147040 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskAckProcessor.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskAckProcessor.java @@ -24,8 +24,6 @@ import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.command.TaskExecuteAckCommand; import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; import org.apache.dolphinscheduler.remote.utils.ChannelUtils; -import org.apache.dolphinscheduler.server.master.cache.TaskInstanceCacheManager; -import org.apache.dolphinscheduler.server.master.cache.impl.TaskInstanceCacheManagerImpl; import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseEvent; import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseService; import org.apache.dolphinscheduler.server.master.runner.WorkflowExecuteThread; @@ -52,14 +50,8 @@ public class TaskAckProcessor implements NettyRequestProcessor { */ private final TaskResponseService taskResponseService; - /** - * taskInstance cache manager - */ - private final TaskInstanceCacheManager taskInstanceCacheManager; - public TaskAckProcessor() { this.taskResponseService = SpringApplicationContext.getBean(TaskResponseService.class); - this.taskInstanceCacheManager = SpringApplicationContext.getBean(TaskInstanceCacheManagerImpl.class); } public void init(ConcurrentHashMap processInstanceExecMaps) { @@ -78,8 +70,6 @@ public class TaskAckProcessor implements NettyRequestProcessor { TaskExecuteAckCommand taskAckCommand = JSONUtils.parseObject(command.getBody(), TaskExecuteAckCommand.class); logger.info("taskAckCommand : {}", taskAckCommand); - taskInstanceCacheManager.cacheTaskInstance(taskAckCommand); - String workerAddress = ChannelUtils.toAddress(channel).getAddress(); ExecutionStatus ackStatus = ExecutionStatus.of(taskAckCommand.getStatus()); diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskKillResponseProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskKillResponseProcessor.java index 28f18fe9613acea677ad028b7b95d63fde076b59..24101108f0bbc82b0935609146aefbad847f2473 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskKillResponseProcessor.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskKillResponseProcessor.java @@ -17,11 +17,18 @@ package org.apache.dolphinscheduler.server.master.processor; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.command.TaskKillResponseCommand; import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; +import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseEvent; +import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseService; +import org.apache.dolphinscheduler.server.master.runner.WorkflowExecuteThread; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; + +import java.util.concurrent.ConcurrentHashMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -37,6 +44,19 @@ public class TaskKillResponseProcessor implements NettyRequestProcessor { private final Logger logger = LoggerFactory.getLogger(TaskKillResponseProcessor.class); + /** + * process service + */ + private final TaskResponseService taskResponseService; + + public TaskKillResponseProcessor() { + this.taskResponseService = SpringApplicationContext.getBean(TaskResponseService.class); + } + + public void init(ConcurrentHashMap processInstanceExecMaps) { + this.taskResponseService.init(processInstanceExecMaps); + } + /** * task final result response * need master process , state persistence @@ -50,6 +70,10 @@ public class TaskKillResponseProcessor implements NettyRequestProcessor { TaskKillResponseCommand responseCommand = JSONUtils.parseObject(command.getBody(), TaskKillResponseCommand.class); logger.info("received task kill response command : {}", responseCommand); + // TaskResponseEvent + TaskResponseEvent taskResponseEvent = TaskResponseEvent.newKillResponse(ExecutionStatus.of(responseCommand.getStatus()), + responseCommand.getTaskInstanceId(), channel, responseCommand.getProcessInstanceId()); + taskResponseService.addResponse(taskResponseEvent); } } diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/DolphinSchedulerPlugin.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskRecallProcessor.java similarity index 30% rename from dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/DolphinSchedulerPlugin.java rename to dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskRecallProcessor.java index 1c5f1c515e72512a879596d3a094032013da892a..9c18e94d1c40c63ba1510040bc4ea8b0d835dc97 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/DolphinSchedulerPlugin.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskRecallProcessor.java @@ -15,47 +15,52 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.spi; +package org.apache.dolphinscheduler.server.master.processor; -import static java.util.Collections.emptyList; - -import org.apache.dolphinscheduler.spi.alert.AlertChannelFactory; -import org.apache.dolphinscheduler.spi.register.RegistryFactory; -import org.apache.dolphinscheduler.spi.task.TaskChannelFactory; +import com.google.common.base.Preconditions; +import io.netty.channel.Channel; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.command.CommandType; +import org.apache.dolphinscheduler.remote.command.TaskRecallCommand; +import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; +import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseEvent; +import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseService; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** - * Dolphinscheduler plugin interface - * All plugin need implements this interface. - * Each plugin needs a factory. This factory has at least two methods. - * one called AlertChannelFactory#getId(), used to return the name of the plugin implementation, - * so that the 'PluginLoad' module can find the plugin implementation class by the name in the configuration file. - * The other method is called create(Map config). This method contains at least one parameter Map config. - * Config contains custom parameters read from the plug-in configuration file. + * task recall processor */ -public interface DolphinSchedulerPlugin { +public class TaskRecallProcessor implements NettyRequestProcessor { - /** - * get alert channel factory - * @return alert channel factory - */ - default Iterable getAlertChannelFactorys() { - return emptyList(); - } + private final Logger logger = LoggerFactory.getLogger(TaskRecallProcessor.class); /** - * get registry plugin factory - * @return registry factory + * process service */ - default Iterable getRegisterFactorys() { - return emptyList(); + private final TaskResponseService taskResponseService; + + public TaskRecallProcessor() { + this.taskResponseService = SpringApplicationContext.getBean(TaskResponseService.class); } /** - * get task plugin factory - * @return registry factory + * task ack process + * + * @param channel channel channel + * @param command command TaskExecuteAckCommand */ - default Iterable getTaskChannelFactorys() { - return emptyList(); + @Override + public void process(Channel channel, Command command) { + Preconditions.checkArgument(CommandType.TASK_RECALL == command.getType(), String.format("invalid command type : %s", command.getType())); + TaskRecallCommand recallCommand = JSONUtils.parseObject(command.getBody(), TaskRecallCommand.class); + logger.info("taskRecallCommand: {}, opaque: {}", recallCommand, command.getOpaque()); + // TaskResponseEvent + TaskResponseEvent taskResponseEvent = TaskResponseEvent.newRecall(ExecutionStatus.of(recallCommand.getStatus()), recallCommand.getEvent(), + recallCommand.getTaskInstanceId(), recallCommand.getProcessInstanceId(), channel, command.getOpaque()); + taskResponseService.addResponse(taskResponseEvent); } - } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskResponseProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskResponseProcessor.java index 5c6ade7fccb3377efd7eb8f103c6f722438b9ebc..0a743b52ff37b541bfe98bf88744d14ecd131b81 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskResponseProcessor.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskResponseProcessor.java @@ -23,8 +23,6 @@ import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.command.TaskExecuteResponseCommand; import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; -import org.apache.dolphinscheduler.server.master.cache.TaskInstanceCacheManager; -import org.apache.dolphinscheduler.server.master.cache.impl.TaskInstanceCacheManagerImpl; import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseEvent; import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseService; import org.apache.dolphinscheduler.server.master.runner.WorkflowExecuteThread; @@ -51,14 +49,8 @@ public class TaskResponseProcessor implements NettyRequestProcessor { */ private final TaskResponseService taskResponseService; - /** - * taskInstance cache manager - */ - private final TaskInstanceCacheManager taskInstanceCacheManager; - public TaskResponseProcessor() { this.taskResponseService = SpringApplicationContext.getBean(TaskResponseService.class); - this.taskInstanceCacheManager = SpringApplicationContext.getBean(TaskInstanceCacheManagerImpl.class); } public void init(ConcurrentHashMap processInstanceExecMaps) { @@ -79,8 +71,6 @@ public class TaskResponseProcessor implements NettyRequestProcessor { TaskExecuteResponseCommand responseCommand = JSONUtils.parseObject(command.getBody(), TaskExecuteResponseCommand.class); logger.info("received command : {}", responseCommand); - taskInstanceCacheManager.cacheTaskInstance(responseCommand); - // TaskResponseEvent TaskResponseEvent taskResponseEvent = TaskResponseEvent.newResult(ExecutionStatus.of(responseCommand.getStatus()), responseCommand.getEndTime(), diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseEvent.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseEvent.java index 224a61753da6cb5a12e8c4ddc86bd86e04b35e73..da6300fcbba14d9c00cf01a7d5ac8abc84dea5e6 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseEvent.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseEvent.java @@ -17,13 +17,13 @@ package org.apache.dolphinscheduler.server.master.processor.queue; -import com.fasterxml.jackson.annotation.JsonFormat; - import org.apache.dolphinscheduler.common.enums.Event; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import java.util.Date; +import com.fasterxml.jackson.annotation.JsonFormat; + import io.netty.channel.Channel; /** @@ -94,6 +94,35 @@ public class TaskResponseEvent { private Channel channel; private int processInstanceId; + + /** + * request unique identification + */ + private long opaque; + + public static TaskResponseEvent newKillResponse(ExecutionStatus state, + int taskInstanceId, + Channel channel, + int processInstanceId) { + TaskResponseEvent event = new TaskResponseEvent(); + event.setState(state); + event.setTaskInstanceId(taskInstanceId); + event.setEvent(Event.ACTION_STOP); + event.setChannel(channel); + event.setProcessInstanceId(processInstanceId); + return event; + } + + public static TaskResponseEvent newActionStop(ExecutionStatus state, + int taskInstanceId, + int processInstanceId) { + TaskResponseEvent event = new TaskResponseEvent(); + event.setState(state); + event.setTaskInstanceId(taskInstanceId); + event.setEvent(Event.ACTION_STOP); + event.setProcessInstanceId(processInstanceId); + return event; + } public static TaskResponseEvent newAck(ExecutionStatus state, Date startTime, @@ -137,6 +166,22 @@ public class TaskResponseEvent { return event; } + public static TaskResponseEvent newRecall(ExecutionStatus state, + Event event, + int taskInstanceId, + int processInstanceId, + Channel channel, + long opaque) { + TaskResponseEvent responseEvent = new TaskResponseEvent(); + responseEvent.setEvent(event); + responseEvent.setState(state); + responseEvent.setTaskInstanceId(taskInstanceId); + responseEvent.setProcessInstanceId(processInstanceId); + responseEvent.setChannel(channel); + responseEvent.setOpaque(opaque); + return responseEvent; + } + public String getVarPool() { return varPool; } @@ -240,4 +285,12 @@ public class TaskResponseEvent { public void setProcessInstanceId(int processInstanceId) { this.processInstanceId = processInstanceId; } + + public long getOpaque() { + return opaque; + } + + public void setOpaque(long opaque) { + this.opaque = opaque; + } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponsePersistThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponsePersistThread.java new file mode 100644 index 0000000000000000000000000000000000000000..715f64a0c11a1208545ae706ec699aabd83beda9 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponsePersistThread.java @@ -0,0 +1,239 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.processor.queue; + +import org.apache.dolphinscheduler.common.enums.Event; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.enums.StateEvent; +import org.apache.dolphinscheduler.common.enums.StateEventType; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.remote.command.DBTaskAckCommand; +import org.apache.dolphinscheduler.remote.command.DBTaskResponseCommand; +import org.apache.dolphinscheduler.remote.command.TaskKillAckCommand; +import org.apache.dolphinscheduler.remote.command.TaskRecallAckCommand; +import org.apache.dolphinscheduler.server.master.runner.WorkflowExecuteThread; +import org.apache.dolphinscheduler.server.master.runner.task.ITaskProcessor; +import org.apache.dolphinscheduler.server.master.runner.task.TaskAction; +import org.apache.dolphinscheduler.service.process.ProcessService; + +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentLinkedQueue; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import io.netty.channel.Channel; + +public class TaskResponsePersistThread implements Runnable { + + /** + * logger of TaskResponsePersistThread + */ + private static final Logger logger = LoggerFactory.getLogger(TaskResponsePersistThread.class); + + private final ConcurrentLinkedQueue events = new ConcurrentLinkedQueue<>(); + + private final Integer processInstanceId; + + /** + * process service + */ + private ProcessService processService; + + private ConcurrentHashMap processInstanceMapper; + + public TaskResponsePersistThread(ProcessService processService, + ConcurrentHashMap processInstanceMapper, + Integer processInstanceId) { + this.processService = processService; + this.processInstanceMapper = processInstanceMapper; + this.processInstanceId = processInstanceId; + } + + @Override + public void run() { + while (!this.events.isEmpty()) { + TaskResponseEvent event = this.events.peek(); + try { + boolean result = persist(event); + if (!result) { + logger.error("persist meta error, task id:{}, instance id:{}", event.getTaskInstanceId(), event.getProcessInstanceId()); + } + } catch (Exception e) { + logger.error("persist error, task id:{}, instance id:{}, error: {}", event.getTaskInstanceId(), event.getProcessInstanceId(), e); + } finally { + this.events.remove(event); + } + } + } + + /** + * persist taskResponseEvent + * + * @param taskResponseEvent taskResponseEvent + */ + private boolean persist(TaskResponseEvent taskResponseEvent) { + Event event = taskResponseEvent.getEvent(); + Channel channel = taskResponseEvent.getChannel(); + + TaskInstance taskInstance = processService.findTaskInstanceById(taskResponseEvent.getTaskInstanceId()); + + boolean result = true; + + switch (event) { + case ACK: + try { + if (taskInstance != null) { + ExecutionStatus status = taskInstance.getState().typeIsFinished() ? taskInstance.getState() : taskResponseEvent.getState(); + processService.changeTaskState(taskInstance, status, + taskResponseEvent.getStartTime(), + taskResponseEvent.getWorkerAddress(), + taskResponseEvent.getExecutePath(), + taskResponseEvent.getLogPath(), + taskResponseEvent.getTaskInstanceId()); + logger.debug("changeTaskState in ACK , changed in meta:{} ,task instance state:{}, task response event state:{}, taskInstance id:{},taskInstance host:{}", + result, taskInstance.getState(), taskResponseEvent.getState(), taskInstance.getId(), taskInstance.getHost()); + } + // if taskInstance is null (maybe deleted) . retry will be meaningless . so ack success + DBTaskAckCommand taskAckCommand = new DBTaskAckCommand(ExecutionStatus.SUCCESS.getCode(), taskResponseEvent.getTaskInstanceId()); + channel.writeAndFlush(taskAckCommand.convert2Command()); + logger.debug("worker ack master success, taskInstance id:{},taskInstance host:{}", taskInstance.getId(), taskInstance.getHost()); + } catch (Exception e) { + result = false; + logger.error("worker ack master error", e); + DBTaskAckCommand taskAckCommand = new DBTaskAckCommand(ExecutionStatus.FAILURE.getCode(), taskInstance == null ? -1 : taskInstance.getId()); + channel.writeAndFlush(taskAckCommand.convert2Command()); + } + break; + case RESULT: + try { + if (taskInstance != null) { + result = processService.changeTaskState(taskInstance, taskResponseEvent.getState(), + taskResponseEvent.getEndTime(), + taskResponseEvent.getProcessId(), + taskResponseEvent.getAppIds(), + taskResponseEvent.getTaskInstanceId(), + taskResponseEvent.getVarPool() + ); + logger.debug("changeTaskState in RESULT , changed in meta:{} task instance state:{}, task response event state:{}, taskInstance id:{},taskInstance host:{}", + result, taskInstance.getState(), taskResponseEvent.getState(), taskInstance.getId(), taskInstance.getHost()); + } + if (!result) { + DBTaskResponseCommand taskResponseCommand = new DBTaskResponseCommand(ExecutionStatus.FAILURE.getCode(), taskResponseEvent.getTaskInstanceId()); + channel.writeAndFlush(taskResponseCommand.convert2Command()); + logger.debug("worker response master failure, taskInstance id:{},taskInstance host:{}", taskInstance.getId(), taskInstance.getHost()); + } else { + // if taskInstance is null (maybe deleted) . retry will be meaningless . so response success + DBTaskResponseCommand taskResponseCommand = new DBTaskResponseCommand(ExecutionStatus.SUCCESS.getCode(), taskResponseEvent.getTaskInstanceId()); + channel.writeAndFlush(taskResponseCommand.convert2Command()); + logger.debug("worker response master success, taskInstance id:{},taskInstance host:{}", taskInstance.getId(), taskInstance.getHost()); + } + } catch (Exception e) { + result = false; + logger.error("worker response master error", e); + DBTaskResponseCommand taskResponseCommand = new DBTaskResponseCommand(ExecutionStatus.FAILURE.getCode(), -1); + channel.writeAndFlush(taskResponseCommand.convert2Command()); + } + break; + case ACTION_STOP: + WorkflowExecuteThread workflowExecuteThread = this.processInstanceMapper.get(taskResponseEvent.getProcessInstanceId()); + if (workflowExecuteThread != null) { + ITaskProcessor taskProcessor = workflowExecuteThread.getActiveTaskProcessorMaps().get(taskResponseEvent.getTaskInstanceId()); + if (taskProcessor != null) { + taskProcessor.persist(TaskAction.STOP); + logger.debug("ACTION_STOP: task instance id:{}, process instance id:{}", taskResponseEvent.getTaskInstanceId(), taskResponseEvent.getProcessInstanceId()); + } + workflowExecuteThread.getActiveTaskProcessorMaps().remove(taskResponseEvent.getTaskInstanceId()); + if (workflowExecuteThread.activeTaskFinish()) { + this.processInstanceMapper.remove(taskResponseEvent.getProcessInstanceId()); + } + } + + if (channel != null) { + TaskKillAckCommand taskKillAckCommand = new TaskKillAckCommand(ExecutionStatus.SUCCESS.getCode(), taskResponseEvent.getTaskInstanceId()); + channel.writeAndFlush(taskKillAckCommand.convert2Command()); + } + break; + case WORKER_REJECT: + try { + WorkflowExecuteThread executeThread = this.processInstanceMapper.get(taskResponseEvent.getProcessInstanceId()); + if (executeThread != null) { + ITaskProcessor taskProcessor = executeThread.getActiveTaskProcessorMaps().get(taskResponseEvent.getTaskInstanceId()); + if (taskProcessor != null) { + taskProcessor.action(TaskAction.RESUBMIT); + logger.info("RESUBMIT: task instance id:{}, process instance id:{}", taskResponseEvent.getTaskInstanceId(), taskResponseEvent.getProcessInstanceId()); + } + } + if (channel != null) { + TaskRecallAckCommand taskRecallAckCommand = new TaskRecallAckCommand(ExecutionStatus.SUCCESS.getCode(), taskResponseEvent.getTaskInstanceId()); + channel.writeAndFlush(taskRecallAckCommand.convert2Command(taskResponseEvent.getOpaque())); + logger.info("taskRecallAckCommand send successfully, task instance id:{}, opaque:{}", taskResponseEvent.getTaskInstanceId(), taskResponseEvent.getOpaque()); + } + } catch (Exception e) { + result = false; + logger.error("worker reject error", e); + TaskRecallAckCommand taskRecallAckCommand = new TaskRecallAckCommand(ExecutionStatus.FAILURE.getCode(), taskResponseEvent.getTaskInstanceId()); + channel.writeAndFlush(taskRecallAckCommand.convert2Command(taskResponseEvent.getOpaque())); + logger.info("taskRecallAckCommand send successfully, task instance id:{}, opaque:{}", taskResponseEvent.getTaskInstanceId(), taskResponseEvent.getOpaque()); + } + break; + case REALLOCATE: + logger.warn("Not yet supported"); + break; + default: + throw new IllegalArgumentException("invalid event type : " + event); + } + + WorkflowExecuteThread workflowExecuteThread = this.processInstanceMapper.get(taskResponseEvent.getProcessInstanceId()); + if (workflowExecuteThread != null && taskResponseEvent.getState().typeIsFinished() + && event != Event.ACTION_STOP && !workflowExecuteThread.getProcessInstance().getState().typeIsStop()) { + StateEvent stateEvent = new StateEvent(); + stateEvent.setProcessInstanceId(taskResponseEvent.getProcessInstanceId()); + stateEvent.setTaskInstanceId(taskResponseEvent.getTaskInstanceId()); + stateEvent.setExecutionStatus(taskResponseEvent.getState()); + stateEvent.setType(StateEventType.TASK_STATE_CHANGE); + workflowExecuteThread.addStateEvent(stateEvent); + } + return result; + } + + public boolean addEvent(TaskResponseEvent event) { + if (event.getProcessInstanceId() != this.processInstanceId) { + logger.info("event would be abounded, task instance id:{}, process instance id:{}, this.processInstanceId:{}", + event.getTaskInstanceId(), event.getProcessInstanceId(), this.processInstanceId); + return false; + } + return this.events.add(event); + } + + public int eventSize() { + return this.events.size(); + } + + public boolean isEmpty() { + return this.events.isEmpty(); + } + + public Integer getProcessInstanceId() { + return processInstanceId; + } + + public String getKey() { + return String.valueOf(processInstanceId); + } +} \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseService.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseService.java index 27b96e14d8c11d69261a4dc2591e3601bac61b97..74c88ac588b1fe8752080b45a8bd30f333109720 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseService.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseService.java @@ -17,22 +17,18 @@ package org.apache.dolphinscheduler.server.master.processor.queue; -import org.apache.dolphinscheduler.common.enums.Event; -import org.apache.dolphinscheduler.common.enums.ExecutionStatus; -import org.apache.dolphinscheduler.common.enums.StateEvent; -import org.apache.dolphinscheduler.common.enums.StateEventType; +import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.thread.Stopper; -import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import org.apache.dolphinscheduler.remote.command.DBTaskAckCommand; -import org.apache.dolphinscheduler.remote.command.DBTaskResponseCommand; +import org.apache.dolphinscheduler.common.thread.ThreadUtils; +import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.server.master.runner.WorkflowExecuteThread; import org.apache.dolphinscheduler.service.process.ProcessService; -import java.util.ArrayList; -import java.util.List; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutorService; import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; @@ -42,7 +38,11 @@ import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; -import io.netty.channel.Channel; +import com.google.common.util.concurrent.FutureCallback; +import com.google.common.util.concurrent.Futures; +import com.google.common.util.concurrent.ListenableFuture; +import com.google.common.util.concurrent.ListeningExecutorService; +import com.google.common.util.concurrent.MoreExecutors; /** * task manager @@ -66,40 +66,69 @@ public class TaskResponseService { @Autowired private ProcessService processService; + @Autowired + private MasterConfig masterConfig; + /** * task response worker */ private Thread taskResponseWorker; - private ConcurrentHashMap processInstanceMapper; + /** + * event handler + */ + private Thread taskResponseEventHandler; + + private ConcurrentHashMap processInstanceMap; + + private final ConcurrentHashMap taskResponseEventHandlerMap = new ConcurrentHashMap<>(); + + private ListeningExecutorService listeningExecutorService; + + private ExecutorService eventExecService; + + /** + * task response mapper + */ + private final ConcurrentHashMap processTaskResponseMap = new ConcurrentHashMap<>(); - public void init(ConcurrentHashMap processInstanceMapper) { - if (this.processInstanceMapper == null) { - this.processInstanceMapper = processInstanceMapper; + public void init(ConcurrentHashMap processInstanceMap) { + if (this.processInstanceMap == null) { + this.processInstanceMap = processInstanceMap; } } @PostConstruct public void start() { + eventExecService = ThreadUtils.newDaemonFixedThreadExecutor("PersistEventState", masterConfig.getMasterPersistEventStateThreads()); + this.listeningExecutorService = MoreExecutors.listeningDecorator(eventExecService); this.taskResponseWorker = new TaskResponseWorker(); - this.taskResponseWorker.setName("StateEventResponseWorker"); + this.taskResponseWorker.setName("TaskResponseWorker"); this.taskResponseWorker.start(); + this.taskResponseEventHandler = new TaskResponseEventHandler(); + this.taskResponseEventHandler.setName("TaskResponseEventHandler"); + this.taskResponseEventHandler.start(); } @PreDestroy public void stop() { try { this.taskResponseWorker.interrupt(); - if (!eventQueue.isEmpty()) { - List remainEvents = new ArrayList<>(eventQueue.size()); - eventQueue.drainTo(remainEvents); - for (TaskResponseEvent event : remainEvents) { - this.persist(event); - } - } + this.taskResponseEventHandler.interrupt(); } catch (Exception e) { logger.error("stop error:", e); } + this.eventExecService.shutdown(); + long waitSec = 5; + boolean terminated = false; + try { + terminated = eventExecService.awaitTermination(waitSec, TimeUnit.SECONDS); + } catch (InterruptedException ignore) { + Thread.currentThread().interrupt(); + } + if (!terminated) { + logger.warn("TaskResponseService: eventExecService shutdown without terminated: {}s, increase await time", waitSec); + } } /** @@ -110,6 +139,7 @@ public class TaskResponseService { public void addResponse(TaskResponseEvent taskResponseEvent) { try { eventQueue.put(taskResponseEvent); + logger.debug("eventQueue size:{}", eventQueue.size()); } catch (InterruptedException e) { logger.error("put task : {} error :{}", taskResponseEvent, e); Thread.currentThread().interrupt(); @@ -123,17 +153,31 @@ public class TaskResponseService { @Override public void run() { - while (Stopper.isRunning()) { try { // if not task , blocking here TaskResponseEvent taskResponseEvent = eventQueue.take(); - persist(taskResponseEvent); + if (processInstanceMap.containsKey(taskResponseEvent.getProcessInstanceId()) + && !processTaskResponseMap.containsKey(taskResponseEvent.getProcessInstanceId())) { + TaskResponsePersistThread taskResponsePersistThread = new TaskResponsePersistThread( + processService, processInstanceMap, taskResponseEvent.getProcessInstanceId()); + processTaskResponseMap.put(taskResponseEvent.getProcessInstanceId(), taskResponsePersistThread); + } + TaskResponsePersistThread taskResponsePersistThread = processTaskResponseMap.get(taskResponseEvent.getProcessInstanceId()); + if (null != taskResponsePersistThread) { + if (taskResponsePersistThread.addEvent(taskResponseEvent)) { + logger.debug("submit task response persist queue success, task instance id:{},process instance id:{}, state:{} ", + taskResponseEvent.getTaskInstanceId(), taskResponseEvent.getProcessInstanceId(), taskResponseEvent.getState()); + } else { + logger.error("submit task response persist queue error, task instance id:{},process instance id:{} ", + taskResponseEvent.getTaskInstanceId(), taskResponseEvent.getProcessInstanceId()); + } + } } catch (InterruptedException e) { Thread.currentThread().interrupt(); break; } catch (Exception e) { - logger.error("persist task error", e); + logger.error("handle task error", e); } } logger.info("StateEventResponseWorker stopped"); @@ -141,71 +185,72 @@ public class TaskResponseService { } /** - * persist taskResponseEvent - * - * @param taskResponseEvent taskResponseEvent + * event handler thread */ - private void persist(TaskResponseEvent taskResponseEvent) { - Event event = taskResponseEvent.getEvent(); - Channel channel = taskResponseEvent.getChannel(); + class TaskResponseEventHandler extends Thread { - TaskInstance taskInstance = processService.findTaskInstanceById(taskResponseEvent.getTaskInstanceId()); - switch (event) { - case ACK: + @Override + public void run() { + logger.info("event handler thread started"); + while (Stopper.isRunning()) { try { - if (taskInstance != null) { - ExecutionStatus status = taskInstance.getState().typeIsFinished() ? taskInstance.getState() : taskResponseEvent.getState(); - processService.changeTaskState(taskInstance, status, - taskResponseEvent.getStartTime(), - taskResponseEvent.getWorkerAddress(), - taskResponseEvent.getExecutePath(), - taskResponseEvent.getLogPath(), - taskResponseEvent.getTaskInstanceId()); - } - // if taskInstance is null (maybe deleted) . retry will be meaningless . so ack success - DBTaskAckCommand taskAckCommand = new DBTaskAckCommand(ExecutionStatus.SUCCESS.getCode(), taskResponseEvent.getTaskInstanceId()); - channel.writeAndFlush(taskAckCommand.convert2Command()); + eventHandler(); + + TimeUnit.MILLISECONDS.sleep(Constants.SLEEP_TIME_MILLIS); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + break; } catch (Exception e) { - logger.error("worker ack master error", e); - DBTaskAckCommand taskAckCommand = new DBTaskAckCommand(ExecutionStatus.FAILURE.getCode(), -1); - channel.writeAndFlush(taskAckCommand.convert2Command()); + logger.error("event handler thread error", e); } - break; - case RESULT: - try { - if (taskInstance != null) { - processService.changeTaskState(taskInstance, taskResponseEvent.getState(), - taskResponseEvent.getEndTime(), - taskResponseEvent.getProcessId(), - taskResponseEvent.getAppIds(), - taskResponseEvent.getTaskInstanceId(), - taskResponseEvent.getVarPool() - ); + } + } + + private void eventHandler() { + + for (TaskResponsePersistThread taskResponsePersistThread: processTaskResponseMap.values()) { + + if (taskResponseEventHandlerMap.containsKey(taskResponsePersistThread.getKey())) { + continue; + } + if (taskResponsePersistThread.eventSize() == 0) { + if (!processInstanceMap.containsKey(taskResponsePersistThread.getProcessInstanceId())) { + processTaskResponseMap.remove(taskResponsePersistThread.getProcessInstanceId()); + logger.info("remove process instance: {}", taskResponsePersistThread.getProcessInstanceId()); } - // if taskInstance is null (maybe deleted) . retry will be meaningless . so response success - DBTaskResponseCommand taskResponseCommand = new DBTaskResponseCommand(ExecutionStatus.SUCCESS.getCode(), taskResponseEvent.getTaskInstanceId()); - channel.writeAndFlush(taskResponseCommand.convert2Command()); - } catch (Exception e) { - logger.error("worker response master error", e); - DBTaskResponseCommand taskResponseCommand = new DBTaskResponseCommand(ExecutionStatus.FAILURE.getCode(), -1); - channel.writeAndFlush(taskResponseCommand.convert2Command()); + continue; } - break; - default: - throw new IllegalArgumentException("invalid event type : " + event); - } - WorkflowExecuteThread workflowExecuteThread = this.processInstanceMapper.get(taskResponseEvent.getProcessInstanceId()); - if (workflowExecuteThread != null) { - StateEvent stateEvent = new StateEvent(); - stateEvent.setProcessInstanceId(taskResponseEvent.getProcessInstanceId()); - stateEvent.setTaskInstanceId(taskResponseEvent.getTaskInstanceId()); - stateEvent.setExecutionStatus(taskResponseEvent.getState()); - stateEvent.setType(StateEventType.TASK_STATE_CHANGE); - workflowExecuteThread.addStateEvent(stateEvent); + logger.info("already exists handler process size:{}", taskResponseEventHandlerMap.size()); + taskResponseEventHandlerMap.put(taskResponsePersistThread.getKey(), taskResponsePersistThread); + + ListenableFuture future = listeningExecutorService.submit(taskResponsePersistThread); + FutureCallback futureCallback = new FutureCallback() { + @Override + public void onSuccess(Object o) { + logger.info("handle events {} succeeded.", taskResponsePersistThread.getProcessInstanceId()); + if (!processInstanceMap.containsKey(taskResponsePersistThread.getProcessInstanceId())) { + processTaskResponseMap.remove(taskResponsePersistThread.getProcessInstanceId()); + logger.info("remove process instance: {}", taskResponsePersistThread.getProcessInstanceId()); + } + taskResponseEventHandlerMap.remove(taskResponsePersistThread.getKey()); + } + + @Override + public void onFailure(Throwable throwable) { + logger.error("handle events failed: {}", throwable.getMessage()); + if (!processInstanceMap.containsKey(taskResponsePersistThread.getProcessInstanceId())) { + processTaskResponseMap.remove(taskResponsePersistThread.getProcessInstanceId()); + logger.info("remove process instance: {}", taskResponsePersistThread.getProcessInstanceId()); + } + taskResponseEventHandlerMap.remove(taskResponsePersistThread.getKey()); + } + }; + Futures.addCallback(future, futureCallback, listeningExecutorService); + } } } public BlockingQueue getEventQueue() { return eventQueue; } -} +} \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryClient.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryClient.java index 22de8e730b41e3b028584b84b091ad2e4e6db777..ed6b29f0767c8651e419ffe49a9df077382b1ad2 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryClient.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryClient.java @@ -33,6 +33,7 @@ import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.NetUtils; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.registry.api.ConnectionState; import org.apache.dolphinscheduler.remote.utils.NamedThreadFactory; import org.apache.dolphinscheduler.server.builder.TaskExecutionContextBuilder; import org.apache.dolphinscheduler.server.master.config.MasterConfig; @@ -42,13 +43,15 @@ import org.apache.dolphinscheduler.server.utils.ProcessUtils; import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.queue.entity.TaskExecutionContext; import org.apache.dolphinscheduler.service.registry.RegistryClient; -import org.apache.dolphinscheduler.spi.register.RegistryConnectListener; -import org.apache.dolphinscheduler.spi.register.RegistryConnectState; +import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang.StringUtils; +import java.util.Collections; import java.util.Date; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; @@ -80,6 +83,7 @@ public class MasterRegistryClient { @Autowired private ProcessService processService; + @Autowired private RegistryClient registryClient; /** @@ -96,46 +100,37 @@ public class MasterRegistryClient { private ConcurrentHashMap processInstanceExecMaps; /** - * master start time + * master startup time, ms */ - private String startTime; + private long startupTime; private String localNodePath; public void init(ConcurrentHashMap processInstanceExecMaps) { - this.startTime = DateUtils.dateToString(new Date()); - this.registryClient = RegistryClient.getInstance(); + this.startupTime = System.currentTimeMillis(); this.heartBeatExecutor = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("HeartBeatExecutor")); this.processInstanceExecMaps = processInstanceExecMaps; } public void start() { - String nodeLock = registryClient.getMasterStartUpLockPath(); + String nodeLock = Constants.REGISTRY_DOLPHINSCHEDULER_LOCK_FAILOVER_STARTUP_MASTERS; try { // create distributed lock with the root node path of the lock space as /dolphinscheduler/lock/failover/startup-masters registryClient.getLock(nodeLock); // master registry registry(); - String registryPath = getMasterPath(); - registryClient.handleDeadServer(registryPath, NodeType.MASTER, Constants.DELETE_OP); - - // init system node - while (!registryClient.checkNodeExists(NetUtils.getHost(), NodeType.MASTER)) { - ThreadUtils.sleep(SLEEP_TIME_MILLIS); - } - - // self tolerant - if (registryClient.getActiveMasterNum() == 1) { - removeNodePath(null, NodeType.MASTER, true); - removeNodePath(null, NodeType.WORKER, true); - } registryClient.subscribe(REGISTRY_DOLPHINSCHEDULER_NODE, new MasterRegistryDataListener()); } catch (Exception e) { logger.error("master start up exception", e); + this.registryClient.getStoppable().stop("master start up exception"); } finally { - registryClient.releaseLock(nodeLock); + try { + registryClient.releaseLock(nodeLock); + } catch (Exception e) { + logger.error("release lock error", e); + } } } @@ -144,22 +139,62 @@ public class MasterRegistryClient { } public void closeRegistry() { - unRegistry(); + // TODO unsubscribe MasterRegistryDataListener + deregister(); } /** - * remove zookeeper node path + * remove master node path * - * @param path zookeeper node path - * @param nodeType zookeeper node type + * @param path node path + * @param nodeType node type * @param failover is failover */ - public void removeNodePath(String path, NodeType nodeType, boolean failover) { + public void removeMasterNodePath(String path, NodeType nodeType, boolean failover) { logger.info("{} node deleted : {}", nodeType, path); - String failoverPath = getFailoverLockPath(nodeType); + + if (StringUtils.isEmpty(path)) { + logger.error("server down error: empty path: {}, nodeType:{}", path, nodeType); + return; + } + + String serverHost = registryClient.getHostByEventDataPath(path); + if (StringUtils.isEmpty(serverHost)) { + logger.error("server down error: unknown path: {}, nodeType:{}", path, nodeType); + return; + } + + String failoverPath = getFailoverLockPath(nodeType, serverHost); try { registryClient.getLock(failoverPath); + if (!registryClient.exists(path)) { + logger.info("path: {} not exists", path); + // handle dead server + registryClient.handleDeadServer(Collections.singleton(path), nodeType, Constants.ADD_OP); + } + + //failover server + if (failover) { + failoverServerWhenDown(serverHost, nodeType); + } + } catch (Exception e) { + logger.error("{} server failover failed, host:{}", nodeType, serverHost, e); + } finally { + registryClient.releaseLock(failoverPath); + } + } + + /** + * remove worker node path + * + * @param path node path + * @param nodeType node type + * @param failover is failover + */ + public void removeWorkerNodePath(String path, NodeType nodeType, boolean failover) { + logger.info("{} node deleted : {}", nodeType, path); + try { String serverHost = null; if (!StringUtils.isEmpty(path)) { serverHost = registryClient.getHostByEventDataPath(path); @@ -167,18 +202,18 @@ public class MasterRegistryClient { logger.error("server down error: unknown path: {}", path); return; } - // handle dead server - registryClient.handleDeadServer(path, nodeType, Constants.ADD_OP); + if (!registryClient.exists(path)) { + logger.info("path: {} not exists", path); + // handle dead server + registryClient.handleDeadServer(Collections.singleton(path), nodeType, Constants.ADD_OP); + } } //failover server if (failover) { failoverServerWhenDown(serverHost, nodeType); } } catch (Exception e) { - logger.error("{} server failover failed.", nodeType); - logger.error("failover exception ", e); - } finally { - registryClient.releaseLock(failoverPath); + logger.error("{} server failover failed", nodeType, e); } } @@ -194,7 +229,7 @@ public class MasterRegistryClient { failoverMaster(serverHost); break; case WORKER: - failoverWorker(serverHost, true, true); + failoverWorker(serverHost); break; default: break; @@ -207,24 +242,28 @@ public class MasterRegistryClient { * @param nodeType zookeeper node type * @return fail over lock path */ - private String getFailoverLockPath(NodeType nodeType) { + public String getFailoverLockPath(NodeType nodeType, String host) { switch (nodeType) { case MASTER: - return registryClient.getMasterFailoverLockPath(); + return Constants.REGISTRY_DOLPHINSCHEDULER_LOCK_FAILOVER_MASTERS + "/" + host; case WORKER: - return registryClient.getWorkerFailoverLockPath(); + return Constants.REGISTRY_DOLPHINSCHEDULER_LOCK_FAILOVER_WORKERS + "/" + host; default: return ""; } } /** - * task needs failover if task start before worker starts + * task needs failover if task start before server starts * * @param taskInstance task instance * @return true if task instance need fail over */ - private boolean checkTaskInstanceNeedFailover(TaskInstance taskInstance) { + private boolean checkTaskInstanceNeedFailover(List servers, TaskInstance taskInstance) { + + // first submit: host is null + // dispatch succeed: host is not null && submit_time is null + // ACK || RESULT from worker: host is not null && start_time is not null boolean taskNeedFailover = true; @@ -232,14 +271,15 @@ public class MasterRegistryClient { if (taskInstance.getHost() == null) { return false; } - - // if the worker node exists in zookeeper, we must check the task starts after the worker - if (registryClient.checkNodeExists(taskInstance.getHost(), NodeType.WORKER)) { - //if task start after worker starts, there is no need to failover the task. - if (checkTaskAfterWorkerStart(taskInstance)) { - taskNeedFailover = false; - } + // host is not null and submit time is null, master will retry + if (taskInstance.getSubmitTime() == null) { + return false; } + //if task start after server starts, there is no need to failover the task. + if (checkTaskAfterServerStart(servers, taskInstance)) { + taskNeedFailover = false; + } + return taskNeedFailover; } @@ -267,6 +307,57 @@ public class MasterRegistryClient { return false; } + /** + * check task start after the server starts. + * + * @param taskInstance task instance + * @return true if task instance start time after server start date + */ + private boolean checkTaskAfterServerStart(List servers, TaskInstance taskInstance) { + if (StringUtils.isEmpty(taskInstance.getHost())) { + return false; + } + + Date taskSubmitTime = taskInstance.getSubmitTime(); + Date serverStartTime = getServerStartupTime(servers, taskInstance.getHost()); + if (taskSubmitTime != null && serverStartTime != null && taskSubmitTime.after(serverStartTime)) { + logger.info( + "The taskInstance's submitTime: {} is after the need failover server's start time: {}, the taskInstance is newly submit, it doesn't need to failover", + DateUtils.dateToString(taskSubmitTime), + DateUtils.dateToString(serverStartTime)); + return true; + } + return false; + } + + /** + * get server startup time + */ + private Date getServerStartupTime(List servers, String host) { + if (CollectionUtils.isEmpty(servers)) { + return null; + } + Date serverStartupTime = null; + for (Server server : servers) { + if (host.equals(server.getHost() + Constants.COLON + server.getPort())) { + serverStartupTime = server.getCreateTime(); + break; + } + } + return serverStartupTime; + } + + /** + * get server startup time + */ + private Date getServerStartupTime(NodeType nodeType, String host) { + if (StringUtils.isEmpty(host)) { + return null; + } + List servers = registryClient.getServerList(nodeType); + return getServerStartupTime(servers, host); + } + /** * failover worker tasks *

@@ -275,86 +366,153 @@ public class MasterRegistryClient { * 3. failover all tasks when workerHost is null * * @param workerHost worker host - * @param needCheckWorkerAlive need check worker alive */ - private void failoverWorker(String workerHost, boolean needCheckWorkerAlive, boolean checkOwner) { - logger.info("start worker[{}] failover ...", workerHost); + private void failoverWorker(String workerHost) { + + if (StringUtils.isEmpty(workerHost)) { + return; + } + + List workerServers = registryClient.getServerList(NodeType.WORKER); + + long startTime = System.currentTimeMillis(); List needFailoverTaskInstanceList = processService.queryNeedFailoverTaskInstances(workerHost); - for (TaskInstance taskInstance : needFailoverTaskInstanceList) { - if (needCheckWorkerAlive) { - if (!checkTaskInstanceNeedFailover(taskInstance)) { - continue; - } - } + Map processInstanceCacheMap = new HashMap<>(); + logger.info("start worker[{}] failover, task list size:{}", workerHost, needFailoverTaskInstanceList.size()); - ProcessInstance processInstance = processService.findProcessInstanceDetailById(taskInstance.getProcessInstanceId()); - if (workerHost == null - || !checkOwner - || processInstance.getHost().equalsIgnoreCase(workerHost)) { - // only failover the task owned myself if worker down. - // failover master need handle worker at the same time + for (TaskInstance taskInstance : needFailoverTaskInstanceList) { + ProcessInstance processInstance = processInstanceCacheMap.get(taskInstance.getProcessInstanceId()); + if (processInstance == null) { + processInstance = processService.findProcessInstanceDetailById(taskInstance.getProcessInstanceId()); if (processInstance == null) { - logger.error("failover error, the process {} of task {} do not exists.", + logger.error("failover task instance error, processInstance {} of taskInstance {} is null", taskInstance.getProcessInstanceId(), taskInstance.getId()); continue; } - taskInstance.setProcessInstance(processInstance); - - TaskExecutionContext taskExecutionContext = TaskExecutionContextBuilder.get() - .buildTaskInstanceRelatedInfo(taskInstance) - .buildProcessInstanceRelatedInfo(processInstance) - .create(); - // only kill yarn job if exists , the local thread has exited - ProcessUtils.killYarnJob(taskExecutionContext); - - taskInstance.setState(ExecutionStatus.NEED_FAULT_TOLERANCE); - processService.saveTaskInstance(taskInstance); - if (!processInstanceExecMaps.containsKey(processInstance.getId())) { - return; - } - WorkflowExecuteThread workflowExecuteThreadNotify = processInstanceExecMaps.get(processInstance.getId()); - StateEvent stateEvent = new StateEvent(); - stateEvent.setTaskInstanceId(taskInstance.getId()); - stateEvent.setType(StateEventType.TASK_STATE_CHANGE); - stateEvent.setProcessInstanceId(processInstance.getId()); - stateEvent.setExecutionStatus(taskInstance.getState()); - workflowExecuteThreadNotify.addStateEvent(stateEvent); + processInstanceCacheMap.put(processInstance.getId(), processInstance); } + if (!checkTaskInstanceNeedFailover(workerServers, taskInstance)) { + continue; + } + + // only failover the task owned myself if worker down. + if (!processInstance.getHost().equalsIgnoreCase(getLocalAddress())) { + continue; + } + + logger.info("failover task instance id: {}, process instance id: {}", taskInstance.getId(), taskInstance.getProcessInstanceId()); + failoverTaskInstance(processInstance, taskInstance); } - logger.info("end worker[{}] failover ...", workerHost); + logger.info("end worker[{}] failover, useTime:{}ms", workerHost, System.currentTimeMillis() - startTime); } /** - * failover master tasks + * failover master + *

+ * failover process instance and associated task instance * * @param masterHost master host */ - private void failoverMaster(String masterHost) { - logger.info("start master failover ..."); + public void failoverMaster(String masterHost) { + + if (StringUtils.isEmpty(masterHost)) { + return; + } + + Date serverStartupTime = getServerStartupTime(NodeType.MASTER, masterHost); + // servers need to contains master hosts and worker hosts, otherwise the logic task will failover fail. + List servers = registryClient.getServerList(NodeType.WORKER); + servers.addAll(registryClient.getServerList(NodeType.MASTER)); + + long startTime = System.currentTimeMillis(); List needFailoverProcessInstanceList = processService.queryNeedFailoverProcessInstances(masterHost); + logger.info("start master[{}] failover, process list size:{}", masterHost, needFailoverProcessInstanceList.size()); - logger.info("failover process list size:{} ", needFailoverProcessInstanceList.size()); - //updateProcessInstance host is null and insert into command for (ProcessInstance processInstance : needFailoverProcessInstanceList) { - logger.info("failover process instance id: {} host:{}", processInstance.getId(), processInstance.getHost()); if (Constants.NULL.equals(processInstance.getHost())) { continue; } + + List validTaskInstanceList = processService.findValidTaskListByProcessId(processInstance.getId()); + for (TaskInstance taskInstance : validTaskInstanceList) { + if (Constants.NULL.equals(taskInstance.getHost())) { + continue; + } + if (taskInstance.getState().typeIsFinished()) { + continue; + } + if (!checkTaskInstanceNeedFailover(servers, taskInstance)) { + continue; + } + logger.info("failover task instance id: {}, process instance id: {}", taskInstance.getId(), taskInstance.getProcessInstanceId()); + failoverTaskInstance(processInstance, taskInstance); + } + + if (serverStartupTime != null && processInstance.getRestartTime() != null + && processInstance.getRestartTime().after(serverStartupTime)) { + continue; + } + + logger.info("failover process instance id: {}", processInstance.getId()); + //updateProcessInstance host is null and insert into command processService.processNeedFailoverProcessInstances(processInstance); } - failoverWorker(masterHost, true, false); - logger.info("master failover end"); + logger.info("master[{}] failover end, useTime:{}ms", masterHost, System.currentTimeMillis() - startTime); } - public void blockAcquireMutex() { - registryClient.getLock(registryClient.getMasterLockPath()); - } + /** + * failover task instance + *

+ * 1. kill yarn job if there are yarn jobs in tasks. + * 2. change task state from running to need failover. + * 3. try to notify local master + */ + private void failoverTaskInstance(ProcessInstance processInstance, TaskInstance taskInstance) { + if (taskInstance == null) { + logger.error("failover task instance error, taskInstance is null"); + return; + } + + if (processInstance == null) { + logger.error("failover task instance error, processInstance {} of taskInstance {} is null", + taskInstance.getProcessInstanceId(), taskInstance.getId()); + return; + } - public void releaseLock() { - registryClient.releaseLock(registryClient.getMasterLockPath()); + taskInstance.setProcessInstance(processInstance); + TaskExecutionContext taskExecutionContext = TaskExecutionContextBuilder.get() + .buildTaskInstanceRelatedInfo(taskInstance) + .buildProcessInstanceRelatedInfo(processInstance) + .create(); + taskExecutionContext.setProcessDefineCode(processInstance.getProcessDefinitionCode()); + taskExecutionContext.setProcessDefineVersion(processInstance.getProcessDefinitionVersion()); + + if (masterConfig.getMasterKillYarnJobWhenHandleFailOver() + && !(taskInstance.isSubProcess() + || taskInstance.isDependTask() + || taskInstance.isConditionsTask() + || taskInstance.isSwitchTask())) { + // only kill yarn job if exists , the local thread has exited + ProcessUtils.killYarnJob(taskExecutionContext); + } + + taskInstance.setState(ExecutionStatus.NEED_FAULT_TOLERANCE); + processService.saveTaskInstance(taskInstance); + + WorkflowExecuteThread workflowExecuteThreadNotify = processInstanceExecMaps.get(processInstance.getId()); + if (workflowExecuteThreadNotify == null) { + logger.info("workflowExecuteThreadNotify is null, just return, task id:{},process id:{}", taskInstance.getId(), processInstance.getId()); + return; + } + StateEvent stateEvent = new StateEvent(); + stateEvent.setTaskInstanceId(taskInstance.getId()); + stateEvent.setType(StateEventType.TASK_STATE_CHANGE); + stateEvent.setProcessInstanceId(processInstance.getId()); + stateEvent.setExecutionStatus(taskInstance.getState()); + workflowExecuteThreadNotify.addStateEvent(stateEvent); } /** @@ -364,37 +522,54 @@ public class MasterRegistryClient { String address = NetUtils.getAddr(masterConfig.getListenPort()); localNodePath = getMasterPath(); int masterHeartbeatInterval = masterConfig.getMasterHeartbeatInterval(); - HeartBeatTask heartBeatTask = new HeartBeatTask(startTime, + HeartBeatTask heartBeatTask = new HeartBeatTask(startupTime, masterConfig.getMasterMaxCpuloadAvg(), masterConfig.getMasterReservedMemory(), Sets.newHashSet(getMasterPath()), Constants.MASTER_TYPE, registryClient); - registryClient.persistEphemeral(localNodePath, heartBeatTask.heartBeatInfo()); - registryClient.addConnectionStateListener(new MasterRegistryConnectStateListener()); + // remove before persist + registryClient.remove(localNodePath); + registryClient.persistEphemeral(localNodePath, heartBeatTask.getHeartBeatInfo()); + + while (!registryClient.checkNodeExists(NetUtils.getHost(), NodeType.MASTER)) { + ThreadUtils.sleep(SLEEP_TIME_MILLIS); + } + + // sleep 1s, waiting master failover remove + ThreadUtils.sleep(SLEEP_TIME_MILLIS); + + // delete dead server + registryClient.handleDeadServer(Collections.singleton(localNodePath), NodeType.MASTER, Constants.DELETE_OP); + + registryClient.addConnectionStateListener(this::handleConnectionState); this.heartBeatExecutor.scheduleAtFixedRate(heartBeatTask, masterHeartbeatInterval, masterHeartbeatInterval, TimeUnit.SECONDS); logger.info("master node : {} registry to ZK successfully with heartBeatInterval : {}s", address, masterHeartbeatInterval); } - class MasterRegistryConnectStateListener implements RegistryConnectListener { - - @Override - public void notify(RegistryConnectState newState) { - if (RegistryConnectState.RECONNECTED == newState) { - registryClient.persistEphemeral(localNodePath, ""); - } - if (RegistryConnectState.SUSPENDED == newState) { + public void handleConnectionState(ConnectionState state) { + switch (state) { + case CONNECTED: + logger.debug("registry connection state is {}", state); + break; + case SUSPENDED: + logger.warn("registry connection state is {}, ready to retry connection", state); + break; + case RECONNECTED: + logger.debug("registry connection state is {}, clean the node info", state); registryClient.persistEphemeral(localNodePath, ""); - } + break; + case DISCONNECTED: + logger.warn("registry connection state is {}, ready to stop myself", state); + registryClient.getStoppable().stop("registry connection state is DISCONNECTED, stop myself"); + break; + default: } } - /** - * remove registry info - */ - public void unRegistry() { + public void deregister() { try { String address = getLocalAddress(); String localNodePath = getMasterPath(); @@ -419,8 +594,8 @@ public class MasterRegistryClient { /** * get local address */ - private String getLocalAddress() { + public String getLocalAddress() { return NetUtils.getAddr(masterConfig.getListenPort()); } -} +} \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryDataListener.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryDataListener.java index c712ac096dce20ed81c2384d26bb7f5a58b37e68..361f09f10b2a7467a62d3d1e4632c0c19386aff3 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryDataListener.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryDataListener.java @@ -22,68 +22,63 @@ import static org.apache.dolphinscheduler.common.Constants.REGISTRY_DOLPHINSCHED import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.NodeType; +import org.apache.dolphinscheduler.registry.api.Event; +import org.apache.dolphinscheduler.registry.api.SubscribeListener; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; -import org.apache.dolphinscheduler.spi.register.DataChangeEvent; -import org.apache.dolphinscheduler.spi.register.SubscribeListener; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.google.common.base.Strings; + public class MasterRegistryDataListener implements SubscribeListener { private static final Logger logger = LoggerFactory.getLogger(MasterRegistryDataListener.class); - private MasterRegistryClient masterRegistryClient; + private final MasterRegistryClient masterRegistryClient; public MasterRegistryDataListener() { masterRegistryClient = SpringApplicationContext.getBean(MasterRegistryClient.class); } - @Override - public void notify(String path, DataChangeEvent event) { + public void notify(Event event) { + final String path = event.path(); + if (Strings.isNullOrEmpty(path)) { + return; + } //monitor master if (path.startsWith(REGISTRY_DOLPHINSCHEDULER_MASTERS + Constants.SINGLE_SLASH)) { - handleMasterEvent(event, path); + handleMasterEvent(event); } else if (path.startsWith(REGISTRY_DOLPHINSCHEDULER_WORKERS + Constants.SINGLE_SLASH)) { //monitor worker - handleWorkerEvent(event, path); + handleWorkerEvent(event); } } - /** - * monitor master - * - * @param event event - * @param path path - */ - public void handleMasterEvent(DataChangeEvent event, String path) { - switch (event) { + public void handleMasterEvent(Event event) { + final String path = event.path(); + switch (event.type()) { case ADD: logger.info("master node added : {}", path); break; case REMOVE: - masterRegistryClient.removeNodePath(path, NodeType.MASTER, true); + masterRegistryClient.removeMasterNodePath(path, NodeType.MASTER, true); break; default: break; } } - /** - * monitor worker - * - * @param event event - * @param path path - */ - public void handleWorkerEvent(DataChangeEvent event, String path) { - switch (event) { + public void handleWorkerEvent(Event event) { + final String path = event.path(); + switch (event.type()) { case ADD: logger.info("worker node added : {}", path); break; case REMOVE: logger.info("worker node deleted : {}", path); - masterRegistryClient.removeNodePath(path, NodeType.WORKER, true); + masterRegistryClient.removeWorkerNodePath(path, NodeType.WORKER, true); break; default: break; diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/registry/ServerNodeManager.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/registry/ServerNodeManager.java index 09f4cc243365a5038602a4631cae0007be4d62e9..075c573415a62a10f63956ee76c121ec249bd05a 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/registry/ServerNodeManager.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/registry/ServerNodeManager.java @@ -27,16 +27,18 @@ import org.apache.dolphinscheduler.common.utils.NetUtils; import org.apache.dolphinscheduler.dao.AlertDao; import org.apache.dolphinscheduler.dao.entity.WorkerGroup; import org.apache.dolphinscheduler.dao.mapper.WorkerGroupMapper; +import org.apache.dolphinscheduler.registry.api.Event; +import org.apache.dolphinscheduler.registry.api.Event.Type; +import org.apache.dolphinscheduler.registry.api.SubscribeListener; import org.apache.dolphinscheduler.remote.utils.NamedThreadFactory; import org.apache.dolphinscheduler.service.queue.MasterPriorityQueue; import org.apache.dolphinscheduler.service.registry.RegistryClient; -import org.apache.dolphinscheduler.spi.register.DataChangeEvent; -import org.apache.dolphinscheduler.spi.register.SubscribeListener; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.StringUtils; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -49,6 +51,7 @@ import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; +import java.util.stream.Collectors; import javax.annotation.PreDestroy; @@ -101,10 +104,8 @@ public class ServerNodeManager implements InitializingBean { */ private ScheduledExecutorService executorService; - /** - * zk client - */ - private RegistryClient registryClient = RegistryClient.getInstance(); + @Autowired + private RegistryClient registryClient; /** * eg : /node/worker/group/127.0.0.1:xxx @@ -153,11 +154,11 @@ public class ServerNodeManager implements InitializingBean { */ executorService = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("ServerNodeManagerExecutor")); executorService.scheduleWithFixedDelay(new WorkerNodeInfoAndGroupDbSyncTask(), 0, 10, TimeUnit.SECONDS); - /** + /* * init MasterNodeListener listener */ registryClient.subscribe(REGISTRY_DOLPHINSCHEDULER_MASTERS, new MasterDataListener()); - /** + /* * init WorkerNodeListener listener */ registryClient.subscribe(REGISTRY_DOLPHINSCHEDULER_WORKERS, new WorkerDataListener()); @@ -167,15 +168,15 @@ public class ServerNodeManager implements InitializingBean { * load nodes from zookeeper */ public void load() { - /** + /* * master nodes from zookeeper */ updateMasterNodes(); - /** + /* * worker group nodes from zookeeper */ - Set workerGroups = registryClient.getWorkerGroupDirectly(); + Collection workerGroups = registryClient.getWorkerGroupDirectly(); for (String workerGroup : workerGroups) { syncWorkerGroupNodes(workerGroup, registryClient.getWorkerGroupNodesDirectly(workerGroup)); } @@ -188,26 +189,30 @@ public class ServerNodeManager implements InitializingBean { @Override public void run() { - // sync worker node info - Map newWorkerNodeInfo = registryClient.getServerMaps(NodeType.WORKER, true); - syncWorkerNodeInfo(newWorkerNodeInfo); - - // sync worker group nodes from database - List workerGroupList = workerGroupMapper.queryAllWorkerGroup(); - if (CollectionUtils.isNotEmpty(workerGroupList)) { - for (WorkerGroup wg : workerGroupList) { - String workerGroup = wg.getName(); - Set nodes = new HashSet<>(); - String[] addrs = wg.getAddrList().split(Constants.COMMA); - for (String addr : addrs) { - if (newWorkerNodeInfo.containsKey(addr)) { - nodes.add(addr); + try { + // sync worker node info + Map newWorkerNodeInfo = registryClient.getServerMaps(NodeType.WORKER, true); + syncAllWorkerNodeInfo(newWorkerNodeInfo); + + // sync worker group nodes from database + List workerGroupList = workerGroupMapper.queryAllWorkerGroup(); + if (CollectionUtils.isNotEmpty(workerGroupList)) { + for (WorkerGroup wg : workerGroupList) { + String workerGroup = wg.getName(); + Set nodes = new HashSet<>(); + String[] addrs = wg.getAddrList().split(Constants.COMMA); + for (String addr : addrs) { + if (newWorkerNodeInfo.containsKey(addr)) { + nodes.add(addr); + } + } + if (!nodes.isEmpty()) { + syncWorkerGroupNodes(workerGroup, nodes); } - } - if (!nodes.isEmpty()) { - syncWorkerGroupNodes(workerGroup, nodes); } } + } catch (Exception e) { + logger.error("WorkerNodeInfoAndGroupDbSyncTask error:", e); } } } @@ -218,21 +223,32 @@ public class ServerNodeManager implements InitializingBean { class WorkerDataListener implements SubscribeListener { @Override - public void notify(String path, DataChangeEvent dataChangeEvent) { + public void notify(Event event) { + final String path = event.path(); + final Type type = event.type(); + final String data = event.data(); if (registryClient.isWorkerPath(path)) { try { - if (dataChangeEvent == DataChangeEvent.ADD) { + if (type == Type.ADD) { logger.info("worker group node : {} added.", path); String group = parseGroup(path); - Set currentNodes = registryClient.getWorkerGroupNodesDirectly(group); + Collection currentNodes = registryClient.getWorkerGroupNodesDirectly(group); logger.info("currentNodes : {}", currentNodes); syncWorkerGroupNodes(group, currentNodes); - } else if (dataChangeEvent == DataChangeEvent.REMOVE) { + } else if (type == Type.REMOVE) { logger.info("worker group node : {} down.", path); String group = parseGroup(path); - Set currentNodes = registryClient.getWorkerGroupNodesDirectly(group); + Collection currentNodes = registryClient.getWorkerGroupNodesDirectly(group); syncWorkerGroupNodes(group, currentNodes); alertDao.sendServerStopedAlert(1, path, "WORKER"); + } else if (type == Type.UPDATE) { + logger.debug("worker group node : {} update, data: {}", path, data); + String group = parseGroup(path); + Collection currentNodes = registryClient.getWorkerGroupNodesDirectly(group); + syncWorkerGroupNodes(group, currentNodes); + + String node = parseNode(path); + syncSingleWorkerNodeInfo(node, data); } } catch (IllegalArgumentException ex) { logger.warn(ex.getMessage()); @@ -251,21 +267,27 @@ public class ServerNodeManager implements InitializingBean { return parts[parts.length - 2]; } + private String parseNode(String path) { + String[] parts = path.split("/"); + if (parts.length < WORKER_LISTENER_CHECK_LENGTH) { + throw new IllegalArgumentException(String.format("worker group path : %s is not valid, ignore", path)); + } + return parts[parts.length - 1]; + } } - /** - * master node listener - */ class MasterDataListener implements SubscribeListener { @Override - public void notify(String path, DataChangeEvent dataChangeEvent) { + public void notify(Event event) { + final String path = event.path(); + final Type type = event.type(); if (registryClient.isMasterPath(path)) { try { - if (dataChangeEvent.equals(DataChangeEvent.ADD)) { + if (type.equals(Type.ADD)) { logger.info("master node : {} added.", path); updateMasterNodes(); } - if (dataChangeEvent.equals(DataChangeEvent.REMOVE)) { + if (type.equals(Type.REMOVE)) { logger.info("master node : {} down.", path); updateMasterNodes(); alertDao.sendServerStopedAlert(1, path, "MASTER"); @@ -280,10 +302,10 @@ public class ServerNodeManager implements InitializingBean { private void updateMasterNodes() { SLOT_LIST.clear(); this.masterNodes.clear(); - String nodeLock = registryClient.getMasterLockPath(); + String nodeLock = Constants.REGISTRY_DOLPHINSCHEDULER_LOCK_MASTERS; try { registryClient.getLock(nodeLock); - Set currentNodes = registryClient.getMasterNodesDirectly(); + Collection currentNodes = registryClient.getMasterNodesDirectly(); List masterNodes = registryClient.getServerList(NodeType.MASTER); syncMasterNodes(currentNodes, masterNodes); } catch (Exception e) { @@ -313,7 +335,7 @@ public class ServerNodeManager implements InitializingBean { * * @param nodes master nodes */ - private void syncMasterNodes(Set nodes, List masterNodes) { + private void syncMasterNodes(Collection nodes, List masterNodes) { masterLock.lock(); try { this.masterNodes.addAll(nodes); @@ -338,10 +360,9 @@ public class ServerNodeManager implements InitializingBean { * @param workerGroup worker group * @param nodes worker nodes */ - private void syncWorkerGroupNodes(String workerGroup, Set nodes) { + private void syncWorkerGroupNodes(String workerGroup, Collection nodes) { workerGroupLock.lock(); try { - workerGroup = workerGroup.toLowerCase(); Set workerNodes = workerGroupNodes.getOrDefault(workerGroup, new HashSet<>()); workerNodes.clear(); workerNodes.addAll(nodes); @@ -367,10 +388,10 @@ public class ServerNodeManager implements InitializingBean { if (StringUtils.isEmpty(workerGroup)) { workerGroup = Constants.DEFAULT_WORKER_GROUP; } - workerGroup = workerGroup.toLowerCase(); Set nodes = workerGroupNodes.get(workerGroup); if (CollectionUtils.isNotEmpty(nodes)) { - return Collections.unmodifiableSet(nodes); + // avoid ConcurrentModificationException + return Collections.unmodifiableSet(nodes.stream().collect(Collectors.toSet())); } return nodes; } finally { @@ -407,7 +428,7 @@ public class ServerNodeManager implements InitializingBean { * * @param newWorkerNodeInfo new worker node info */ - private void syncWorkerNodeInfo(Map newWorkerNodeInfo) { + private void syncAllWorkerNodeInfo(Map newWorkerNodeInfo) { workerNodeInfoLock.lock(); try { workerNodeInfo.clear(); @@ -417,6 +438,18 @@ public class ServerNodeManager implements InitializingBean { } } + /** + * sync single worker node info + */ + private void syncSingleWorkerNodeInfo(String node, String info) { + workerNodeInfoLock.lock(); + try { + workerNodeInfo.put(node, info); + } finally { + workerNodeInfoLock.unlock(); + } + } + /** * destroy */ diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/EventExecuteService.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/EventExecuteService.java index 335684272033bff197a02d5e8e4ad29a73ffcf49..2279cad7b16d74de04ffb4c6ef376a4b9059ad1d 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/EventExecuteService.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/EventExecuteService.java @@ -19,6 +19,7 @@ package org.apache.dolphinscheduler.server.master.runner; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.common.enums.StateEvent; import org.apache.dolphinscheduler.common.enums.StateEventType; import org.apache.dolphinscheduler.common.thread.Stopper; @@ -118,11 +119,12 @@ public class EventExecuteService extends Thread { for (WorkflowExecuteThread workflowExecuteThread : this.processInstanceExecMaps.values()) { if (workflowExecuteThread.eventSize() == 0 || StringUtils.isEmpty(workflowExecuteThread.getKey()) + || !workflowExecuteThread.isStart() || eventHandlerMap.containsKey(workflowExecuteThread.getKey())) { continue; } int processInstanceId = workflowExecuteThread.getProcessInstance().getId(); - logger.info("handle process instance : {} events, count:{}", + logger.info("handle process instance : {} , events count:{}", processInstanceId, workflowExecuteThread.eventSize()); logger.info("already exists handler process size:{}", this.eventHandlerMap.size()); @@ -131,7 +133,7 @@ public class EventExecuteService extends Thread { FutureCallback futureCallback = new FutureCallback() { @Override public void onSuccess(Object o) { - if (workflowExecuteThread.workFlowFinish()) { + if (workflowExecuteThread.workFlowFinish() && workflowExecuteThread.activeTaskFinish()) { processInstanceExecMaps.remove(processInstanceId); notifyProcessChanged(); logger.info("process instance {} finished.", processInstanceId); @@ -145,9 +147,11 @@ public class EventExecuteService extends Thread { } private void notifyProcessChanged() { - Map fatherMaps - = processService.notifyProcessList(processInstanceId, 0); + if (Flag.NO == workflowExecuteThread.getProcessInstance().getIsSubProcess()) { + return; + } + Map fatherMaps = processService.notifyProcessList(processInstanceId); for (ProcessInstance processInstance : fatherMaps.keySet()) { String address = NetUtils.getAddr(masterConfig.getListenPort()); if (processInstance.getHost().equalsIgnoreCase(address)) { @@ -186,12 +190,13 @@ public class EventExecuteService extends Thread { StateEventChangeCommand stateEventChangeCommand = new StateEventChangeCommand( processInstanceId, 0, workflowExecuteThread.getProcessInstance().getState(), processInstance.getId(), taskInstance.getId() ); - stateEventCallbackService.sendResult(address, port, stateEventChangeCommand.convert2Command()); } @Override public void onFailure(Throwable throwable) { + logger.info("handle events {} failed.", processInstanceId); + logger.info("handle events failed.", throwable); } }; Futures.addCallback(future, futureCallback, this.listeningExecutorService); diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/FailoverExecuteThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/FailoverExecuteThread.java new file mode 100644 index 0000000000000000000000000000000000000000..770062f3f1ba14e89bc9a65d581eb191d9279e26 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/FailoverExecuteThread.java @@ -0,0 +1,110 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.runner; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.NodeType; +import org.apache.dolphinscheduler.common.thread.Stopper; +import org.apache.dolphinscheduler.common.thread.ThreadUtils; +import org.apache.dolphinscheduler.server.master.config.MasterConfig; +import org.apache.dolphinscheduler.server.master.registry.MasterRegistryClient; +import org.apache.dolphinscheduler.service.process.ProcessService; +import org.apache.dolphinscheduler.service.registry.RegistryClient; + +import org.apache.commons.collections4.CollectionUtils; + +import java.util.Iterator; +import java.util.List; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +@Service +public class FailoverExecuteThread extends Thread { + + private static final Logger logger = LoggerFactory.getLogger(FailoverExecuteThread.class); + + @Autowired + private MasterRegistryClient masterRegistryClient; + + @Autowired + private RegistryClient registryClient; + + @Autowired + private MasterConfig masterConfig; + + /** + * process service + */ + @Autowired + private ProcessService processService; + + @Override + public synchronized void start() { + super.setName("FailoverExecuteThread"); + super.start(); + } + + @Override + public void run() { + while (Stopper.isRunning()) { + logger.info("failover execute started"); + try { + List hosts = getNeedFailoverMasterServers(); + if (CollectionUtils.isEmpty(hosts)) { + continue; + } + logger.info("need failover hosts:{}", hosts); + + for (String host : hosts) { + String failoverPath = masterRegistryClient.getFailoverLockPath(NodeType.MASTER, host); + try { + registryClient.getLock(failoverPath); + masterRegistryClient.failoverMaster(host); + } catch (Exception e) { + logger.error("{} server failover failed, host:{}", NodeType.MASTER, host, e); + } finally { + registryClient.releaseLock(failoverPath); + } + } + } catch (Exception e) { + logger.error("failover execute error", e); + } finally { + ThreadUtils.sleep((long) Constants.SLEEP_TIME_MILLIS * masterConfig.getFailoverInterval() * 60); + } + } + } + + private List getNeedFailoverMasterServers() { + // failover myself && failover dead masters + List hosts = processService.queryNeedFailoverProcessInstanceHost(); + + Iterator iterator = hosts.iterator(); + while (iterator.hasNext()) { + String host = iterator.next(); + if (registryClient.checkNodeExists(host, NodeType.MASTER)) { + if (!host.equals(masterRegistryClient.getLocalAddress())) { + iterator.remove(); + } + } + } + return hosts; + } +} \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterExecService.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterExecService.java new file mode 100644 index 0000000000000000000000000000000000000000..edb5e6611d98489d71a4750e5b4d7f5ed7cab8ac --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterExecService.java @@ -0,0 +1,102 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.runner; + +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.util.concurrent.FutureCallback; +import com.google.common.util.concurrent.Futures; +import com.google.common.util.concurrent.ListenableFuture; +import com.google.common.util.concurrent.ListeningExecutorService; +import com.google.common.util.concurrent.MoreExecutors; + +public class MasterExecService { + + /** + * logger of MasterExecService + */ + private static final Logger logger = LoggerFactory.getLogger(MasterExecService.class); + + /** + * master exec service + */ + private final ThreadPoolExecutor execService; + + private final ListeningExecutorService listeningExecutorService; + + /** + * start process failed map + */ + private final ConcurrentHashMap startProcessFailedMap; + + private final ConcurrentHashMap filterMap = new ConcurrentHashMap<>(); + + public MasterExecService(ConcurrentHashMap startProcessFailedMap,ThreadPoolExecutor execService) { + this.startProcessFailedMap = startProcessFailedMap; + this.execService = execService; + this.listeningExecutorService = MoreExecutors.listeningDecorator(this.execService); + } + + public void execute(WorkflowExecuteThread workflowExecuteThread) { + if (workflowExecuteThread == null + || workflowExecuteThread.getProcessInstance() == null + || workflowExecuteThread.isStart() + || filterMap.containsKey(workflowExecuteThread.getProcessInstance().getId())) { + return; + } + Integer processInstanceId = workflowExecuteThread.getProcessInstance().getId(); + filterMap.put(processInstanceId, workflowExecuteThread); + ListenableFuture future = this.listeningExecutorService.submit(workflowExecuteThread); + FutureCallback futureCallback = new FutureCallback() { + @Override + public void onSuccess(Object o) { + if (!workflowExecuteThread.isStart()) { + startProcessFailedMap.putIfAbsent(processInstanceId, workflowExecuteThread); + } else { + startProcessFailedMap.remove(processInstanceId); + } + filterMap.remove(processInstanceId); + } + + @Override + public void onFailure(Throwable throwable) { + logger.error("handle events {} failed", processInstanceId, throwable); + if (!workflowExecuteThread.isStart()) { + startProcessFailedMap.putIfAbsent(processInstanceId, workflowExecuteThread); + } else { + startProcessFailedMap.remove(processInstanceId); + } + filterMap.remove(processInstanceId); + } + }; + Futures.addCallback(future, futureCallback, this.listeningExecutorService); + } + + public void shutdown() { + this.execService.shutdown(); + } + + public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException { + return this.execService.awaitTermination(timeout, unit); + } +} \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterSchedulerService.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterSchedulerService.java index faa4eb081c480292ed5dc64313b9bd8eebc8c63c..266efada1dd836c394c91e6fad7b9ad732819c60 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterSchedulerService.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterSchedulerService.java @@ -23,17 +23,20 @@ import org.apache.dolphinscheduler.common.thread.ThreadUtils; import org.apache.dolphinscheduler.common.utils.NetUtils; import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.dao.entity.Command; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.remote.NettyRemotingClient; import org.apache.dolphinscheduler.remote.config.NettyClientConfig; import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.server.master.dispatch.executor.NettyExecutorManager; +import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseService; import org.apache.dolphinscheduler.server.master.registry.MasterRegistryClient; import org.apache.dolphinscheduler.server.master.registry.ServerNodeManager; import org.apache.dolphinscheduler.service.alert.ProcessAlertManager; import org.apache.dolphinscheduler.service.process.ProcessService; +import java.util.HashMap; import java.util.List; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ThreadPoolExecutor; @@ -55,6 +58,12 @@ public class MasterSchedulerService extends Thread { */ private static final Logger logger = LoggerFactory.getLogger(MasterSchedulerService.class); + /** + * handle task event + */ + @Autowired + private TaskResponseService taskResponseService; + /** * dolphinscheduler database interface */ @@ -90,13 +99,37 @@ public class MasterSchedulerService extends Thread { /** * master exec service */ - private ThreadPoolExecutor masterExecService; + private MasterExecService masterExecService; + /** + * start process failed map + */ + private final ConcurrentHashMap startProcessFailedMap = new ConcurrentHashMap<>(); + /** + * process instance execution list + */ private ConcurrentHashMap processInstanceExecMaps; + /** + * process timeout check list + */ ConcurrentHashMap processTimeoutCheckList = new ConcurrentHashMap<>(); + + /** + * task time out checkout list + */ ConcurrentHashMap taskTimeoutCheckList = new ConcurrentHashMap<>(); + /** + * task retry check list + */ + ConcurrentHashMap taskRetryCheckList = new ConcurrentHashMap<>(); + + /** + * dep task check list + */ + ConcurrentHashMap depStateCheckList = new ConcurrentHashMap<>(); + private StateWheelExecuteThread stateWheelExecuteThread; /** @@ -104,15 +137,21 @@ public class MasterSchedulerService extends Thread { */ public void init(ConcurrentHashMap processInstanceExecMaps) { this.processInstanceExecMaps = processInstanceExecMaps; - this.masterExecService = (ThreadPoolExecutor) ThreadUtils.newDaemonFixedThreadExecutor("Master-Exec-Thread", masterConfig.getMasterExecThreads()); + this.masterExecService = new MasterExecService(this.startProcessFailedMap, + (ThreadPoolExecutor) ThreadUtils.newDaemonFixedThreadExecutor("Master-Exec-Thread", masterConfig.getMasterExecThreads())); NettyClientConfig clientConfig = new NettyClientConfig(); this.nettyRemotingClient = new NettyRemotingClient(clientConfig); - stateWheelExecuteThread = new StateWheelExecuteThread(processTimeoutCheckList, + stateWheelExecuteThread = new StateWheelExecuteThread( + masterExecService, + processService, + startProcessFailedMap, + processTimeoutCheckList, taskTimeoutCheckList, + taskRetryCheckList, + depStateCheckList, this.processInstanceExecMaps, masterConfig.getStateWheelInterval() * Constants.SLEEP_TIME_MILLIS); - } @Override @@ -160,28 +199,20 @@ public class MasterSchedulerService extends Thread { /** * 1. get command by slot * 2. donot handle command if slot is empty - * - * @throws Exception */ private void scheduleProcess() throws Exception { - int activeCount = masterExecService.getActiveCount(); // make sure to scan and delete command table in one transaction Command command = findOneCommand(); if (command != null) { logger.info("find one command: id: {}, type: {}", command.getId(), command.getCommandType()); try { - ProcessInstance processInstance = processService.handleCommand(logger, - getLocalAddress(), - this.masterConfig.getMasterExecThreads() - activeCount, command); + ProcessInstance processInstance = processService.handleCommand(logger, getLocalAddress(), command); + if (processInstance != null) { - WorkflowExecuteThread workflowExecuteThread = new WorkflowExecuteThread( - processInstance - , processService - , nettyExecutorManager - , processAlertManager - , masterConfig - , taskTimeoutCheckList); + WorkflowExecuteThread workflowExecuteThread = new WorkflowExecuteThread(processInstance, + taskResponseService, processService, nettyExecutorManager, processAlertManager, + masterConfig, taskTimeoutCheckList, taskRetryCheckList, depStateCheckList); this.processInstanceExecMaps.put(processInstance.getId(), workflowExecuteThread); if (processInstance.getTimeout() > 0) { @@ -208,22 +239,20 @@ public class MasterSchedulerService extends Thread { if (ServerNodeManager.MASTER_SIZE == 0) { return null; } + logger.debug("master size:{}",ServerNodeManager.MASTER_SIZE); List commandList = processService.findCommandPage(ServerNodeManager.MASTER_SIZE, pageNumber); if (commandList.size() == 0) { return null; } for (Command command : commandList) { int slot = ServerNodeManager.getSlot(); - if (ServerNodeManager.MASTER_SIZE != 0 - && command.getId() % ServerNodeManager.MASTER_SIZE == slot) { + if (ServerNodeManager.MASTER_SIZE != 0 && command.getId() % ServerNodeManager.MASTER_SIZE == slot) { result = command; break; } } if (result != null) { - logger.info("find command {}, slot:{} :", - result.getId(), - ServerNodeManager.getSlot()); + logger.info("find command {}, slot:{} :", result.getId(), ServerNodeManager.getSlot()); break; } pageNumber += 1; diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/StateWheelExecuteThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/StateWheelExecuteThread.java index f205e2ddcee64d3ce56fdaeb3637ed6a9db74db5..e0e5e64b6ce9a67c67fe1256192369ef665ad579 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/StateWheelExecuteThread.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/StateWheelExecuteThread.java @@ -26,14 +26,13 @@ import org.apache.dolphinscheduler.common.thread.Stopper; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.TaskInstance; - +import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.hadoop.util.ThreadUtil; - -import java.util.concurrent.ConcurrentHashMap; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.util.concurrent.ConcurrentHashMap; + /** * 1. timeout check wheel * 2. dependent task check wheel @@ -42,20 +41,43 @@ public class StateWheelExecuteThread extends Thread { private static final Logger logger = LoggerFactory.getLogger(StateWheelExecuteThread.class); - ConcurrentHashMap processInstanceCheckList; - ConcurrentHashMap taskInstanceCheckList; + private ProcessService processService; + private ConcurrentHashMap processInstanceTimeoutCheckList; + private ConcurrentHashMap taskInstanceTimeoutCheckList; + private ConcurrentHashMap taskInstanceRetryCheckList; private ConcurrentHashMap processInstanceExecMaps; + private ConcurrentHashMap depStateCheckList; + /** + * start process failed map + */ + private final ConcurrentHashMap startProcessFailedMap; private int stateCheckIntervalSecs; - public StateWheelExecuteThread(ConcurrentHashMap processInstances, - ConcurrentHashMap taskInstances, - ConcurrentHashMap processInstanceExecMaps, - int stateCheckIntervalSecs) { - this.processInstanceCheckList = processInstances; - this.taskInstanceCheckList = taskInstances; + /** + * master exec service + */ + private MasterExecService masterExecService; + + public StateWheelExecuteThread( + MasterExecService masterExecService, + ProcessService processService, + ConcurrentHashMap startProcessFailedMap, + ConcurrentHashMap processInstanceTimeoutCheckList, + ConcurrentHashMap taskInstanceTimeoutCheckList, + ConcurrentHashMap taskInstanceRetryCheckList, + ConcurrentHashMap depStateCheckList, + ConcurrentHashMap processInstanceExecMaps, + int stateCheckIntervalSecs) { + this.masterExecService = masterExecService; + this.processService = processService; + this.startProcessFailedMap = startProcessFailedMap; + this.processInstanceTimeoutCheckList = processInstanceTimeoutCheckList; + this.taskInstanceTimeoutCheckList = taskInstanceTimeoutCheckList; + this.taskInstanceRetryCheckList = taskInstanceRetryCheckList; this.processInstanceExecMaps = processInstanceExecMaps; this.stateCheckIntervalSecs = stateCheckIntervalSecs; + this.depStateCheckList = depStateCheckList; } @Override @@ -64,8 +86,11 @@ public class StateWheelExecuteThread extends Thread { logger.info("state wheel thread start"); while (Stopper.isRunning()) { try { - checkProcess(); - checkTask(); + check4StartProcessFailed(); + checkTask4Timeout(); + checkTask4Retry(); + checkProcess4Timeout(); + checkDepTask(); } catch (Exception e) { logger.error("state wheel thread check error:", e); } @@ -73,82 +98,153 @@ public class StateWheelExecuteThread extends Thread { } } - public boolean addProcess(ProcessInstance processInstance) { - this.processInstanceCheckList.put(processInstance.getId(), processInstance); - return true; + private void checkDepTask() { + if (depStateCheckList.isEmpty()) { + return; + } + for (TaskInstance taskInstance : depStateCheckList.values()) { + WorkflowExecuteThread workflowExecuteThread = processInstanceExecMaps.get(taskInstance.getProcessInstanceId()); + if (workflowExecuteThread == null) { + logger.warn("can not find workflowExecuteThread, this check event will remove, processInstanceId:{}, taskId:{}", + taskInstance.getProcessInstanceId(), taskInstance.getId()); + depStateCheckList.remove(taskInstance.getId()); + continue; + } + ProcessInstance processInstance = workflowExecuteThread.getProcessInstance(); + if (processInstance.getState() == ExecutionStatus.READY_STOP) { + depStateCheckList.remove(taskInstance.getId()); + break; + } + if (taskInstance.getState().typeIsFinished()) { + depStateCheckList.remove(taskInstance.getId()); + continue; + } + addTaskStateChangeEvent(taskInstance); + } } - public boolean addTask(TaskInstance taskInstance) { - this.taskInstanceCheckList.put(taskInstance.getId(), taskInstance); - return true; + public void addProcess4TimeoutCheck(ProcessInstance processInstance) { + this.processInstanceTimeoutCheckList.put(processInstance.getId(), processInstance); + } + + public void addTask4TimeoutCheck(TaskInstance taskInstance) { + this.taskInstanceTimeoutCheckList.put(taskInstance.getId(), taskInstance); + } + + public void addTask4RetryCheck(TaskInstance taskInstance) { + this.taskInstanceRetryCheckList.put(taskInstance.getId(), taskInstance); } - private void checkTask() { - if (taskInstanceCheckList.isEmpty()) { + public void checkTask4Timeout() { + if (taskInstanceTimeoutCheckList.isEmpty()) { return; } - - for (TaskInstance taskInstance : this.taskInstanceCheckList.values()) { + for (TaskInstance taskInstance : taskInstanceTimeoutCheckList.values()) { + WorkflowExecuteThread workflowExecuteThread = processInstanceExecMaps.get(taskInstance.getProcessInstanceId()); + if (workflowExecuteThread == null) { + logger.warn("can not find workflowExecuteThread, this check event will remove, processInstanceId:{}, taskId:{}", + taskInstance.getProcessInstanceId(), taskInstance.getId()); + taskInstanceTimeoutCheckList.remove(taskInstance.getId()); + continue; + } + ProcessInstance processInstance = workflowExecuteThread.getProcessInstance(); + if (processInstance.getState() == ExecutionStatus.READY_STOP) { + taskInstanceTimeoutCheckList.remove(taskInstance.getId()); + break; + } if (TimeoutFlag.OPEN == taskInstance.getTaskDefine().getTimeoutFlag()) { + if (taskInstance.getStartTime() == null) { + TaskInstance newTaskInstance = processService.findTaskInstanceById(taskInstance.getId()); + taskInstance.setStartTime(newTaskInstance.getStartTime()); + } long timeRemain = DateUtils.getRemainTime(taskInstance.getStartTime(), taskInstance.getTaskDefine().getTimeout() * Constants.SEC_2_MINUTES_TIME_UNIT); - if (0 <= timeRemain && processTimeout(taskInstance)) { - taskInstanceCheckList.remove(taskInstance.getId()); - return; + if (timeRemain < 0) { + addTaskTimeoutEvent(taskInstance); + taskInstanceTimeoutCheckList.remove(taskInstance.getId()); } } - if (taskInstance.isSubProcess() || taskInstance.isDependTask()) { - processDependCheck(taskInstance); - } } } - private void checkProcess() { - if (processInstanceCheckList.isEmpty()) { + private void checkTask4Retry() { + if (taskInstanceRetryCheckList.isEmpty()) { return; } - for (ProcessInstance processInstance : this.processInstanceCheckList.values()) { - - long timeRemain = DateUtils.getRemainTime(processInstance.getStartTime(), processInstance.getTimeout() * Constants.SEC_2_MINUTES_TIME_UNIT); - if (0 <= timeRemain && processTimeout(processInstance)) { - processInstanceCheckList.remove(processInstance.getId()); + for (TaskInstance taskInstance : this.taskInstanceRetryCheckList.values()) { + WorkflowExecuteThread workflowExecuteThread = processInstanceExecMaps.get(taskInstance.getProcessInstanceId()); + if (workflowExecuteThread == null) { + logger.warn("can not find workflowExecuteThread, this check event will remove, processInstanceId:{}, taskId:{}", + taskInstance.getProcessInstanceId(), taskInstance.getId()); + taskInstanceRetryCheckList.remove(taskInstance.getId()); + continue; + } + ProcessInstance processInstance = workflowExecuteThread.getProcessInstance(); + if (processInstance.getState() == ExecutionStatus.READY_STOP) { + taskInstanceRetryCheckList.remove(taskInstance.getId()); + break; + } + if (((taskInstance.getRetryTimes() <= taskInstance.getMaxRetryTimes() && taskInstance.isDependTask()) + || (taskInstance.getState().typeIsFinished() && taskInstance.taskCanRetry())) && taskInstance.retryTaskIntervalOverTime()) { + addTaskStateChangeEvent(taskInstance); + taskInstanceRetryCheckList.remove(taskInstance.getId()); } } } - private void putEvent(StateEvent stateEvent) { - - if (!processInstanceExecMaps.containsKey(stateEvent.getProcessInstanceId())) { + private void checkProcess4Timeout() { + if (processInstanceTimeoutCheckList.isEmpty()) { return; } - WorkflowExecuteThread workflowExecuteThread = this.processInstanceExecMaps.get(stateEvent.getProcessInstanceId()); - workflowExecuteThread.addStateEvent(stateEvent); + for (ProcessInstance processInstance : this.processInstanceTimeoutCheckList.values()) { + long timeRemain = DateUtils.getRemainTime(processInstance.getStartTime(), processInstance.getTimeout() * Constants.SEC_2_MINUTES_TIME_UNIT); + if (timeRemain < 0) { + addProcessTimeoutEvent(processInstance); + processInstanceTimeoutCheckList.remove(processInstance.getId()); + } + } } - private boolean processDependCheck(TaskInstance taskInstance) { + private boolean addTaskStateChangeEvent(TaskInstance taskInstance) { StateEvent stateEvent = new StateEvent(); stateEvent.setType(StateEventType.TASK_STATE_CHANGE); stateEvent.setProcessInstanceId(taskInstance.getProcessInstanceId()); stateEvent.setTaskInstanceId(taskInstance.getId()); stateEvent.setExecutionStatus(ExecutionStatus.RUNNING_EXECUTION); - putEvent(stateEvent); + addEvent(stateEvent); return true; } - private boolean processTimeout(TaskInstance taskInstance) { + private boolean addTaskTimeoutEvent(TaskInstance taskInstance) { StateEvent stateEvent = new StateEvent(); stateEvent.setType(StateEventType.TASK_TIMEOUT); stateEvent.setProcessInstanceId(taskInstance.getProcessInstanceId()); stateEvent.setTaskInstanceId(taskInstance.getId()); - putEvent(stateEvent); + addEvent(stateEvent); return true; } - private boolean processTimeout(ProcessInstance processInstance) { + private boolean addProcessTimeoutEvent(ProcessInstance processInstance) { StateEvent stateEvent = new StateEvent(); stateEvent.setType(StateEventType.PROCESS_TIMEOUT); stateEvent.setProcessInstanceId(processInstance.getId()); - putEvent(stateEvent); + addEvent(stateEvent); return true; } + private void addEvent(StateEvent stateEvent) { + if (!processInstanceExecMaps.containsKey(stateEvent.getProcessInstanceId())) { + return; + } + WorkflowExecuteThread workflowExecuteThread = this.processInstanceExecMaps.get(stateEvent.getProcessInstanceId()); + workflowExecuteThread.addStateEvent(stateEvent); + } + + private void check4StartProcessFailed() { + if (startProcessFailedMap.isEmpty()) { + return; + } + for (WorkflowExecuteThread workflowExecuteThread : this.startProcessFailedMap.values()) { + masterExecService.execute(workflowExecuteThread); + } + } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/WorkflowExecuteThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/WorkflowExecuteThread.java index eae6abe0686c8858456e8c0f989d6069bc0ab7db..66c713fa3a3cfb11e97e928d09589843f7adecf9 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/WorkflowExecuteThread.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/WorkflowExecuteThread.java @@ -20,14 +20,20 @@ package org.apache.dolphinscheduler.server.master.runner; import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_END_DATE; import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_START_DATE; import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_RECOVERY_START_NODE_STRING; -import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_START_NODE_NAMES; +import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_START_NODES; import static org.apache.dolphinscheduler.common.Constants.DEFAULT_WORKER_GROUP; +import static org.apache.dolphinscheduler.common.Constants.DRY_RUN_FLAG_NO; +import static org.apache.dolphinscheduler.common.Constants.DRY_RUN_FLAG_YES; import static org.apache.dolphinscheduler.common.Constants.SEC_2_MINUTES_TIME_UNIT; +import static org.apache.dolphinscheduler.common.Constants.START_UP_PARAMS_PREFIX; +import static org.apache.dolphinscheduler.common.Constants.GLOBAL_PARAMS_PREFIX; +import static org.apache.dolphinscheduler.common.enums.DataType.VARCHAR; +import static org.apache.dolphinscheduler.common.enums.Direct.IN; +import org.apache.commons.collections4.MapUtils; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.DependResult; -import org.apache.dolphinscheduler.common.enums.Direct; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.Flag; @@ -43,23 +49,27 @@ import org.apache.dolphinscheduler.common.model.TaskNodeRelation; import org.apache.dolphinscheduler.common.process.ProcessDag; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.thread.ThreadUtils; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.NetUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.dao.entity.Command; import org.apache.dolphinscheduler.dao.entity.Environment; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.entity.ProcessTaskRelation; import org.apache.dolphinscheduler.dao.entity.ProjectUser; import org.apache.dolphinscheduler.dao.entity.Schedule; import org.apache.dolphinscheduler.dao.entity.TaskDefinition; +import org.apache.dolphinscheduler.dao.entity.TaskDefinitionLog; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.utils.DagHelper; import org.apache.dolphinscheduler.remote.command.HostUpdateCommand; import org.apache.dolphinscheduler.remote.utils.Host; import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.server.master.dispatch.executor.NettyExecutorManager; +import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseEvent; +import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseService; import org.apache.dolphinscheduler.server.master.runner.task.ITaskProcessor; import org.apache.dolphinscheduler.server.master.runner.task.TaskAction; import org.apache.dolphinscheduler.server.master.runner.task.TaskProcessorFactory; @@ -68,6 +78,7 @@ import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.quartz.cron.CronUtils; import org.apache.dolphinscheduler.service.queue.PeerTaskInstancePriorityQueue; +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.StringUtils; import java.util.ArrayList; @@ -75,14 +86,15 @@ import java.util.Arrays; import java.util.Collection; import java.util.Date; import java.util.HashMap; +import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Objects; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedQueue; -import java.util.concurrent.ExecutorService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -104,10 +116,12 @@ public class WorkflowExecuteThread implements Runnable { * runing TaskNode */ private final Map activeTaskProcessorMaps = new ConcurrentHashMap<>(); + /** - * task exec service + * handle task event */ - private final ExecutorService taskExecService; + private TaskResponseService taskResponseService; + /** * process instance */ @@ -192,9 +206,18 @@ public class WorkflowExecuteThread implements Runnable { private ConcurrentHashMap taskTimeoutCheckList; + /** + * task retry check list + */ + private ConcurrentHashMap taskRetryCheckList; + + /** + * dep task check list + */ + private ConcurrentHashMap depStateCheckList; + /** * start flag, true: start nodes submit completely - * */ private boolean isStart = false; @@ -204,31 +227,35 @@ public class WorkflowExecuteThread implements Runnable { * @param processInstance processInstance * @param processService processService * @param nettyExecutorManager nettyExecutorManager - * @param taskTimeoutCheckList */ - public WorkflowExecuteThread(ProcessInstance processInstance - , ProcessService processService - , NettyExecutorManager nettyExecutorManager - , ProcessAlertManager processAlertManager - , MasterConfig masterConfig - , ConcurrentHashMap taskTimeoutCheckList) { + public WorkflowExecuteThread(ProcessInstance processInstance, + TaskResponseService taskResponseService, + ProcessService processService, + NettyExecutorManager nettyExecutorManager, + ProcessAlertManager processAlertManager, + MasterConfig masterConfig, + ConcurrentHashMap taskTimeoutCheckList, + ConcurrentHashMap taskRetryCheckList, + ConcurrentHashMap depStateCheckList) { this.processService = processService; - + this.taskResponseService = taskResponseService; this.processInstance = processInstance; this.masterConfig = masterConfig; - int masterTaskExecNum = masterConfig.getMasterExecTaskNum(); - this.taskExecService = ThreadUtils.newDaemonFixedThreadExecutor("Master-Task-Exec-Thread", - masterTaskExecNum); this.nettyExecutorManager = nettyExecutorManager; this.processAlertManager = processAlertManager; this.taskTimeoutCheckList = taskTimeoutCheckList; + this.taskRetryCheckList = taskRetryCheckList; + this.depStateCheckList = depStateCheckList; } @Override public void run() { try { - startProcess(); - handleEvents(); + if (!this.isStart()) { + startProcess(); + } else { + handleEvents(); + } } catch (Exception e) { logger.error("handler error:", e); } @@ -236,14 +263,13 @@ public class WorkflowExecuteThread implements Runnable { /** * the process start nodes are submitted completely. - * @return */ public boolean isStart() { return this.isStart; } private void handleEvents() { - while (this.stateEvents.size() > 0) { + while (!this.stateEvents.isEmpty()) { try { StateEvent stateEvent = this.stateEvents.peek(); @@ -318,76 +344,149 @@ public class WorkflowExecuteThread implements Runnable { } private boolean taskTimeout(StateEvent stateEvent) { - - if (taskInstanceHashMap.containsRow(stateEvent.getTaskInstanceId())) { + if (!checkTaskInstanceByStateEvent(stateEvent)) { return true; } - TaskInstance taskInstance = taskInstanceHashMap - .row(stateEvent.getTaskInstanceId()) - .values() - .iterator().next(); + // get lastest task instance into + TaskInstance taskInstance = processService.findTaskInstanceById(stateEvent.getTaskInstanceId()); + TaskDefinition taskDefinition = processService.findTaskDefinition(taskInstance.getTaskCode(), taskInstance.getTaskDefinitionVersion()); + taskInstance.setTaskDefine(taskDefinition); if (TimeoutFlag.CLOSE == taskInstance.getTaskDefine().getTimeoutFlag()) { return true; } TaskTimeoutStrategy taskTimeoutStrategy = taskInstance.getTaskDefine().getTimeoutNotifyStrategy(); - if (TaskTimeoutStrategy.FAILED == taskTimeoutStrategy) { + if ((TaskTimeoutStrategy.FAILED == taskTimeoutStrategy || TaskTimeoutStrategy.WARNFAILED == taskTimeoutStrategy) && !taskInstance.getState().typeIsFinished()) { ITaskProcessor taskProcessor = activeTaskProcessorMaps.get(stateEvent.getTaskInstanceId()); taskProcessor.action(TaskAction.TIMEOUT); - return false; + if (taskInstance.isDependTask()) { + TaskInstance task = processService.findTaskInstanceById(taskInstance.getId()); + taskFinished(task); + } + if (TaskTimeoutStrategy.WARNFAILED == taskTimeoutStrategy) { + ProjectUser projectUser = processService.queryProjectWithUserByProcessInstanceId(processInstance.getId()); + processAlertManager.sendTaskTimeoutAlert(processInstance, taskInstance, projectUser); + } } else { - processAlertManager.sendTaskTimeoutAlert(processInstance, taskInstance, taskInstance.getTaskDefine()); - return true; + ProjectUser projectUser = processService.queryProjectWithUserByProcessInstanceId(processInstance.getId()); + processAlertManager.sendTaskTimeoutAlert(processInstance, taskInstance, projectUser); } + return true; } private boolean processTimeout() { - this.processAlertManager.sendProcessTimeoutAlert(this.processInstance, this.processDefinition); + ProjectUser projectUser = processService.queryProjectWithUserByProcessInstanceId(processInstance.getId()); + this.processAlertManager.sendProcessTimeoutAlert(this.processInstance, projectUser); + return true; + } + + /** + * check if task instance exist by state event + */ + private boolean checkTaskInstanceByStateEvent(StateEvent stateEvent) { + if (stateEvent.getTaskInstanceId() == 0) { + logger.error("task instance id null, state event:{}", stateEvent); + return false; + } + + if (!taskInstanceHashMap.containsRow(stateEvent.getTaskInstanceId())) { + logger.error("mismatch task instance id, event:{}", stateEvent); + return false; + } return true; } private boolean taskStateChangeHandler(StateEvent stateEvent) { + if (!checkTaskInstanceByStateEvent(stateEvent)) { + return true; + } TaskInstance task = processService.findTaskInstanceById(stateEvent.getTaskInstanceId()); - if (stateEvent.getExecutionStatus().typeIsFinished()) { + if (task.getState() == null) { + logger.error("task state is null, state handler error: {}", stateEvent); + return true; + } + if (task.getState().typeIsFinished()) { + if (completeTaskList.containsKey(Long.toString(task.getTaskCode())) && completeTaskList.get(Long.toString(task.getTaskCode())).getId() == task.getId() + && task.getState() != ExecutionStatus.NEED_FAULT_TOLERANCE) { + return true; + } + if (task.getStartTime() == null) { + logger.info("Maybe TASK_EXECUTE_ACK has not been received when the task finish, will wait for one second"); + ThreadUtils.sleep(Constants.SLEEP_TIME_MILLIS); + } taskFinished(task); - } else if (activeTaskProcessorMaps.containsKey(stateEvent.getTaskInstanceId())) { + return true; + } + if (activeTaskProcessorMaps.containsKey(stateEvent.getTaskInstanceId())) { ITaskProcessor iTaskProcessor = activeTaskProcessorMaps.get(stateEvent.getTaskInstanceId()); - iTaskProcessor.run(); + iTaskProcessor.action(TaskAction.RUN); if (iTaskProcessor.taskState().typeIsFinished()) { task = processService.findTaskInstanceById(stateEvent.getTaskInstanceId()); taskFinished(task); } - } else { - logger.error("state handler error: {}", stateEvent.toString()); + return true; } + logger.error("state handler error: {}", stateEvent); return true; } private void taskFinished(TaskInstance task) { - logger.info("work flow {} task {} state:{} ", - processInstance.getId(), - task.getId(), - task.getState()); - if (task.taskCanRetry()) { + logger.info("work flow {} task {} state:{} ", processInstance.getId(), task.getId(), task.getState()); + if (task.getState() == ExecutionStatus.NEED_FAULT_TOLERANCE) { + logger.info("resubmit NEED_FAULT_TOLERANCE {} task", task.getId()); + if (task.getMaxRetryTimes() == 0) { + task.setRetryTimes(task.getRetryTimes() + 1); + } addTaskToStandByList(task); + submitStandByTask(); return; } + if (task.taskCanRetry() && processInstance.getState() != ExecutionStatus.READY_STOP) { + if (task.retryTaskIntervalOverTime()) { + logger.info("failure task will be submitted: process id: {}, task instance id: {} state:{} retry times:{}/{}, interval:{}", + processInstance.getId(), task.getId(), task.getState(), task.getRetryTimes() + 1, task.getMaxRetryTimes(), task.getRetryInterval()); + submitStandByTask(); + if (task.taskCanRetry()) { + TaskInstance retryTask = processService.findTaskInstanceById(task.getId()); + if (retryTask.isDependTask()) { + retryTask.setRetryTimes(retryTask.getRetryTimes() + 1); + if (retryTask.taskCanRetry()) { + addTaskToStandByList(retryTask); + this.taskRetryCheckList.put(retryTask.getId(), retryTask); + } + } else { + addTaskToStandByList(retryTask); + this.taskRetryCheckList.put(retryTask.getId(), retryTask); + } + } + return; + } else { + task.setRetryTimes(task.getRetryTimes() + 1); + if (task.taskCanRetry()) { + addTaskToStandByList(task); + this.taskRetryCheckList.put(task.getId(), task); + return; + } + } + } ProcessInstance processInstance = processService.findProcessInstanceById(this.processInstance.getId()); - completeTaskList.put(task.getName(), task); + completeTaskList.put(Long.toString(task.getTaskCode()), task); activeTaskProcessorMaps.remove(task.getId()); taskTimeoutCheckList.remove(task.getId()); + taskRetryCheckList.remove(task.getId()); + depStateCheckList.remove(task.getId()); if (task.getState().typeIsSuccess()) { - processInstance.setVarPool(task.getVarPool()); + // processInstance.setVarPool(task.getVarPool()); processService.saveProcessInstance(processInstance); - submitPostNode(task.getName()); + submitPostNode(Long.toString(task.getTaskCode())); } else if (task.getState().typeIsFailure()) { if (task.isConditionsTask() - || DagHelper.haveConditionsAfterNode(task.getName(), dag)) { - submitPostNode(task.getName()); + || DagHelper.haveConditionsAfterNode(Long.toString(task.getTaskCode()), dag)) { + submitPostNode(Long.toString(task.getTaskCode())); } else { - errorTaskList.put(task.getName(), task); + errorTaskList.put(Long.toString(task.getTaskCode()), task); if (processInstance.getFailureStrategy() == FailureStrategy.END) { killAllTasks(); } @@ -398,9 +497,8 @@ public class WorkflowExecuteThread implements Runnable { private boolean checkStateEvent(StateEvent stateEvent) { if (this.processInstance.getId() != stateEvent.getProcessInstanceId()) { - logger.error("mismatch process instance id: {}, state event:{}", - this.processInstance.getId(), - stateEvent.toString()); + logger.error("mismatch process instance id: {}, state event:{}, task instance id:{}", + this.processInstance.getId(), stateEvent, stateEvent.getTaskInstanceId()); return false; } return true; @@ -410,13 +508,21 @@ public class WorkflowExecuteThread implements Runnable { try { logger.info("process:{} state {} change to {}", processInstance.getId(), processInstance.getState(), stateEvent.getExecutionStatus()); processInstance = processService.findProcessInstanceById(this.processInstance.getId()); + + if (stateEvent.getExecutionStatus().typeIsCancel()) { + this.updateProcessInstanceState(stateEvent); + return true; + } + if (processComplementData()) { return true; } + if (stateEvent.getExecutionStatus().typeIsFinished()) { endProcess(); } - if (stateEvent.getExecutionStatus() == ExecutionStatus.READY_STOP) { + + if (processInstance.getState() == ExecutionStatus.READY_STOP) { killAllTasks(); } return true; @@ -426,16 +532,24 @@ public class WorkflowExecuteThread implements Runnable { return true; } - private boolean processComplementData() throws Exception { + private boolean processComplementData() { if (!needComplementProcess()) { return false; } + if (processInstance.getState().typeIsReadyCancel()) { + return false; + } + Date scheduleDate = processInstance.getScheduleTime(); if (scheduleDate == null) { scheduleDate = complementListDate.get(0); } else if (processInstance.getState().typeIsFinished()) { endProcess(); + // rerun process instance of complement didn't need create the next process complement + if (processInstance.getCommandType() == CommandType.REPEAT_RUNNING) { + return true; + } if (complementListDate.size() <= 0) { logger.info("process complement end. process id:{}", processInstance.getId()); return true; @@ -451,27 +565,43 @@ public class WorkflowExecuteThread implements Runnable { processInstance.getScheduleTime(), complementListDate.toString()); scheduleDate = complementListDate.get(index + 1); - //the next process complement - processInstance.setId(0); + } - processInstance.setScheduleTime(scheduleDate); + + //the next process complement + int create = this.createComplementDataCommand(scheduleDate); + if (create > 0) { + logger.info("create complement data command successfully. process id: {}", processInstance.getId()); + } + + return true; + } + + private int createComplementDataCommand(Date scheduleDate) { + Command command = new Command(); + command.setScheduleTime(scheduleDate); + command.setCommandType(CommandType.COMPLEMENT_DATA); + command.setProcessDefinitionCode(processInstance.getProcessDefinitionCode()); Map cmdParam = JSONUtils.toMap(processInstance.getCommandParam()); if (cmdParam.containsKey(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING)) { cmdParam.remove(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING); - processInstance.setCommandParam(JSONUtils.toJsonString(cmdParam)); - } - - processInstance.setState(ExecutionStatus.RUNNING_EXECUTION); - processInstance.setGlobalParams(ParameterUtils.curingGlobalParams( - processDefinition.getGlobalParamMap(), - processDefinition.getGlobalParamList(), - CommandType.COMPLEMENT_DATA, processInstance.getScheduleTime())); - processInstance.setStartTime(new Date()); - processInstance.setEndTime(null); - processService.saveProcessInstance(processInstance); - this.taskInstanceHashMap.clear(); - startProcess(); - return true; + } + cmdParam.replace(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.format(scheduleDate, "yyyy-MM-dd HH:mm:ss")); + command.setCommandParam(JSONUtils.toJsonString(cmdParam)); + command.setTaskDependType(processInstance.getTaskDependType()); + command.setFailureStrategy(processInstance.getFailureStrategy()); + command.setWarningType(processInstance.getWarningType()); + command.setWarningGroupId(processInstance.getWarningGroupId()); + command.setStartTime(new Date()); + command.setExecutorId(processInstance.getExecutorId()); + command.setUpdateTime(new Date()); + command.setProcessInstancePriority(processInstance.getProcessInstancePriority()); + command.setWorkerGroup(processInstance.getWorkerGroup()); + command.setEnvironmentCode(processInstance.getEnvironmentCode()); + command.setDryRun(processInstance.getDryRun()); + command.setProcessInstanceId(0); + command.setProcessDefinitionVersion(processInstance.getProcessDefinitionVersion()); + return processService.createCommand(command); } private boolean needComplementProcess() { @@ -502,9 +632,11 @@ public class WorkflowExecuteThread implements Runnable { if (processInstance.getState().typeIsWaitingThread()) { processService.createRecoveryWaitingThreadCommand(null, processInstance); } - List taskInstances = processService.findValidTaskListByProcessId(processInstance.getId()); - ProjectUser projectUser = processService.queryProjectWithUserByProcessInstanceId(processInstance.getId()); - processAlertManager.sendAlertProcessInstance(processInstance, taskInstances, projectUser); + if (processAlertManager.isNeedToSendWarning(processInstance)) { + List taskInstances = processService.findValidTaskListByProcessId(processInstance.getId()); + ProjectUser projectUser = processService.queryProjectWithUserByProcessInstanceId(processInstance.getId()); + processAlertManager.sendAlertProcessInstance(processInstance, taskInstances, projectUser); + } } /** @@ -516,22 +648,24 @@ public class WorkflowExecuteThread implements Runnable { if (this.dag != null) { return; } - processDefinition = processService.findProcessDefinition(processInstance.getProcessDefinitionCode(), - processInstance.getProcessDefinitionVersion()); + processDefinition = processService.findProcessDefinition(processInstance.getProcessDefinitionCode(), processInstance.getProcessDefinitionVersion()); recoverNodeIdList = getStartTaskInstanceList(processInstance.getCommandParam()); - List taskNodeList = - processService.transformTask(processService.findRelationByCode(processDefinition.getProjectCode(), processDefinition.getCode()), Lists.newArrayList()); + List processTaskRelationList = processService.findRelationByCode(processDefinition.getCode(), processDefinition.getVersion()); + List taskDefinitionLogList = processService.getTaskDefineLogListByRelation(processTaskRelationList); + List taskNodeList = processService.transformTask(processTaskRelationList, taskDefinitionLogList); forbiddenTaskList.clear(); + taskNodeList.forEach(taskNode -> { if (taskNode.isForbidden()) { - forbiddenTaskList.put(taskNode.getName(), taskNode); + forbiddenTaskList.put(Long.toString(taskNode.getCode()), taskNode); } }); + // generate process to get DAG info - List recoveryNameList = getRecoveryNodeNameList(); + List recoveryNodeCodeList = getRecoveryNodeCodeList(); List startNodeNameList = parseStartNodeName(processInstance.getCommandParam()); ProcessDag processDag = generateFlowDag(taskNodeList, - startNodeNameList, recoveryNameList, processInstance.getTaskDependType()); + startNodeNameList, recoveryNodeCodeList, processInstance.getTaskDependType()); if (processDag == null) { logger.error("processDag is null"); return; @@ -553,19 +687,22 @@ public class WorkflowExecuteThread implements Runnable { List taskInstanceList = processService.findValidTaskListByProcessId(processInstance.getId()); for (TaskInstance task : taskInstanceList) { if (task.isTaskComplete()) { - completeTaskList.put(task.getName(), task); + completeTaskList.put(Long.toString(task.getTaskCode()), task); } - if (task.isConditionsTask() || DagHelper.haveConditionsAfterNode(task.getName(), dag)) { + if (task.isConditionsTask() || DagHelper.haveConditionsAfterNode(Long.toString(task.getTaskCode()), dag)) { continue; } if (task.getState().typeIsFailure() && !task.taskCanRetry()) { - errorTaskList.put(task.getName(), task); + errorTaskList.put(Long.toString(task.getTaskCode()), task); } } if (processInstance.isComplementData() && complementListDate.size() == 0) { Map cmdParam = JSONUtils.toMap(processInstance.getCommandParam()); if (cmdParam != null && cmdParam.containsKey(CMDPARAM_COMPLEMENT_DATA_START_DATE)) { + // reset global params while there are start parameters + setGlobalParamIfCommanded(processDefinition, cmdParam); + Date start = DateUtils.stringToDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE)); Date end = DateUtils.stringToDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_END_DATE)); List schedules = processService.queryReleaseSchedulerListByProcessDefinitionCode(processInstance.getProcessDefinitionCode()); @@ -573,6 +710,15 @@ public class WorkflowExecuteThread implements Runnable { complementListDate = CronUtils.getSelfFireDateList(start, end, schedules); logger.info(" process definition code:{} complement data: {}", processInstance.getProcessDefinitionCode(), complementListDate.toString()); + + if (complementListDate.size() > 0 && Flag.NO == processInstance.getIsSubProcess()) { + processInstance.setScheduleTime(complementListDate.get(0)); + processInstance.setGlobalParams(ParameterUtils.curingGlobalParams( + processDefinition.getGlobalParamMap(), + processDefinition.getGlobalParamList(), + CommandType.COMPLEMENT_DATA, processInstance.getScheduleTime())); + processService.updateProcessInstance(processInstance); + } } } } @@ -591,12 +737,15 @@ public class WorkflowExecuteThread implements Runnable { && taskProcessor.getType().equalsIgnoreCase(Constants.COMMON_TASK_TYPE)) { notifyProcessHostUpdate(taskInstance); } - boolean submit = taskProcessor.submit(taskInstance, processInstance, masterConfig.getMasterTaskCommitRetryTimes(), masterConfig.getMasterTaskCommitInterval()); + taskProcessor.init(taskInstance, processInstance); + boolean submit = taskProcessor.action(TaskAction.SUBMIT); if (submit) { this.taskInstanceHashMap.put(taskInstance.getId(), taskInstance.getTaskCode(), taskInstance); activeTaskProcessorMaps.put(taskInstance.getId(), taskProcessor); - taskProcessor.run(); + taskProcessor.action(TaskAction.RUN); addTimeoutCheck(taskInstance); + addDepTaskCheck(taskInstance); + // addRetryCheck(taskInstance); TaskDefinition taskDefinition = processService.findTaskDefinition( taskInstance.getTaskCode(), taskInstance.getTaskDefinitionVersion()); @@ -639,21 +788,49 @@ public class WorkflowExecuteThread implements Runnable { } private void addTimeoutCheck(TaskInstance taskInstance) { - - TaskDefinition taskDefinition = processService.findTaskDefinition( - taskInstance.getTaskCode(), - taskInstance.getTaskDefinitionVersion() - ); - taskInstance.setTaskDefine(taskDefinition); - if (TimeoutFlag.OPEN == taskDefinition.getTimeoutFlag()) { - this.taskTimeoutCheckList.put(taskInstance.getId(), taskInstance); + if (taskTimeoutCheckList.containsKey(taskInstance.getId())) { return; } + if (taskInstance.getTaskDefine() == null) { + TaskDefinition taskDefinition = processService.findTaskDefinition( + taskInstance.getTaskCode(), + taskInstance.getTaskDefinitionVersion() + ); + taskInstance.setTaskDefine(taskDefinition); + } + if (TimeoutFlag.OPEN == taskInstance.getTaskDefine().getTimeoutFlag()) { + this.taskTimeoutCheckList.put(taskInstance.getId(), taskInstance); + } if (taskInstance.isDependTask() || taskInstance.isSubProcess()) { this.taskTimeoutCheckList.put(taskInstance.getId(), taskInstance); } } + private void addDepTaskCheck(TaskInstance taskInstance) { + if (taskInstance.isDependTask()) { + if (depStateCheckList.containsKey(taskInstance.getId())) { + return; + } + this.depStateCheckList.put(taskInstance.getId(), taskInstance); + } + } + + private void addRetryCheck(TaskInstance taskInstance) { + if (taskRetryCheckList.containsKey(taskInstance.getId())) { + return; + } + if (taskInstance.getTaskDefine() == null) { + TaskDefinition taskDefinition = processService.findTaskDefinition( + taskInstance.getTaskCode(), + taskInstance.getTaskDefinitionVersion() + ); + taskInstance.setTaskDefine(taskDefinition); + } + if (taskInstance.getRetryTimes() <= taskInstance.getMaxRetryTimes() && taskInstance.isDependTask()) { + this.taskRetryCheckList.put(taskInstance.getId(), taskInstance); + } + } + /** * find task instance in db. * in case submit more than one same name task in the same time. @@ -703,7 +880,7 @@ public class WorkflowExecuteThread implements Runnable { taskInstance.setFlag(Flag.YES); // task dry run flag - taskInstance.setDryRun(processInstance.getDryRun()); + taskInstance.setDryRun(taskIsDryRun(processInstance, taskNode.getCode())); // task instance retry times taskInstance.setRetryTimes(0); @@ -752,12 +929,25 @@ public class WorkflowExecuteThread implements Runnable { return taskInstance; } + private int taskIsDryRun(ProcessInstance processInstance, Long taskCode) { + if (processInstance.getDryRun() == DRY_RUN_FLAG_YES) { + if (!processInstance.getHistoryCmd().startsWith(CommandType.START_CURRENT_TASK_PROCESS.name())) { + return DRY_RUN_FLAG_YES; + } + List startNodeList = parseStartNodeName(processInstance.getCommandParam()); + if (startNodeList.contains(String.valueOf(taskCode))) { + return DRY_RUN_FLAG_YES; + } + } + return DRY_RUN_FLAG_NO; + } + public void getPreVarPool(TaskInstance taskInstance, Set preTask) { Map allProperty = new HashMap<>(); Map allTaskInstance = new HashMap<>(); if (CollectionUtils.isNotEmpty(preTask)) { - for (String preTaskName : preTask) { - TaskInstance preTaskInstance = completeTaskList.get(preTaskName); + for (String preTaskCode : preTask) { + TaskInstance preTaskInstance = completeTaskList.get(preTaskCode); if (preTaskInstance == null) { continue; } @@ -777,7 +967,7 @@ public class WorkflowExecuteThread implements Runnable { private void setVarPoolValue(Map allProperty, Map allTaskInstance, TaskInstance preTaskInstance, Property thisProperty) { //for this taskInstance all the param in this part is IN. - thisProperty.setDirect(Direct.IN); + thisProperty.setDirect(IN); //get the pre taskInstance Property's name String proName = thisProperty.getProp(); //if the Previous nodes have the Property of same name @@ -806,8 +996,8 @@ public class WorkflowExecuteThread implements Runnable { } } - private void submitPostNode(String parentNodeName) { - Set submitTaskNodeList = DagHelper.parsePostNodes(parentNodeName, skipTaskNodeList, dag, completeTaskList); + private void submitPostNode(String parentNodeCode) { + Set submitTaskNodeList = DagHelper.parsePostNodes(parentNodeCode, skipTaskNodeList, dag, completeTaskList); List taskInstances = new ArrayList<>(); for (String taskNode : submitTaskNodeList) { TaskNode taskNodeObject = dag.getNode(taskNode); @@ -815,7 +1005,12 @@ public class WorkflowExecuteThread implements Runnable { continue; } TaskInstance task = createTaskInstance(processInstance, taskNodeObject); - taskInstances.add(task); + if (processInstance.getState().typeIsReadyPause() && (!activeTaskProcessorMaps.isEmpty() || !completeTaskList.isEmpty())) { + task.setState(ExecutionStatus.PAUSE); + completeTaskList.put(String.valueOf(task.getTaskCode()), task); + } else { + taskInstances.add(task); + } } // if previous node success , post node submit @@ -825,12 +1020,12 @@ public class WorkflowExecuteThread implements Runnable { continue; } - if (completeTaskList.containsKey(task.getName())) { - logger.info("task {} has already run success", task.getName()); + if (completeTaskList.containsKey(Long.toString(task.getTaskCode()))) { + logger.info("task {} has already run success, task id:{}", task.getName(), task.getId()); continue; } - if (task.getState().typeIsPause() || task.getState().typeIsCancel()) { - logger.info("task {} stopped, the state is {}", task.getName(), task.getState()); + if (task.getState().typeIsCancel()) { + logger.info("task {} stopped, the state is {}, task id:{}", task.getName(), task.getState(), task.getId()); } else { addTaskToStandByList(task); } @@ -844,41 +1039,58 @@ public class WorkflowExecuteThread implements Runnable { * * @return DependResult */ - private DependResult isTaskDepsComplete(String taskName) { + private DependResult isTaskDepsComplete(String taskCode) { Collection startNodes = dag.getBeginNode(); // if vertex,returns true directly - if (startNodes.contains(taskName)) { + if (startNodes.contains(taskCode)) { return DependResult.SUCCESS; } - TaskNode taskNode = dag.getNode(taskName); - List depNameList = taskNode.getDepList(); - for (String depsNode : depNameList) { - if (!dag.containsNode(depsNode) - || forbiddenTaskList.containsKey(depsNode) - || skipTaskNodeList.containsKey(depsNode)) { - continue; - } - // dependencies must be fully completed - if (!completeTaskList.containsKey(depsNode)) { - return DependResult.WAITING; - } - ExecutionStatus depTaskState = completeTaskList.get(depsNode).getState(); - if (depTaskState.typeIsPause() || depTaskState.typeIsCancel()) { - return DependResult.NON_EXEC; - } - // ignore task state if current task is condition - if (taskNode.isConditionsTask()) { - continue; - } - if (!dependTaskSuccess(depsNode, taskName)) { - return DependResult.FAILED; + TaskNode taskNode = dag.getNode(taskCode); + List indirectDepCodeList = new ArrayList<>(); + setIndirectDepList(taskCode, indirectDepCodeList); + for (String depsNode : indirectDepCodeList) { + if (dag.containsNode(depsNode) && !skipTaskNodeList.containsKey(depsNode)) { + // dependencies must be fully completed + if (!completeTaskList.containsKey(depsNode)) { + return DependResult.WAITING; + } + ExecutionStatus depTaskState = completeTaskList.get(depsNode).getState(); + if (depTaskState.typeIsCancel()) { + return DependResult.NON_EXEC; + } + // ignore task state if current task is condition + if (taskNode.isConditionsTask()) { + continue; + } + if (!dependTaskSuccess(depsNode, taskCode)) { + return DependResult.FAILED; + } } } - logger.info("taskName: {} completeDependTaskList: {}", taskName, Arrays.toString(completeTaskList.keySet().toArray())); + logger.info("taskCode: {} completeDependTaskList: {}", taskCode, Arrays.toString(completeTaskList.keySet().toArray())); return DependResult.SUCCESS; } + /** + * This function is specially used to handle the dependency situation where the parent node is a prohibited node. + * When the parent node is a forbidden node, the dependency relationship should continue to be traced + * + * @param taskCode taskCode + * @param indirectDepCodeList All indirectly dependent nodes + */ + private void setIndirectDepList(String taskCode, List indirectDepCodeList) { + TaskNode taskNode = dag.getNode(taskCode); + List depCodeList = taskNode.getDepList(); + for (String depsNode : depCodeList) { + if (forbiddenTaskList.containsKey(depsNode)) { + setIndirectDepList(depsNode, indirectDepCodeList); + } else { + indirectDepCodeList.add(depsNode); + } + } + } + /** * depend node is completed, but here need check the condition task branch is the next node */ @@ -938,14 +1150,30 @@ public class WorkflowExecuteThread implements Runnable { * @return Boolean whether has failed task */ private boolean hasFailedTask() { - - if (this.taskFailedSubmit) { + if (taskFailedSubmit) { return true; } - if (this.errorTaskList.size() > 0) { - return true; + if (errorTaskList.size() > 0) { + for (Map.Entry errorTaskMap : errorTaskList.entrySet()) { + TaskInstance taskInstance = processService.findTaskInstanceById(errorTaskMap.getValue().getId()); + if (taskInstance == null || taskInstance.getState().typeIsSuccess()) { + errorTaskList.remove(errorTaskMap.getKey()); + } + } + if (errorTaskList.size() > 0) { + return true; + } + } else { + if (processInstance.getCommandType() == CommandType.RECOVER_TOLERANCE_FAULT_PROCESS + || processInstance.getCommandType() == CommandType.RECOVER_SUSPENDED_PROCESS) { + List failureTaskIds = processService.findLastTaskIdByStateList(processInstance.getId(), + Lists.newArrayList(ExecutionStatus.FAILURE, ExecutionStatus.NEED_FAULT_TOLERANCE, ExecutionStatus.KILL)); + if (!failureTaskIds.isEmpty()) { + return true; + } + } } - return this.dependFailedTask.size() > 0; + return dependFailedTask.size() > 0; } /** @@ -959,7 +1187,7 @@ public class WorkflowExecuteThread implements Runnable { return true; } if (processInstance.getFailureStrategy() == FailureStrategy.CONTINUE) { - return readyToSubmitTaskQueue.size() == 0 || activeTaskProcessorMaps.size() == 0; + return readyToSubmitTaskQueue.size() == 0 && activeTaskProcessorMaps.size() == 0; } } return false; @@ -989,9 +1217,7 @@ public class WorkflowExecuteThread implements Runnable { } List pauseList = getCompleteTaskByState(ExecutionStatus.PAUSE); - if (CollectionUtils.isNotEmpty(pauseList) - || !isComplementEnd() - || readyToSubmitTaskQueue.size() > 0) { + if (CollectionUtils.isNotEmpty(pauseList) || !isComplementEnd()) { return ExecutionStatus.PAUSE; } else { return ExecutionStatus.SUCCESS; @@ -1001,7 +1227,6 @@ public class WorkflowExecuteThread implements Runnable { /** * generate the latest process instance status by the tasks state * - * @param instance * @return process instance execution status */ private ExecutionStatus getProcessInstanceState(ProcessInstance instance) { @@ -1011,10 +1236,6 @@ public class WorkflowExecuteThread implements Runnable { // active task and retry task exists return runningState(state); } - // process failure - if (processFailed()) { - return ExecutionStatus.FAILURE; - } // waiting thread if (hasWaitingThreadTask()) { @@ -1030,8 +1251,10 @@ public class WorkflowExecuteThread implements Runnable { if (state == ExecutionStatus.READY_STOP) { List stopList = getCompleteTaskByState(ExecutionStatus.STOP); List killList = getCompleteTaskByState(ExecutionStatus.KILL); + List failList = getCompleteTaskByState(ExecutionStatus.FAILURE); if (CollectionUtils.isNotEmpty(stopList) || CollectionUtils.isNotEmpty(killList) + || CollectionUtils.isNotEmpty(failList) || !isComplementEnd()) { return ExecutionStatus.STOP; } else { @@ -1039,6 +1262,11 @@ public class WorkflowExecuteThread implements Runnable { } } + // process failure + if (processFailed()) { + return ExecutionStatus.FAILURE; + } + // success if (state == ExecutionStatus.RUNNING_EXECUTION) { List killTasks = getCompleteTaskByState(ExecutionStatus.KILL); @@ -1102,6 +1330,22 @@ public class WorkflowExecuteThread implements Runnable { } } + /** + * stateEvent's execution status as process instance state + */ + private void updateProcessInstanceState(StateEvent stateEvent) { + ExecutionStatus state = stateEvent.getExecutionStatus(); + if (processInstance.getState() != state) { + logger.info("work flow process instance [id: {}, name:{}], state change from {} to {}, cmd type: {}", + processInstance.getId(), processInstance.getName(), processInstance.getState(), state, processInstance.getCommandType()); + processInstance.setState(state); + if (state.typeIsFinished()) { + processInstance.setEndTime(new Date()); + } + processService.updateProcessInstance(processInstance); + } + } + /** * get task dependency result * @@ -1109,7 +1353,7 @@ public class WorkflowExecuteThread implements Runnable { * @return DependResult */ private DependResult getDependResultForTask(TaskInstance taskInstance) { - return isTaskDepsComplete(taskInstance.getName()); + return isTaskDepsComplete(Long.toString(taskInstance.getTaskCode())); } /** @@ -1118,11 +1362,35 @@ public class WorkflowExecuteThread implements Runnable { * @param taskInstance task instance */ private void addTaskToStandByList(TaskInstance taskInstance) { - logger.info("add task to stand by list: {}", taskInstance.getName()); try { + if (readyToSubmitTaskQueue.contains(taskInstance)) { + logger.warn("task was found in ready submit queue, task code:{}", taskInstance.getTaskCode()); + return; + } + // need to check if the tasks with same task code is active + boolean active = false; + Map taskInstanceMap = taskInstanceHashMap.column(taskInstance.getTaskCode()); + if (taskInstanceMap != null && taskInstanceMap.size() > 0) { + for (Entry entry : taskInstanceMap.entrySet()) { + Integer taskInstanceId = entry.getKey(); + if (activeTaskProcessorMaps.containsKey(taskInstanceId)) { + TaskInstance latestTaskInstance = processService.findTaskInstanceById(taskInstanceId); + if (latestTaskInstance != null && !latestTaskInstance.getState().typeIsFailure()) { + active = true; + break; + } + } + } + } + if (active) { + logger.warn("task was found in active task list, task code:{}", taskInstance.getTaskCode()); + return; + } + logger.info("add task to stand by list, task name:{}, task id:{}, task code:{}", + taskInstance.getName(), taskInstance.getId(), taskInstance.getTaskCode()); readyToSubmitTaskQueue.put(taskInstance); } catch (Exception e) { - logger.error("add task instance to readyToSubmitTaskQueue error, taskName: {}", taskInstance.getName(), e); + logger.error("add task instance to readyToSubmitTaskQueue, taskName:{}, task id:{}", taskInstance.getName(), taskInstance.getId(), e); } } @@ -1158,12 +1426,25 @@ public class WorkflowExecuteThread implements Runnable { return false; } + private void addProcessStopEvent(ProcessInstance processInstance) { + StateEvent stateEvent = new StateEvent(); + stateEvent.setType(StateEventType.PROCESS_STATE_CHANGE); + stateEvent.setProcessInstanceId(processInstance.getId()); + stateEvent.setExecutionStatus(ExecutionStatus.STOP); + this.addStateEvent(stateEvent); + } + /** * close the on going tasks */ private void killAllTasks() { logger.info("kill called on process instance id: {}, num: {}", processInstance.getId(), activeTaskProcessorMaps.size()); + + if (readyToSubmitTaskQueue.size() > 0) { + readyToSubmitTaskQueue.clear(); + } + for (int taskId : activeTaskProcessorMaps.keySet()) { TaskInstance taskInstance = processService.findTaskInstanceById(taskId); if (taskInstance == null || taskInstance.getState().typeIsFinished()) { @@ -1172,42 +1453,33 @@ public class WorkflowExecuteThread implements Runnable { ITaskProcessor taskProcessor = activeTaskProcessorMaps.get(taskId); taskProcessor.action(TaskAction.STOP); if (taskProcessor.taskState().typeIsFinished()) { - StateEvent stateEvent = new StateEvent(); - stateEvent.setType(StateEventType.TASK_STATE_CHANGE); - stateEvent.setProcessInstanceId(this.processInstance.getId()); - stateEvent.setTaskInstanceId(taskInstance.getId()); - stateEvent.setExecutionStatus(taskProcessor.taskState()); - this.addStateEvent(stateEvent); + TaskResponseEvent taskResponseEvent = TaskResponseEvent.newActionStop( + taskProcessor.taskState(), + taskInstance.getId(), + this.processInstance.getId()); + taskResponseService.addResponse(taskResponseEvent); } + this.taskRetryCheckList.remove(taskId); + this.depStateCheckList.remove(taskId); } - + this.addProcessStopEvent(processInstance); } public boolean workFlowFinish() { return this.processInstance.getState().typeIsFinished(); } - /** - * whether the retry interval is timed out - * - * @param taskInstance task instance - * @return Boolean - */ - private boolean retryTaskIntervalOverTime(TaskInstance taskInstance) { - if (taskInstance.getState() != ExecutionStatus.FAILURE) { + public boolean activeTaskFinish() { + if (activeTaskProcessorMaps.isEmpty()) { return true; } - if (taskInstance.getId() == 0 - || - taskInstance.getMaxRetryTimes() == 0 - || - taskInstance.getRetryInterval() == 0) { - return true; + List taskInstanceList = processService.findTaskInstanceListByIds(activeTaskProcessorMaps.keySet()); + for (TaskInstance taskInstance : taskInstanceList) { + if (!taskInstance.getState().typeIsFinished()) { + return false; + } } - Date now = new Date(); - long failedTimeInterval = DateUtils.differSec(now, taskInstance.getEndTime()); - // task retry does not over time, return false - return taskInstance.getRetryInterval() * SEC_2_MINUTES_TIME_UNIT < failedTimeInterval; + return true; } /** @@ -1216,6 +1488,7 @@ public class WorkflowExecuteThread implements Runnable { private void submitStandByTask() { try { int length = readyToSubmitTaskQueue.size(); + List skipSubmitInstances = new ArrayList<>(); for (int i = 0; i < length; i++) { TaskInstance task = readyToSubmitTaskQueue.peek(); if (task == null) { @@ -1226,32 +1499,50 @@ public class WorkflowExecuteThread implements Runnable { TaskInstance retryTask = processService.findTaskInstanceById(task.getId()); if (retryTask != null && retryTask.getState().equals(ExecutionStatus.FORCED_SUCCESS)) { task.setState(retryTask.getState()); - logger.info("task: {} has been forced success, put it into complete task list and stop retrying", task.getName()); + logger.info("task name: {} has been forced success, put it into complete task list and stop retrying, task id:{}", task.getName(), task.getId()); removeTaskFromStandbyList(task); - completeTaskList.put(task.getName(), task); - submitPostNode(task.getName()); + completeTaskList.put(Long.toString(task.getTaskCode()), task); + submitPostNode(Long.toString(task.getTaskCode())); continue; } + if (retryTask != null && retryTask.getState() == ExecutionStatus.FAILURE && retryTask.getMaxRetryTimes() !=0 && retryTask.getRetryInterval() != 0) { + long failedTimeInterval = DateUtils.differSec(new Date(), retryTask.getEndTime()); + if ((long) retryTask.getRetryInterval() * SEC_2_MINUTES_TIME_UNIT > failedTimeInterval) { + logger.info("task name: {} retry waiting has not exceeded the interval time, and skip submission this time, task id:{}", task.getName(), task.getId()); + readyToSubmitTaskQueue.remove(task); + skipSubmitInstances.add(task); + continue; + } + } } //init varPool only this task is the first time running if (task.isFirstRun()) { //get pre task ,get all the task varPool to this task - Set preTask = dag.getPreviousNodes(task.getName()); + Set preTask = new HashSet<>(); + preTask.addAll(dag.getPreviousNodes(Long.toString(task.getTaskCode()))); + TaskNode taskNode = dag.getNode(Long.toString(task.getTaskCode())); + if (null != taskNode && null != taskNode.getDepList() && !taskNode.getDepList().isEmpty()) { + logger.debug("in submitStandByTask: taskCode:{}, taskType: {}, preTasks: {}, depList:{}", + task.getTaskCode(), taskNode.getType(), taskNode.getPreTasks(), taskNode.getDepList()); + preTask.addAll(taskNode.getDepList()); + } getPreVarPool(task, preTask); } DependResult dependResult = getDependResultForTask(task); if (DependResult.SUCCESS == dependResult) { - if (retryTaskIntervalOverTime(task)) { - TaskInstance taskInstance = submitTaskExec(task); - if (taskInstance == null) { - this.taskFailedSubmit = true; - } else { - removeTaskFromStandbyList(task); + int originalId = task.getId(); + TaskInstance taskInstance = submitTaskExec(task); + if (taskInstance == null) { + this.taskFailedSubmit = true; + } else { + removeTaskFromStandbyList(task); + if (taskInstance.getId() != originalId) { + activeTaskProcessorMaps.remove(originalId); } } } else if (DependResult.FAILED == dependResult) { // if the dependency fails, the current node is not submitted and the state changes to failure. - dependFailedTask.put(task.getName(), task); + dependFailedTask.put(Long.toString(task.getTaskCode()), task); removeTaskFromStandbyList(task); logger.info("task {},id:{} depend result : {}", task.getName(), task.getId(), dependResult); } else if (DependResult.NON_EXEC == dependResult) { @@ -1260,6 +1551,10 @@ public class WorkflowExecuteThread implements Runnable { logger.info("remove task {},id:{} , because depend result : {}", task.getName(), task.getId(), dependResult); } } + for (TaskInstance task : skipSubmitInstances) { + readyToSubmitTaskQueue.put(task); + } + skipSubmitInstances.clear(); } catch (Exception e) { logger.error("submit standby task error", e); } @@ -1324,26 +1619,26 @@ public class WorkflowExecuteThread implements Runnable { if (paramMap == null) { return startNodeNameList; } - if (paramMap.containsKey(CMD_PARAM_START_NODE_NAMES)) { - startNodeNameList = Arrays.asList(paramMap.get(CMD_PARAM_START_NODE_NAMES).split(Constants.COMMA)); + if (paramMap.containsKey(CMD_PARAM_START_NODES)) { + startNodeNameList = Arrays.asList(paramMap.get(CMD_PARAM_START_NODES).split(Constants.COMMA)); } return startNodeNameList; } /** - * generate start node name list from parsing command param; + * generate start node code list from parsing command param; * if "StartNodeIdList" exists in command param, return StartNodeIdList * - * @return recovery node name list + * @return recovery node code list */ - private List getRecoveryNodeNameList() { - List recoveryNodeNameList = new ArrayList<>(); + private List getRecoveryNodeCodeList() { + List recoveryNodeCodeList = new ArrayList<>(); if (CollectionUtils.isNotEmpty(recoverNodeIdList)) { for (TaskInstance task : recoverNodeIdList) { - recoveryNodeNameList.add(task.getName()); + recoveryNodeCodeList.add(Long.toString(task.getTaskCode())); } } - return recoveryNodeNameList; + return recoveryNodeCodeList; } /** @@ -1351,15 +1646,51 @@ public class WorkflowExecuteThread implements Runnable { * * @param totalTaskNodeList total task node list * @param startNodeNameList start node name list - * @param recoveryNodeNameList recovery node name list + * @param recoveryNodeCodeList recovery node code list * @param depNodeType depend node type * @return ProcessDag process dag * @throws Exception exception */ public ProcessDag generateFlowDag(List totalTaskNodeList, List startNodeNameList, - List recoveryNodeNameList, + List recoveryNodeCodeList, TaskDependType depNodeType) throws Exception { - return DagHelper.generateFlowDag(totalTaskNodeList, startNodeNameList, recoveryNodeNameList, depNodeType); + return DagHelper.generateFlowDag(totalTaskNodeList, startNodeNameList, recoveryNodeCodeList, depNodeType); + } + + public Map getActiveTaskProcessorMaps() { + return activeTaskProcessorMaps; + } + + private void setGlobalParamIfCommanded(ProcessDefinition processDefinition, Map cmdParam) { + // get start params from command param + Map startParamMap = new HashMap<>(); + if (cmdParam.containsKey(Constants.CMD_PARAM_START_PARAMS)) { + String startParamJson = cmdParam.get(Constants.CMD_PARAM_START_PARAMS); + startParamMap = JSONUtils.toMap(startParamJson); + } + Map fatherParamMap = new HashMap<>(); + if (cmdParam.containsKey(Constants.CMD_PARAM_FATHER_PARAMS)) { + String fatherParamJson = cmdParam.get(Constants.CMD_PARAM_FATHER_PARAMS); + fatherParamMap = JSONUtils.toMap(fatherParamJson); + } + startParamMap.putAll(fatherParamMap); + Map globalMap = processDefinition.getGlobalParamMap(); + List globalParamList = processDefinition.getGlobalParamList(); + if (MapUtils.isNotEmpty(startParamMap) && globalMap != null) { + Map tempGlobalMap = new HashMap<>(); + // add prefix for global params + for (Map.Entry param : globalMap.entrySet()) { + tempGlobalMap.put(GLOBAL_PARAMS_PREFIX+ param.getKey(), param.getValue()); + } + globalParamList.forEach(property -> property.setProp(GLOBAL_PARAMS_PREFIX + property.getProp())); + // set start param into global params, add prefix for startup params + for (Entry startParam : startParamMap.entrySet()) { + String tmpStartParamKey = START_UP_PARAMS_PREFIX + startParam.getKey(); + tempGlobalMap.put(tmpStartParamKey, startParam.getValue()); + globalParamList.add(new Property(tmpStartParamKey, IN, VARCHAR, startParam.getValue())); + } + processDefinition.setGlobalParamMap(tempGlobalMap); + } } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/BaseTaskProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/BaseTaskProcessor.java index e3a6c07ae42d38e237c687016e738cdd2e3f1d2f..fef13c53cf65653c1e5aa79d0f0738a8034bc161 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/BaseTaskProcessor.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/BaseTaskProcessor.java @@ -18,7 +18,6 @@ package org.apache.dolphinscheduler.server.master.runner.task; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; -import org.apache.dolphinscheduler.common.enums.ResourceType; import org.apache.dolphinscheduler.common.enums.SqoopJobType; import org.apache.dolphinscheduler.common.enums.TaskType; import org.apache.dolphinscheduler.common.enums.UdfType; @@ -30,9 +29,9 @@ import org.apache.dolphinscheduler.common.task.sql.SqlParameters; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.sources.SourceMysqlParameter; import org.apache.dolphinscheduler.common.task.sqoop.targets.TargetMysqlParameter; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; -import org.apache.dolphinscheduler.common.utils.EnumUtils; +import org.apache.dolphinscheduler.common.utils.HadoopUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.common.utils.LoggerUtils; import org.apache.dolphinscheduler.common.utils.TaskParametersUtils; import org.apache.dolphinscheduler.dao.entity.DataSource; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; @@ -41,15 +40,19 @@ import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.entity.Tenant; import org.apache.dolphinscheduler.dao.entity.UdfFunc; import org.apache.dolphinscheduler.server.builder.TaskExecutionContextBuilder; +import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.queue.entity.TaskExecutionContext; +import org.apache.dolphinscheduler.spi.enums.ResourceType; +import org.apache.dolphinscheduler.spi.task.TaskConstants; import org.apache.dolphinscheduler.spi.task.request.DataxTaskExecutionContext; import org.apache.dolphinscheduler.spi.task.request.ProcedureTaskExecutionContext; import org.apache.dolphinscheduler.spi.task.request.SQLTaskExecutionContext; import org.apache.dolphinscheduler.spi.task.request.SqoopTaskExecutionContext; import org.apache.dolphinscheduler.spi.task.request.UdfFuncRequest; +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.StringUtils; import java.util.HashMap; @@ -62,9 +65,12 @@ import java.util.stream.Stream; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.google.common.base.Enums; +import com.google.common.base.Strings; + public abstract class BaseTaskProcessor implements ITaskProcessor { - protected Logger logger = LoggerFactory.getLogger(getClass()); + protected final Logger logger = LoggerFactory.getLogger(String.format(TaskConstants.TASK_LOG_LOGGER_NAME_FORMAT, getClass())); protected boolean killed = false; @@ -74,36 +80,85 @@ public abstract class BaseTaskProcessor implements ITaskProcessor { protected TaskInstance taskInstance = null; - protected ProcessInstance processInstance; + protected ProcessInstance processInstance = null; + + protected int maxRetryTimes; + + protected int commitInterval; protected ProcessService processService = SpringApplicationContext.getBean(ProcessService.class); + protected MasterConfig masterConfig = SpringApplicationContext.getBean(MasterConfig.class); + + protected String threadLoggerInfoName; + + @Override + public void init(TaskInstance taskInstance, ProcessInstance processInstance) { + if (processService == null) { + processService = SpringApplicationContext.getBean(ProcessService.class); + } + if (masterConfig == null) { + masterConfig = SpringApplicationContext.getBean(MasterConfig.class); + } + this.taskInstance = taskInstance; + this.processInstance = processInstance; + this.maxRetryTimes = masterConfig.getMasterTaskCommitRetryTimes(); + this.commitInterval = masterConfig.getMasterTaskCommitInterval(); + } + /** - * pause task, common tasks donot need this. + * persist task * * @return */ + protected abstract boolean persistTask(TaskAction taskAction); + + /** + * pause task, common tasks donot need this. + */ protected abstract boolean pauseTask(); /** * kill task, all tasks need to realize this function - * - * @return */ protected abstract boolean killTask(); /** * task timeout process - * @return */ protected abstract boolean taskTimeout(); + /** + * persist + * + * @return + */ @Override - public void run() { + public boolean persist(TaskAction taskAction) { + return persistTask(taskAction); } + /* + * submit task + */ + protected abstract boolean submitTask(); + + /* + * resubmit task + */ + protected abstract boolean resubmitTask(); + + /** + * run task + */ + protected abstract boolean runTask(); + @Override public boolean action(TaskAction taskAction) { + String threadName = Thread.currentThread().getName(); + if (StringUtils.isNotEmpty(threadLoggerInfoName)) { + Thread.currentThread().setName(String.format(TaskConstants.TASK_LOGGER_THREAD_NAME_FORMAT, threadLoggerInfoName)); + } switch (taskAction) { case STOP: @@ -112,13 +167,33 @@ public abstract class BaseTaskProcessor implements ITaskProcessor { return pause(); case TIMEOUT: return timeout(); + case SUBMIT: + return submit(); + case RUN: + return run(); + case RESUBMIT: + return resubmit(); default: logger.error("unknown task action: {}", taskAction.toString()); - } + + // reset thread name + Thread.currentThread().setName(threadName); return false; } + protected boolean resubmit() { + return resubmitTask(); + } + + protected boolean submit() { + return submitTask(); + } + + protected boolean run() { + return runTask(); + } + protected boolean timeout() { if (timeout) { return true; @@ -127,9 +202,6 @@ public abstract class BaseTaskProcessor implements ITaskProcessor { return timeout; } - /** - * @return - */ protected boolean pause() { if (paused) { return true; @@ -151,6 +223,18 @@ public abstract class BaseTaskProcessor implements ITaskProcessor { return null; } + /** + * set master task running logger. + */ + public void setTaskExecutionLogger() { + threadLoggerInfoName = LoggerUtils.buildTaskId(LoggerUtils.TASK_LOGGER_INFO_PREFIX, + processInstance.getProcessDefinitionCode(), + processInstance.getProcessDefinitionVersion(), + taskInstance.getProcessInstanceId(), + taskInstance.getId()); + Thread.currentThread().setName(String.format(TaskConstants.TASK_LOGGER_THREAD_NAME_FORMAT, threadLoggerInfoName)); + } + /** * get TaskExecutionContext * @@ -296,8 +380,10 @@ public abstract class BaseTaskProcessor implements ITaskProcessor { DataSource datasource = processService.findDataSourceById(datasourceId); sqlTaskExecutionContext.setConnectionParams(datasource.getConnectionParams()); + sqlTaskExecutionContext.setDefaultFS(HadoopUtils.getInstance().getDefaultFS()); + // whether udf type - boolean udfTypeFlag = EnumUtils.isValidEnum(UdfType.class, sqlParameters.getType()) + boolean udfTypeFlag = Enums.getIfPresent(UdfType.class, Strings.nullToEmpty(sqlParameters.getType())).isPresent() && !StringUtils.isEmpty(sqlParameters.getUdfs()); if (udfTypeFlag) { @@ -368,4 +454,4 @@ public abstract class BaseTaskProcessor implements ITaskProcessor { return resourcesMap; } -} \ No newline at end of file +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/CommonTaskProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/CommonTaskProcessor.java index 4296b85fa4a63b431ef974ef06aaf961ae49cfa9..e7e3a978d7e2dfc6017913726e57f2c59300df3d 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/CommonTaskProcessor.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/CommonTaskProcessor.java @@ -20,11 +20,11 @@ package org.apache.dolphinscheduler.server.master.runner.task; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.common.utils.LoggerUtils; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.remote.command.TaskKillRequestCommand; import org.apache.dolphinscheduler.remote.utils.Host; -import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.server.master.dispatch.context.ExecutionContext; import org.apache.dolphinscheduler.server.master.dispatch.enums.ExecutorType; import org.apache.dolphinscheduler.server.master.dispatch.exceptions.ExecuteException; @@ -34,6 +34,7 @@ import org.apache.dolphinscheduler.service.queue.TaskPriority; import org.apache.dolphinscheduler.service.queue.TaskPriorityQueue; import org.apache.dolphinscheduler.service.queue.TaskPriorityQueueImpl; import org.apache.dolphinscheduler.service.queue.entity.TaskExecutionContext; +import org.apache.dolphinscheduler.spi.task.TaskConstants; import org.apache.commons.lang.StringUtils; @@ -48,30 +49,42 @@ import org.springframework.beans.factory.annotation.Autowired; */ public class CommonTaskProcessor extends BaseTaskProcessor { + private static final Logger logger = LoggerFactory.getLogger(CommonTaskProcessor.class); + @Autowired private TaskPriorityQueue taskUpdateQueue; @Autowired - MasterConfig masterConfig; + NettyExecutorManager nettyExecutorManager = SpringApplicationContext.getBean(NettyExecutorManager.class); - @Autowired - NettyExecutorManager nettyExecutorManager; + @Override + public boolean submitTask() { + this.taskInstance = processService.submitTask(taskInstance, maxRetryTimes, commitInterval); - /** - * logger of MasterBaseTaskExecThread - */ - protected Logger logger = LoggerFactory.getLogger(getClass()); + if (this.taskInstance == null) { + return false; + } + setTaskExecutionLogger(); + return dispatchTask(taskInstance, processInstance); + } @Override - public boolean submit(TaskInstance task, ProcessInstance processInstance, int maxRetryTimes, int commitInterval) { - this.processInstance = processInstance; - this.taskInstance = processService.submitTask(task, maxRetryTimes, commitInterval); - + protected boolean resubmitTask() { if (this.taskInstance == null) { return false; } - dispatchTask(taskInstance, processInstance); - return true; + setTaskExecutionLogger(); + return dispatchTask(taskInstance, processInstance); + } + + @Override + public void setTaskExecutionLogger() { + threadLoggerInfoName = LoggerUtils.buildTaskId(LoggerUtils.TASK_LOGGER_INFO_PREFIX, + processInstance.getProcessDefinitionCode(), + processInstance.getProcessDefinitionVersion(), + taskInstance.getProcessInstanceId(), + taskInstance.getId()); + Thread.currentThread().setName(String.format(TaskConstants.MASTER_COMMON_TASK_LOGGER_THREAD_NAME_FORMAT, threadLoggerInfoName)); } @Override @@ -80,7 +93,8 @@ public class CommonTaskProcessor extends BaseTaskProcessor { } @Override - public void run() { + public boolean runTask() { + return true; } @Override @@ -88,6 +102,24 @@ public class CommonTaskProcessor extends BaseTaskProcessor { return true; } + @Override + protected boolean persistTask(TaskAction taskAction) { + switch (taskAction) { + case STOP: + if (taskInstance.getState().typeIsSuccess() || taskInstance.getState().typeIsFailure()) { + return true; + } + taskInstance.setState(ExecutionStatus.KILL); + taskInstance.setEndTime(new Date()); + processService.updateTaskInstance(taskInstance); + return true; + default: + logger.error("unknown task action: {}", taskAction.toString()); + + } + return false; + } + /** * common task cannot be paused */ @@ -117,7 +149,9 @@ public class CommonTaskProcessor extends BaseTaskProcessor { logger.info("submit task, but the status of the task {} is already running or delayed.", taskInstance.getName()); return true; } - logger.info("task ready to submit: {}", taskInstance); + if (logger.isDebugEnabled()) { + logger.debug("task ready to submit: {}", taskInstance.getName()); + } TaskPriority taskPriority = new TaskPriority(processInstance.getProcessInstancePriority().getCode(), processInstance.getId(), taskInstance.getProcessInstancePriority().getCode(), @@ -127,7 +161,8 @@ public class CommonTaskProcessor extends BaseTaskProcessor { taskPriority.setTaskExecutionContext(taskExecutionContext); taskUpdateQueue.put(taskPriority); - logger.info(String.format("master submit success, task : %s", taskInstance.getName())); + logger.info("master submit success, task id:{}, task name:{}, process id:{}", + taskInstance.getId(), taskInstance.getName(), taskInstance.getProcessInstanceId()); return true; } catch (Exception e) { logger.error("submit task Exception: ", e); @@ -154,7 +189,6 @@ public class CommonTaskProcessor extends BaseTaskProcessor { if (StringUtils.isBlank(taskInstance.getHost())) { taskInstance.setState(ExecutionStatus.KILL); taskInstance.setEndTime(new Date()); - processService.updateTaskInstance(taskInstance); return true; } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/ConditionTaskProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/ConditionTaskProcessor.java index 80d8eba1c0b2224e5de6e1e5fbc758c30f03129e..41dd0b06497a7f7a56a889ec81dd4009547033c0 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/ConditionTaskProcessor.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/ConditionTaskProcessor.java @@ -26,14 +26,10 @@ import org.apache.dolphinscheduler.common.model.DependentItem; import org.apache.dolphinscheduler.common.model.DependentTaskModel; import org.apache.dolphinscheduler.common.task.dependent.DependentParameters; import org.apache.dolphinscheduler.common.utils.DependentUtils; -import org.apache.dolphinscheduler.common.utils.LoggerUtils; import org.apache.dolphinscheduler.common.utils.NetUtils; -import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.TaskDefinition; import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.server.utils.LogUtils; -import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import java.util.ArrayList; import java.util.Date; @@ -41,8 +37,6 @@ import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; -import org.slf4j.LoggerFactory; - /** * condition task processor */ @@ -53,8 +47,6 @@ public class ConditionTaskProcessor extends BaseTaskProcessor { */ private DependentParameters dependentParameters; - ProcessInstance processInstance; - /** * condition result */ @@ -63,16 +55,13 @@ public class ConditionTaskProcessor extends BaseTaskProcessor { /** * complete task map */ - private Map completeTaskList = new ConcurrentHashMap<>(); - - MasterConfig masterConfig = SpringApplicationContext.getBean(MasterConfig.class); + private Map completeTaskList = new ConcurrentHashMap<>(); private TaskDefinition taskDefinition; @Override - public boolean submit(TaskInstance task, ProcessInstance processInstance, int masterTaskCommitRetryTimes, int masterTaskCommitInterval) { - this.processInstance = processInstance; - this.taskInstance = processService.submitTask(task, masterTaskCommitRetryTimes, masterTaskCommitInterval); + public boolean submitTask() { + this.taskInstance = processService.submitTask(taskInstance, maxRetryTimes, commitInterval); if (this.taskInstance == null) { return false; @@ -81,16 +70,14 @@ public class ConditionTaskProcessor extends BaseTaskProcessor { taskInstance.getTaskCode(), taskInstance.getTaskDefinitionVersion() ); - logger = LoggerFactory.getLogger(LoggerUtils.buildTaskId(LoggerUtils.TASK_LOGGER_INFO_PREFIX, - processInstance.getProcessDefinitionCode(), - processInstance.getProcessDefinitionVersion(), - taskInstance.getProcessInstanceId(), - taskInstance.getId())); - String threadLoggerInfoName = String.format(Constants.TASK_LOG_INFO_FORMAT, processService.formatTaskAppId(this.taskInstance)); - Thread.currentThread().setName(threadLoggerInfoName); + setTaskExecutionLogger(); initTaskParameters(); logger.info("dependent task start"); - endTask(); + return true; + } + + @Override + protected boolean resubmitTask() { return true; } @@ -100,12 +87,14 @@ public class ConditionTaskProcessor extends BaseTaskProcessor { } @Override - public void run() { + public boolean runTask() { if (conditionResult.equals(DependResult.WAITING)) { setConditionResult(); + endTask(); } else { endTask(); } + return true; } @Override @@ -130,6 +119,23 @@ public class ConditionTaskProcessor extends BaseTaskProcessor { return true; } + @Override + protected boolean persistTask(TaskAction taskAction) { + switch (taskAction) { + case STOP: + if (taskInstance.getState().typeIsSuccess() || taskInstance.getState().typeIsFailure()) { + return true; + } + this.taskInstance.setState(ExecutionStatus.KILL); + this.taskInstance.setEndTime(new Date()); + processService.saveTaskInstance(taskInstance); + return true; + default: + logger.error("unknown task action: {}", taskAction.toString()); + } + return false; + } + @Override protected boolean killTask() { this.taskInstance.setState(ExecutionStatus.KILL); @@ -159,7 +165,7 @@ public class ConditionTaskProcessor extends BaseTaskProcessor { List taskInstances = processService.findValidTaskListByProcessId(taskInstance.getProcessInstanceId()); for (TaskInstance task : taskInstances) { - completeTaskList.putIfAbsent(task.getName(), task.getState()); + completeTaskList.putIfAbsent(task.getTaskCode(), task.getState()); } List modelResultList = new ArrayList<>(); @@ -181,18 +187,18 @@ public class ConditionTaskProcessor extends BaseTaskProcessor { private DependResult getDependResultForItem(DependentItem item) { DependResult dependResult = DependResult.SUCCESS; - if (!completeTaskList.containsKey(item.getDepTasks())) { - logger.info("depend item: {} have not completed yet.", item.getDepTasks()); + if (!completeTaskList.containsKey(item.getDepTaskCode())) { + logger.info("depend item: {} have not completed yet.", item.getDepTaskCode()); dependResult = DependResult.FAILED; return dependResult; } - ExecutionStatus executionStatus = completeTaskList.get(item.getDepTasks()); + ExecutionStatus executionStatus = completeTaskList.get(item.getDepTaskCode()); if (executionStatus != item.getStatus()) { - logger.info("depend item : {} expect status: {}, actual status: {}", item.getDepTasks(), item.getStatus(), executionStatus); + logger.info("depend item : {} expect status: {}, actual status: {}", item.getDepTaskCode(), item.getStatus(), executionStatus); dependResult = DependResult.FAILED; } logger.info("dependent item complete {} {},{}", - Constants.DEPENDENT_SPLIT, item.getDepTasks(), dependResult); + Constants.DEPENDENT_SPLIT, item.getDepTaskCode(), dependResult); return dependResult; } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/DependentTaskProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/DependentTaskProcessor.java index b26e6411187a1f8e9b958a462ef17b4a08e3540f..16e330d0004e140230c7b44a2a518517534e9b69 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/DependentTaskProcessor.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/DependentTaskProcessor.java @@ -27,14 +27,9 @@ import org.apache.dolphinscheduler.common.model.DependentTaskModel; import org.apache.dolphinscheduler.common.task.dependent.DependentParameters; import org.apache.dolphinscheduler.common.utils.DependentUtils; import org.apache.dolphinscheduler.common.utils.NetUtils; -import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.TaskDefinition; -import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.server.utils.DependentExecute; import org.apache.dolphinscheduler.server.utils.LogUtils; -import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; -import org.apache.dolphinscheduler.service.process.ProcessService; import java.util.ArrayList; import java.util.Date; @@ -70,25 +65,19 @@ public class DependentTaskProcessor extends BaseTaskProcessor { DependResult result; - ProcessInstance processInstance; TaskDefinition taskDefinition; - MasterConfig masterConfig = SpringApplicationContext.getBean(MasterConfig.class); - boolean allDependentItemFinished; @Override - public boolean submit(TaskInstance task, ProcessInstance processInstance, int masterTaskCommitRetryTimes, int masterTaskCommitInterval) { - this.processInstance = processInstance; - this.taskInstance = task; - this.taskInstance = processService.submitTask(task, masterTaskCommitRetryTimes, masterTaskCommitInterval); + public boolean submitTask() { + this.taskInstance = processService.submitTask(taskInstance, maxRetryTimes, commitInterval); if (this.taskInstance == null) { return false; } - taskDefinition = processService.findTaskDefinition( - taskInstance.getTaskCode(), taskInstance.getTaskDefinitionVersion() - ); + taskDefinition = processService.findTaskDefinition(taskInstance.getTaskCode(), taskInstance.getTaskDefinitionVersion()); + setTaskExecutionLogger(); taskInstance.setLogPath(LogUtils.getTaskLogPath(processInstance.getProcessDefinitionCode(), processInstance.getProcessDefinitionVersion(), taskInstance.getProcessInstanceId(), @@ -101,13 +90,18 @@ public class DependentTaskProcessor extends BaseTaskProcessor { return true; } + @Override + protected boolean resubmitTask() { + return true; + } + @Override public ExecutionStatus taskState() { return this.taskInstance.getState(); } @Override - public void run() { + public boolean runTask() { if (!allDependentItemFinished) { allDependentItemFinished = allDependentTaskFinish(); } @@ -115,6 +109,7 @@ public class DependentTaskProcessor extends BaseTaskProcessor { getTaskDependResult(); endTask(); } + return true; } @Override @@ -155,6 +150,24 @@ public class DependentTaskProcessor extends BaseTaskProcessor { return true; } + @Override + protected boolean persistTask(TaskAction taskAction) { + switch (taskAction) { + case STOP: + if (taskInstance.getState().typeIsSuccess() || taskInstance.getState().typeIsFailure()) { + return true; + } + this.taskInstance.setState(ExecutionStatus.KILL); + this.taskInstance.setEndTime(new Date()); + processService.saveTaskInstance(taskInstance); + return true; + default: + logger.error("unknown task action: {}", taskAction.toString()); + + } + return false; + } + @Override protected boolean killTask() { this.taskInstance.setState(ExecutionStatus.KILL); @@ -201,9 +214,6 @@ public class DependentTaskProcessor extends BaseTaskProcessor { return result; } - /** - * - */ private void endTask() { ExecutionStatus status; status = (result == DependResult.SUCCESS) ? ExecutionStatus.SUCCESS : ExecutionStatus.FAILURE; diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/ITaskProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/ITaskProcessor.java index b68dc221a9f22a1b414e96c1109daaac8b17c2f4..41fb0d0acd93050b99fcdf42439ab131168fd0d7 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/ITaskProcessor.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/ITaskProcessor.java @@ -26,14 +26,14 @@ import org.apache.dolphinscheduler.dao.entity.TaskInstance; */ public interface ITaskProcessor { - void run(); + boolean persist(TaskAction taskAction); + + void init(TaskInstance taskInstance, ProcessInstance processInstance); boolean action(TaskAction taskAction); String getType(); - boolean submit(TaskInstance taskInstance, ProcessInstance processInstance, int masterTaskCommitRetryTimes, int masterTaskCommitInterval); - ExecutionStatus taskState(); } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/SubTaskProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/SubTaskProcessor.java index 7a4be5830cb1107631b5744f35d8edb00e6c5470..280ef0fe278c5a9caedfd81da5c3bfd8cf7ce7d9 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/SubTaskProcessor.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/SubTaskProcessor.java @@ -17,24 +17,45 @@ package org.apache.dolphinscheduler.server.master.runner.task; +import static org.apache.dolphinscheduler.common.Constants.LOCAL_PARAMS; + +import org.apache.dolphinscheduler.common.enums.Direct; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.TaskTimeoutStrategy; import org.apache.dolphinscheduler.common.enums.TaskType; +import org.apache.dolphinscheduler.common.graph.DAG; +import org.apache.dolphinscheduler.common.model.TaskNode; +import org.apache.dolphinscheduler.common.model.TaskNodeRelation; +import org.apache.dolphinscheduler.common.process.Property; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.TaskDefinition; import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.remote.command.StateEventChangeCommand; +import org.apache.dolphinscheduler.remote.processor.StateEventCallbackService; +import org.apache.dolphinscheduler.server.utils.LogUtils; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; + +import org.apache.commons.lang3.StringUtils; +import java.util.Comparator; import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; +import java.util.stream.Collectors; + +import com.fasterxml.jackson.core.type.TypeReference; /** * */ public class SubTaskProcessor extends BaseTaskProcessor { - private ProcessInstance processInstance; - private ProcessInstance subProcessInstance = null; private TaskDefinition taskDefinition; @@ -43,18 +64,26 @@ public class SubTaskProcessor extends BaseTaskProcessor { */ private final Lock runLock = new ReentrantLock(); + private StateEventCallbackService stateEventCallbackService = SpringApplicationContext.getBean(StateEventCallbackService.class); + @Override - public boolean submit(TaskInstance task, ProcessInstance processInstance, int masterTaskCommitRetryTimes, int masterTaskCommitInterval) { - this.processInstance = processInstance; - taskDefinition = processService.findTaskDefinition( - task.getTaskCode(), task.getTaskDefinitionVersion() - ); - this.taskInstance = processService.submitTask(task, masterTaskCommitRetryTimes, masterTaskCommitInterval); + public boolean submitTask() { + taskDefinition = processService.findTaskDefinition(taskInstance.getTaskCode(), taskInstance.getTaskDefinitionVersion()); + this.taskInstance = processService.submitTask(taskInstance, maxRetryTimes, commitInterval); if (this.taskInstance == null) { return false; } + setTaskExecutionLogger(); + taskInstance.setLogPath(LogUtils.getTaskLogPath(processInstance.getProcessDefinitionCode(), + processInstance.getProcessDefinitionVersion(), taskInstance.getProcessInstanceId(), taskInstance.getId())); + + return true; + } + + @Override + protected boolean resubmitTask() { return true; } @@ -64,43 +93,35 @@ public class SubTaskProcessor extends BaseTaskProcessor { } @Override - public void run() { + public boolean runTask() { try { this.runLock.lock(); if (setSubWorkFlow()) { updateTaskState(); } } catch (Exception e) { - logger.error("work flow {} sub task {} exceptions", - this.processInstance.getId(), - this.taskInstance.getId(), - e); + logger.error("work flow {} sub task {} exceptions", this.processInstance.getId(), this.taskInstance.getId(), e); } finally { this.runLock.unlock(); } + return true; } @Override protected boolean taskTimeout() { - TaskTimeoutStrategy taskTimeoutStrategy = - taskDefinition.getTimeoutNotifyStrategy(); - if (TaskTimeoutStrategy.FAILED != taskTimeoutStrategy - && TaskTimeoutStrategy.WARNFAILED != taskTimeoutStrategy) { + TaskTimeoutStrategy taskTimeoutStrategy = taskDefinition.getTimeoutNotifyStrategy(); + if (TaskTimeoutStrategy.FAILED != taskTimeoutStrategy && TaskTimeoutStrategy.WARNFAILED != taskTimeoutStrategy) { return true; } - logger.info("sub process task {} timeout, strategy {} ", - taskInstance.getId(), taskTimeoutStrategy.getDescp()); + logger.info("sub process task {} timeout, strategy {} ", taskInstance.getId(), taskTimeoutStrategy.getDescp()); killTask(); return true; } private void updateTaskState() { subProcessInstance = processService.findSubProcessInstance(processInstance.getId(), taskInstance.getId()); - logger.info("work flow {} task {}, sub work flow: {} state: {}", - this.processInstance.getId(), - this.taskInstance.getId(), - subProcessInstance.getId(), - subProcessInstance.getState().getDescp()); + logger.info("work flow {} task {}, sub work flow: {} state: {}", this.processInstance.getId(), this.taskInstance.getId(), + subProcessInstance.getId(), subProcessInstance.getState().getDescp()); if (subProcessInstance != null && subProcessInstance.getState().typeIsFinished()) { taskInstance.setState(subProcessInstance.getState()); taskInstance.setEndTime(new Date()); @@ -108,6 +129,88 @@ public class SubTaskProcessor extends BaseTaskProcessor { } } + private Map mergeEndNodeTaskInstanceVarPool(Set taskCodes) { + List taskInstanceList = processService.findValidTaskListByProcessId(subProcessInstance.getId()); + logger.info("in dealFinish1, mergeEndNodeTaskInstanceVarPool, taskInstanceList.size:{}, subProcessInstance.getId:{}", taskInstanceList.size(),subProcessInstance.getId()); + // filter end nodes and sort by end time reversed + List endTaskInstancesSortedByEndTimeReversed = taskInstanceList.stream() + .filter(o -> taskCodes.contains(Long.toString(o.getTaskCode()))). + sorted(Comparator.comparing(TaskInstance::getEndTime).reversed()).collect(Collectors.toList()); + logger.info("in dealFinish1, mergeEndNodeTaskInstanceVarPool, endTaskInstancesSortedByEndTimeReversed.size:{}", endTaskInstancesSortedByEndTimeReversed.size()); + Map allProperties = new HashMap<>(); + for (TaskInstance taskInstance : endTaskInstancesSortedByEndTimeReversed) { + String varPool = taskInstance.getVarPool(); + if (org.apache.commons.lang.StringUtils.isNotEmpty(varPool)) { + List properties = JSONUtils.toList(varPool, Property.class); + properties.forEach(o -> { + allProperties.put(o.getProp(), o); + }); + } + } + return allProperties; + } + + private void dealFinish1() { + // build dag + ProcessDefinition processDefinition = processService.findProcessDefinition(subProcessInstance.getProcessDefinitionCode(), subProcessInstance.getProcessDefinitionVersion()); + if (null == processDefinition) { + logger.error("process definition not found in meta data, processDefinitionCode:{}, processDefinitionVersion:{}, processInstanceId:{}", + subProcessInstance.getProcessDefinitionCode(), subProcessInstance.getProcessDefinitionVersion(), subProcessInstance.getId()); + throw new RuntimeException(String.format("process definition code %s, version %s does not exist", subProcessInstance.getProcessDefinitionCode(), subProcessInstance.getProcessDefinitionVersion())); + } + subProcessInstance.setProcessDefinition(processDefinition); + DAG dag = processService.genDagGraph(subProcessInstance.getProcessDefinition()); + // get end nodes + Set endTaskCodes = dag.getEndNode().stream().collect(Collectors.toSet()); + logger.info("in dealFinish1, endTaskCodes:{}", endTaskCodes); + if (endTaskCodes == null || endTaskCodes.isEmpty()) { + return; + } + // get var pool of sub progress instance; + Map varPoolPropertiesMap = mergeEndNodeTaskInstanceVarPool(endTaskCodes); + logger.debug("in dealFinish1, varPoolPropertiesMap:{}", varPoolPropertiesMap); + // merge var pool: 1. task instance var pool from pre task ; 2. var pool from sub progress + // filter by localParams + String taskVarPool = taskInstance.getVarPool(); + Map taskVarPoolProperties = new HashMap<>(); + if (StringUtils.isNotEmpty(taskVarPool)) { + taskVarPoolProperties = JSONUtils.toList(taskVarPool, Property.class).stream().collect(Collectors.toMap(Property::getProp, (p) -> p)); + } + Map taskParams = JSONUtils.parseObject(taskInstance.getTaskParams(), new TypeReference>() { + }); + Object localParams = taskParams.get(LOCAL_PARAMS); + Map outProperties = new HashMap<>(); + if (localParams != null) { + List properties = JSONUtils.toList(JSONUtils.toJsonString(localParams), Property.class); + outProperties = properties.stream().filter(r -> Direct.OUT == r.getDirect()).collect(Collectors.toMap(Property::getProp, (p) -> p)); + // put all task instance var pool from pre task + outProperties.putAll(taskVarPoolProperties); + for (Map.Entry o : outProperties.entrySet()) { + if (varPoolPropertiesMap.containsKey(o.getKey())) { + o.getValue().setValue(varPoolPropertiesMap.get(o.getKey()).getValue()); + } + } + } else { + outProperties.putAll(taskVarPoolProperties); + outProperties.putAll(varPoolPropertiesMap); + } + taskInstance.setVarPool(JSONUtils.toJsonString(outProperties.values())); + logger.debug("in dealFinish1, varPool:{}", taskInstance.getVarPool()); + //deal with localParam for show in the page + processService.changeOutParam(taskInstance); + } + + @Override + protected boolean persistTask(TaskAction taskAction) { + switch (taskAction) { + case STOP: + return true; + default: + logger.error("unknown task action: {}", taskAction); + } + return false; + } + @Override protected boolean pauseTask() { pauseSubWorkFlow(); @@ -115,14 +218,13 @@ public class SubTaskProcessor extends BaseTaskProcessor { } private boolean pauseSubWorkFlow() { - ProcessInstance subProcessInstance = processService.findSubProcessInstance(processInstance.getId(), taskInstance.getId()); + subProcessInstance = processService.findSubProcessInstance(processInstance.getId(), taskInstance.getId()); if (subProcessInstance == null || taskInstance.getState().typeIsFinished()) { return false; } subProcessInstance.setState(ExecutionStatus.READY_PAUSE); processService.updateProcessInstance(subProcessInstance); - //TODO... - // send event to sub process master + sendToSubProcess(); return true; } @@ -137,7 +239,11 @@ public class SubTaskProcessor extends BaseTaskProcessor { if (subProcessInstance == null || taskInstance.getState().typeIsFinished()) { return false; } - + TaskInstance instance = processService.findTaskInstanceById(taskInstance.getId()); + if (instance.getState() == ExecutionStatus.RUNNING_EXECUTION) { + taskInstance.setState(ExecutionStatus.RUNNING_EXECUTION); + return true; + } taskInstance.setState(ExecutionStatus.RUNNING_EXECUTION); taskInstance.setStartTime(new Date()); processService.updateTaskInstance(taskInstance); @@ -146,20 +252,32 @@ public class SubTaskProcessor extends BaseTaskProcessor { taskInstance.getId(), taskInstance.getState()); return true; - } @Override protected boolean killTask() { - ProcessInstance subProcessInstance = processService.findSubProcessInstance(processInstance.getId(), taskInstance.getId()); + subProcessInstance = processService.findSubProcessInstance(processInstance.getId(), taskInstance.getId()); if (subProcessInstance == null || taskInstance.getState().typeIsFinished()) { return false; } subProcessInstance.setState(ExecutionStatus.READY_STOP); processService.updateProcessInstance(subProcessInstance); + sendToSubProcess(); + this.taskInstance.setState(ExecutionStatus.KILL); + this.taskInstance.setEndTime(new Date()); + dealFinish1(); + processService.saveTaskInstance(taskInstance); return true; } + private void sendToSubProcess() { + StateEventChangeCommand stateEventChangeCommand = new StateEventChangeCommand(processInstance.getId(), + taskInstance.getId(), subProcessInstance.getState(), subProcessInstance.getId(), 0); + String address = subProcessInstance.getHost().split(":")[0]; + int port = Integer.parseInt(subProcessInstance.getHost().split(":")[1]); + this.stateEventCallbackService.sendResult(address, port, stateEventChangeCommand.convert2Command()); + } + @Override public String getType() { return TaskType.SUB_PROCESS.getDesc(); diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/SwitchTaskProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/SwitchTaskProcessor.java index 68189c6d790d3008e724cb5cd89b9c544f44c209..bdb5e4f76ddb60459d33715b98d9a25ac70084d7 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/SwitchTaskProcessor.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/SwitchTaskProcessor.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.server.master.runner.task; +import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.DependResult; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.TaskType; @@ -25,13 +26,10 @@ import org.apache.dolphinscheduler.common.task.switchtask.SwitchParameters; import org.apache.dolphinscheduler.common.task.switchtask.SwitchResultVo; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.NetUtils; -import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.TaskDefinition; import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.server.utils.LogUtils; import org.apache.dolphinscheduler.server.utils.SwitchTaskUtils; -import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.commons.lang.StringUtils; @@ -47,27 +45,22 @@ public class SwitchTaskProcessor extends BaseTaskProcessor { protected final String rgex = "['\"]*\\$\\{(.*?)\\}['\"]*"; - private TaskInstance taskInstance; - - private ProcessInstance processInstance; TaskDefinition taskDefinition; - MasterConfig masterConfig = SpringApplicationContext.getBean(MasterConfig.class); - /** * switch result */ private DependResult conditionResult; @Override - public boolean submit(TaskInstance taskInstance, ProcessInstance processInstance, int masterTaskCommitRetryTimes, int masterTaskCommitInterval) { - - this.processInstance = processInstance; - this.taskInstance = processService.submitTask(taskInstance, masterTaskCommitRetryTimes, masterTaskCommitInterval); + public boolean submitTask() { + this.taskInstance = processService.submitTask(taskInstance, maxRetryTimes, commitInterval); if (this.taskInstance == null) { return false; } + setTaskExecutionLogger(); + taskDefinition = processService.findTaskDefinition( taskInstance.getTaskCode(), taskInstance.getTaskDefinitionVersion() ); @@ -83,7 +76,12 @@ public class SwitchTaskProcessor extends BaseTaskProcessor { } @Override - public void run() { + protected boolean resubmitTask() { + return true; + } + + @Override + public boolean runTask() { try { if (!this.taskState().typeIsFinished() && setSwitchResult()) { endTaskState(); @@ -94,6 +92,25 @@ public class SwitchTaskProcessor extends BaseTaskProcessor { this.taskInstance.getId(), e); } + return true; + } + + @Override + protected boolean persistTask(TaskAction taskAction) { + switch (taskAction) { + case STOP: + if (taskInstance.getState().typeIsSuccess() || taskInstance.getState().typeIsFailure()) { + return true; + } + this.taskInstance.setState(ExecutionStatus.KILL); + this.taskInstance.setEndTime(new Date()); + processService.saveTaskInstance(taskInstance); + return true; + default: + logger.error("unknown task action: {}", taskAction.toString()); + + } + return false; } @Override @@ -171,7 +188,13 @@ public class SwitchTaskProcessor extends BaseTaskProcessor { switchParameters.setResultConditionLocation(finalConditionLocation); taskInstance.setSwitchDependency(switchParameters); - logger.info("the switch task depend result : {}", conditionResult); + if (!isValidSwitchResult(switchResultVos.get(finalConditionLocation))) { + conditionResult = DependResult.FAILED; + logger.error("the switch task depend result is invalid, result:{}, switch branch:{}", conditionResult, finalConditionLocation); + return true; + } + + logger.info("the switch task depend result:{}, switch branch:{}", conditionResult, finalConditionLocation); return true; } @@ -202,18 +225,31 @@ public class SwitchTaskProcessor extends BaseTaskProcessor { } while (m.find()) { String paramName = m.group(1); - Property property = globalParams.get(paramName); + Property property = globalParams.get(Constants.START_UP_PARAMS_PREFIX + paramName); if (property == null) { - return ""; + property = globalParams.get(paramName); + if (property == null) { + property = globalParams.get(Constants.GLOBAL_PARAMS_PREFIX + paramName); + if (property == null) { + return ""; + } + } } String value = property.getValue(); if (!org.apache.commons.lang.math.NumberUtils.isNumber(value)) { value = "\"" + value + "\""; } - logger.info("paramName:{},paramValue{}", paramName, value); + logger.info("paramName:{},paramValue:{}", paramName, value); content = content.replace("${" + paramName + "}", value); } return content; } + /** + * check whether switch result is valid + */ + private boolean isValidSwitchResult(SwitchResultVo switchResult) { + return switchResult.getNextNode() != null && switchResult.getNextNode() != 0L; + } + } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/TaskAction.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/TaskAction.java index 42c88463b29ca706b49463e2ee9da3b923c73b58..c965de7f981fa4e8212975f56d565204db0ad8b9 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/TaskAction.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/TaskAction.java @@ -20,8 +20,11 @@ package org.apache.dolphinscheduler.server.master.runner.task; /** * task action */ -public enum TaskAction { +public enum TaskAction { PAUSE, STOP, - TIMEOUT + TIMEOUT, + SUBMIT, + RUN, + RESUBMIT } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/monitor/AbstractMonitor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/monitor/AbstractMonitor.java index 5e31343fbfd3a8922b1ccb227e488abaf1071497..2640d8d663e596047b6345c487fcdadd08e2b585 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/monitor/AbstractMonitor.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/monitor/AbstractMonitor.java @@ -16,8 +16,7 @@ */ package org.apache.dolphinscheduler.server.monitor; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; - +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/monitor/RegistryMonitorImpl.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/monitor/RegistryMonitorImpl.java index 34d6d9d27054376fdb5d0ec70399c90c584a957d..9e3bb0c65ba5ddd824c85ea88ec7f3c9b6b3956e 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/monitor/RegistryMonitorImpl.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/monitor/RegistryMonitorImpl.java @@ -19,36 +19,26 @@ package org.apache.dolphinscheduler.server.monitor; import org.apache.dolphinscheduler.service.registry.RegistryClient; +import java.util.Collection; import java.util.HashMap; -import java.util.List; import java.util.Map; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; -/** - * zk monitor server impl - */ @Component public class RegistryMonitorImpl extends AbstractMonitor { - /** - * zookeeper operator - */ - private RegistryClient registryClient = RegistryClient.getInstance(); - - /** - * get active nodes map by path - * - * @param path path - * @return active nodes map - */ + @Autowired + private RegistryClient registryClient; + @Override protected Map getActiveNodesByPath(String path) { Map maps = new HashMap<>(); - List childrenList = registryClient.getChildrenKeys(path); + Collection childrenList = registryClient.getChildrenKeys(path); if (childrenList == null) { return maps; diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/registry/HeartBeatTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/registry/HeartBeatTask.java index c80787709f065210afd1f3f8efaea282358c2d4a..67fd07a892b72dc0f110fdbddc5e579dd6e9674b 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/registry/HeartBeatTask.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/registry/HeartBeatTask.java @@ -17,15 +17,12 @@ package org.apache.dolphinscheduler.server.registry; -import static org.apache.dolphinscheduler.remote.utils.Constants.COMMA; +import org.apache.dolphinscheduler.common.utils.HeartBeat; +import org.apache.dolphinscheduler.server.worker.runner.WorkerManagerThread; +import org.apache.dolphinscheduler.service.registry.RegistryClient; -import java.util.Date; import java.util.Set; -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.utils.DateUtils; -import org.apache.dolphinscheduler.common.utils.OSUtils; -import org.apache.dolphinscheduler.service.registry.RegistryClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -36,42 +33,43 @@ public class HeartBeatTask implements Runnable { private final Logger logger = LoggerFactory.getLogger(HeartBeatTask.class); - private String startTime; - private double maxCpuloadAvg; - private double reservedMemory; - private int hostWeight; // worker host weight - private Set heartBeatPaths; - private String serverType; - private RegistryClient registryClient; + private final Set heartBeatPaths; + private final RegistryClient registryClient; + private WorkerManagerThread workerManagerThread; + private final String serverType; + private final HeartBeat heartBeat; - public HeartBeatTask(String startTime, + public HeartBeatTask(long startupTime, double maxCpuloadAvg, double reservedMemory, Set heartBeatPaths, String serverType, RegistryClient registryClient) { - this.startTime = startTime; - this.maxCpuloadAvg = maxCpuloadAvg; - this.reservedMemory = reservedMemory; this.heartBeatPaths = heartBeatPaths; - this.serverType = serverType; this.registryClient = registryClient; + this.serverType = serverType; + this.heartBeat = new HeartBeat(startupTime, maxCpuloadAvg, reservedMemory); } - public HeartBeatTask(String startTime, + public HeartBeatTask(long startupTime, double maxCpuloadAvg, double reservedMemory, int hostWeight, Set heartBeatPaths, String serverType, - RegistryClient registryClient) { - this.startTime = startTime; - this.maxCpuloadAvg = maxCpuloadAvg; - this.reservedMemory = reservedMemory; - this.hostWeight = hostWeight; + RegistryClient registryClient, + int workerThreadCount, + WorkerManagerThread workerManagerThread + ) { this.heartBeatPaths = heartBeatPaths; - this.serverType = serverType; this.registryClient = registryClient; + this.workerManagerThread = workerManagerThread; + this.serverType = serverType; + this.heartBeat = new HeartBeat(startupTime, maxCpuloadAvg, reservedMemory, hostWeight, workerThreadCount); + } + + public String getHeartBeatInfo() { + return this.heartBeat.encodeHeartBeat(); } @Override @@ -85,41 +83,16 @@ public class HeartBeatTask implements Runnable { } } + if (workerManagerThread != null) { + // update waiting task count + heartBeat.setWorkerWaitingTaskCount(workerManagerThread.getThreadPoolQueueSize()); + } + for (String heartBeatPath : heartBeatPaths) { - registryClient.update(heartBeatPath, heartBeatInfo()); + registryClient.persistEphemeral(heartBeatPath, heartBeat.encodeHeartBeat()); } } catch (Throwable ex) { logger.error("error write heartbeat info", ex); } } - - public String heartBeatInfo() { - double loadAverage = OSUtils.loadAverage(); - double availablePhysicalMemorySize = OSUtils.availablePhysicalMemorySize(); - int status = Constants.NORMAL_NODE_STATUS; - if (loadAverage > maxCpuloadAvg || availablePhysicalMemorySize < reservedMemory) { - logger.warn("current cpu load average {} is too high or available memory {}G is too low, under max.cpuload.avg={} and reserved.memory={}G", - loadAverage, availablePhysicalMemorySize, maxCpuloadAvg, reservedMemory); - status = Constants.ABNORMAL_NODE_STATUS; - } - - StringBuilder builder = new StringBuilder(100); - builder.append(OSUtils.cpuUsage()).append(COMMA); - builder.append(OSUtils.memoryUsage()).append(COMMA); - builder.append(OSUtils.loadAverage()).append(COMMA); - builder.append(OSUtils.availablePhysicalMemorySize()).append(Constants.COMMA); - builder.append(maxCpuloadAvg).append(Constants.COMMA); - builder.append(reservedMemory).append(Constants.COMMA); - builder.append(startTime).append(Constants.COMMA); - builder.append(DateUtils.dateToString(new Date())).append(Constants.COMMA); - builder.append(status).append(COMMA); - // save process id - builder.append(OSUtils.getProcessID()); - // worker host weight - if (Constants.WORKER_TYPE.equals(serverType)) { - builder.append(Constants.COMMA).append(hostWeight); - } - return builder.toString(); - } - } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/DependentExecute.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/DependentExecute.java index c17b8fb6528564d0b7a23911ee9fcda4ef185496..627d481aca8022e13ab0e9f83daed754eb161e6a 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/DependentExecute.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/DependentExecute.java @@ -17,15 +17,18 @@ package org.apache.dolphinscheduler.server.utils; +import org.apache.commons.lang3.StringUtils; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.DependResult; import org.apache.dolphinscheduler.common.enums.DependentRelation; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.enums.TaskType; +import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.common.model.DateInterval; import org.apache.dolphinscheduler.common.model.DependentItem; import org.apache.dolphinscheduler.common.utils.DependentUtils; -import org.apache.dolphinscheduler.dao.entity.ProcessInstance; -import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.dao.entity.*; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.process.ProcessService; @@ -34,7 +37,9 @@ import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; +import org.apache.dolphinscheduler.spi.task.TaskConstants; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -70,7 +75,7 @@ public class DependentExecute { /** * logger */ - private Logger logger = LoggerFactory.getLogger(DependentExecute.class); + protected final Logger logger = LoggerFactory.getLogger(String.format(TaskConstants.TASK_LOG_LOGGER_NAME_FORMAT, getClass())); /** * constructor @@ -102,21 +107,25 @@ public class DependentExecute { * @param dateIntervals date intervals * @return dateIntervals */ - private DependResult calculateResultForTasks(DependentItem dependentItem, - List dateIntervals) { - + private DependResult calculateResultForTasks(DependentItem dependentItem, List dateIntervals) { DependResult result = DependResult.FAILED; for (DateInterval dateInterval : dateIntervals) { - ProcessInstance processInstance = findLastProcessInterval(dependentItem.getDefinitionCode(), - dateInterval); + ProcessInstance processInstance = findLastProcessInterval(dependentItem.getDefinitionCode(), dateInterval); if (processInstance == null) { + logger.info("Cannot find dependent processInstance, waiting for workflow to run, processDefiniteCode:{}, taskCode:{}", + dependentItem.getDefinitionCode(), dependentItem.getDepTaskCode()); return DependResult.WAITING; } // need to check workflow for updates, so get all task and check the task state - if (dependentItem.getDepTasks().equals(Constants.DEPENDENT_ALL)) { - result = dependResultByProcessInstance(processInstance); + if (dependentItem.getDepTaskCode() == Constants.DEPENDENT_ALL_TASK_CODE) { + if (!processInstance.getState().typeIsFinished()) { + logger.info("Wait for the dependent workflow to complete, processDefiniteCode:{}, taskCode:{}, processInstanceId:{}, processInstance state:{}", + dependentItem.getDefinitionCode(), dependentItem.getDepTaskCode(), processInstance.getId(), processInstance.getState()); + return DependResult.WAITING; + } + result = dependResultByProcessInstance(processInstance, dateInterval); } else { - result = getDependTaskResult(dependentItem.getDepTasks(), processInstance); + result = getDependTaskResult(processInstance, dependentItem.getDepTaskCode(), dateInterval); } if (result != DependResult.SUCCESS) { break; @@ -127,14 +136,56 @@ public class DependentExecute { /** * depend type = depend_all - * - * @return */ - private DependResult dependResultByProcessInstance(ProcessInstance processInstance) { - if (!processInstance.getState().typeIsFinished()) { - return DependResult.WAITING; - } + private DependResult dependResultByProcessInstance(ProcessInstance processInstance, DateInterval dateInterval) { if (processInstance.getState().typeIsSuccess()) { + List taskRelations = processService.findRelationByCode(processInstance.getProcessDefinitionCode(), + processInstance.getProcessDefinitionVersion()); + if (!taskRelations.isEmpty()) { + List taskDefinitionLogs = processService.genTaskDefineList(taskRelations); + Map definiteTask = taskDefinitionLogs.stream().filter(log -> !log.getTaskType().equals(TaskType.SUB_PROCESS.getDesc()) + || !log.getTaskType().equals(TaskType.DEPENDENT.getDesc()) + || !log.getTaskType().equals(TaskType.CONDITIONS.getDesc())) + .filter(log -> log.getFlag().equals(Flag.YES)) + .collect(Collectors.toMap(TaskDefinition::getCode, TaskDefinitionLog::getName)); + if (!definiteTask.isEmpty()) { + List taskInstanceList = processService.findLastTaskInstanceListInterval(definiteTask.keySet(), dateInterval); + if (taskInstanceList.isEmpty()) { + logger.warn("Cannot find the task instance: {}", JSONUtils.toJsonString(definiteTask)); + return DependResult.FAILED; + } + Map taskInstanceMap = new HashMap<>(); + for (TaskInstance instance : taskInstanceList) { + taskInstanceMap.compute(instance.getTaskCode(), (k, v) -> { + if (v == null) { + v = instance; + } else { + if (v.getId() < instance.getId()) { + v = instance; + } + } + return v; + }); + definiteTask.remove(instance.getTaskCode()); + } + List instanceFail = taskInstanceMap.values().stream().filter(instance -> instance.getState().typeIsFailure()).collect(Collectors.toList()); + if (!instanceFail.isEmpty()) { + List log = instanceFail.stream().map(instance -> instance.getId() + "|" + instance.getTaskCode() + "|" + instance.getName()).collect(Collectors.toList()); + logger.warn("The fail task: {}", StringUtils.join(log, Constants.COMMA)); + return DependResult.FAILED; + } + List instanceRunning = taskInstanceMap.values().stream().filter(instance -> instance.getState().typeIsRunning()).collect(Collectors.toList()); + if (!instanceRunning.isEmpty()) { + List log = instanceRunning.stream().map(instance -> instance.getId() + "|" + instance.getTaskCode() + "|" + instance.getName()).collect(Collectors.toList()); + logger.info("The running task: {}", StringUtils.join(log, Constants.COMMA)); + return DependResult.WAITING; + } + if (!definiteTask.isEmpty()) { + logger.warn("Cannot find the task instance: {}", JSONUtils.toJsonString(definiteTask)); + return DependResult.FAILED; + } + } + } return DependResult.SUCCESS; } return DependResult.FAILED; @@ -142,36 +193,29 @@ public class DependentExecute { /** * get depend task result - * - * @param taskName - * @param processInstance - * @return */ - private DependResult getDependTaskResult(String taskName, ProcessInstance processInstance) { - DependResult result; - TaskInstance taskInstance = null; - List taskInstanceList = processService.findValidTaskListByProcessId(processInstance.getId()); - - for (TaskInstance task : taskInstanceList) { - if (task.getName().equals(taskName)) { - taskInstance = task; - break; - } - } - + private DependResult getDependTaskResult(ProcessInstance processInstance, long taskCode, DateInterval dateInterval) { + TaskInstance taskInstance = processService.findLastTaskInstanceInterval(taskCode, dateInterval); if (taskInstance == null) { - // cannot find task in the process instance - // maybe because process instance is running or failed. - if (processInstance.getState().typeIsFinished()) { - result = DependResult.FAILED; - } else { + TaskDefinition taskDefinition = processService.findTaskDefinitionByCode(taskCode); + if (taskDefinition == null) { + logger.error("Cannot find the task definition, something error, taskCode: {}", taskCode); + return DependResult.FAILED; + } + if (taskDefinition.getFlag() == Flag.NO) { + logger.warn("Cannot find the task instance, but the task is forbidden, so dependent success, taskCode: {}, taskName: {}", taskCode, taskDefinition.getName()); + return DependResult.SUCCESS; + } + if (!processInstance.getState().typeIsFinished()) { + logger.info("Wait for the dependent workflow to complete, taskCode:{}, processInstanceId:{}, processInstance state:{}", taskCode, processInstance.getId(), processInstance.getState()); return DependResult.WAITING; } + logger.warn("Cannot find the task in the process instance when the ProcessInstance is finish, taskCode: {}, taskName: {}", taskCode, taskDefinition.getName()); + return DependResult.FAILED; } else { - result = getDependResultByState(taskInstance.getState()); + logger.info("The running task, taskId:{}, taskCode:{}, taskName:{}", taskInstance.getId(), taskInstance.getTaskCode(), taskInstance.getName()); + return getDependResultByState(taskInstance.getState()); } - - return result; } /** @@ -211,7 +255,6 @@ public class DependentExecute { * @return DependResult */ private DependResult getDependResultByState(ExecutionStatus state) { - if (!state.typeIsFinished()) { return DependResult.WAITING; } else if (state.typeIsSuccess()) { @@ -221,23 +264,6 @@ public class DependentExecute { } } - /** - * get dependent result by task instance state when task instance is null - * - * @param state state - * @return DependResult - */ - private DependResult getDependResultByProcessStateWhenTaskNull(ExecutionStatus state) { - - if (state.typeIsRunning() - || state == ExecutionStatus.SUBMITTED_SUCCESS - || state == ExecutionStatus.WAITING_THREAD) { - return DependResult.WAITING; - } else { - return DependResult.FAILED; - } - } - /** * judge depend item finished * @@ -245,9 +271,9 @@ public class DependentExecute { * @return boolean */ public boolean finish(Date currentTime) { - if (modelDependResult == DependResult.WAITING) { + if (modelDependResult == DependResult.WAITING || modelDependResult == DependResult.NON_EXEC) { modelDependResult = getModelDependResult(currentTime); - return false; + return modelDependResult == DependResult.SUCCESS || modelDependResult == DependResult.FAILED; } return true; } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ProcessUtils.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ProcessUtils.java index 3ca91205a371a1b0cf0de6ffb15a12ee1f0aa259..063b153a178a681b594302e169a6e5f25d8efa62 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ProcessUtils.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ProcessUtils.java @@ -19,7 +19,6 @@ package org.apache.dolphinscheduler.server.utils; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.CommonUtils; import org.apache.dolphinscheduler.common.utils.FileUtils; import org.apache.dolphinscheduler.common.utils.HadoopUtils; @@ -30,7 +29,9 @@ import org.apache.dolphinscheduler.remote.utils.Host; import org.apache.dolphinscheduler.service.log.LogClientService; import org.apache.dolphinscheduler.service.queue.entity.TaskExecutionContext; +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.StringUtils; +import org.apache.commons.lang.SystemUtils; import java.io.File; import java.nio.charset.StandardCharsets; @@ -153,7 +154,7 @@ public class ProcessUtils { List pidList = new ArrayList<>(); Matcher mat = null; // pstree pid get sub pids - if (OSUtils.isMacOS()) { + if (SystemUtils.IS_OS_MAC) { String pids = OSUtils.exeCmd(String.format("%s -sp %d", Constants.PSTREE, processId)); if (null != pids) { mat = MACPATTERN.matcher(pids); diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/WorkerServer.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/WorkerServer.java index 9705b4480fbac07b5f2c5ea05f2724102f97c14f..11175998d7b0ade6552a529c66e1871ba5ca8f1d 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/WorkerServer.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/WorkerServer.java @@ -17,10 +17,12 @@ package org.apache.dolphinscheduler.server.worker; +import static org.apache.dolphinscheduler.common.Constants.SPRING_DATASOURCE_DRIVER_CLASS_NAME; + import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.IStoppable; -import org.apache.dolphinscheduler.common.enums.NodeType; import org.apache.dolphinscheduler.common.thread.Stopper; +import org.apache.dolphinscheduler.common.utils.PropertyUtils; import org.apache.dolphinscheduler.remote.NettyRemotingServer; import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.config.NettyServerConfig; @@ -28,44 +30,45 @@ import org.apache.dolphinscheduler.server.worker.config.WorkerConfig; import org.apache.dolphinscheduler.server.worker.plugin.TaskPluginManager; import org.apache.dolphinscheduler.server.worker.processor.DBTaskAckProcessor; import org.apache.dolphinscheduler.server.worker.processor.DBTaskResponseProcessor; +import org.apache.dolphinscheduler.server.worker.processor.HostUpdateProcessor; import org.apache.dolphinscheduler.server.worker.processor.TaskExecuteProcessor; +import org.apache.dolphinscheduler.server.worker.processor.TaskKillAckProcessor; import org.apache.dolphinscheduler.server.worker.processor.TaskKillProcessor; +import org.apache.dolphinscheduler.server.worker.processor.TaskRecallAckProcessor; import org.apache.dolphinscheduler.server.worker.registry.WorkerRegistryClient; import org.apache.dolphinscheduler.server.worker.runner.RetryReportTaskStatusThread; import org.apache.dolphinscheduler.server.worker.runner.WorkerManagerThread; import org.apache.dolphinscheduler.service.alert.AlertClientService; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; -import org.apache.dolphinscheduler.spi.exception.PluginNotFoundException; -import org.apache.dolphinscheduler.spi.plugin.DolphinPluginLoader; -import org.apache.dolphinscheduler.spi.plugin.DolphinPluginManagerConfig; -import org.apache.dolphinscheduler.spi.utils.StringUtils; +import org.apache.dolphinscheduler.spi.task.TaskExecutionContextCacheManager; +import org.apache.dolphinscheduler.spi.task.request.TaskRequest; -import org.apache.commons.collections4.MapUtils; +import org.apache.commons.collections4.CollectionUtils; -import java.util.Set; +import java.util.Collection; import javax.annotation.PostConstruct; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.WebApplicationType; import org.springframework.boot.builder.SpringApplicationBuilder; import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.FilterType; import org.springframework.transaction.annotation.EnableTransactionManagement; -import com.facebook.presto.jdbc.internal.guava.collect.ImmutableList; - /** * worker server */ @ComponentScan(value = "org.apache.dolphinscheduler", excludeFilters = { - @ComponentScan.Filter(type = FilterType.REGEX, pattern = { - "org.apache.dolphinscheduler.server.master.*", - "org.apache.dolphinscheduler.server.monitor.*", - "org.apache.dolphinscheduler.server.log.*" - }) + @ComponentScan.Filter(type = FilterType.REGEX, pattern = { + "org.apache.dolphinscheduler.server.master.*", + "org.apache.dolphinscheduler.server.monitor.*", + "org.apache.dolphinscheduler.server.log.*", + "org.apache.dolphinscheduler.alert.*" + }) }) @EnableTransactionManagement public class WorkerServer implements IStoppable { @@ -80,12 +83,6 @@ public class WorkerServer implements IStoppable { */ private NettyRemotingServer nettyRemotingServer; - /** - * worker registry - */ - @Autowired - private WorkerRegistryClient workerRegistryClient; - /** * worker config */ @@ -110,8 +107,18 @@ public class WorkerServer implements IStoppable { @Autowired private WorkerManagerThread workerManagerThread; + /** + * worker registry + */ + @Autowired + private WorkerRegistryClient workerRegistryClient; + + @Autowired private TaskPluginManager taskPluginManager; + @Value("${spring.datasource.driver-class-name}") + private String driverClassName; + /** * worker server startup, not use web service * @@ -119,7 +126,10 @@ public class WorkerServer implements IStoppable { */ public static void main(String[] args) { Thread.currentThread().setName(Constants.THREAD_NAME_WORKER_SERVER); - new SpringApplicationBuilder(WorkerServer.class).web(WebApplicationType.NONE).run(args); + new SpringApplicationBuilder(WorkerServer.class) + .web(WebApplicationType.NONE) + .profiles("worker") + .run(args); } /** @@ -127,40 +137,43 @@ public class WorkerServer implements IStoppable { */ @PostConstruct public void run() { + PropertyUtils.setValue(SPRING_DATASOURCE_DRIVER_CLASS_NAME, driverClassName); + // alert-server client registry alertClientService = new AlertClientService(workerConfig.getAlertListenHost(), Constants.ALERT_RPC_PORT); - // init task plugin - initTaskPlugin(); // init remoting server NettyServerConfig serverConfig = new NettyServerConfig(); serverConfig.setListenPort(workerConfig.getListenPort()); this.nettyRemotingServer = new NettyRemotingServer(serverConfig); this.nettyRemotingServer.registerProcessor(CommandType.TASK_EXECUTE_REQUEST, new TaskExecuteProcessor(alertClientService, taskPluginManager)); this.nettyRemotingServer.registerProcessor(CommandType.TASK_KILL_REQUEST, new TaskKillProcessor()); + this.nettyRemotingServer.registerProcessor(CommandType.TASK_KILL_RESPONSE_ACK, new TaskKillAckProcessor()); + this.nettyRemotingServer.registerProcessor(CommandType.TASK_RECALL_ACK, new TaskRecallAckProcessor()); this.nettyRemotingServer.registerProcessor(CommandType.DB_TASK_ACK, new DBTaskAckProcessor()); this.nettyRemotingServer.registerProcessor(CommandType.DB_TASK_RESPONSE, new DBTaskResponseProcessor()); + this.nettyRemotingServer.registerProcessor(CommandType.PROCESS_HOST_UPDATE_REQUEST, new HostUpdateProcessor()); this.nettyRemotingServer.start(); // worker registry try { this.workerRegistryClient.registry(); this.workerRegistryClient.setRegistryStoppable(this); - Set workerZkPaths = this.workerRegistryClient.getWorkerZkPaths(); - - this.workerRegistryClient.handleDeadServer(workerZkPaths, NodeType.WORKER, Constants.DELETE_OP); } catch (Exception e) { - logger.error(e.getMessage(), e); + logger.error("worker registry error", e); throw new RuntimeException(e); } + // solve dead lock + logger.info(org.apache.dolphinscheduler.spi.utils.PropertyUtils.dumpProperties()); + // task execute manager this.workerManagerThread.start(); // retry report task status this.retryReportTaskStatusThread.start(); - /** + /* * registry hooks, which are called before the process exits */ Runtime.getRuntime().addShutdownHook(new Thread(() -> { @@ -170,32 +183,7 @@ public class WorkerServer implements IStoppable { })); } - // todo better - private void initTaskPlugin() { - taskPluginManager = new TaskPluginManager(); - DolphinPluginManagerConfig taskPluginManagerConfig = new DolphinPluginManagerConfig(); - taskPluginManagerConfig.setPlugins(workerConfig.getTaskPluginBinding()); - if (StringUtils.isNotBlank(workerConfig.getTaskPluginDir())) { - taskPluginManagerConfig.setInstalledPluginsDir(workerConfig.getTaskPluginDir().trim()); - } - - if (StringUtils.isNotBlank(workerConfig.getMavenLocalRepository())) { - taskPluginManagerConfig.setMavenLocalRepository(workerConfig.getMavenLocalRepository().trim()); - } - - DolphinPluginLoader taskPluginLoader = new DolphinPluginLoader(taskPluginManagerConfig, ImmutableList.of(taskPluginManager)); - try { - taskPluginLoader.loadPlugins(); - } catch (Exception e) { - throw new RuntimeException("Load Task Plugin Failed !", e); - } - if (MapUtils.isEmpty(taskPluginManager.getTaskChannelMap())) { - throw new PluginNotFoundException("Task Plugin Not Found,Please Check Config File"); - } - } - public void close(String cause) { - try { // execute only once if (Stopper.isStopped()) { @@ -218,9 +206,23 @@ public class WorkerServer implements IStoppable { this.nettyRemotingServer.close(); this.workerRegistryClient.unRegistry(); this.alertClientService.close(); + + // kill running tasks + this.killAllRunningTasks(); + + // close the application context this.springApplicationContext.close(); + logger.info("springApplicationContext close"); + try { + // thread sleep 60 seconds for quietly stop + Thread.sleep(60000L); + } catch (Exception e) { + logger.warn("thread sleep exception ", e); + } + Runtime.getRuntime().halt(0); } catch (Exception e) { logger.error("worker server stop exception ", e); + Runtime.getRuntime().halt(1); } } @@ -228,4 +230,21 @@ public class WorkerServer implements IStoppable { public void stop(String cause) { close(cause); } + + /** + * kill all tasks which are running + */ + public void killAllRunningTasks() { + Collection taskRequests = TaskExecutionContextCacheManager.getAllTaskRequestList(); + logger.info("ready to kill all cache job, job size:{}", taskRequests.size()); + + if (CollectionUtils.isEmpty(taskRequests)) { + return; + } + + for (TaskRequest taskRequest : taskRequests) { + // kill task when it's not finished yet + org.apache.dolphinscheduler.plugin.task.api.ProcessUtils.kill(taskRequest); + } + } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/cache/ResponceCache.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/cache/ResponceCache.java index 3639b8eba38176327682b50c2c41e1882ff9c3de..8cffa09a0ab3a5140f1e9e9b6dce580e09d453b4 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/cache/ResponceCache.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/cache/ResponceCache.java @@ -24,7 +24,7 @@ import java.util.Map; import java.util.concurrent.ConcurrentHashMap; /** - * Responce Cache : cache worker send master result + * Response Cache : cache worker send master result */ public class ResponceCache { @@ -38,6 +38,8 @@ public class ResponceCache { private Map ackCache = new ConcurrentHashMap<>(); private Map responseCache = new ConcurrentHashMap<>(); + private Map killResponseCache = new ConcurrentHashMap<>(); + private Map recallCache = new ConcurrentHashMap<>(); /** @@ -54,6 +56,13 @@ public class ResponceCache { case RESULT: responseCache.put(taskInstanceId,command); break; + case ACTION_STOP: + killResponseCache.put(taskInstanceId,command); + break; + case WORKER_REJECT: + case REALLOCATE: + recallCache.put(taskInstanceId,command); + break; default: throw new IllegalArgumentException("invalid event type : " + event); } @@ -68,6 +77,32 @@ public class ResponceCache { ackCache.remove(taskInstanceId); } + /** + * remove kill response cache + * + * @param taskInstanceId taskInstanceId + */ + public void removeKillResponseCache(Integer taskInstanceId) { + killResponseCache.remove(taskInstanceId); + } + + public Map getKillResponseCache() { + return killResponseCache; + } + + /** + * recall response cache + * + * @param taskInstanceId taskInstanceId + */ + public void removeRecallCache(Integer taskInstanceId) { + recallCache.remove(taskInstanceId); + } + + public Map getRecallCache() { + return recallCache; + } + /** * remove reponse cache * @param taskInstanceId taskInstanceId diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/config/WorkerConfig.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/config/WorkerConfig.java index a3feb7777de3e81c2dea2e5d6bcbe1bbab6b355f..57119a703ded9bd95947805858e4a18267ea12b2 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/config/WorkerConfig.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/config/WorkerConfig.java @@ -65,6 +65,17 @@ public class WorkerConfig { @Value("${task.plugin.binding:}") private String taskPluginBinding; + @Value("${worker.retry.report.task.statues.interval:10}") + private int retryReportTaskStatusInterval; + + public int getRetryReportTaskStatusInterval() { + return retryReportTaskStatusInterval; + } + + public void setRetryReportTaskStatusInterval(int retryReportTaskStatusInterval) { + this.retryReportTaskStatusInterval = retryReportTaskStatusInterval; + } + public int getListenPort() { return listenPort; } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/plugin/TaskPluginManager.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/plugin/TaskPluginManager.java index 9da2181deddfa6dfb3a1caf9368fedc5e29a0a61..24d4603c8e1ccb3b21870f85acdedd981bfcf279 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/plugin/TaskPluginManager.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/plugin/TaskPluginManager.java @@ -18,89 +18,80 @@ package org.apache.dolphinscheduler.server.worker.plugin; import static java.lang.String.format; -import static java.util.Objects.requireNonNull; - -import static com.google.common.base.Preconditions.checkState; import org.apache.dolphinscheduler.common.enums.PluginType; -import org.apache.dolphinscheduler.dao.DaoFactory; +import org.apache.dolphinscheduler.common.enums.TaskType; import org.apache.dolphinscheduler.dao.PluginDao; import org.apache.dolphinscheduler.dao.entity.PluginDefine; -import org.apache.dolphinscheduler.spi.DolphinSchedulerPlugin; -import org.apache.dolphinscheduler.spi.classloader.ThreadContextClassLoader; import org.apache.dolphinscheduler.spi.params.PluginParamsTransfer; import org.apache.dolphinscheduler.spi.params.base.PluginParams; -import org.apache.dolphinscheduler.spi.plugin.AbstractDolphinPluginManager; import org.apache.dolphinscheduler.spi.task.TaskChannel; import org.apache.dolphinscheduler.spi.task.TaskChannelFactory; -import java.util.HashMap; +import java.util.Collections; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.ServiceLoader; +import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.boot.context.event.ApplicationReadyEvent; +import org.springframework.context.event.EventListener; +import org.springframework.stereotype.Component; -public class TaskPluginManager extends AbstractDolphinPluginManager { - +@Component +public class TaskPluginManager { private static final Logger logger = LoggerFactory.getLogger(TaskPluginManager.class); - private final Map taskChannelFactoryMap = new ConcurrentHashMap<>(); private final Map taskChannelMap = new ConcurrentHashMap<>(); - /** - * k->pluginDefineId v->pluginDefineName - */ - private final Map pluginDefineMap = new HashMap<>(); + private final PluginDao pluginDao; - private void addTaskChannelFactory(TaskChannelFactory taskChannelFactory) { - requireNonNull(taskChannelFactory, "taskChannelFactory is null"); + public TaskPluginManager(PluginDao pluginDao) { + this.pluginDao = pluginDao; + } - if (taskChannelFactoryMap.putIfAbsent(taskChannelFactory.getName(), taskChannelFactory) != null) { - throw new IllegalArgumentException(format("Task Plugin '%s' is already registered", taskChannelFactory.getName())); - } + private void loadTaskChannel(TaskChannelFactory taskChannelFactory) { + TaskChannel taskChannel = taskChannelFactory.create(); + taskChannelMap.put(taskChannelFactory.getName(), taskChannel); + } - try { - loadTaskChannel(taskChannelFactory.getName()); - } catch (Exception e) { - throw new IllegalArgumentException(format("Task Plugin '%s' is can not load .", taskChannelFactory.getName())); - } + public Map getTaskChannelMap() { + return Collections.unmodifiableMap(taskChannelMap); } - private void loadTaskChannel(String name) { - requireNonNull(name, "name is null"); + @EventListener + public void installPlugin(ApplicationReadyEvent readyEvent) { + final Set names = new HashSet<>(); - TaskChannelFactory taskChannelFactory = taskChannelFactoryMap.get(name); - checkState(taskChannelFactory != null, "Task Plugin {} is not registered", name); + ServiceLoader.load(TaskChannelFactory.class).forEach(factory -> { + final String name = factory.getName(); - try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(taskChannelFactory.getClass().getClassLoader())) { - TaskChannel taskChannel = taskChannelFactory.create(); - this.taskChannelMap.put(name, taskChannel); - } + logger.info("Registering task plugin: {}", name); - logger.info("-- Loaded Task Plugin {} --", name); - } + if (!names.add(name)) { + throw new IllegalStateException(format("Duplicate task plugins named '%s'", name)); + } + loadTaskChannel(factory); - private PluginDao pluginDao = DaoFactory.getDaoInstance(PluginDao.class); + logger.info("Registered task plugin: {}", name); - public Map getTaskChannelMap() { - return taskChannelMap; - } - - @Override - public void installPlugin(DolphinSchedulerPlugin dolphinSchedulerPlugin) { - for (TaskChannelFactory taskChannelFactory : dolphinSchedulerPlugin.getTaskChannelFactorys()) { - logger.info("Registering Task Plugin '{}'", taskChannelFactory.getName()); - this.addTaskChannelFactory(taskChannelFactory); - List params = taskChannelFactory.getParams(); - String nameEn = taskChannelFactory.getName(); + List params = factory.getParams(); String paramsJson = PluginParamsTransfer.transferParamsToJson(params); - PluginDefine pluginDefine = new PluginDefine(nameEn, PluginType.TASK.getDesc(), paramsJson); - int id = pluginDao.addOrUpdatePluginDefine(pluginDefine); - pluginDefineMap.put(id, pluginDefine.getPluginName()); - } + PluginDefine pluginDefine = new PluginDefine(name, PluginType.TASK.getDesc(), paramsJson); + int count = pluginDao.addOrUpdatePluginDefine(pluginDefine); + if (count <= 0) { + throw new RuntimeException("Failed to update task plugin: " + name); + } + }); + + // put WATERDROP task + taskChannelMap.put(TaskType.WATERDROP.getDesc(), taskChannelMap.get(TaskType.SHELL.getDesc())); + } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/DBTaskAckProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/DBTaskAckProcessor.java index a340ad704e1b9244c4b4c0e07aa29121851f8950..3aac840855b9433d8d7256a134bf25e01096aea1 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/DBTaskAckProcessor.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/DBTaskAckProcessor.java @@ -17,17 +17,21 @@ package org.apache.dolphinscheduler.server.worker.processor; -import io.netty.channel.Channel; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.apache.dolphinscheduler.remote.command.*; +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.command.CommandType; +import org.apache.dolphinscheduler.remote.command.DBTaskAckCommand; import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; import org.apache.dolphinscheduler.server.worker.cache.ResponceCache; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Preconditions; +import io.netty.channel.Channel; + /** * db task ack processor */ @@ -50,6 +54,7 @@ public class DBTaskAckProcessor implements NettyRequestProcessor { if (taskAckCommand.getStatus() == ExecutionStatus.SUCCESS.getCode()){ ResponceCache.get().removeAckCache(taskAckCommand.getTaskInstanceId()); + logger.debug("removeAckCache: taskinstance id:{}", taskAckCommand.getTaskInstanceId()); } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/DBTaskResponseProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/DBTaskResponseProcessor.java index 97a9cf527a66073ca5ea667bec2ef8efc85f2ebc..f9e206ee7560c1501d15f373ca814389b73d64d5 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/DBTaskResponseProcessor.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/DBTaskResponseProcessor.java @@ -17,7 +17,6 @@ package org.apache.dolphinscheduler.server.worker.processor; -import io.netty.channel.Channel; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.remote.command.Command; @@ -25,11 +24,14 @@ import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.command.DBTaskResponseCommand; import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; import org.apache.dolphinscheduler.server.worker.cache.ResponceCache; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Preconditions; +import io.netty.channel.Channel; + /** * db task response processor */ @@ -45,12 +47,15 @@ public class DBTaskResponseProcessor implements NettyRequestProcessor { DBTaskResponseCommand taskResponseCommand = JSONUtils.parseObject( command.getBody(), DBTaskResponseCommand.class); - if (taskResponseCommand == null){ + if (taskResponseCommand == null) { return; } - if (taskResponseCommand.getStatus() == ExecutionStatus.SUCCESS.getCode()){ + if (taskResponseCommand.getStatus() == ExecutionStatus.SUCCESS.getCode()) { ResponceCache.get().removeResponseCache(taskResponseCommand.getTaskInstanceId()); + logger.debug("removeResponseCache: taskinstance id:{}", taskResponseCommand.getTaskInstanceId()); + TaskCallbackService.remove(taskResponseCommand.getTaskInstanceId()); + logger.debug("remove REMOTE_CHANNELS, task instance id:{}", taskResponseCommand.getTaskInstanceId()); } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/HostUpdateProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/HostUpdateProcessor.java index 5be32761349a6500704f5f3c2cbdd6f08f347d55..8928d5009d7897df671d2de69d59db19cf23a3be 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/HostUpdateProcessor.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/HostUpdateProcessor.java @@ -51,7 +51,7 @@ public class HostUpdateProcessor implements NettyRequestProcessor { @Override public void process(Channel channel, Command command) { - Preconditions.checkArgument(CommandType.PROCESS_HOST_UPDATE_REQUST == command.getType(), String.format("invalid command type : %s", command.getType())); + Preconditions.checkArgument(CommandType.PROCESS_HOST_UPDATE_REQUEST == command.getType(), String.format("invalid command type : %s", command.getType())); HostUpdateCommand updateCommand = JSONUtils.parseObject(command.getBody(), HostUpdateCommand.class); logger.info("received host update command : {}", updateCommand); taskCallbackService.changeRemoteChannel(updateCommand.getTaskInstanceId(), new NettyRemoteChannel(channel, command.getOpaque())); diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskCallbackService.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskCallbackService.java index fa186d0d5f3f89a5c2856d56099043321c67d963..d90c787f3716a8ee55c4a69f2c436ccd2f1ac660 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskCallbackService.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskCallbackService.java @@ -19,14 +19,13 @@ package org.apache.dolphinscheduler.server.worker.processor; import static org.apache.dolphinscheduler.common.Constants.SLEEP_TIME_MILLIS; -import java.util.concurrent.ConcurrentHashMap; - import org.apache.dolphinscheduler.remote.NettyRemotingClient; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.config.NettyClientConfig; import org.apache.dolphinscheduler.remote.processor.NettyRemoteChannel; -import org.apache.dolphinscheduler.service.registry.RegistryClient; + +import java.util.concurrent.ConcurrentHashMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -51,18 +50,12 @@ public class TaskCallbackService { */ private static final ConcurrentHashMap REMOTE_CHANNELS = new ConcurrentHashMap<>(); - /** - * zookeeper registry center - */ - private RegistryClient registryClient; - /** * netty remoting client */ private final NettyRemotingClient nettyRemotingClient; public TaskCallbackService() { - this.registryClient = RegistryClient.getInstance(); final NettyClientConfig clientConfig = new NettyClientConfig(); this.nettyRemotingClient = new NettyRemotingClient(clientConfig); this.nettyRemotingClient.registerProcessor(CommandType.DB_TASK_ACK, new DBTaskAckProcessor()); @@ -131,10 +124,24 @@ public class TaskCallbackService { * * @param taskInstanceId taskInstanceId */ - public void remove(int taskInstanceId) { + public static void remove(int taskInstanceId) { REMOTE_CHANNELS.remove(taskInstanceId); } + /** + * get opaque + * + * @param taskInstanceId taskInstanceId + */ + public static long getOpaque(int taskInstanceId) { + NettyRemoteChannel nettyRemoteChannel = REMOTE_CHANNELS.get(taskInstanceId); + long opaque = 0L; + if (nettyRemoteChannel != null) { + opaque = nettyRemoteChannel.getOpaque(); + } + return opaque; + } + /** * send ack * @@ -162,7 +169,7 @@ public class TaskCallbackService { @Override public void operationComplete(ChannelFuture future) throws Exception { if (future.isSuccess()) { - remove(taskInstanceId); + // remove(taskInstanceId); return; } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskExecuteProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskExecuteProcessor.java index c3720fe4609d4ec60356659e1294c5c3227240e7..b587235883a665a3d3f969227ed9c5b75f51c35e 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskExecuteProcessor.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskExecuteProcessor.java @@ -19,21 +19,16 @@ package org.apache.dolphinscheduler.server.worker.processor; import org.apache.dolphinscheduler.common.enums.Event; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; -import org.apache.dolphinscheduler.common.enums.TaskType; import org.apache.dolphinscheduler.common.utils.CommonUtils; -import org.apache.dolphinscheduler.common.utils.DateUtils; -import org.apache.dolphinscheduler.common.utils.FileUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.apache.dolphinscheduler.common.utils.LoggerUtils; import org.apache.dolphinscheduler.common.utils.NetUtils; import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.CommandType; -import org.apache.dolphinscheduler.remote.command.TaskExecuteAckCommand; import org.apache.dolphinscheduler.remote.command.TaskExecuteRequestCommand; +import org.apache.dolphinscheduler.remote.command.TaskRecallCommand; import org.apache.dolphinscheduler.remote.processor.NettyRemoteChannel; import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; -import org.apache.dolphinscheduler.server.utils.LogUtils; import org.apache.dolphinscheduler.server.worker.cache.ResponceCache; import org.apache.dolphinscheduler.server.worker.config.WorkerConfig; import org.apache.dolphinscheduler.server.worker.plugin.TaskPluginManager; @@ -45,9 +40,6 @@ import org.apache.dolphinscheduler.service.queue.entity.TaskExecutionContext; import org.apache.dolphinscheduler.spi.task.TaskExecutionContextCacheManager; import org.apache.dolphinscheduler.spi.task.request.TaskRequest; -import java.util.Date; -import java.util.Optional; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -96,8 +88,6 @@ public class TaskExecuteProcessor implements NettyRequestProcessor { * @param taskExecutionContext task */ private void setTaskCache(TaskExecutionContext taskExecutionContext) { - TaskExecutionContext preTaskCache = new TaskExecutionContext(); - preTaskCache.setTaskInstanceId(taskExecutionContext.getTaskInstanceId()); TaskRequest taskRequest = JSONUtils.parseObject(JSONUtils.toJsonString(taskExecutionContext), TaskRequest.class); TaskExecutionContextCacheManager.cacheTaskExecutionContext(taskRequest); } @@ -112,7 +102,6 @@ public class TaskExecuteProcessor implements NettyRequestProcessor { public void process(Channel channel, Command command) { Preconditions.checkArgument(CommandType.TASK_EXECUTE_REQUEST == command.getType(), String.format("invalid command type : %s", command.getType())); - TaskExecuteRequestCommand taskRequestCommand = JSONUtils.parseObject( command.getBody(), TaskExecuteRequestCommand.class); @@ -132,92 +121,39 @@ public class TaskExecuteProcessor implements NettyRequestProcessor { } setTaskCache(taskExecutionContext); - // todo custom logger - taskExecutionContext.setHost(NetUtils.getAddr(workerConfig.getListenPort())); - taskExecutionContext.setLogPath(LogUtils.getTaskLogPath(taskExecutionContext)); - - // local execute path - String execLocalPath = getExecLocalPath(taskExecutionContext); - logger.info("task instance local execute path : {}", execLocalPath); - taskExecutionContext.setExecutePath(execLocalPath); - - try { - FileUtils.createWorkDirIfAbsent(execLocalPath); - if (CommonUtils.isSudoEnable() && workerConfig.getWorkerTenantAutoCreate()) { - OSUtils.createUserIfAbsent(taskExecutionContext.getTenantCode()); - } - } catch (Throwable ex) { - String errorLog = String.format("create execLocalPath : %s", execLocalPath); - LoggerUtils.logError(Optional.of(logger), errorLog, ex); - TaskExecutionContextCacheManager.removeByTaskInstanceId(taskExecutionContext.getTaskInstanceId()); - } - FileUtils.taskLoggerThreadLocal.remove(); - - taskCallbackService.addRemoteChannel(taskExecutionContext.getTaskInstanceId(), - new NettyRemoteChannel(channel, command.getOpaque())); - - // delay task process - long remainTime = DateUtils.getRemainTime(taskExecutionContext.getFirstSubmitTime(), taskExecutionContext.getDelayTime() * 60L); - if (remainTime > 0) { - logger.info("delay the execution of task instance {}, delay time: {} s", taskExecutionContext.getTaskInstanceId(), remainTime); - taskExecutionContext.setCurrentExecutionStatus(ExecutionStatus.DELAY_EXECUTION); - taskExecutionContext.setStartTime(null); - } else { - taskExecutionContext.setCurrentExecutionStatus(ExecutionStatus.RUNNING_EXECUTION); - taskExecutionContext.setStartTime(new Date()); + if (CommonUtils.isSudoEnable() && workerConfig.getWorkerTenantAutoCreate()) { + OSUtils.createUserIfAbsent(taskExecutionContext.getTenantCode()); } - this.doAck(taskExecutionContext); - + ResponceCache.get().removeRecallCache(taskExecutionContext.getTaskInstanceId()); + taskCallbackService.addRemoteChannel(taskExecutionContext.getTaskInstanceId(), new NettyRemoteChannel(channel, command.getOpaque())); // submit task to manager - if (!workerManager.offer(new TaskExecuteThread(taskExecutionContext, taskCallbackService, alertClientService, taskPluginManager))) { - logger.info("submit task to manager error, queue is full, queue size is {}", workerManager.getQueueSize()); + boolean offer = workerManager.offer(new TaskExecuteThread(taskExecutionContext, taskCallbackService, alertClientService, taskPluginManager)); + if (!offer) { + logger.warn("submit task to wait queue error, queue is full, queue size is {}, taskInstanceId: {}", + workerManager.getWaitSubmitQueueSize(), taskExecutionContext.getTaskInstanceId()); + sendRecallCommand(taskExecutionContext, channel); + TaskExecutionContextCacheManager.removeByTaskInstanceId(taskExecutionContext.getTaskInstanceId()); } } - private void doAck(TaskExecutionContext taskExecutionContext) { - // tell master that task is in executing - TaskExecuteAckCommand ackCommand = buildAckCommand(taskExecutionContext); - ResponceCache.get().cache(taskExecutionContext.getTaskInstanceId(), ackCommand.convert2Command(), Event.ACK); - taskCallbackService.sendAck(taskExecutionContext.getTaskInstanceId(), ackCommand.convert2Command()); + private void sendRecallCommand(TaskExecutionContext taskExecutionContext, Channel channel) { + TaskRecallCommand taskRecallCommand = buildRecallCommand(taskExecutionContext); + Command command = taskRecallCommand.convert2Command(); + ResponceCache.get().cache(taskExecutionContext.getTaskInstanceId(), command, Event.WORKER_REJECT); + taskCallbackService.changeRemoteChannel(taskExecutionContext.getTaskInstanceId(), new NettyRemoteChannel(channel, command.getOpaque())); + taskCallbackService.sendResult(taskExecutionContext.getTaskInstanceId(), command); + logger.info("send recall command successfully, taskId:{}, opaque:{}", taskExecutionContext.getTaskInstanceId(), command.getOpaque()); } - /** - * build ack command - * - * @param taskExecutionContext taskExecutionContext - * @return TaskExecuteAckCommand - */ - private TaskExecuteAckCommand buildAckCommand(TaskExecutionContext taskExecutionContext) { - TaskExecuteAckCommand ackCommand = new TaskExecuteAckCommand(); - ackCommand.setTaskInstanceId(taskExecutionContext.getTaskInstanceId()); - ackCommand.setStatus(taskExecutionContext.getCurrentExecutionStatus().getCode()); - ackCommand.setLogPath(LogUtils.getTaskLogPath(taskExecutionContext)); - ackCommand.setHost(taskExecutionContext.getHost()); - ackCommand.setStartTime(taskExecutionContext.getStartTime()); - if (TaskType.SQL.getDesc().equalsIgnoreCase(taskExecutionContext.getTaskType()) || TaskType.PROCEDURE.getDesc().equalsIgnoreCase(taskExecutionContext.getTaskType())) { - ackCommand.setExecutePath(null); - } else { - ackCommand.setExecutePath(taskExecutionContext.getExecutePath()); - } - taskExecutionContext.setLogPath(ackCommand.getLogPath()); - ackCommand.setProcessInstanceId(taskExecutionContext.getProcessInstanceId()); - - return ackCommand; - } - - /** - * get execute local path - * - * @param taskExecutionContext taskExecutionContext - * @return execute local path - */ - private String getExecLocalPath(TaskExecutionContext taskExecutionContext) { - return FileUtils.getProcessExecDir(taskExecutionContext.getProjectCode(), - taskExecutionContext.getProcessDefineCode(), - taskExecutionContext.getProcessDefineVersion(), - taskExecutionContext.getProcessInstanceId(), - taskExecutionContext.getTaskInstanceId()); + private TaskRecallCommand buildRecallCommand(TaskExecutionContext taskExecutionContext) { + TaskRecallCommand taskRecallCommand = new TaskRecallCommand(); + taskRecallCommand.setTaskInstanceId(taskExecutionContext.getTaskInstanceId()); + taskRecallCommand.setProcessInstanceId(taskExecutionContext.getProcessInstanceId()); + taskRecallCommand.setHost(taskExecutionContext.getHost()); + taskRecallCommand.setEvent(Event.WORKER_REJECT); + taskRecallCommand.setStatus(ExecutionStatus.SUBMITTED_SUCCESS.getCode()); + return taskRecallCommand; } } diff --git a/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/plugin/AlertPluginManagerTest.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskKillAckProcessor.java similarity index 30% rename from dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/plugin/AlertPluginManagerTest.java rename to dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskKillAckProcessor.java index c4518076b62e31dfce472b0afcc0919a202d602d..dff97191a2cb1313f210cc9b908f4095f161c63b 100644 --- a/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/plugin/AlertPluginManagerTest.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskKillAckProcessor.java @@ -15,53 +15,45 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.alert.plugin; +package org.apache.dolphinscheduler.server.worker.processor; -import org.apache.dolphinscheduler.alert.AlertServer; -import org.apache.dolphinscheduler.alert.utils.Constants; -import org.apache.dolphinscheduler.common.utils.PropertyUtils; -import org.apache.dolphinscheduler.spi.plugin.DolphinPluginLoader; -import org.apache.dolphinscheduler.spi.plugin.DolphinPluginManagerConfig; -import org.apache.dolphinscheduler.spi.utils.StringUtils; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.command.CommandType; +import org.apache.dolphinscheduler.remote.command.TaskKillAckCommand; +import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; +import org.apache.dolphinscheduler.server.worker.cache.ResponceCache; +import org.apache.dolphinscheduler.spi.task.TaskExecutionContextCacheManager; -import java.util.Objects; - -import org.junit.Assert; -import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.collect.ImmutableList; +import com.google.common.base.Preconditions; -/** - * AlertPluginManager Tester. - */ -public class AlertPluginManagerTest { +import io.netty.channel.Channel; - private static final Logger logger = LoggerFactory.getLogger(AlertPluginManagerTest.class); +public class TaskKillAckProcessor implements NettyRequestProcessor { - @Test - public void testLoadPlugins() { - logger.info("begin test AlertPluginManagerTest"); - AlertPluginManager alertPluginManager = new AlertPluginManager(); - DolphinPluginManagerConfig alertPluginManagerConfig = new DolphinPluginManagerConfig(); - String path = Objects.requireNonNull(DolphinPluginLoader.class.getClassLoader().getResource("")).getPath(); - alertPluginManagerConfig.setPlugins(path + "../../../dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/pom.xml"); - if (StringUtils.isNotBlank(PropertyUtils.getString(AlertServer.ALERT_PLUGIN_DIR))) { - alertPluginManagerConfig.setInstalledPluginsDir(PropertyUtils.getString(AlertServer.ALERT_PLUGIN_DIR, Constants.ALERT_PLUGIN_PATH).trim()); - } + private final Logger logger = LoggerFactory.getLogger(TaskKillAckProcessor.class); - if (StringUtils.isNotBlank(PropertyUtils.getString(AlertServer.MAVEN_LOCAL_REPOSITORY))) { - alertPluginManagerConfig.setMavenLocalRepository(Objects.requireNonNull(PropertyUtils.getString(AlertServer.MAVEN_LOCAL_REPOSITORY)).trim()); + @Override + public void process(Channel channel, Command command) { + Preconditions.checkArgument(CommandType.TASK_KILL_RESPONSE_ACK == command.getType(), + String.format("invalid command type : %s", command.getType())); + TaskKillAckCommand taskKillAckCommand = JSONUtils.parseObject(command.getBody(), TaskKillAckCommand.class); + if (taskKillAckCommand == null) { + logger.warn("Cannot parse command, command type: {}", command.getType()); + return; } - - DolphinPluginLoader alertPluginLoader = new DolphinPluginLoader(alertPluginManagerConfig, ImmutableList.of(alertPluginManager)); - try { - //alertPluginLoader.loadPlugins(); - } catch (Exception e) { - throw new RuntimeException("load Alert Plugin Failed !", e); + logger.info("received kill ack command : {}", taskKillAckCommand); + + if (taskKillAckCommand.getStatus() == ExecutionStatus.SUCCESS.getCode()) { + ResponceCache.get().removeKillResponseCache(taskKillAckCommand.getTaskInstanceId()); + TaskExecutionContextCacheManager.removeByTaskInstanceId(taskKillAckCommand.getTaskInstanceId()); + logger.info("removeKillResponseCache: task instance id:{}", taskKillAckCommand.getTaskInstanceId()); + TaskCallbackService.remove(taskKillAckCommand.getTaskInstanceId()); + logger.info("remove REMOTE_CHANNELS, task instance id:{}", taskKillAckCommand.getTaskInstanceId()); } - - Assert.assertNull(alertPluginManager.getAlertChannelFactoryMap().get("Email")); } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskKillProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskKillProcessor.java index c0ecd673855efeb7384faf7dbcadcca9c82e75b2..1caa23dc367b3f58768f2db337b67bc44b7b6609 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskKillProcessor.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskKillProcessor.java @@ -18,6 +18,7 @@ package org.apache.dolphinscheduler.server.worker.processor; import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.Event; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.LoggerUtils; @@ -31,11 +32,14 @@ import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; import org.apache.dolphinscheduler.remote.utils.Host; import org.apache.dolphinscheduler.remote.utils.Pair; import org.apache.dolphinscheduler.server.utils.ProcessUtils; +import org.apache.dolphinscheduler.server.worker.cache.ResponceCache; import org.apache.dolphinscheduler.server.worker.config.WorkerConfig; +import org.apache.dolphinscheduler.server.worker.runner.TaskExecuteThread; import org.apache.dolphinscheduler.server.worker.runner.WorkerManagerThread; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.log.LogClientService; import org.apache.dolphinscheduler.service.queue.entity.TaskExecutionContext; +import org.apache.dolphinscheduler.spi.task.AbstractTask; import org.apache.dolphinscheduler.spi.task.TaskExecutionContextCacheManager; import org.apache.dolphinscheduler.spi.task.request.TaskRequest; @@ -91,14 +95,21 @@ public class TaskKillProcessor implements NettyRequestProcessor { TaskKillRequestCommand killCommand = JSONUtils.parseObject(command.getBody(), TaskKillRequestCommand.class); logger.info("received kill command : {}", killCommand); - Pair> result = doKill(killCommand); + TaskRequest taskRequest = TaskExecutionContextCacheManager.getByTaskInstanceId(killCommand.getTaskInstanceId()); + if (taskRequest == null) { + logger.warn("Cannot find taskInstanceId {} in taskContextCacheManager", killCommand.getTaskInstanceId()); + return; + } + taskRequest.setCurrentExecutionStatus(org.apache.dolphinscheduler.spi.task.ExecutionStatus.STOP); + TaskExecutionContextCacheManager.updateTaskExecutionContext(taskRequest); + + taskCallbackService.addRemoteChannel(killCommand.getTaskInstanceId(), new NettyRemoteChannel(channel, command.getOpaque())); - taskCallbackService.addRemoteChannel(killCommand.getTaskInstanceId(), - new NettyRemoteChannel(channel, command.getOpaque())); + Pair> result = doKill(killCommand); - TaskKillResponseCommand taskKillResponseCommand = buildKillTaskResponseCommand(killCommand, result); + TaskKillResponseCommand taskKillResponseCommand = buildKillTaskResponseCommand(taskRequest, result); + ResponceCache.get().cache(taskKillResponseCommand.getTaskInstanceId(), taskKillResponseCommand.convert2Command(), Event.ACTION_STOP); taskCallbackService.sendResult(taskKillResponseCommand.getTaskInstanceId(), taskKillResponseCommand.convert2Command()); - TaskExecutionContextCacheManager.removeByTaskInstanceId(taskKillResponseCommand.getTaskInstanceId()); } /** @@ -112,10 +123,17 @@ public class TaskKillProcessor implements NettyRequestProcessor { int taskInstanceId = killCommand.getTaskInstanceId(); TaskRequest taskRequest = TaskExecutionContextCacheManager.getByTaskInstanceId(taskInstanceId); TaskExecutionContext taskExecutionContext = JSONUtils.parseObject(JSONUtils.toJsonString(taskRequest), TaskExecutionContext.class); - try { Integer processId = taskExecutionContext.getProcessId(); if (processId.equals(0)) { + TaskExecuteThread taskExecuteThread = workerManager.getTaskExecuteThread(taskInstanceId); + if (null != taskExecuteThread) { + AbstractTask task = taskExecuteThread.getTask(); + if (task != null) { + task.cancelApplication(true); + logger.info("kill task by cancelApplication, task id:{}", taskInstanceId); + } + } workerManager.killTaskBeforeExecuteByInstanceId(taskInstanceId); TaskExecutionContextCacheManager.removeByTaskInstanceId(taskInstanceId); logger.info("the task has not been executed and has been cancelled, task id:{}", taskInstanceId); @@ -145,24 +163,21 @@ public class TaskKillProcessor implements NettyRequestProcessor { /** * build TaskKillResponseCommand * - * @param killCommand kill command + * @param taskRequest taskRequest * @param result exe result * @return build TaskKillResponseCommand */ - private TaskKillResponseCommand buildKillTaskResponseCommand(TaskKillRequestCommand killCommand, + private TaskKillResponseCommand buildKillTaskResponseCommand(TaskRequest taskRequest, Pair> result) { TaskKillResponseCommand taskKillResponseCommand = new TaskKillResponseCommand(); taskKillResponseCommand.setStatus(result.getLeft() ? ExecutionStatus.SUCCESS.getCode() : ExecutionStatus.FAILURE.getCode()); taskKillResponseCommand.setAppIds(result.getRight()); - TaskRequest taskRequest = TaskExecutionContextCacheManager.getByTaskInstanceId(killCommand.getTaskInstanceId()); - if (taskRequest == null) { - return taskKillResponseCommand; - } TaskExecutionContext taskExecutionContext = JSONUtils.parseObject(JSONUtils.toJsonString(taskRequest), TaskExecutionContext.class); if (taskExecutionContext != null) { taskKillResponseCommand.setTaskInstanceId(taskExecutionContext.getTaskInstanceId()); taskKillResponseCommand.setHost(taskExecutionContext.getHost()); taskKillResponseCommand.setProcessId(taskExecutionContext.getProcessId()); + taskKillResponseCommand.setProcessInstanceId(taskExecutionContext.getProcessInstanceId()); } return taskKillResponseCommand; } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskRecallAckProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskRecallAckProcessor.java new file mode 100644 index 0000000000000000000000000000000000000000..01bc8cfa90ea88a2afff8ebb16d25ce4372ba3e5 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskRecallAckProcessor.java @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.worker.processor; + +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.command.CommandType; +import org.apache.dolphinscheduler.remote.command.TaskRecallAckCommand; +import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; +import org.apache.dolphinscheduler.server.worker.cache.ResponceCache; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.base.Preconditions; + +import io.netty.channel.Channel; + +public class TaskRecallAckProcessor implements NettyRequestProcessor { + + private final Logger logger = LoggerFactory.getLogger(TaskRecallAckProcessor.class); + + @Override + public void process(Channel channel, Command command) { + Preconditions.checkArgument(CommandType.TASK_RECALL_ACK == command.getType(), + String.format("invalid command type : %s", command.getType())); + + TaskRecallAckCommand taskRecallAckCommand = JSONUtils.parseObject( + command.getBody(), TaskRecallAckCommand.class); + logger.info("taskRecallAckCommand:{}, opaque:{}", taskRecallAckCommand, command.getOpaque()); + if (taskRecallAckCommand == null) { + return; + } + + if (taskRecallAckCommand.getStatus() == ExecutionStatus.SUCCESS.getCode()) { + Command recallCommand = ResponceCache.get().getRecallCache().get(taskRecallAckCommand.getTaskInstanceId()); + if (recallCommand != null && command.getOpaque() == recallCommand.getOpaque()) { + ResponceCache.get().removeRecallCache(taskRecallAckCommand.getTaskInstanceId()); + logger.info("removeRecallCache: task instance id:{}", taskRecallAckCommand.getTaskInstanceId()); + } + if (command.getOpaque() == TaskCallbackService.getOpaque(taskRecallAckCommand.getTaskInstanceId())) { + TaskCallbackService.remove(taskRecallAckCommand.getTaskInstanceId()); + logger.info("remove REMOTE_CHANNELS, task instance id:{}", taskRecallAckCommand.getTaskInstanceId()); + } + } + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerRegistryClient.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerRegistryClient.java index 363b497278df2c2a3afdd2a3ecf0861f919a8c84..7f72d1a8d2dfe66b14bf7fff077e7fac2896d2cc 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerRegistryClient.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerRegistryClient.java @@ -20,20 +20,23 @@ package org.apache.dolphinscheduler.server.worker.registry; import static org.apache.dolphinscheduler.common.Constants.DEFAULT_WORKER_GROUP; import static org.apache.dolphinscheduler.common.Constants.REGISTRY_DOLPHINSCHEDULER_WORKERS; import static org.apache.dolphinscheduler.common.Constants.SINGLE_SLASH; +import static org.apache.dolphinscheduler.common.Constants.SLEEP_TIME_MILLIS; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.IStoppable; import org.apache.dolphinscheduler.common.enums.NodeType; -import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.common.thread.ThreadUtils; import org.apache.dolphinscheduler.common.utils.NetUtils; +import org.apache.dolphinscheduler.registry.api.ConnectionState; import org.apache.dolphinscheduler.remote.utils.NamedThreadFactory; import org.apache.dolphinscheduler.server.registry.HeartBeatTask; import org.apache.dolphinscheduler.server.worker.config.WorkerConfig; +import org.apache.dolphinscheduler.server.worker.runner.WorkerManagerThread; import org.apache.dolphinscheduler.service.registry.RegistryClient; import org.apache.commons.lang.StringUtils; -import java.util.Date; +import java.io.IOException; import java.util.Set; import java.util.StringJoiner; import java.util.concurrent.Executors; @@ -63,25 +66,31 @@ public class WorkerRegistryClient { @Autowired private WorkerConfig workerConfig; + /** + * worker manager + */ + @Autowired + private WorkerManagerThread workerManagerThread; + /** * heartbeat executor */ private ScheduledExecutorService heartBeatExecutor; + @Autowired private RegistryClient registryClient; /** - * worker start time + * worker startup time, ms */ - private String startTime; + private long startupTime; private Set workerGroups; @PostConstruct public void initWorkRegistry() { this.workerGroups = workerConfig.getWorkerGroups(); - this.startTime = DateUtils.dateToString(new Date()); - this.registryClient = RegistryClient.getInstance(); + this.startupTime = System.currentTimeMillis(); this.heartBeatExecutor = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("HeartBeatExecutor")); } @@ -93,36 +102,85 @@ public class WorkerRegistryClient { Set workerZkPaths = getWorkerZkPaths(); int workerHeartbeatInterval = workerConfig.getWorkerHeartbeatInterval(); - for (String workerZKPath : workerZkPaths) { - registryClient.persistEphemeral(workerZKPath, ""); - logger.info("worker node : {} registry to ZK {} successfully", address, workerZKPath); - } - - HeartBeatTask heartBeatTask = new HeartBeatTask(startTime, + HeartBeatTask heartBeatTask = new HeartBeatTask(startupTime, workerConfig.getWorkerMaxCpuloadAvg(), workerConfig.getWorkerReservedMemory(), workerConfig.getHostWeight(), workerZkPaths, Constants.WORKER_TYPE, - registryClient); + registryClient, + workerConfig.getWorkerExecThreads(), + workerManagerThread + ); + + for (String workerZKPath : workerZkPaths) { + // remove before persist + registryClient.remove(workerZKPath); + registryClient.persistEphemeral(workerZKPath, heartBeatTask.getHeartBeatInfo()); + logger.info("worker node : {} registry to ZK {} successfully", address, workerZKPath); + } + + while (!this.checkNodeExists()) { + ThreadUtils.sleep(SLEEP_TIME_MILLIS); + } + + // sleep 1s, waiting master failover remove + ThreadUtils.sleep(Constants.SLEEP_TIME_MILLIS); + + // delete dead server + this.handleDeadServer(workerZkPaths, NodeType.WORKER, Constants.DELETE_OP); + + registryClient.addConnectionStateListener(this::handleConnectionState); this.heartBeatExecutor.scheduleAtFixedRate(heartBeatTask, workerHeartbeatInterval, workerHeartbeatInterval, TimeUnit.SECONDS); logger.info("worker node : {} heartbeat interval {} s", address, workerHeartbeatInterval); } + public void handleConnectionState(ConnectionState state) { + switch (state) { + case CONNECTED: + logger.debug("registry connection state is {}", state); + break; + case SUSPENDED: + logger.warn("registry connection state is {}, ready to retry connection", state); + break; + case RECONNECTED: + logger.debug("registry connection state is {}, clean the node info", state); + String address = NetUtils.getAddr(workerConfig.getListenPort()); + Set workerZkPaths = getWorkerZkPaths(); + for (String workerZKPath : workerZkPaths) { + registryClient.persistEphemeral(workerZKPath, ""); + logger.info("worker node : {} reconnect to ZK {} successfully", address, workerZKPath); + } + break; + case DISCONNECTED: + logger.warn("registry connection state is {}, ready to stop myself", state); + registryClient.getStoppable().stop("registry connection state is DISCONNECTED, stop myself"); + break; + default: + } + } + /** * remove registry info */ - public void unRegistry() { - String address = getLocalAddress(); - Set workerZkPaths = getWorkerZkPaths(); - for (String workerZkPath : workerZkPaths) { - registryClient.remove(workerZkPath); - logger.info("worker node : {} unRegistry from ZK {}.", address, workerZkPath); + public void unRegistry() throws IOException { + try { + String address = getLocalAddress(); + Set workerZkPaths = getWorkerZkPaths(); + for (String workerZkPath : workerZkPaths) { + registryClient.remove(workerZkPath); + logger.info("worker node : {} unRegistry from ZK {}.", address, workerZkPath); + } + } catch (Exception ex) { + logger.error("remove worker zk path exception", ex); } + this.heartBeatExecutor.shutdownNow(); logger.info("heartbeat executor shutdown"); + registryClient.close(); + logger.info("registry client closed"); } /** @@ -146,7 +204,7 @@ public class WorkerRegistryClient { return workerPaths; } - public void handleDeadServer(Set nodeSet, NodeType nodeType, String opType) throws Exception { + public void handleDeadServer(Set nodeSet, NodeType nodeType, String opType) { registryClient.handleDeadServer(nodeSet, nodeType, opType); } @@ -161,4 +219,11 @@ public class WorkerRegistryClient { registryClient.setStoppable(stoppable); } + public boolean checkNodeExists() { + boolean result = registryClient.checkNodeExists(NetUtils.getHost(), NodeType.WORKER); + if (result) { + logger.info("check worker, node exist success, host:{}", NetUtils.getHost()); + } + return result; + } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/RetryReportTaskStatusThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/RetryReportTaskStatusThread.java index b2d00317a5e91e0361d705233ada6e7c38dfc0b9..d85a5daff1d7c514a50ae15d4d00b928a18fa5de 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/RetryReportTaskStatusThread.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/RetryReportTaskStatusThread.java @@ -17,15 +17,17 @@ package org.apache.dolphinscheduler.server.worker.runner; +import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.thread.Stopper; - import org.apache.dolphinscheduler.common.thread.ThreadUtils; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.server.worker.cache.ResponceCache; +import org.apache.dolphinscheduler.server.worker.config.WorkerConfig; import org.apache.dolphinscheduler.server.worker.processor.TaskCallbackService; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import java.util.Map; @@ -38,23 +40,21 @@ public class RetryReportTaskStatusThread implements Runnable { private final Logger logger = LoggerFactory.getLogger(RetryReportTaskStatusThread.class); - /** - * every 5 minutes - */ - private static long RETRY_REPORT_TASK_STATUS_INTERVAL = 5 * 60 * 1000L; + @Autowired + WorkerConfig workerConfig; /** - * task callback service + * task callback service */ private final TaskCallbackService taskCallbackService; - public void start(){ - Thread thread = new Thread(this,"RetryReportTaskStatusThread"); + public void start() { + Thread thread = new Thread(this, "RetryReportTaskStatusThread"); thread.setDaemon(true); thread.start(); } - public RetryReportTaskStatusThread(){ + public RetryReportTaskStatusThread() { this.taskCallbackService = SpringApplicationContext.getBean(TaskCallbackService.class); } @@ -64,33 +64,29 @@ public class RetryReportTaskStatusThread implements Runnable { @Override public void run() { ResponceCache responceCache = ResponceCache.get(); - - while (Stopper.isRunning()){ - - // sleep 5 minutes - ThreadUtils.sleep(RETRY_REPORT_TASK_STATUS_INTERVAL); - + long interval = workerConfig.getRetryReportTaskStatusInterval() * Constants.SLEEP_TIME_MILLIS * 60L; + while (Stopper.isRunning()) { + ThreadUtils.sleep(60 * Constants.SLEEP_TIME_MILLIS); + long nowTimeMillis = System.currentTimeMillis(); try { - if (!responceCache.getAckCache().isEmpty()){ - Map ackCache = responceCache.getAckCache(); - for (Map.Entry entry : ackCache.entrySet()){ - Integer taskInstanceId = entry.getKey(); - Command ackCommand = entry.getValue(); - taskCallbackService.sendAck(taskInstanceId,ackCommand); - } - } - - if (!responceCache.getResponseCache().isEmpty()){ - Map responseCache = responceCache.getResponseCache(); - for (Map.Entry entry : responseCache.entrySet()){ - Integer taskInstanceId = entry.getKey(); - Command responseCommand = entry.getValue(); - taskCallbackService.sendResult(taskInstanceId,responseCommand); - } - } - }catch (Exception e){ + retrySendCommand(responceCache.getAckCache(), interval, nowTimeMillis); + retrySendCommand(responceCache.getResponseCache(), interval, nowTimeMillis); + retrySendCommand(responceCache.getKillResponseCache(), interval, nowTimeMillis); + retrySendCommand(responceCache.getRecallCache(), interval, nowTimeMillis); + } catch (Exception e) { logger.warn("retry report task status error", e); } } } + + private void retrySendCommand(Map cache, long interval, long nowTimeMillis) { + for (Map.Entry entry : cache.entrySet()) { + Command command = entry.getValue(); + if (nowTimeMillis - command.getGenCommandTimeMillis() > interval) { + Integer taskInstanceId = entry.getKey(); + taskCallbackService.sendResult(taskInstanceId, command); + logger.info("retry send command successfully, the command type {}, the task id:{}", command.getType(),taskInstanceId); + } + } + } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskExecuteThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskExecuteThread.java index d1f1c39717c964f6a91ec4606b455639c04aba1c..90c5a41923b11dd0d8721fcbe2a0dc2a1e67bd66 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskExecuteThread.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskExecuteThread.java @@ -17,34 +17,30 @@ package org.apache.dolphinscheduler.server.worker.runner; -import static java.util.Calendar.DAY_OF_MONTH; - import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.Event; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; -import org.apache.dolphinscheduler.common.enums.TaskType; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.utils.CommonUtils; import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.common.utils.FileUtils; import org.apache.dolphinscheduler.common.utils.HadoopUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.LoggerUtils; import org.apache.dolphinscheduler.common.utils.OSUtils; -import org.apache.dolphinscheduler.common.utils.RetryerUtils; -import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.TaskExecuteAckCommand; import org.apache.dolphinscheduler.remote.command.TaskExecuteResponseCommand; +import org.apache.dolphinscheduler.server.utils.LogUtils; import org.apache.dolphinscheduler.server.utils.ProcessUtils; import org.apache.dolphinscheduler.server.worker.cache.ResponceCache; import org.apache.dolphinscheduler.server.worker.plugin.TaskPluginManager; import org.apache.dolphinscheduler.server.worker.processor.TaskCallbackService; import org.apache.dolphinscheduler.service.alert.AlertClientService; import org.apache.dolphinscheduler.service.queue.entity.TaskExecutionContext; -import org.apache.dolphinscheduler.spi.exception.PluginNotFoundException; import org.apache.dolphinscheduler.spi.task.AbstractTask; import org.apache.dolphinscheduler.spi.task.TaskAlertInfo; import org.apache.dolphinscheduler.spi.task.TaskChannel; +import org.apache.dolphinscheduler.spi.task.TaskConstants; import org.apache.dolphinscheduler.spi.task.TaskExecutionContextCacheManager; import org.apache.dolphinscheduler.spi.task.request.TaskRequest; @@ -59,15 +55,12 @@ import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.Delayed; -import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.github.rholder.retry.RetryException; - /** * task scheduler thread */ @@ -93,16 +86,6 @@ public class TaskExecuteThread implements Runnable, Delayed { */ private TaskCallbackService taskCallbackService; - /** - * taskExecutionContextCacheManager - */ - private TaskExecutionContextCacheManager taskExecutionContextCacheManager; - - /** - * task logger - */ - private Logger taskLogger; - /** * alert client server */ @@ -136,14 +119,20 @@ public class TaskExecuteThread implements Runnable, Delayed { @Override public void run() { - TaskExecuteResponseCommand responseCommand = new TaskExecuteResponseCommand(taskExecutionContext.getTaskInstanceId(), taskExecutionContext.getProcessInstanceId()); try { - logger.info("script path : {}", taskExecutionContext.getExecutePath()); + taskExecutionContext.setLogPath(LogUtils.getTaskLogPath(taskExecutionContext)); + + // local execute path + String execLocalPath = getExecLocalPath(taskExecutionContext); + FileUtils.createWorkDirIfAbsent(execLocalPath); + logger.info("task instance local execute path : {}", execLocalPath); + taskExecutionContext.setExecutePath(execLocalPath); + // check if the OS user exists if (!OSUtils.getUserList().contains(taskExecutionContext.getTenantCode())) { String errorLog = String.format("tenantCode: %s does not exist", taskExecutionContext.getTenantCode()); - taskLogger.error(errorLog); + logger.error(errorLog); responseCommand.setStatus(ExecutionStatus.FAILURE.getCode()); responseCommand.setEndTime(new Date()); return; @@ -153,10 +142,11 @@ public class TaskExecuteThread implements Runnable, Delayed { taskExecutionContext.setStartTime(new Date()); } if (taskExecutionContext.getCurrentExecutionStatus() != ExecutionStatus.RUNNING_EXECUTION) { - changeTaskExecutionStatusToRunning(); + //changeTaskExecutionStatusToRunning(); + logger.info("the task begins to execute. task instance id: {}", taskExecutionContext.getTaskInstanceId()); + taskExecutionContext.setCurrentExecutionStatus(ExecutionStatus.RUNNING_EXECUTION); + sendTaskExecuteRunningCommand(taskExecutionContext); } - logger.info("the task begins to execute. task instance id: {}", taskExecutionContext.getTaskInstanceId()); - int dryRun = taskExecutionContext.getDryRun(); // copy hdfs/minio file to local if (dryRun == Constants.DRY_RUN_FLAG_NO) { @@ -176,19 +166,30 @@ public class TaskExecuteThread implements Runnable, Delayed { TaskChannel taskChannel = taskPluginManager.getTaskChannelMap().get(taskExecutionContext.getTaskType()); if (null == taskChannel) { - throw new PluginNotFoundException(String.format("%s Task Plugin Not Found,Please Check Config File.", taskExecutionContext.getTaskType())); + throw new RuntimeException(String.format("%s Task Plugin Not Found,Please Check Config File.", taskExecutionContext.getTaskType())); } TaskRequest taskRequest = JSONUtils.parseObject(JSONUtils.toJsonString(taskExecutionContext), TaskRequest.class); + if (null == taskRequest) { + throw new RuntimeException("The taskExecutionContext parse error"); + } String taskLogName = LoggerUtils.buildTaskId(LoggerUtils.TASK_LOGGER_INFO_PREFIX, taskExecutionContext.getProcessDefineCode(), taskExecutionContext.getProcessDefineVersion(), taskExecutionContext.getProcessInstanceId(), taskExecutionContext.getTaskInstanceId()); taskRequest.setTaskLogName(taskLogName); + if (!TaskExecutionContextCacheManager.updateTaskExecutionContext(taskRequest)) { + TaskExecutionContextCacheManager.cacheTaskExecutionContext(taskRequest); + logger.info("taskRequest reCache successfully, taskInstanceId: {}", taskExecutionContext.getTaskInstanceId()); + } + // set the name of the current thread + Thread.currentThread().setName(String.format(TaskConstants.TASK_LOGGER_THREAD_NAME_FORMAT,taskLogName)); task = taskChannel.createTask(taskRequest); + // task init this.task.init(); + //init varPool this.task.getParameters().setVarPool(taskExecutionContext.getVarPool()); @@ -211,7 +212,6 @@ public class TaskExecuteThread implements Runnable, Delayed { responseCommand.setVarPool(JSONUtils.toJsonString(this.task.getParameters().getVarPool())); logger.info("task instance id : {},task final status : {}", taskExecutionContext.getTaskInstanceId(), this.task.getExitStatus()); } catch (Throwable e) { - logger.error("task scheduler failure", e); kill(); responseCommand.setStatus(ExecutionStatus.FAILURE.getCode()); @@ -219,13 +219,52 @@ public class TaskExecuteThread implements Runnable, Delayed { responseCommand.setProcessId(task.getProcessId()); responseCommand.setAppIds(task.getAppIds()); } finally { - TaskExecutionContextCacheManager.removeByTaskInstanceId(taskExecutionContext.getTaskInstanceId()); - ResponceCache.get().cache(taskExecutionContext.getTaskInstanceId(), responseCommand.convert2Command(), Event.RESULT); - taskCallbackService.sendResult(taskExecutionContext.getTaskInstanceId(), responseCommand.convert2Command()); + if (TaskExecutionContextCacheManager.statusIsStop(taskExecutionContext.getTaskInstanceId())) { + logger.info("task has exited, taskInstanceId:{}, exitStatusCode:{}, task executionStatus:{}", + taskExecutionContext.getTaskInstanceId(), this.task.getExitStatusCode(), ExecutionStatus.STOP); + } else { + TaskExecutionContextCacheManager.removeByTaskInstanceId(taskExecutionContext.getTaskInstanceId()); + ResponceCache.get().cache(taskExecutionContext.getTaskInstanceId(), responseCommand.convert2Command(), Event.RESULT); + taskCallbackService.sendResult(taskExecutionContext.getTaskInstanceId(), responseCommand.convert2Command()); + } clearTaskExecPath(); } } + /** + * get execute local path + * + * @param taskExecutionContext taskExecutionContext + * @return execute local path + */ + private String getExecLocalPath(TaskExecutionContext taskExecutionContext) { + return FileUtils.getProcessExecDir(taskExecutionContext.getProjectCode(), + taskExecutionContext.getProcessDefineCode(), + taskExecutionContext.getProcessDefineVersion(), + taskExecutionContext.getProcessInstanceId(), + taskExecutionContext.getTaskInstanceId()); + } + + private void sendTaskExecuteRunningCommand(TaskExecutionContext taskExecutionContext) { + TaskExecuteAckCommand command = buildTaskExecuteRunningCommand(taskExecutionContext); + // add response cache + ResponceCache.get().cache(taskExecutionContext.getTaskInstanceId(), command.convert2Command(), Event.ACK); + taskCallbackService.sendAck(taskExecutionContext.getTaskInstanceId(), command.convert2Command()); + } + + private TaskExecuteAckCommand buildTaskExecuteRunningCommand(TaskExecutionContext taskExecutionContext) { + TaskExecuteAckCommand command = new TaskExecuteAckCommand(); + command.setTaskInstanceId(taskExecutionContext.getTaskInstanceId()); + command.setProcessInstanceId(taskExecutionContext.getProcessInstanceId()); + command.setStatus(taskExecutionContext.getCurrentExecutionStatus().getCode()); + command.setLogPath(taskExecutionContext.getLogPath()); + command.setHost(taskExecutionContext.getHost()); + command.setStartTime(taskExecutionContext.getStartTime()); + command.setExecutePath(taskExecutionContext.getExecutePath()); + return command; + } + + private void sendAlert(TaskAlertInfo taskAlertInfo) { alertClientService.sendAlert(taskAlertInfo.getAlertGroupId(), taskAlertInfo.getTitle(), taskAlertInfo.getContent()); } @@ -325,42 +364,6 @@ public class TaskExecuteThread implements Runnable, Delayed { } } - /** - * send an ack to change the status of the task. - */ - private void changeTaskExecutionStatusToRunning() { - taskExecutionContext.setCurrentExecutionStatus(ExecutionStatus.RUNNING_EXECUTION); - Command ackCommand = buildAckCommand().convert2Command(); - try { - RetryerUtils.retryCall(() -> { - taskCallbackService.sendAck(taskExecutionContext.getTaskInstanceId(), ackCommand); - return Boolean.TRUE; - }); - } catch (ExecutionException | RetryException e) { - logger.error(e.getMessage(), e); - } - } - - /** - * build ack command. - * - * @return TaskExecuteAckCommand - */ - private TaskExecuteAckCommand buildAckCommand() { - TaskExecuteAckCommand ackCommand = new TaskExecuteAckCommand(); - ackCommand.setTaskInstanceId(taskExecutionContext.getTaskInstanceId()); - ackCommand.setStatus(taskExecutionContext.getCurrentExecutionStatus().getCode()); - ackCommand.setStartTime(taskExecutionContext.getStartTime()); - ackCommand.setLogPath(taskExecutionContext.getLogPath()); - ackCommand.setHost(taskExecutionContext.getHost()); - if (TaskType.SQL.getDesc().equalsIgnoreCase(taskExecutionContext.getTaskType()) || TaskType.PROCEDURE.getDesc().equalsIgnoreCase(taskExecutionContext.getTaskType())) { - ackCommand.setExecutePath(null); - } else { - ackCommand.setExecutePath(taskExecutionContext.getExecutePath()); - } - return ackCommand; - } - /** * get current TaskExecutionContext * @@ -389,9 +392,6 @@ public class TaskExecuteThread implements Runnable, Delayed { // replace variable TIME with $[YYYYmmddd...] in shell file when history run job and batch complement job if (taskExecutionContext.getScheduleTime() != null) { Date date = taskExecutionContext.getScheduleTime(); - if (CommandType.COMPLEMENT_DATA.getCode() == taskExecutionContext.getCmdTypeIfComplement()) { - date = DateUtils.add(taskExecutionContext.getScheduleTime(), DAY_OF_MONTH, 1); - } String dateTime = DateUtils.format(date, Constants.PARAMETER_FORMAT_TIME); Property p = new Property(); p.setValue(dateTime); @@ -400,4 +400,8 @@ public class TaskExecuteThread implements Runnable, Delayed { } taskExecutionContext.setParamsMap(paramsMap); } + + public AbstractTask getTask() { + return task; + } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/WorkerExecService.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/WorkerExecService.java new file mode 100644 index 0000000000000000000000000000000000000000..b98024674b240159910f87200251c6c00164a0ac --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/WorkerExecService.java @@ -0,0 +1,85 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.worker.runner; + +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.ThreadPoolExecutor; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.util.concurrent.FutureCallback; +import com.google.common.util.concurrent.Futures; +import com.google.common.util.concurrent.ListenableFuture; +import com.google.common.util.concurrent.ListeningExecutorService; +import com.google.common.util.concurrent.MoreExecutors; + +public class WorkerExecService { + /** + * logger of WorkerExecService + */ + private static final Logger logger = LoggerFactory.getLogger(WorkerExecService.class); + + private final ListeningExecutorService listeningExecutorService; + + /** + * thread executor service + */ + private final ExecutorService execService; + + /** + * running task + */ + private final ConcurrentHashMap taskExecuteThreadMap; + + public WorkerExecService(ExecutorService execService, ConcurrentHashMap taskExecuteThreadMap) { + this.execService = execService; + this.listeningExecutorService = MoreExecutors.listeningDecorator(this.execService); + this.taskExecuteThreadMap = taskExecuteThreadMap; + } + + public void submit(TaskExecuteThread taskExecuteThread) { + taskExecuteThreadMap.put(taskExecuteThread.getTaskExecutionContext().getTaskInstanceId(), taskExecuteThread); + ListenableFuture future = this.listeningExecutorService.submit(taskExecuteThread); + FutureCallback futureCallback = new FutureCallback() { + @Override + public void onSuccess(Object o) { + taskExecuteThreadMap.remove(taskExecuteThread.getTaskExecutionContext().getTaskInstanceId()); + } + + @Override + public void onFailure(Throwable throwable) { + logger.error("task execute failed, processInstanceId:{}, taskInstanceId:{}", taskExecuteThread.getTaskExecutionContext().getProcessInstanceId() + , taskExecuteThread.getTaskExecutionContext().getTaskInstanceId(), throwable); + taskExecuteThreadMap.remove(taskExecuteThread.getTaskExecutionContext().getTaskInstanceId()); + } + }; + Futures.addCallback(future, futureCallback, this.listeningExecutorService); + } + + /** + * get thread pool queue size + * + * @return queue size + */ + public int getThreadPoolQueueSize() { + return ((ThreadPoolExecutor) this.execService).getQueue().size(); + } + +} \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/WorkerManagerThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/WorkerManagerThread.java index 8319e01664bf9484499d62be815e1b64771cbadb..a73b1ac92a1350747899d832bd2d3ff2e0ca8340 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/WorkerManagerThread.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/WorkerManagerThread.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.server.worker.runner; +import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.Event; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.thread.Stopper; @@ -31,8 +32,9 @@ import org.apache.dolphinscheduler.service.queue.entity.TaskExecutionContext; import org.apache.dolphinscheduler.spi.task.TaskExecutionContextCacheManager; import org.apache.dolphinscheduler.spi.task.request.TaskRequest; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.DelayQueue; -import java.util.concurrent.ExecutorService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -49,7 +51,12 @@ public class WorkerManagerThread implements Runnable { /** * task queue */ - private final DelayQueue workerExecuteQueue = new DelayQueue<>(); + private final BlockingQueue waitSubmitQueue; + + /** + * running task + */ + private final ConcurrentHashMap taskExecuteThreadMap = new ConcurrentHashMap<>(); /** * worker config @@ -59,7 +66,7 @@ public class WorkerManagerThread implements Runnable { /** * thread executor service */ - private final ExecutorService workerExecService; + private final WorkerExecService workerExecService; /** * task callback service @@ -68,17 +75,34 @@ public class WorkerManagerThread implements Runnable { public WorkerManagerThread() { this.workerConfig = SpringApplicationContext.getBean(WorkerConfig.class); - this.workerExecService = ThreadUtils.newDaemonFixedThreadExecutor("Worker-Execute-Thread", this.workerConfig.getWorkerExecThreads()); + this.waitSubmitQueue = new DelayQueue<>(); + this.workerExecService = new WorkerExecService( + ThreadUtils.newDaemonFixedThreadExecutor("Worker-Execute-Thread", this.workerConfig.getWorkerExecThreads()), + taskExecuteThreadMap + ); this.taskCallbackService = SpringApplicationContext.getBean(TaskCallbackService.class); } + public TaskExecuteThread getTaskExecuteThread(Integer taskInstanceId) { + return this.taskExecuteThreadMap.get(taskInstanceId); + } + /** - * get queue size + * get wait submit queue size * * @return queue size */ - public int getQueueSize() { - return workerExecuteQueue.size(); + public int getWaitSubmitQueueSize() { + return waitSubmitQueue.size(); + } + + /** + * get thread pool queue size + * + * @return queue size + */ + public int getThreadPoolQueueSize() { + return this.workerExecService.getThreadPoolQueueSize(); } /** @@ -86,9 +110,9 @@ public class WorkerManagerThread implements Runnable { * then send Response to Master, update the execution status of task instance */ public void killTaskBeforeExecuteByInstanceId(Integer taskInstanceId) { - workerExecuteQueue.stream() + waitSubmitQueue.stream() .filter(taskExecuteThread -> taskExecuteThread.getTaskExecutionContext().getTaskInstanceId() == taskInstanceId) - .forEach(workerExecuteQueue::remove); + .forEach(waitSubmitQueue::remove); sendTaskKillResponse(taskInstanceId); } @@ -114,7 +138,14 @@ public class WorkerManagerThread implements Runnable { * @return submit result */ public boolean offer(TaskExecuteThread taskExecuteThread) { - return workerExecuteQueue.offer(taskExecuteThread); + if (waitSubmitQueue.size() > workerConfig.getWorkerExecThreads()) { + // if waitSubmitQueue is full, it will wait 1s, then try add + ThreadUtils.sleep(Constants.SLEEP_TIME_MILLIS); + if (waitSubmitQueue.size() > workerConfig.getWorkerExecThreads()) { + return false; + } + } + return waitSubmitQueue.offer(taskExecuteThread); } public void start() { @@ -129,8 +160,14 @@ public class WorkerManagerThread implements Runnable { TaskExecuteThread taskExecuteThread; while (Stopper.isRunning()) { try { - taskExecuteThread = workerExecuteQueue.take(); - workerExecService.submit(taskExecuteThread); + if (this.getThreadPoolQueueSize() <= workerConfig.getWorkerExecThreads()) { + taskExecuteThread = waitSubmitQueue.take(); + workerExecService.submit(taskExecuteThread); + } else { + logger.info("Exec queue is full, waiting submit queue {}, waiting exec queue size {}", + this.getWaitSubmitQueueSize(), this.getThreadPoolQueueSize()); + ThreadUtils.sleep(Constants.SLEEP_TIME_MILLIS); + } } catch (Exception e) { logger.error("An unexpected interrupt is happened, " + "the exception will be ignored and this thread will continue to run", e); diff --git a/dolphinscheduler-standalone-server/src/main/resources/registry.properties b/dolphinscheduler-server/src/main/resources/application-master.yaml similarity index 59% rename from dolphinscheduler-standalone-server/src/main/resources/registry.properties rename to dolphinscheduler-server/src/main/resources/application-master.yaml index 3c337990759189e3522371e0b25e44c010a56f5c..737b542387ebafd7926b607443e9aa2e612261a3 100644 --- a/dolphinscheduler-standalone-server/src/main/resources/registry.properties +++ b/dolphinscheduler-server/src/main/resources/application-master.yaml @@ -14,9 +14,31 @@ # See the License for the specific language governing permissions and # limitations under the License. # +spring: + application: + name: master-server + cache: + # default unable cache, you can enable by `type: caffeine` + type: none + cache-names: + - tenant + - user + - processDefinition + - processTaskRelation + - taskDefinition + - workerGroup + - schedule + caffeine: + spec: maximumSize=100,expireAfterWrite=300s,recordStats -# This file is only to override the production configurations in standalone server. +server: + port: 5679 -registry.plugin.dir=./dolphinscheduler-dist/target/dolphinscheduler-dist-2.0.0-SNAPSHOT/lib/plugin/registry/zookeeper -registry.plugin.name=zookeeper -registry.servers=127.0.0.1:2181 +management: + endpoints: + web: + exposure: + include: '*' + metrics: + tags: + application: ${spring.application.name} diff --git a/dolphinscheduler-server/src/main/resources/config/install_config.conf b/dolphinscheduler-server/src/main/resources/config/install_config.conf index 9706897aa6d2d283c86419730555ee9f1d89f009..91ddb1fc106e5b9ff78eef89e28b05d2db58ee79 100755 --- a/dolphinscheduler-server/src/main/resources/config/install_config.conf +++ b/dolphinscheduler-server/src/main/resources/config/install_config.conf @@ -15,64 +15,111 @@ # limitations under the License. # +# --------------------------------------------------------- +# INSTALL MACHINE +# --------------------------------------------------------- +# A comma separated list of machine hostname or IP would be installed DolphinScheduler, +# including master, worker, api, alert. If you want to deploy in pseudo-distributed +# mode, just write a pseudo-distributed hostname +# Example for hostnames: ips="ds1,ds2,ds3,ds4,ds5", Example for IPs: ips="192.168.8.1,192.168.8.2,192.168.8.3,192.168.8.4,192.168.8.5" +ips="ds1,ds2,ds3,ds4,ds5" -# NOTICE: If the following config has special characters in the variable `.*[]^${}\+?|()@#&`, Please escape, for example, `[` escape to `\[` -# postgresql or mysql -dbtype="mysql" +# Port of SSH protocol, default value is 22. For now we only support same port in all `ips` machine +# modify it if you use different ssh port +sshPort="22" -# db config -# db address and port -dbhost="192.168.xx.xx:3306" +# A comma separated list of machine hostname or IP would be installed Master server, it +# must be a subset of configuration `ips`. +# Example for hostnames: masters="ds1,ds2", Example for IPs: masters="192.168.8.1,192.168.8.2" +masters="ds1,ds2" -# db username -username="xx" +# A comma separated list of machine : or :.All hostname or IP must be a +# subset of configuration `ips`, And workerGroup have default value as `default`, but we recommend you declare behind the hosts +# Example for hostnames: workers="ds1:default,ds2:default,ds3:default", Example for IPs: workers="192.168.8.1:default,192.168.8.2:default,192.168.8.3:default" +workers="ds1:default,ds2:default,ds3:default,ds4:default,ds5:default" -# db password -# NOTICE: if there are special characters, please use the \ to escape, for example, `[` escape to `\[` -password="xx" +# A comma separated list of machine hostname or IP would be installed Alert server, it +# must be a subset of configuration `ips`. +# Example for hostname: alertServer="ds3", Example for IP: alertServer="192.168.8.3" +alertServer="ds3" -# database name -dbname="dolphinscheduler" +# A comma separated list of machine hostname or IP would be installed API server, it +# must be a subset of configuration `ips`. +# Example for hostname: apiServers="ds1", Example for IP: apiServers="192.168.8.1" +apiServers="ds1" +# A comma separated list of machine hostname or IP would be installed Python gateway server, it +# must be a subset of configuration `ips`. +# Example for hostname: pythonGatewayServers="ds1", Example for IP: pythonGatewayServers="192.168.8.1" +pythonGatewayServers="ds1" -# zk cluster -zkQuorum="192.168.xx.xx:2181,192.168.xx.xx:2181,192.168.xx.xx:2181" +# The directory to install DolphinScheduler for all machine we config above. It will automatically be created by `install.sh` script if not exists. +# Do not set this configuration same as the current path (pwd) +installPath="/data1_1T/dolphinscheduler" -# zk root directory -zkRoot="/dolphinscheduler" +# The user to deploy DolphinScheduler for all machine we config above. For now user must create by yourself before running `install.sh` +# script. The user needs to have sudo privileges and permissions to operate hdfs. If hdfs is enabled than the root directory needs +# to be created by this user +deployUser="dolphinscheduler" -# registry config -# registry plugin dir -# Note: find and load the Registry Plugin Jar from this dir. -registryPluginDir="/data1_1T/dolphinscheduler/lib/plugin/registry" -registryPluginName="zookeeper" -registryServers="192.168.xx.xx:2181,192.168.xx.xx:2181,192.168.xx.xx:2181" +# The directory to store local data for all machine we config above. Make sure user `deployUser` have permissions to read and write this directory. +dataBasedirPath="/tmp/dolphinscheduler" +# --------------------------------------------------------- +# DolphinScheduler ENV +# --------------------------------------------------------- +# JAVA_HOME, we recommend use same JAVA_HOME in all machine you going to install DolphinScheduler +# and this configuration only support one parameter so far. +javaHome="/your/java/home/here" -# Note: the target installation path for dolphinscheduler, please not config as the same as the current path (pwd) -installPath="/data1_1T/dolphinscheduler" +# DolphinScheduler API service port, also this is your DolphinScheduler UI component's URL port, default value is 12345 +apiServerPort="12345" -# deployment user -# Note: the deployment user needs to have sudo privileges and permissions to operate hdfs. If hdfs is enabled, the root directory needs to be created by itself -deployUser="dolphinscheduler" +# --------------------------------------------------------- +# Database +# NOTICE: If database value has special characters, such as `.*[]^${}\+?|()@#&`, Please add prefix `\` for escaping. +# --------------------------------------------------------- +# The type for the metadata database +# Supported values: ``postgresql``, ``mysql`, `h2``. +DATABASE_TYPE=${DATABASE_TYPE:-"h2"} -# alert config -# alert plugin dir -# Note: find and load the Alert Plugin Jar from this dir. -alertPluginDir="/data1_1T/dolphinscheduler/lib/plugin/alert" +# Spring datasource url, following :/? format, If you using mysql, you could use jdbc +# string jdbc:mysql://127.0.0.1:3306/dolphinscheduler?useUnicode=true&characterEncoding=UTF-8 as example +SPRING_DATASOURCE_URL=${SPRING_DATASOURCE_URL:-"jdbc:h2:mem:dolphinscheduler;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true"} -# user data local directory path, please make sure the directory exists and have read write permissions -dataBasedirPath="/tmp/dolphinscheduler" +# Spring datasource username +SPRING_DATASOURCE_USERNAME=${SPRING_DATASOURCE_USERNAME:-"sa"} + +# Spring datasource password +SPRING_DATASOURCE_PASSWORD=${SPRING_DATASOURCE_PASSWORD:-""} + +# --------------------------------------------------------- +# Registry Server +# --------------------------------------------------------- +# Registry Server plugin name, should be a substring of `registryPluginDir`, DolphinScheduler use this for verifying configuration consistency +registryPluginName="zookeeper" + +# Registry Server address. +registryServers="192.168.xx.xx:2181,192.168.xx.xx:2181,192.168.xx.xx:2181" + +# Registry Namespace +registryNamespace="dolphinscheduler" + +# --------------------------------------------------------- +# Worker Task Server +# --------------------------------------------------------- +# Worker Task Server plugin dir. DolphinScheduler will find and load the worker task plugin jar package from this dir. +taskPluginDir="lib/plugin/task" # resource storage type: HDFS, S3, NONE resourceStorageType="NONE" -# resource store on HDFS/S3 path, resource file will store to this hadoop hdfs path, self configuration, please make sure the directory exists on hdfs and have read write permissions. "/dolphinscheduler" is recommended +# resource store on HDFS/S3 path, resource file will store to this hdfs path, self configuration, please make sure the directory exists on hdfs and has read write permissions. "/dolphinscheduler" is recommended resourceUploadPath="/dolphinscheduler" -# if resourceStorageType is HDFS,defaultFS write namenode address,HA you need to put core-site.xml and hdfs-site.xml in the conf directory. +# if resourceStorageType is HDFS,defaultFS write namenode address,HA, you need to put core-site.xml and hdfs-site.xml in the conf directory. # if S3,write S3 address,HA,for example :s3a://dolphinscheduler, -# Note,s3 be sure to create the root directory /dolphinscheduler +# Note,S3 be sure to create the root directory /dolphinscheduler defaultFS="hdfs://mycluster:8020" # if resourceStorageType is S3, the following three configuration is required, otherwise please ignore @@ -83,13 +130,13 @@ s3SecretKey="xxxxxxxxxx" # resourcemanager port, the default value is 8088 if not specified resourceManagerHttpAddressPort="8088" -# if resourcemanager HA is enabled, please set the HA IPs; if resourcemanager is single, keep this value empty +# if resourcemanager HA is enabled, please set the HA IPs; if resourcemanager is single node, keep this value empty yarnHaIps="192.168.xx.xx,192.168.xx.xx" -# if resourcemanager HA is enabled or not use resourcemanager, please keep the default value; If resourcemanager is single, you only need to replace ds1 to actual resourcemanager hostname +# if resourcemanager HA is enabled or not use resourcemanager, please keep the default value; If resourcemanager is single node, you only need to replace 'yarnIp1' to actual resourcemanager hostname singleYarnIp="yarnIp1" -# who have permissions to create directory under HDFS/S3 root path +# who has permission to create directory under HDFS/S3 root path # Note: if kerberos is enabled, please config hdfsRootUser= hdfsRootUser="hdfs" @@ -110,34 +157,3 @@ sudoEnable="true" # worker tenant auto create workerTenantAutoCreate="false" - -# worker task plugin dir -taskPluginDir="/data1_1T/dolphinscheduler/lib/plugin/task" - -# api server port -apiServerPort="12345" - - -# install hosts -# Note: install the scheduled hostname list. If it is pseudo-distributed, just write a pseudo-distributed hostname -ips="ds1,ds2,ds3,ds4,ds5" - -# ssh port, default 22 -# Note: if ssh port is not default, modify here -sshPort="22" - -# run master machine -# Note: list of hosts hostname for deploying master -masters="ds1,ds2" - -# run worker machine -# note: need to write the worker group name of each worker, the default value is "default" -workers="ds1:default,ds2:default,ds3:default,ds4:default,ds5:default" - -# run alert machine -# note: list of machine hostnames for deploying alert server -alertServer="ds3" - -# run api machine -# note: list of machine hostnames for deploying api server -apiServers="ds1" \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/resources/logback-master.xml b/dolphinscheduler-server/src/main/resources/logback-master.xml index a61d891b104b98be7821154460c482b7bf541680..bef5ce19ff0abce7c73c4db7d23bda2781049f36 100644 --- a/dolphinscheduler-server/src/main/resources/logback-master.xml +++ b/dolphinscheduler-server/src/main/resources/logback-master.xml @@ -62,6 +62,7 @@ ${log.base}/dolphinscheduler-master.%d{yyyy-MM-dd_HH}.%i.log 168 + 2GB 200MB diff --git a/dolphinscheduler-server/src/main/resources/logback-worker.xml b/dolphinscheduler-server/src/main/resources/logback-worker.xml index 31719d5b535af5f73117b4a58bc4c6b385bd130c..cf68c85a75e023d64ba3b1c084a98bba2fbdd980 100644 --- a/dolphinscheduler-server/src/main/resources/logback-worker.xml +++ b/dolphinscheduler-server/src/main/resources/logback-worker.xml @@ -62,6 +62,7 @@ ${log.base}/dolphinscheduler-worker.%d{yyyy-MM-dd_HH}.%i.log 168 + 2GB 200MB diff --git a/dolphinscheduler-server/src/main/resources/master.properties b/dolphinscheduler-server/src/main/resources/master.properties index cc45622132f1c9125b1294f00bae77cba2e75ad1..e67d6973bb4557cd641caf981c6b1208dfc2bfaa 100644 --- a/dolphinscheduler-server/src/main/resources/master.properties +++ b/dolphinscheduler-server/src/main/resources/master.properties @@ -18,12 +18,9 @@ # master listen port #master.listen.port=5678 -# master execute thread number to limit process instances in parallel +# Number of process instances executed in parallel #master.exec.threads=100 -# master execute task number in parallel per process instance -#master.exec.task.num=20 - # master dispatch task number per batch #master.dispatch.task.num=3 @@ -44,3 +41,12 @@ # master reserved memory, only lower than system available memory, master server can schedule. default value 0.3, the unit is G #master.reserved.memory=0.3 + +# master failover interval, default value 10 seconds +#master.failover.interval=10 + +# master kill yarn job when failover, default: true +#master.kill.yarn.job.when.handle.failover=true + +# master persist event state threads, default value 10 +#master.persist.event.state.threads=10 diff --git a/dolphinscheduler-server/src/main/resources/worker.properties b/dolphinscheduler-server/src/main/resources/worker.properties index 0e72baac8bbc531c042eac7d524b94ff3fb2a485..b8529b179fee27cc668dc4d940f1608dffcf351e 100644 --- a/dolphinscheduler-server/src/main/resources/worker.properties +++ b/dolphinscheduler-server/src/main/resources/worker.properties @@ -49,3 +49,6 @@ task.plugin.dir=lib/plugin/task #task.plugin.binding config the task plugin need be load when development and run in IDE #task.plugin.binding=./dolphinscheduler-task-plugin/dolphinscheduler-task-shell/pom.xml + +# worker retry report task statues interval, default value 10 min +#worker.retry.report.task.statues.interval=10 diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/LoggerRequestProcessorTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/LoggerRequestProcessorTest.java index e24539558ced37d199ad515b2c4921ad51fb44ff..13eb520db9ebfe383671fa8969a5d4752142637e 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/LoggerRequestProcessorTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/LoggerRequestProcessorTest.java @@ -17,34 +17,99 @@ package org.apache.dolphinscheduler.server.log; +import io.netty.channel.Channel; +import org.apache.commons.lang.StringUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.LoggerUtils; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.command.log.ViewLogRequestCommand; - +import org.junit.Before; import org.junit.Test; -import org.junit.Test.None; import org.junit.runner.RunWith; import org.mockito.Mockito; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; -import io.netty.channel.Channel; - @RunWith(PowerMockRunner.class) @PrepareForTest({LoggerUtils.class}) public class LoggerRequestProcessorTest { - @Test(expected = None.class) + private String dsHome; + + @Before + public void initDsHome() { + // DOLPHINSCHEDULER_HOME is be set in start.sh. if we run test in IDE user.dir is DS Home. + dsHome = System.getProperty("DOLPHINSCHEDULER_HOME"); + if (StringUtils.isBlank(dsHome)) { + dsHome = System.getProperty("user.dir"); + System.setProperty("DOLPHINSCHEDULER_HOME", dsHome); + } + } + + @Test public void testProcessViewWholeLogRequest() { + System.setProperty("DOLPHINSCHEDULER_HOME", System.getProperty("user.dir")); Channel channel = PowerMockito.mock(Channel.class); PowerMockito.when(channel.writeAndFlush(Mockito.any(Command.class))).thenReturn(null); PowerMockito.mockStatic(LoggerUtils.class); PowerMockito.when(LoggerUtils.readWholeFileContent(Mockito.anyString())).thenReturn(""); + String userDir = System.getProperty("user.dir"); + ViewLogRequestCommand logRequestCommand = new ViewLogRequestCommand(userDir + "/log/path/a.log"); + + Command command = new Command(); + command.setType(CommandType.VIEW_WHOLE_LOG_REQUEST); + command.setBody(JSONUtils.toJsonByteArray(logRequestCommand)); - ViewLogRequestCommand logRequestCommand = new ViewLogRequestCommand("/log/path"); + LoggerRequestProcessor loggerRequestProcessor = new LoggerRequestProcessor(); + loggerRequestProcessor.process(channel, command); + } + + @Test(expected = IllegalArgumentException.class) + public void testProcessViewWholeLogRequestError() { + System.setProperty("DOLPHINSCHEDULER_HOME", System.getProperty("user.dir")); + Channel channel = PowerMockito.mock(Channel.class); + PowerMockito.when(channel.writeAndFlush(Mockito.any(Command.class))).thenReturn(null); + PowerMockito.mockStatic(LoggerUtils.class); + PowerMockito.when(LoggerUtils.readWholeFileContent(Mockito.anyString())).thenReturn(""); + String userDir = System.getProperty("user.dir"); + ViewLogRequestCommand logRequestCommand = new ViewLogRequestCommand(userDir + "/log/path/a"); + + Command command = new Command(); + command.setType(CommandType.VIEW_WHOLE_LOG_REQUEST); + command.setBody(JSONUtils.toJsonByteArray(logRequestCommand)); + + LoggerRequestProcessor loggerRequestProcessor = new LoggerRequestProcessor(); + loggerRequestProcessor.process(channel, command); + } + + @Test(expected = IllegalArgumentException.class) + public void testProcessViewWholeLogRequestErrorRelativePath() { + System.setProperty("DOLPHINSCHEDULER_HOME", System.getProperty("user.dir")); + Channel channel = PowerMockito.mock(Channel.class); + PowerMockito.when(channel.writeAndFlush(Mockito.any(Command.class))).thenReturn(null); + PowerMockito.mockStatic(LoggerUtils.class); + PowerMockito.when(LoggerUtils.readWholeFileContent(Mockito.anyString())).thenReturn(""); + String userDir = System.getProperty("user.dir"); + ViewLogRequestCommand logRequestCommand = new ViewLogRequestCommand(userDir + "/log/../../a.log"); + + Command command = new Command(); + command.setType(CommandType.VIEW_WHOLE_LOG_REQUEST); + command.setBody(JSONUtils.toJsonByteArray(logRequestCommand)); + + LoggerRequestProcessor loggerRequestProcessor = new LoggerRequestProcessor(); + loggerRequestProcessor.process(channel, command); + } + + @Test(expected = IllegalArgumentException.class) + public void testProcessViewWholeLogRequestErrorStartWith() { + System.setProperty("DOLPHINSCHEDULER_HOME", System.getProperty("user.dir")); + Channel channel = PowerMockito.mock(Channel.class); + PowerMockito.when(channel.writeAndFlush(Mockito.any(Command.class))).thenReturn(null); + PowerMockito.mockStatic(LoggerUtils.class); + PowerMockito.when(LoggerUtils.readWholeFileContent(Mockito.anyString())).thenReturn(""); + ViewLogRequestCommand logRequestCommand = new ViewLogRequestCommand("/log/a.log"); Command command = new Command(); command.setType(CommandType.VIEW_WHOLE_LOG_REQUEST); @@ -53,4 +118,4 @@ public class LoggerRequestProcessorTest { LoggerRequestProcessor loggerRequestProcessor = new LoggerRequestProcessor(); loggerRequestProcessor.process(channel, command); } -} \ No newline at end of file +} diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/LoggerServerTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/LoggerServerTest.java index 74bdceae8bce174904065da46f42cba26029e4e3..0dc5fb8f2ee746633c6ca03dbe70a439be30383a 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/LoggerServerTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/LoggerServerTest.java @@ -17,57 +17,68 @@ package org.apache.dolphinscheduler.server.log; +import org.apache.commons.lang.StringUtils; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.FileUtils; import org.apache.dolphinscheduler.service.log.LogClientService; - -import org.apache.commons.lang.StringUtils; - -import java.io.File; -import java.io.IOException; -import java.nio.charset.Charset; - import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import java.io.File; +import java.io.IOException; +import java.nio.charset.Charset; + public class LoggerServerTest { private LoggerServer loggerServer; private LogClientService logClientService; + private String dsHome; + @Before public void startServerAndClient() { this.loggerServer = new LoggerServer(); this.loggerServer.start(); this.logClientService = new LogClientService(); + + // DOLPHINSCHEDULER_HOME is be set in start.sh. if we run test in IDE user.dir is DS Home. + dsHome = System.getProperty("DOLPHINSCHEDULER_HOME"); + if (StringUtils.isBlank(dsHome)) { + dsHome = System.getProperty("user.dir"); + System.setProperty("DOLPHINSCHEDULER_HOME", dsHome); + } } @Test - public void testRollViewLog() throws IOException { + public void testRollViewLog() + throws IOException { String expectedTmpDemoString = "testRolloViewLog"; - org.apache.commons.io.FileUtils.writeStringToFile(new File("/tmp/demo.txt"), expectedTmpDemoString, Charset.defaultCharset()); + String testFile = dsHome + "/tmp/demo.log"; + org.apache.commons.io.FileUtils.writeStringToFile(new File(testFile), expectedTmpDemoString, Charset.defaultCharset()); String resultTmpDemoString = this.logClientService.rollViewLog( - "localhost", Constants.RPC_PORT,"/tmp/demo.txt", 0, 1000); + "localhost", Constants.RPC_PORT, testFile, 0, 1000); Assert.assertEquals(expectedTmpDemoString, resultTmpDemoString.replaceAll("[\r|\n|\t]", StringUtils.EMPTY)); - FileUtils.deleteFile("/tmp/demo.txt"); + FileUtils.deleteFile(testFile); } @Test - public void testRemoveTaskLog() throws IOException { + public void testRemoveTaskLog() + throws IOException { String expectedTmpRemoveString = "testRemoveTaskLog"; - org.apache.commons.io.FileUtils.writeStringToFile(new File("/tmp/remove.txt"), expectedTmpRemoveString, Charset.defaultCharset()); + String testFile = dsHome + "/tmp/remove.log"; + org.apache.commons.io.FileUtils.writeStringToFile(new File(testFile), expectedTmpRemoveString, Charset.defaultCharset()); - Boolean b = this.logClientService.removeTaskLog("localhost", Constants.RPC_PORT,"/tmp/remove.txt"); + Boolean b = this.logClientService.removeTaskLog("localhost", Constants.RPC_PORT, testFile); Assert.assertTrue(b); - String result = this.logClientService.viewLog("localhost", Constants.RPC_PORT,"/tmp/demo.txt"); + String result = this.logClientService.viewLog("localhost", Constants.RPC_PORT, testFile); Assert.assertEquals(StringUtils.EMPTY, result); } diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/SensitiveDataConverterTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/SensitiveDataConverterTest.java index fbacf0f256a9ebff66965ad876c075a5018b5c11..a8733f2df0ee1b505a2449943911a3a5fcc9ea24 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/SensitiveDataConverterTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/SensitiveDataConverterTest.java @@ -17,23 +17,16 @@ package org.apache.dolphinscheduler.server.log; +import static org.apache.dolphinscheduler.server.log.SensitiveDataConverter.passwordHandler; + import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.utils.SensitiveLogUtils; -import java.util.Map; -import java.util.regex.Matcher; import java.util.regex.Pattern; import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.slf4j.Marker; - -import ch.qos.logback.classic.Level; -import ch.qos.logback.classic.spi.ILoggingEvent; -import ch.qos.logback.classic.spi.IThrowableProxy; -import ch.qos.logback.classic.spi.LoggerContextVO; public class SensitiveDataConverterTest { @@ -58,86 +51,7 @@ public class SensitiveDataConverterTest { @Test public void convert() { - SensitiveDataConverter sensitiveDataConverter = new SensitiveDataConverter(); - String result = sensitiveDataConverter.convert(new ILoggingEvent() { - @Override - public String getThreadName() { - return null; - } - - @Override - public Level getLevel() { - return Level.INFO; - } - - @Override - public String getMessage() { - return null; - } - - @Override - public Object[] getArgumentArray() { - return new Object[0]; - } - - @Override - public String getFormattedMessage() { - return logMsg; - } - - @Override - public String getLoggerName() { - return null; - } - - @Override - public LoggerContextVO getLoggerContextVO() { - return null; - } - - @Override - public IThrowableProxy getThrowableProxy() { - return null; - } - - @Override - public StackTraceElement[] getCallerData() { - return new StackTraceElement[0]; - } - - @Override - public boolean hasCallerData() { - return false; - } - - @Override - public Marker getMarker() { - return null; - } - - @Override - public Map getMDCPropertyMap() { - return null; - } - - @Override - public Map getMdc() { - return null; - } - - @Override - public long getTimeStamp() { - return 0; - } - - @Override - public void prepareForDeferredProcessing() { - - } - }); - Assert.assertNotEquals(maskLogMsg, passwordHandler(pwdPattern, logMsg)); - } /** @@ -153,28 +67,4 @@ public class SensitiveDataConverterTest { } - /** - * password regex test - * - * @param logMsg original log - */ - private static String passwordHandler(Pattern pattern, String logMsg) { - - Matcher matcher = pattern.matcher(logMsg); - - StringBuffer sb = new StringBuffer(logMsg.length()); - - while (matcher.find()) { - - String password = matcher.group(); - - String maskPassword = SensitiveLogUtils.maskDataSourcePwd(password); - - matcher.appendReplacement(sb, maskPassword); - } - matcher.appendTail(sb); - - return sb.toString(); - } - } diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/TaskLogDiscriminatorTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/TaskLogDiscriminatorTest.java index 190847541c114518bb616107e0346671468e917a..13232c36ab5fe93d1e1325d6cfed4967ac810495 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/TaskLogDiscriminatorTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/TaskLogDiscriminatorTest.java @@ -16,16 +16,19 @@ */ package org.apache.dolphinscheduler.server.log; -import ch.qos.logback.classic.Level; -import ch.qos.logback.classic.spi.ILoggingEvent; -import ch.qos.logback.classic.spi.IThrowableProxy; -import ch.qos.logback.classic.spi.LoggerContextVO; +import org.apache.dolphinscheduler.spi.task.TaskConstants; + +import java.util.Map; + import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.slf4j.Marker; -import java.util.Map; +import ch.qos.logback.classic.Level; +import ch.qos.logback.classic.spi.ILoggingEvent; +import ch.qos.logback.classic.spi.IThrowableProxy; +import ch.qos.logback.classic.spi.LoggerContextVO; public class TaskLogDiscriminatorTest { @@ -48,7 +51,7 @@ public class TaskLogDiscriminatorTest { String result = taskLogDiscriminator.getDiscriminatingValue(new ILoggingEvent() { @Override public String getThreadName() { - return null; + return String.format(TaskConstants.TASK_LOGGER_THREAD_NAME_FORMAT,"-[taskAppId=TASK-1-1-1"); } @Override @@ -73,7 +76,7 @@ public class TaskLogDiscriminatorTest { @Override public String getLoggerName() { - return "[taskAppId=TASK-1-1-1"; + return TaskConstants.TASK_LOG_LOGGER_NAME; } @Override diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/TaskLogFilterTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/TaskLogFilterTest.java index d8abb48d72795c2faf23bea26cd35ae7605ac767..78ebae61586c4380598652ff9b901029ffde934d 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/TaskLogFilterTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/TaskLogFilterTest.java @@ -16,17 +16,19 @@ */ package org.apache.dolphinscheduler.server.log; +import org.apache.dolphinscheduler.spi.task.TaskConstants; + +import java.util.Map; + +import org.junit.Assert; +import org.junit.Test; +import org.slf4j.Marker; + import ch.qos.logback.classic.Level; import ch.qos.logback.classic.spi.ILoggingEvent; import ch.qos.logback.classic.spi.IThrowableProxy; import ch.qos.logback.classic.spi.LoggerContextVO; import ch.qos.logback.core.spi.FilterReply; -import org.apache.dolphinscheduler.common.utils.LoggerUtils; -import org.junit.Assert; -import org.junit.Test; -import org.slf4j.Marker; - -import java.util.Map; public class TaskLogFilterTest { @@ -39,7 +41,7 @@ public class TaskLogFilterTest { FilterReply filterReply = taskLogFilter.decide(new ILoggingEvent() { @Override public String getThreadName() { - return LoggerUtils.TASK_LOGGER_THREAD_NAME; + return TaskConstants.TASK_LOGGER_THREAD_NAME; } @Override @@ -64,7 +66,7 @@ public class TaskLogFilterTest { @Override public String getLoggerName() { - return null; + return TaskConstants.TASK_LOG_LOGGER_NAME; } @Override diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/ConditionsTaskTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/ConditionsTaskTest.java index c2043e56e8864bed31715f74b866248729fda76d..718e14ccaf03bc2681fb507b2dc7a05583384895 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/ConditionsTaskTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/ConditionsTaskTest.java @@ -140,7 +140,7 @@ public class ConditionsTaskTest { taskNode.setRunFlag(FLOWNODE_RUN_FLAG_NORMAL); DependentItem dependentItem = new DependentItem(); - dependentItem.setDepTasks("1"); + dependentItem.setDepTaskCode(11L); dependentItem.setStatus(ExecutionStatus.SUCCESS); DependentTaskModel dependentTaskModel = new DependentTaskModel(); @@ -155,8 +155,8 @@ public class ConditionsTaskTest { taskNode.setDependence(JSONUtils.toJsonString(dependentParameters)); ConditionsParameters conditionsParameters = new ConditionsParameters(); - conditionsParameters.setSuccessNode(Stream.of("2").collect(Collectors.toList())); - conditionsParameters.setFailedNode(Stream.of("3").collect(Collectors.toList())); + conditionsParameters.setSuccessNode(Stream.of(2L).collect(Collectors.toList())); + conditionsParameters.setFailedNode(Stream.of(3L).collect(Collectors.toList())); // out: SUCCESS => 2, FAILED => 3 taskNode.setConditionResult(JSONUtils.toJsonString(conditionsParameters)); diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/DependentTaskTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/DependentTaskTest.java index 9a1861388d08b09773c19ae83f062a88c43572ee..9d3333199761393b463f905291afa3ccdc4b9678 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/DependentTaskTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/DependentTaskTest.java @@ -39,7 +39,6 @@ import org.apache.dolphinscheduler.service.process.ProcessService; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; @@ -60,6 +59,10 @@ public class DependentTaskTest { public static final Long TASK_CODE = 1111L; + public static final Long DEPEND_TASK_CODE_A = 110L; + public static final Long DEPEND_TASK_CODE_B = 111L; + public static final Long DEPEND_TASK_CODE_C = 112L; + public static final Long DEPEND_TASK_CODE_D = 113L; public static final int TASK_VERSION = 1; private ProcessService processService; @@ -125,7 +128,7 @@ public class DependentTaskTest { DependentTaskModel dependentTaskModel = new DependentTaskModel(); dependentTaskModel.setRelation(DependentRelation.AND); dependentTaskModel.setDependItemList(Stream.of( - getDependentItemFromTaskNode(2L, "A", "today", "day") + getDependentItemFromTaskNode(2L, DEPEND_TASK_CODE_A, "today", "day") ).collect(Collectors.toList())); DependentParameters dependentParameters = new DependentParameters(); @@ -152,8 +155,8 @@ public class DependentTaskTest { Mockito.when(processService .findValidTaskListByProcessId(200)) .thenReturn(Stream.of( - getTaskInstanceForValidTaskList(2000, ExecutionStatus.SUCCESS, "A", dependentProcessInstance), - getTaskInstanceForValidTaskList(2000, ExecutionStatus.FAILURE, "B", dependentProcessInstance) + getTaskInstanceForValidTaskList(2000, ExecutionStatus.SUCCESS, DEPEND_TASK_CODE_A, dependentProcessInstance), + getTaskInstanceForValidTaskList(2000, ExecutionStatus.FAILURE, DEPEND_TASK_CODE_B, dependentProcessInstance) ).collect(Collectors.toList())); } @@ -172,8 +175,8 @@ public class DependentTaskTest { Mockito.when(processService .findValidTaskListByProcessId(200)) .thenReturn(Stream.of( - getTaskInstanceForValidTaskList(2000, ExecutionStatus.FAILURE, "A", dependentProcessInstance), - getTaskInstanceForValidTaskList(2000, ExecutionStatus.SUCCESS, "B", dependentProcessInstance) + getTaskInstanceForValidTaskList(2000, ExecutionStatus.FAILURE, DEPEND_TASK_CODE_A, dependentProcessInstance), + getTaskInstanceForValidTaskList(2000, ExecutionStatus.SUCCESS, DEPEND_TASK_CODE_B, dependentProcessInstance) ).collect(Collectors.toList())); } @@ -182,15 +185,15 @@ public class DependentTaskTest { DependentTaskModel dependentTaskModel1 = new DependentTaskModel(); dependentTaskModel1.setRelation(DependentRelation.AND); dependentTaskModel1.setDependItemList(Stream.of( - getDependentItemFromTaskNode(2L, "A", "today", "day"), - getDependentItemFromTaskNode(3L, "B", "today", "day") + getDependentItemFromTaskNode(2L, DEPEND_TASK_CODE_A, "today", "day"), + getDependentItemFromTaskNode(3L, DEPEND_TASK_CODE_B, "today", "day") ).collect(Collectors.toList())); DependentTaskModel dependentTaskModel2 = new DependentTaskModel(); dependentTaskModel2.setRelation(DependentRelation.OR); dependentTaskModel2.setDependItemList(Stream.of( - getDependentItemFromTaskNode(2L, "A", "today", "day"), - getDependentItemFromTaskNode(3L, "C", "today", "day") + getDependentItemFromTaskNode(2L, DEPEND_TASK_CODE_A, "today", "day"), + getDependentItemFromTaskNode(3L, DEPEND_TASK_CODE_C, "today", "day") ).collect(Collectors.toList())); /* @@ -225,13 +228,13 @@ public class DependentTaskTest { Mockito.when(processService .findValidTaskListByProcessId(200)) .thenReturn(Stream.of( - getTaskInstanceForValidTaskList(2000, ExecutionStatus.FAILURE, "A", processInstance200) + getTaskInstanceForValidTaskList(2000, ExecutionStatus.FAILURE, DEPEND_TASK_CODE_A, processInstance200) ).collect(Collectors.toList())); Mockito.when(processService .findValidTaskListByProcessId(300)) .thenReturn(Stream.of( - getTaskInstanceForValidTaskList(3000, ExecutionStatus.SUCCESS, "B", processInstance300), - getTaskInstanceForValidTaskList(3001, ExecutionStatus.SUCCESS, "C", processInstance300) + getTaskInstanceForValidTaskList(3000, ExecutionStatus.SUCCESS, DEPEND_TASK_CODE_B, processInstance300), + getTaskInstanceForValidTaskList(3001, ExecutionStatus.SUCCESS, DEPEND_TASK_CODE_C, processInstance300) ).collect(Collectors.toList())); //DependentTaskExecThread taskExecThread = new DependentTaskExecThread(taskInstance); @@ -247,7 +250,7 @@ public class DependentTaskTest { DependentTaskModel dependentTaskModel = new DependentTaskModel(); dependentTaskModel.setRelation(DependentRelation.AND); dependentTaskModel.setDependItemList(Stream.of( - getDependentItemFromTaskNode(2L, Constants.DEPENDENT_ALL, "today", "day") + getDependentItemFromTaskNode(2L, Constants.DEPENDENT_ALL_TASK_CODE, "today", "day") ).collect(Collectors.toList())); DependentParameters dependentParameters = new DependentParameters(); @@ -300,7 +303,7 @@ public class DependentTaskTest { DependentTaskModel dependentTaskModel = new DependentTaskModel(); dependentTaskModel.setRelation(DependentRelation.AND); dependentTaskModel.setDependItemList(Stream.of( - getDependentItemFromTaskNode(2L, "A", "today", "day") + getDependentItemFromTaskNode(2L, DEPEND_TASK_CODE_A, "today", "day") ).collect(Collectors.toList())); DependentParameters dependentParameters = new DependentParameters(); @@ -327,7 +330,7 @@ public class DependentTaskTest { .thenAnswer(i -> { processInstance.setState(ExecutionStatus.READY_STOP); return Stream.of( - getTaskInstanceForValidTaskList(2000, ExecutionStatus.RUNNING_EXECUTION, "A", dependentProcessInstance) + getTaskInstanceForValidTaskList(2000, ExecutionStatus.RUNNING_EXECUTION, DEPEND_TASK_CODE_A, dependentProcessInstance) ).collect(Collectors.toList()); }) .thenThrow(new IllegalStateException("have not been stopped as expected")); @@ -351,6 +354,7 @@ public class DependentTaskTest { TaskNode taskNode = new TaskNode(); taskNode.setId("tasks-10"); taskNode.setName("D"); + taskNode.setCode(DEPEND_TASK_CODE_D); taskNode.setType(TaskType.DEPENDENT.getDesc()); taskNode.setRunFlag(FLOWNODE_RUN_FLAG_NORMAL); return taskNode; @@ -380,10 +384,10 @@ public class DependentTaskTest { /** * DependentItem defines the condition for the dependent */ - private DependentItem getDependentItemFromTaskNode(Long processDefinitionCode, String taskName, String date, String cycle) { + private DependentItem getDependentItemFromTaskNode(Long processDefinitionCode, long taskCode, String date, String cycle) { DependentItem dependentItem = new DependentItem(); dependentItem.setDefinitionCode(processDefinitionCode); - dependentItem.setDepTasks(taskName); + dependentItem.setDepTaskCode(taskCode); dependentItem.setDateValue(date); dependentItem.setCycle(cycle); // so far, the following fields have no effect @@ -401,12 +405,12 @@ public class DependentTaskTest { private TaskInstance getTaskInstanceForValidTaskList( int taskInstanceId, ExecutionStatus state, - String taskName, ProcessInstance processInstance + long taskCode, ProcessInstance processInstance ) { TaskInstance taskInstance = new TaskInstance(); taskInstance.setTaskType(TaskType.DEPENDENT.getDesc()); taskInstance.setId(taskInstanceId); - taskInstance.setName(taskName); + taskInstance.setTaskCode(taskCode); taskInstance.setProcessInstanceId(processInstance.getId()); taskInstance.setState(state); return taskInstance; diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/SwitchTaskTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/SwitchTaskTest.java index 61f1d6d80063b1bf6c494626b6b17c375707c2a0..38bd79d2e0681127ded6354c982cfb59a474d361 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/SwitchTaskTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/SwitchTaskTest.java @@ -113,19 +113,19 @@ public class SwitchTaskTest { SwitchResultVo switchResultVo1 = new SwitchResultVo(); switchResultVo1.setCondition(" 2 == 1"); - switchResultVo1.setNextNode("t1"); + switchResultVo1.setNextNode(1L); SwitchResultVo switchResultVo2 = new SwitchResultVo(); switchResultVo2.setCondition(" 2 == 2"); - switchResultVo2.setNextNode("t2"); + switchResultVo2.setNextNode(2L); SwitchResultVo switchResultVo3 = new SwitchResultVo(); switchResultVo3.setCondition(" 3 == 2"); - switchResultVo3.setNextNode("t3"); + switchResultVo3.setNextNode(3L); List list = new ArrayList<>(); list.add(switchResultVo1); list.add(switchResultVo2); list.add(switchResultVo3); conditionsParameters.setDependTaskList(list); - conditionsParameters.setNextNode("t"); + conditionsParameters.setNextNode(4L); conditionsParameters.setRelation("AND"); return conditionsParameters; diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/WorkflowExecuteThreadTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/WorkflowExecuteThreadTest.java index 1b4d3bfd4b9f048d16c9237b7e614205b32fb0fe..0f38c7dedb7259a2e02497d9549ed16312afff7c 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/WorkflowExecuteThreadTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/WorkflowExecuteThreadTest.java @@ -20,10 +20,8 @@ package org.apache.dolphinscheduler.server.master; import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_END_DATE; import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_START_DATE; import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_RECOVERY_START_NODE_STRING; -import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_START_NODE_NAMES; +import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_START_NODES; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; import static org.powermock.api.mockito.PowerMockito.mock; import org.apache.dolphinscheduler.common.enums.CommandType; @@ -34,7 +32,6 @@ import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; -import org.apache.dolphinscheduler.dao.entity.Schedule; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.server.master.runner.WorkflowExecuteThread; @@ -47,7 +44,6 @@ import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; -import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; @@ -88,7 +84,6 @@ public class WorkflowExecuteThreadTest { applicationContext = mock(ApplicationContext.class); config = new MasterConfig(); - config.setMasterExecTaskNum(1); Mockito.when(applicationContext.getBean(MasterConfig.class)).thenReturn(config); processInstance = mock(ProcessInstance.class); @@ -106,57 +101,24 @@ public class WorkflowExecuteThreadTest { Mockito.when(processInstance.getProcessDefinition()).thenReturn(processDefinition); ConcurrentHashMap taskTimeoutCheckList = new ConcurrentHashMap<>(); - workflowExecuteThread = PowerMockito.spy(new WorkflowExecuteThread(processInstance, processService, null, null, config, taskTimeoutCheckList)); + ConcurrentHashMap taskRetryCheckList = new ConcurrentHashMap<>(); + ConcurrentHashMap depStateCheckList = new ConcurrentHashMap<>(); + workflowExecuteThread = PowerMockito.spy(new WorkflowExecuteThread(processInstance, null, + processService, null, null, + config, taskTimeoutCheckList, taskRetryCheckList, depStateCheckList)); // prepareProcess init dag Field dag = WorkflowExecuteThread.class.getDeclaredField("dag"); dag.setAccessible(true); dag.set(workflowExecuteThread, new DAG()); - PowerMockito.doNothing().when(workflowExecuteThread, "executeProcess"); - PowerMockito.doNothing().when(workflowExecuteThread, "prepareProcess"); - PowerMockito.doNothing().when(workflowExecuteThread, "runProcess"); PowerMockito.doNothing().when(workflowExecuteThread, "endProcess"); } - /** - * without schedule - */ - @Test - public void testParallelWithOutSchedule() throws ParseException { - try { - Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionCode(processDefinitionId)).thenReturn(zeroSchedulerList()); - Method method = WorkflowExecuteThread.class.getDeclaredMethod("executeComplementProcess"); - method.setAccessible(true); - method.invoke(workflowExecuteThread); - // one create save, and 1-30 for next save, and last day 20 no save - verify(processService, times(20)).saveProcessInstance(processInstance); - } catch (Exception e) { - e.printStackTrace(); - Assert.fail(); - } - } - - /** - * with schedule - */ - @Test - public void testParallelWithSchedule() { - try { - Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionCode(processDefinitionId)).thenReturn(oneSchedulerList()); - Method method = WorkflowExecuteThread.class.getDeclaredMethod("executeComplementProcess"); - method.setAccessible(true); - method.invoke(workflowExecuteThread); - // one create save, and 9(1 to 20 step 2) for next save, and last day 31 no save - verify(processService, times(20)).saveProcessInstance(processInstance); - } catch (Exception e) { - Assert.fail(); - } - } @Test - public void testParseStartNodeName() throws ParseException { + public void testParseStartNodeName() { try { Map cmdParam = new HashMap<>(); - cmdParam.put(CMD_PARAM_START_NODE_NAMES, "t1,t2,t3"); + cmdParam.put(CMD_PARAM_START_NODES, "1,2,3"); Mockito.when(processInstance.getCommandParam()).thenReturn(JSONUtils.toJsonString(cmdParam)); Class masterExecThreadClass = WorkflowExecuteThread.class; Method method = masterExecThreadClass.getDeclaredMethod("parseStartNodeName", String.class); @@ -168,23 +130,6 @@ public class WorkflowExecuteThreadTest { } } - @Test - public void testRetryTaskIntervalOverTime() { - try { - TaskInstance taskInstance = new TaskInstance(); - taskInstance.setId(0); - taskInstance.setMaxRetryTimes(0); - taskInstance.setRetryInterval(0); - taskInstance.setState(ExecutionStatus.FAILURE); - Class masterExecThreadClass = WorkflowExecuteThread.class; - Method method = masterExecThreadClass.getDeclaredMethod("retryTaskIntervalOverTime", TaskInstance.class); - method.setAccessible(true); - Assert.assertTrue((Boolean) method.invoke(workflowExecuteThread, taskInstance)); - } catch (Exception e) { - Assert.fail(); - } - } - @Test public void testGetStartTaskInstanceList() { try { @@ -256,16 +201,4 @@ public class WorkflowExecuteThreadTest { } } - private List zeroSchedulerList() { - return Collections.emptyList(); - } - - private List oneSchedulerList() { - List schedulerList = new LinkedList<>(); - Schedule schedule = new Schedule(); - schedule.setCrontab("0 0 0 1/2 * ?"); - schedulerList.add(schedule); - return schedulerList; - } - -} \ No newline at end of file +} diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/cache/impl/TaskInstanceCacheManagerImplTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/cache/impl/TaskInstanceCacheManagerImplTest.java deleted file mode 100644 index f6098454ba40bb81f2c6e994174e36d35d5676fe..0000000000000000000000000000000000000000 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/cache/impl/TaskInstanceCacheManagerImplTest.java +++ /dev/null @@ -1,177 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.server.master.cache.impl; - -import static org.apache.dolphinscheduler.common.Constants.CACHE_REFRESH_TIME_MILLIS; - -import org.apache.dolphinscheduler.common.enums.ExecutionStatus; -import org.apache.dolphinscheduler.common.enums.TaskType; -import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import org.apache.dolphinscheduler.remote.command.TaskExecuteAckCommand; -import org.apache.dolphinscheduler.remote.command.TaskExecuteResponseCommand; -import org.apache.dolphinscheduler.service.process.ProcessService; -import org.apache.dolphinscheduler.service.queue.entity.TaskExecutionContext; - -import java.util.Calendar; -import java.util.Date; -import java.util.concurrent.TimeUnit; - -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.InjectMocks; -import org.mockito.Mock; -import org.mockito.Mockito; -import org.mockito.junit.MockitoJUnitRunner; - -@RunWith(MockitoJUnitRunner.class) -public class TaskInstanceCacheManagerImplTest { - - @InjectMocks - private TaskInstanceCacheManagerImpl taskInstanceCacheManager; - - @Mock(name = "processService") - private ProcessService processService; - - @Before - public void before() { - - TaskExecuteAckCommand taskExecuteAckCommand = new TaskExecuteAckCommand(); - taskExecuteAckCommand.setStatus(1); - taskExecuteAckCommand.setExecutePath("/dolphinscheduler/worker"); - taskExecuteAckCommand.setHost("worker007"); - taskExecuteAckCommand.setLogPath("/temp/worker.log"); - taskExecuteAckCommand.setStartTime(new Date(1970, Calendar.AUGUST,7)); - taskExecuteAckCommand.setTaskInstanceId(0); - - taskInstanceCacheManager.cacheTaskInstance(taskExecuteAckCommand); - - } - - @Test - public void testInit() throws InterruptedException { - - TaskInstance taskInstance = new TaskInstance(); - taskInstance.setId(0); - taskInstance.setState(ExecutionStatus.NEED_FAULT_TOLERANCE); - taskInstance.setExecutePath("/dolphinscheduler/worker"); - taskInstance.setHost("worker007"); - taskInstance.setLogPath("/temp/worker.log"); - taskInstance.setProcessInstanceId(0); - - Mockito.when(processService.findTaskInstanceById(0)).thenReturn(taskInstance); - - taskInstanceCacheManager.init(); - TimeUnit.MILLISECONDS.sleep(CACHE_REFRESH_TIME_MILLIS + 1000); - - Assert.assertEquals(taskInstance.getState(), taskInstanceCacheManager.getByTaskInstanceId(0).getState()); - - } - - @Test - public void getByTaskInstanceIdFromCache() { - TaskInstance instanceGot = taskInstanceCacheManager.getByTaskInstanceId(0); - - TaskInstance taskInstance = new TaskInstance(); - taskInstance.setId(0); - taskInstance.setState(ExecutionStatus.RUNNING_EXECUTION); - taskInstance.setExecutePath("/dolphinscheduler/worker"); - taskInstance.setHost("worker007"); - taskInstance.setLogPath("/temp/worker.log"); - taskInstance.setStartTime(new Date(1970, Calendar.AUGUST,7)); - - Assert.assertEquals(taskInstance.toString(), instanceGot.toString()); - - } - - @Test - public void getByTaskInstanceIdFromDatabase() { - - TaskInstance taskInstance = new TaskInstance(); - taskInstance.setId(1); - taskInstance.setState(ExecutionStatus.RUNNING_EXECUTION); - taskInstance.setExecutePath("/dolphinscheduler/worker"); - taskInstance.setHost("worker007"); - taskInstance.setLogPath("/temp/worker.log"); - taskInstance.setStartTime(new Date(1970, Calendar.AUGUST,7)); - - Mockito.when(processService.findTaskInstanceById(1)).thenReturn(taskInstance); - - TaskInstance instanceGot = taskInstanceCacheManager.getByTaskInstanceId(1); - - Assert.assertEquals(taskInstance, instanceGot); - - } - - @Test - public void cacheTaskInstanceByTaskExecutionContext() { - TaskExecutionContext taskExecutionContext = new TaskExecutionContext(); - taskExecutionContext.setTaskInstanceId(2); - taskExecutionContext.setTaskName("blackberrier test"); - taskExecutionContext.setStartTime(new Date(1970, Calendar.AUGUST,7)); - taskExecutionContext.setTaskType(TaskType.SPARK.getDesc()); - taskExecutionContext.setExecutePath("/tmp"); - - taskInstanceCacheManager.cacheTaskInstance(taskExecutionContext); - - TaskInstance taskInstance = taskInstanceCacheManager.getByTaskInstanceId(2); - - Assert.assertEquals(taskInstance.getId(), 2); - Assert.assertEquals(taskInstance.getName(), "blackberrier test"); - Assert.assertEquals(taskInstance.getStartTime(), new Date(1970, Calendar.AUGUST, 7)); - Assert.assertEquals(taskInstance.getTaskType(), TaskType.SPARK.getDesc()); - Assert.assertEquals(taskInstance.getExecutePath(), "/tmp"); - - } - - @Test - public void testCacheTaskInstanceByTaskExecuteAckCommand() { - TaskInstance taskInstance = taskInstanceCacheManager.getByTaskInstanceId(0); - - Assert.assertEquals(ExecutionStatus.RUNNING_EXECUTION, taskInstance.getState()); - Assert.assertEquals(new Date(1970, Calendar.AUGUST, 7), taskInstance.getStartTime()); - Assert.assertEquals("worker007", taskInstance.getHost()); - Assert.assertEquals("/dolphinscheduler/worker", taskInstance.getExecutePath()); - Assert.assertEquals("/temp/worker.log", taskInstance.getLogPath()); - - } - - @Test - public void testCacheTaskInstanceByTaskExecuteResponseCommand() { - TaskExecuteResponseCommand responseCommand = new TaskExecuteResponseCommand(); - responseCommand.setTaskInstanceId(0); - responseCommand.setStatus(9); - responseCommand.setEndTime(new Date(1970, Calendar.AUGUST, 8)); - - taskInstanceCacheManager.cacheTaskInstance(responseCommand); - - TaskInstance taskInstance = taskInstanceCacheManager.getByTaskInstanceId(0); - - Assert.assertEquals(new Date(1970, Calendar.AUGUST, 8), taskInstance.getEndTime()); - Assert.assertEquals(ExecutionStatus.KILL, taskInstance.getState()); - - } - - @Test - public void removeByTaskInstanceId() { - taskInstanceCacheManager.removeByTaskInstanceId(0); - Assert.assertNull(taskInstanceCacheManager.getByTaskInstanceId(0)); - - } -} \ No newline at end of file diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumerTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumerTest.java index dd190f771c040755f9e65228705f27e85b404d60..f9d51a910e20f3cdead9d95671b13424f163d4da 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumerTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumerTest.java @@ -18,7 +18,6 @@ package org.apache.dolphinscheduler.server.master.consumer; import org.apache.dolphinscheduler.common.enums.CommandType; -import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.TaskType; @@ -34,6 +33,7 @@ import org.apache.dolphinscheduler.server.master.dispatch.ExecutorDispatcher; import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.queue.TaskPriority; import org.apache.dolphinscheduler.service.queue.TaskPriorityQueue; +import org.apache.dolphinscheduler.spi.enums.DbType; import java.util.Date; import java.util.concurrent.TimeUnit; diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/ExecutorDispatcherTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/ExecutorDispatcherTest.java index 80f75af0a8f63f410552b91ae6b19ce42e10d2f4..619dba84bd4fc5fb393d57fe56f533b25770ae85 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/ExecutorDispatcherTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/ExecutorDispatcherTest.java @@ -56,7 +56,7 @@ public class ExecutorDispatcherTest { } @Test - public void testDispatch() throws ExecuteException { + public void testDispatch() throws Exception { int port = 30000; final NettyServerConfig serverConfig = new NettyServerConfig(); serverConfig.setListenPort(port); diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/host/RefreshResourceTaskTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/host/RefreshResourceTaskTest.java new file mode 100644 index 0000000000000000000000000000000000000000..5e35b0b2ecb88f30f585139ddb6efdda91979b7d --- /dev/null +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/host/RefreshResourceTaskTest.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.dispatch.host; + +import org.apache.dolphinscheduler.server.master.registry.ServerNodeManager; + +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +/** + * RefreshResourceTask test + */ +@RunWith(MockitoJUnitRunner.class) +public class RefreshResourceTaskTest { + + @Mock + private ServerNodeManager serverNodeManager; + + @InjectMocks + LowerWeightHostManager lowerWeightHostManager; + + @Test + public void testGetHostWeightWithResult() { + Assert.assertTrue(!lowerWeightHostManager.new RefreshResourceTask() + .getHostWeight("192.168.1.1:22", "default", null) + .isPresent()); + } +} \ No newline at end of file diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/LowerWeightRoundRobinTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/LowerWeightRoundRobinTest.java index f822f04d970d3e65dc3b4eb864ac12d64cbd7678..2137a0cfb9934c288bdf0ab4604422539ec07164 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/LowerWeightRoundRobinTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/LowerWeightRoundRobinTest.java @@ -28,33 +28,33 @@ public class LowerWeightRoundRobinTest { @Test public void testSelect() { Collection sources = new ArrayList<>(); - sources.add(new HostWeight(HostWorker.of("192.158.2.1:11", 100, "default"), 0.06, 0.44, 3.84, System.currentTimeMillis() - 60 * 8 * 1000)); - sources.add(new HostWeight(HostWorker.of("192.158.2.2:22", 100, "default"), 0.06, 0.56, 3.24, System.currentTimeMillis() - 60 * 5 * 1000)); - sources.add(new HostWeight(HostWorker.of("192.158.2.3:33", 100, "default"), 0.06, 0.80, 3.15, System.currentTimeMillis() - 60 * 2 * 1000)); + sources.add(new HostWeight(HostWorker.of("192.158.2.1:11", 100, "default"), 0.06, 0.44, 3.84, 1, System.currentTimeMillis() - 60 * 8 * 1000)); + sources.add(new HostWeight(HostWorker.of("192.158.2.2:22", 100, "default"), 0.06, 0.56, 3.24, 2, System.currentTimeMillis() - 60 * 5 * 1000)); + sources.add(new HostWeight(HostWorker.of("192.158.2.3:33", 100, "default"), 0.06, 0.80, 3.15, 1, System.currentTimeMillis() - 60 * 2 * 1000)); LowerWeightRoundRobin roundRobin = new LowerWeightRoundRobin(); HostWeight result; result = roundRobin.select(sources); Assert.assertEquals("192.158.2.1", result.getHost().getIp()); result = roundRobin.select(sources); - Assert.assertEquals("192.158.2.2", result.getHost().getIp()); + Assert.assertEquals("192.158.2.1", result.getHost().getIp()); result = roundRobin.select(sources); Assert.assertEquals("192.158.2.1", result.getHost().getIp()); result = roundRobin.select(sources); - Assert.assertEquals("192.158.2.2", result.getHost().getIp()); + Assert.assertEquals("192.158.2.3", result.getHost().getIp()); result = roundRobin.select(sources); Assert.assertEquals("192.158.2.1", result.getHost().getIp()); result = roundRobin.select(sources); - Assert.assertEquals("192.158.2.2", result.getHost().getIp()); + Assert.assertEquals("192.158.2.1", result.getHost().getIp()); } @Test public void testWarmUpSelect() { Collection sources = new ArrayList<>(); - sources.add(new HostWeight(HostWorker.of("192.158.2.1:11", 100, "default"), 0.06, 0.44, 3.84, System.currentTimeMillis() - 60 * 8 * 1000)); - sources.add(new HostWeight(HostWorker.of("192.158.2.2:22", 100, "default"), 0.06, 0.44, 3.84, System.currentTimeMillis() - 60 * 5 * 1000)); - sources.add(new HostWeight(HostWorker.of("192.158.2.3:33", 100, "default"), 0.06, 0.44, 3.84, System.currentTimeMillis() - 60 * 3 * 1000)); - sources.add(new HostWeight(HostWorker.of("192.158.2.4:33", 100, "default"), 0.06, 0.44, 3.84, System.currentTimeMillis() - 60 * 11 * 1000)); + sources.add(new HostWeight(HostWorker.of("192.158.2.1:11", 100, "default"), 0.06, 0.44, 3.84, 0, System.currentTimeMillis() - 60 * 8 * 1000)); + sources.add(new HostWeight(HostWorker.of("192.158.2.2:22", 100, "default"), 0.06, 0.44, 3.84, 0, System.currentTimeMillis() - 60 * 5 * 1000)); + sources.add(new HostWeight(HostWorker.of("192.158.2.3:33", 100, "default"), 0.06, 0.44, 3.84, 0, System.currentTimeMillis() - 60 * 3 * 1000)); + sources.add(new HostWeight(HostWorker.of("192.158.2.4:33", 100, "default"), 0.06, 0.44, 3.84, 0, System.currentTimeMillis() - 60 * 11 * 1000)); LowerWeightRoundRobin roundRobin = new LowerWeightRoundRobin(); HostWeight result; diff --git a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/registry/RegistryClientTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/processor/CacheProcessorTest.java similarity index 33% rename from dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/registry/RegistryClientTest.java rename to dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/processor/CacheProcessorTest.java index 2dc219334a615af8a02727d0b33e67e69d0a0af6..5c177ca94e3296a8f2c0632a37b6d20d91a1375f 100644 --- a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/registry/RegistryClientTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/processor/CacheProcessorTest.java @@ -15,60 +15,60 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.service.registry; +package org.apache.dolphinscheduler.server.master.processor; -import static org.apache.dolphinscheduler.common.Constants.ADD_OP; -import static org.apache.dolphinscheduler.common.Constants.DELETE_OP; - -import static org.mockito.BDDMockito.given; - -import org.apache.dolphinscheduler.common.enums.NodeType; -import org.apache.dolphinscheduler.spi.register.Registry; - -import java.util.Arrays; +import org.apache.dolphinscheduler.common.enums.CacheType; +import org.apache.dolphinscheduler.dao.entity.Tenant; +import org.apache.dolphinscheduler.remote.command.CacheExpireCommand; +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; +import org.mockito.Mock; import org.mockito.Mockito; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; +import org.springframework.cache.Cache; +import org.springframework.cache.CacheManager; -import com.google.common.collect.Sets; +import io.netty.channel.Channel; +/** + * task ack processor test + */ @RunWith(PowerMockRunner.class) -@PrepareForTest({ RegistryClient.class }) -public class RegistryClientTest { +@PrepareForTest({SpringApplicationContext.class}) +public class CacheProcessorTest { - private RegistryClient registryClient; + private CacheProcessor cacheProcessor; - @Test - public void test() throws Exception { - Registry registry = PowerMockito.mock(Registry.class); - PowerMockito.doNothing().when(registry).persist(Mockito.anyString(), Mockito.anyString()); - PowerMockito.doNothing().when(registry).update(Mockito.anyString(), Mockito.anyString()); - PowerMockito.when(registry.releaseLock(Mockito.anyString())).thenReturn(true); - PowerMockito.when(registry.getChildren("/dead-servers")).thenReturn(Arrays.asList("worker_127.0.0.1:8089")); + @Mock + private Channel channel; - PowerMockito.suppress(PowerMockito.constructor(RegistryClient.class)); - registryClient = PowerMockito.mock(RegistryClient.class); - registryClient.persist("/key", ""); - registryClient.update("/key", ""); - registryClient.releaseLock("/key"); - registryClient.getChildrenKeys("/key"); - registryClient.handleDeadServer(Sets.newHashSet("ma/127.0.0.1:8089"), NodeType.WORKER, DELETE_OP); - registryClient.handleDeadServer(Sets.newHashSet("ma/127.0.0.1:8089"), NodeType.WORKER, ADD_OP); - //registryClient.removeDeadServerByHost("127.0.0.1:8089","master"); - registryClient.handleDeadServer("ma/127.0.0.1:8089", NodeType.WORKER, DELETE_OP); - registryClient.handleDeadServer("ma/127.0.0.1:8089", NodeType.WORKER, ADD_OP); - registryClient.checkIsDeadServer("master/127.0.0.1","master"); - given(registry.getChildren("/nodes/worker")).willReturn(Arrays.asList("worker_127.0.0.1:8089")); - given(registry.getChildren("/nodes/worker/worker_127.0.0.1:8089")).willReturn(Arrays.asList("default")); + @Mock + private CacheManager cacheManager; - registryClient.checkNodeExists("127.0.0.1",NodeType.WORKER); - - registryClient.getServerList(NodeType.MASTER); + @Mock + private Cache cache; + @Before + public void before() { + PowerMockito.mockStatic(SpringApplicationContext.class); + PowerMockito.when(SpringApplicationContext.getBean(CacheManager.class)).thenReturn(cacheManager); + Mockito.when(cacheManager.getCache(CacheType.TENANT.getCacheName())).thenReturn(cache); + cacheProcessor = new CacheProcessor(); } + @Test + public void testProcess() { + Tenant tenant = new Tenant(); + tenant.setId(1); + CacheExpireCommand cacheExpireCommand = new CacheExpireCommand(CacheType.TENANT, "1"); + Command command = cacheExpireCommand.convert2Command(); + + cacheProcessor.process(channel, command); + } } diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/processor/TaskAckProcessorTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/processor/TaskAckProcessorTest.java index e215d4cdb64a4102e2a15ad655d9506258e0b0ef..823ffa2cd788c5fbf47352382fd2272b6c225174 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/processor/TaskAckProcessorTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/processor/TaskAckProcessorTest.java @@ -18,20 +18,16 @@ package org.apache.dolphinscheduler.server.master.processor; import org.apache.dolphinscheduler.remote.command.TaskExecuteAckCommand; -import org.apache.dolphinscheduler.server.master.cache.impl.TaskInstanceCacheManagerImpl; import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseEvent; import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseService; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.process.ProcessService; -import java.net.InetSocketAddress; import java.util.Date; -import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; -import org.mockito.Mockito; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; @@ -39,7 +35,7 @@ import org.powermock.modules.junit4.PowerMockRunner; import io.netty.channel.Channel; /** - * task ack processor test + * task ack processor test */ @RunWith(PowerMockRunner.class) @PrepareForTest({SpringApplicationContext.class, TaskResponseEvent.class}) @@ -47,7 +43,6 @@ public class TaskAckProcessorTest { private TaskAckProcessor taskAckProcessor; private TaskResponseService taskResponseService; - private TaskInstanceCacheManagerImpl taskInstanceCacheManager; private ProcessService processService; private TaskExecuteAckCommand taskExecuteAckCommand; private TaskResponseEvent taskResponseEvent; @@ -60,9 +55,6 @@ public class TaskAckProcessorTest { taskResponseService = PowerMockito.mock(TaskResponseService.class); PowerMockito.when(SpringApplicationContext.getBean(TaskResponseService.class)).thenReturn(taskResponseService); - taskInstanceCacheManager = PowerMockito.mock(TaskInstanceCacheManagerImpl.class); - PowerMockito.when(SpringApplicationContext.getBean(TaskInstanceCacheManagerImpl.class)).thenReturn(taskInstanceCacheManager); - processService = PowerMockito.mock(ProcessService.class); PowerMockito.when(SpringApplicationContext.getBean(ProcessService.class)).thenReturn(processService); diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/processor/TaskKillResponseProcessorTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/processor/TaskKillResponseProcessorTest.java index c7f047569e95fb08ef52e384a3493c9b0496d8a7..8bef045f5aca1cb830aaf6b2f724ce87f1a5aced 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/processor/TaskKillResponseProcessorTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/processor/TaskKillResponseProcessorTest.java @@ -20,19 +20,26 @@ package org.apache.dolphinscheduler.server.master.processor; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.command.TaskKillResponseCommand; +import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseService; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import java.util.ArrayList; import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import org.junit.runner.RunWith; import org.powermock.api.mockito.PowerMockito; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; import io.netty.channel.Channel; /** * task response processor test */ +@RunWith(PowerMockRunner.class) +@PrepareForTest({SpringApplicationContext.class}) public class TaskKillResponseProcessorTest { private TaskKillResponseProcessor taskKillResponseProcessor; @@ -41,8 +48,14 @@ public class TaskKillResponseProcessorTest { private Channel channel; + private TaskResponseService taskResponseService; + @Before public void before() { + PowerMockito.mockStatic(SpringApplicationContext.class); + + taskResponseService = PowerMockito.mock(TaskResponseService.class); + PowerMockito.when(SpringApplicationContext.getBean(TaskResponseService.class)).thenReturn(taskResponseService); taskKillResponseProcessor = new TaskKillResponseProcessor(); channel = PowerMockito.mock(Channel.class); taskKillResponseCommand = new TaskKillResponseCommand(); diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseServiceTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseServiceTest.java index 878446c30cb60f9b128330d9f96c4839f6bdb42a..d787d0c2dcf250119f97aadd69e5c53a95b43411 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseServiceTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseServiceTest.java @@ -19,6 +19,7 @@ package org.apache.dolphinscheduler.server.master.processor.queue; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.service.process.ProcessService; import java.util.Date; @@ -34,12 +35,15 @@ import org.mockito.junit.MockitoJUnitRunner; import io.netty.channel.Channel; -@RunWith(MockitoJUnitRunner.class) +@RunWith(MockitoJUnitRunner.Silent.class) public class TaskResponseServiceTest { @Mock(name = "processService") private ProcessService processService; + @Mock + private MasterConfig masterConfig; + @InjectMocks TaskResponseService taskRspService; @@ -54,6 +58,7 @@ public class TaskResponseServiceTest { @Before public void before() { + Mockito.when(masterConfig.getMasterPersistEventStateThreads()).thenReturn(10); taskRspService.start(); ackEvent = TaskResponseEvent.newAck(ExecutionStatus.RUNNING_EXECUTION, diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryClientTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryClientTest.java index a91c6e28ee19da1ce64e07e1d25bd17c8d464f4c..71481052ccedbf6bc64e5e36261abdb42ca5cae0 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryClientTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryClientTest.java @@ -20,25 +20,26 @@ package org.apache.dolphinscheduler.server.master.registry; import static org.mockito.BDDMockito.given; import static org.mockito.Mockito.doNothing; -import java.util.Arrays; -import java.util.Date; -import java.util.concurrent.ScheduledExecutorService; - import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.NodeType; import org.apache.dolphinscheduler.common.model.Server; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.registry.api.ConnectionState; import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.registry.RegistryClient; + +import java.util.Arrays; +import java.util.Date; +import java.util.concurrent.ScheduledExecutorService; + import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; -import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PowerMockIgnore; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; @@ -48,7 +49,7 @@ import org.springframework.test.util.ReflectionTestUtils; * MasterRegistryClientTest */ @RunWith(PowerMockRunner.class) -@PrepareForTest({ RegistryClient.class }) +@PrepareForTest({RegistryClient.class}) @PowerMockIgnore({"javax.management.*"}) public class MasterRegistryClientTest { @@ -58,6 +59,7 @@ public class MasterRegistryClientTest { @Mock private MasterConfig masterConfig; + @Mock private RegistryClient registryClient; @Mock @@ -68,13 +70,13 @@ public class MasterRegistryClientTest { @Before public void before() throws Exception { - PowerMockito.suppress(PowerMockito.constructor(RegistryClient.class)); - registryClient = PowerMockito.mock(RegistryClient.class); given(registryClient.getLock(Mockito.anyString())).willReturn(true); - given(registryClient.getMasterFailoverLockPath()).willReturn("/path"); given(registryClient.releaseLock(Mockito.anyString())).willReturn(true); given(registryClient.getHostByEventDataPath(Mockito.anyString())).willReturn("127.0.0.1:8080"); - doNothing().when(registryClient).handleDeadServer(Mockito.anyString(), Mockito.any(NodeType.class), Mockito.anyString()); + given(registryClient.getStoppable()).willReturn(cause -> { + + }); + doNothing().when(registryClient).handleDeadServer(Mockito.anySet(), Mockito.any(NodeType.class), Mockito.anyString()); ReflectionTestUtils.setField(masterRegistryClient, "registryClient", registryClient); ProcessInstance processInstance = new ProcessInstance(); @@ -103,11 +105,18 @@ public class MasterRegistryClientTest { masterRegistryClient.registry(); } + @Test + public void handleConnectionStateTest() { + masterRegistryClient.handleConnectionState(ConnectionState.CONNECTED); + masterRegistryClient.handleConnectionState(ConnectionState.RECONNECTED); + masterRegistryClient.handleConnectionState(ConnectionState.SUSPENDED); + } + @Test public void removeNodePathTest() { - masterRegistryClient.removeNodePath("/path", NodeType.MASTER, false); - masterRegistryClient.removeNodePath("/path", NodeType.MASTER, true); + masterRegistryClient.removeMasterNodePath("/path", NodeType.MASTER, false); + masterRegistryClient.removeMasterNodePath("/path", NodeType.MASTER, true); //Cannot mock static methods - masterRegistryClient.removeNodePath("/path", NodeType.WORKER, true); + masterRegistryClient.removeWorkerNodePath("/path", NodeType.WORKER, true); } } diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/runner/task/CommonTaskProcessorTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/runner/task/CommonTaskProcessorTest.java index c0aeb7273a1447284a554a1518d6b3093b9679c8..e7afa143bb436e5b971c3530058dc30fa7ed3cd1 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/runner/task/CommonTaskProcessorTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/runner/task/CommonTaskProcessorTest.java @@ -18,10 +18,9 @@ package org.apache.dolphinscheduler.server.master.runner.task; import org.apache.dolphinscheduler.common.enums.CommandType; -import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.Priority; -import org.apache.dolphinscheduler.common.enums.ResourceType; +import org.apache.dolphinscheduler.spi.enums.ResourceType; import org.apache.dolphinscheduler.common.enums.TaskType; import org.apache.dolphinscheduler.common.enums.TimeoutFlag; import org.apache.dolphinscheduler.dao.entity.DataSource; @@ -33,6 +32,7 @@ import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.entity.Tenant; import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.queue.entity.TaskExecutionContext; +import org.apache.dolphinscheduler.spi.enums.DbType; import org.apache.dolphinscheduler.spi.task.request.DataxTaskExecutionContext; import java.util.ArrayList; diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/runner/task/TaskProcessorFactoryTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/runner/task/TaskProcessorFactoryTest.java index 01f5ee28b5a31f95709683e46060bbc09e8167af..a49719a7f64301bd5acbfbd6768237ebc0012fb0 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/runner/task/TaskProcessorFactoryTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/runner/task/TaskProcessorFactoryTest.java @@ -20,8 +20,10 @@ package org.apache.dolphinscheduler.server.master.runner.task; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.junit.Assert; +import org.junit.Ignore; import org.junit.Test; +@Ignore public class TaskProcessorFactoryTest { @Test diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/registry/DependencyConfig.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/registry/DependencyConfig.java index 4429e7cd72d5ed157d9508027f17d588566f88f2..8d1faa80a94919cff193e697333b6f073c30edd5 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/registry/DependencyConfig.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/registry/DependencyConfig.java @@ -36,7 +36,6 @@ import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.TenantMapper; import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper; import org.apache.dolphinscheduler.dao.mapper.UserMapper; -import org.apache.dolphinscheduler.server.master.cache.impl.TaskInstanceCacheManagerImpl; import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.server.master.dispatch.host.HostManager; import org.apache.dolphinscheduler.server.master.dispatch.host.RandomHostManager; @@ -67,11 +66,6 @@ public class DependencyConfig { return Mockito.mock(AlertMapper.class); } - @Bean - public TaskInstanceCacheManagerImpl taskInstanceCacheManagerImpl() { - return Mockito.mock(TaskInstanceCacheManagerImpl.class); - } - @Bean public ProcessService processService() { return Mockito.mock(ProcessService.class); diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/utils/ProcessUtilsTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/utils/ProcessUtilsTest.java index d545c02ba5b084a5ce9a8540a0e2eb090b8b9a53..6631d3d9f6263b773117439097a656f9034d1889 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/utils/ProcessUtilsTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/utils/ProcessUtilsTest.java @@ -25,6 +25,8 @@ import org.apache.dolphinscheduler.common.utils.HadoopUtils; import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.common.utils.PropertyUtils; +import org.apache.commons.lang.SystemUtils; + import java.util.ArrayList; import java.util.List; @@ -36,11 +38,12 @@ import org.mockito.MockitoAnnotations; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; +import org.powermock.reflect.Whitebox; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @RunWith(PowerMockRunner.class) -@PrepareForTest({System.class, OSUtils.class, HadoopUtils.class, PropertyUtils.class}) +@PrepareForTest({System.class, OSUtils.class, HadoopUtils.class, PropertyUtils.class, SystemUtils.class}) public class ProcessUtilsTest { private static final Logger logger = LoggerFactory.getLogger(ProcessUtils.class); @@ -53,11 +56,8 @@ public class ProcessUtilsTest { @Test public void getPidsStr() throws Exception { int processId = 1; - String pidList = ProcessUtils.getPidsStr(processId); - Assert.assertNotEquals("The child process of process 1 should not be empty", pidList, ""); - PowerMockito.mockStatic(OSUtils.class); - when(OSUtils.isMacOS()).thenReturn(true); + Whitebox.setInternalState(SystemUtils.class, "IS_OS_MAC", true); when(OSUtils.exeCmd(String.format("%s -p %d", Constants.PSTREE, processId))).thenReturn(null); String pidListMac = ProcessUtils.getPidsStr(processId); Assert.assertEquals("", pidListMac); diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/processor/TaskCallbackServiceTestConfig.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/processor/TaskCallbackServiceTestConfig.java index f4876a697fcc0455a563b10fe7b6aea37782b2d1..0ac237264db03efbaaeeb80538959ea0e49702ee 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/processor/TaskCallbackServiceTestConfig.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/processor/TaskCallbackServiceTestConfig.java @@ -36,7 +36,6 @@ import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.TenantMapper; import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper; import org.apache.dolphinscheduler.dao.mapper.UserMapper; -import org.apache.dolphinscheduler.server.master.cache.impl.TaskInstanceCacheManagerImpl; import org.apache.dolphinscheduler.service.process.ProcessService; import org.mockito.Mockito; @@ -59,11 +58,6 @@ public class TaskCallbackServiceTestConfig { return Mockito.mock(AlertMapper.class); } - @Bean - public TaskInstanceCacheManagerImpl taskInstanceCacheManagerImpl() { - return Mockito.mock(TaskInstanceCacheManagerImpl.class); - } - @Bean public ProcessService processService() { return Mockito.mock(ProcessService.class); diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/processor/TaskExecuteProcessorTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/processor/TaskExecuteProcessorTest.java index daee652c399986d5d14b67a622e00f5f84416ca0..559bd6d1e33320e2cdae26c156498ef27b52b888 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/processor/TaskExecuteProcessorTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/processor/TaskExecuteProcessorTest.java @@ -22,7 +22,6 @@ import org.apache.dolphinscheduler.common.enums.TaskType; import org.apache.dolphinscheduler.common.thread.ThreadUtils; import org.apache.dolphinscheduler.common.utils.FileUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.apache.dolphinscheduler.common.utils.LoggerUtils; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.command.TaskExecuteAckCommand; @@ -49,7 +48,6 @@ import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * test task execute processor @@ -107,12 +105,6 @@ public class TaskExecuteProcessorTest { PowerMockito.when(SpringApplicationContext.getBean(WorkerConfig.class)) .thenReturn(workerConfig); - Logger taskLogger = LoggerFactory.getLogger(LoggerUtils.buildTaskId(LoggerUtils.TASK_LOGGER_INFO_PREFIX, - taskExecutionContext.getProcessDefineCode(), - taskExecutionContext.getProcessDefineVersion(), - taskExecutionContext.getProcessInstanceId(), - taskExecutionContext.getTaskInstanceId())); - workerManager = PowerMockito.mock(WorkerManagerThread.class); PowerMockito.when(workerManager.offer(new TaskExecuteThread(taskExecutionContext, taskCallbackService, alertClientService))).thenReturn(Boolean.TRUE); diff --git a/dolphinscheduler-service/pom.xml b/dolphinscheduler-service/pom.xml index bc8b367364c5d1c87233d3b7dc4e6c737804a92e..4241f9638b53379ddc2b414b6a9d0668301d3eaf 100644 --- a/dolphinscheduler-service/pom.xml +++ b/dolphinscheduler-service/pom.xml @@ -15,12 +15,11 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - + dolphinscheduler org.apache.dolphinscheduler - 2.0.0-SNAPSHOT + 2.0.10-SNAPSHOT 4.0.0 @@ -42,26 +41,33 @@ org.apache.dolphinscheduler dolphinscheduler-spi + + org.apache.dolphinscheduler + dolphinscheduler-registry-zookeeper + + + org.springframework.boot + spring-boot-starter-quartz + org.quartz-scheduler quartz - - com.mchange - c3p0 - - - com.mchange - mchange-commons-java - - - com.zaxxer - HikariCP-java6 - + + com.mchange + c3p0 + + + com.mchange + mchange-commons-java + + + com.zaxxer + HikariCP-java6 + - org.quartz-scheduler quartz-jobs @@ -76,23 +82,32 @@ org.powermock powermock-api-mockito2 test - - - org.mockito - mockito-core - - + - org.mockito - mockito-core - test + io.micrometer + micrometer-core + provided + - org.jacoco - org.jacoco.agent - runtime - test + com.github.ben-manes.caffeine + caffeine + + + + + org.apache.maven.plugins + maven-jar-plugin + + + *.yaml + *.properties + + + + + diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/alert/ProcessAlertManager.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/alert/ProcessAlertManager.java index c2db5657dbb4b84866d26088c9e1d2d015968706..9542d39007b57669d64b12206e2985b9f3a2c4af 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/alert/ProcessAlertManager.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/alert/ProcessAlertManager.java @@ -24,10 +24,8 @@ import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.AlertDao; import org.apache.dolphinscheduler.dao.entity.Alert; import org.apache.dolphinscheduler.dao.entity.ProcessAlertContent; -import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.ProjectUser; -import org.apache.dolphinscheduler.dao.entity.TaskDefinition; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import java.util.ArrayList; @@ -104,10 +102,11 @@ public class ProcessAlertManager { if (processInstance.getState().typeIsSuccess()) { List successTaskList = new ArrayList<>(1); ProcessAlertContent processAlertContent = ProcessAlertContent.newBuilder() - .projectId(projectUser.getProjectId()) + .projectCode(projectUser.getProjectCode()) .projectName(projectUser.getProjectName()) .owner(projectUser.getUserName()) .processId(processInstance.getId()) + .processDefinitionCode(processInstance.getProcessDefinitionCode()) .processName(processInstance.getName()) .processType(processInstance.getCommandType()) .processState(processInstance.getState()) @@ -127,12 +126,13 @@ public class ProcessAlertManager { continue; } ProcessAlertContent processAlertContent = ProcessAlertContent.newBuilder() - .projectId(projectUser.getProjectId()) + .projectCode(projectUser.getProjectCode()) .projectName(projectUser.getProjectName()) .owner(projectUser.getUserName()) .processId(processInstance.getId()) + .processDefinitionCode(processInstance.getProcessDefinitionCode()) .processName(processInstance.getName()) - .taskId(task.getId()) + .taskCode(task.getTaskCode()) .taskName(task.getName()) .taskType(task.getTaskType()) .taskState(task.getState()) @@ -162,7 +162,10 @@ public class ProcessAlertManager { for (TaskInstance taskInstance : toleranceTaskList) { ProcessAlertContent processAlertContent = ProcessAlertContent.newBuilder() + .processId(processInstance.getId()) + .processDefinitionCode(processInstance.getProcessDefinitionCode()) .processName(processInstance.getName()) + .taskCode(taskInstance.getTaskCode()) .taskName(taskInstance.getName()) .taskHost(taskInstance.getHost()) .retryTimes(taskInstance.getRetryTimes()) @@ -192,7 +195,6 @@ public class ProcessAlertManager { } catch (Exception e) { logger.error("send alert failed:{} ", e.getMessage()); } - } /** @@ -205,57 +207,66 @@ public class ProcessAlertManager { List taskInstances, ProjectUser projectUser) { - if (Flag.YES == processInstance.getIsSubProcess()) { + if (!isNeedToSendWarning(processInstance)) { return; } - boolean sendWarnning = false; + Alert alert = new Alert(); + + String cmdName = getCommandCnName(processInstance.getCommandType()); + String success = processInstance.getState().typeIsSuccess() ? "success" : "failed"; + alert.setTitle(cmdName + " " + success); + String content = getContentProcessInstance(processInstance, taskInstances,projectUser); + alert.setContent(content); + alert.setAlertGroupId(processInstance.getWarningGroupId()); + alert.setCreateTime(new Date()); + alertDao.addAlert(alert); + logger.info("add alert to db , alert: {}", alert); + } + + /** + * check if need to be send warning + * + * @param processInstance + * @return + */ + public boolean isNeedToSendWarning(ProcessInstance processInstance) { + if (Flag.YES == processInstance.getIsSubProcess()) { + return false; + } + boolean sendWarning = false; WarningType warningType = processInstance.getWarningType(); switch (warningType) { case ALL: if (processInstance.getState().typeIsFinished()) { - sendWarnning = true; + sendWarning = true; } break; case SUCCESS: if (processInstance.getState().typeIsSuccess()) { - sendWarnning = true; + sendWarning = true; } break; case FAILURE: if (processInstance.getState().typeIsFailure()) { - sendWarnning = true; + sendWarning = true; } break; default: } - if (!sendWarnning) { - return; - } - Alert alert = new Alert(); - - String cmdName = getCommandCnName(processInstance.getCommandType()); - String success = processInstance.getState().typeIsSuccess() ? "success" : "failed"; - alert.setTitle(cmdName + " " + success); - String content = getContentProcessInstance(processInstance, taskInstances,projectUser); - alert.setContent(content); - alert.setAlertGroupId(processInstance.getWarningGroupId()); - alert.setCreateTime(new Date()); - alertDao.addAlert(alert); - logger.info("add alert to db , alert: {}", alert); + return sendWarning; } /** * send process timeout alert * * @param processInstance process instance - * @param processDefinition process definition + * @param projectUser projectUser */ - public void sendProcessTimeoutAlert(ProcessInstance processInstance, ProcessDefinition processDefinition) { - alertDao.sendProcessTimeoutAlert(processInstance, processDefinition); + public void sendProcessTimeoutAlert(ProcessInstance processInstance, ProjectUser projectUser) { + alertDao.sendProcessTimeoutAlert(processInstance, projectUser); } - public void sendTaskTimeoutAlert(ProcessInstance processInstance, TaskInstance taskInstance, TaskDefinition taskDefinition) { - alertDao.sendTaskTimeoutAlert(processInstance.getWarningGroupId(), processInstance.getId(),processInstance.getName(), - taskInstance.getId(), taskInstance.getName()); + public void sendTaskTimeoutAlert(ProcessInstance processInstance, TaskInstance taskInstance, ProjectUser projectUser ) { + alertDao.sendTaskTimeoutAlert(processInstance, taskInstance, projectUser); } } diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/alert/AlertChannel.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/cache/CacheNotifyService.java similarity index 81% rename from dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/alert/AlertChannel.java rename to dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/cache/CacheNotifyService.java index a429a4ca028aa18ae1a6bb7389d12cdb62b261a2..09c55714971927a14229cb096436d74d8f329297 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/alert/AlertChannel.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/cache/CacheNotifyService.java @@ -15,14 +15,10 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.spi.alert; +package org.apache.dolphinscheduler.service.cache; -/** - * alert channel interface . - * - * @author gaojun - */ -public interface AlertChannel { +import org.apache.dolphinscheduler.remote.command.Command; - AlertResult process(AlertInfo info); +public interface CacheNotifyService { + void notifyMaster(Command command); } diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertPlugin.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/cache/impl/CacheKeyGenerator.java similarity index 63% rename from dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertPlugin.java rename to dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/cache/impl/CacheKeyGenerator.java index 2d15f499c9ef37f483fbc279c0d2aee26602e72c..2a036542a4db8e9b250113dc3c9a5baad3bb74b1 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkAlertPlugin.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/cache/impl/CacheKeyGenerator.java @@ -15,20 +15,22 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.plugin.alert.dingtalk; +package org.apache.dolphinscheduler.service.cache.impl; -import org.apache.dolphinscheduler.spi.DolphinSchedulerPlugin; -import org.apache.dolphinscheduler.spi.alert.AlertChannelFactory; +import java.lang.reflect.Method; -import com.google.common.collect.ImmutableList; +import org.springframework.cache.interceptor.KeyGenerator; +import org.springframework.stereotype.Component; +import org.springframework.util.StringUtils; /** - * DingTalkAlertPlugin + * custom cache key generator */ -public class DingTalkAlertPlugin implements DolphinSchedulerPlugin { +@Component +public class CacheKeyGenerator implements KeyGenerator { @Override - public Iterable getAlertChannelFactorys() { - return ImmutableList.of(new DingTalkAlertChannelFactory()); + public Object generate(Object target, Method method, Object... params) { + return StringUtils.arrayToDelimitedString(params, "_"); } } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/cache/impl/CacheNotifyServiceImpl.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/cache/impl/CacheNotifyServiceImpl.java new file mode 100644 index 0000000000000000000000000000000000000000..ffa9299ffccd9d3120bdf9db917be0f69a983c1c --- /dev/null +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/cache/impl/CacheNotifyServiceImpl.java @@ -0,0 +1,135 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.service.cache.impl; + +import org.apache.dolphinscheduler.common.enums.NodeType; +import org.apache.dolphinscheduler.common.model.Server; +import org.apache.dolphinscheduler.remote.NettyRemotingClient; +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.config.NettyClientConfig; +import org.apache.dolphinscheduler.remote.processor.NettyRemoteChannel; +import org.apache.dolphinscheduler.remote.utils.Host; +import org.apache.dolphinscheduler.service.cache.CacheNotifyService; +import org.apache.dolphinscheduler.service.registry.RegistryClient; + +import org.apache.commons.collections4.CollectionUtils; + +import java.util.List; +import java.util.concurrent.ConcurrentHashMap; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import io.netty.channel.Channel; + +/** + * cache notify service + */ +@Service +public class CacheNotifyServiceImpl implements CacheNotifyService { + + private final Logger logger = LoggerFactory.getLogger(CacheNotifyServiceImpl.class); + + @Autowired + private RegistryClient registryClient; + + /** + * remote channels + */ + private static final ConcurrentHashMap REMOTE_CHANNELS = new ConcurrentHashMap<>(); + + /** + * netty remoting client + */ + private final NettyRemotingClient nettyRemotingClient; + + public CacheNotifyServiceImpl() { + final NettyClientConfig clientConfig = new NettyClientConfig(); + this.nettyRemotingClient = new NettyRemotingClient(clientConfig); + } + + /** + * add channel + * + * @param channel channel + */ + private void cache(Host host, NettyRemoteChannel channel) { + REMOTE_CHANNELS.put(host, channel); + } + + /** + * remove channel + */ + private void remove(Host host) { + REMOTE_CHANNELS.remove(host); + } + + /** + * get remote channel + * + * @return netty remote channel + */ + private NettyRemoteChannel getRemoteChannel(Host host) { + NettyRemoteChannel nettyRemoteChannel = REMOTE_CHANNELS.get(host); + if (nettyRemoteChannel != null) { + if (nettyRemoteChannel.isActive()) { + return nettyRemoteChannel; + } else { + this.remove(host); + } + } + + Channel channel = nettyRemotingClient.getChannel(host); + if (channel == null) { + return null; + } + + NettyRemoteChannel remoteChannel = new NettyRemoteChannel(channel); + this.cache(host, remoteChannel); + return remoteChannel; + } + + /** + * send result to master + * + * @param command command + */ + @Override + public void notifyMaster(Command command) { + logger.info("send result, command:{}", command.toString()); + try { + List serverList = registryClient.getServerList(NodeType.MASTER); + if (CollectionUtils.isEmpty(serverList)) { + return; + } + + for (Server server : serverList) { + Host host = new Host(server.getHost(), server.getPort()); + NettyRemoteChannel nettyRemoteChannel = getRemoteChannel(host); + if (nettyRemoteChannel == null) { + continue; + } + nettyRemoteChannel.writeAndFlush(command); + } + } catch (Exception e) { + logger.error("notify master error", e); + } + } +} diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/log/LogClientService.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/log/LogClientService.java index f207a229e564bbe20a6067610847416beb0deed3..f682780ef75640791f245d4b388b5a8a2ede381c 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/log/LogClientService.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/log/LogClientService.java @@ -149,7 +149,6 @@ public class LogClientService implements AutoCloseable { public byte[] getLogBytes(String host, int port, String path) { logger.info("log path {}", path); GetLogBytesRequestCommand request = new GetLogBytesRequestCommand(path); - byte[] result = null; final Host address = new Host(host, port); try { Command command = request.convert2Command(); @@ -157,14 +156,14 @@ public class LogClientService implements AutoCloseable { if (response != null) { GetLogBytesResponseCommand getLog = JSONUtils.parseObject( response.getBody(), GetLogBytesResponseCommand.class); - return getLog.getData(); + return getLog.getData() == null ? new byte[0] : getLog.getData(); } } catch (Exception e) { logger.error("get log size error", e); } finally { this.client.closeChannel(address); } - return result; + return new byte[0]; } /** @@ -199,4 +198,4 @@ public class LogClientService implements AutoCloseable { public boolean isRunning() { return isRunning; } -} \ No newline at end of file +} diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/permission/PermissionCheck.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/permission/PermissionCheck.java index a8f73f0c97db3509e3bef55533a9ebfd34b7407e..491c191b360f74a1487934f96f5e616e3a37476f 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/permission/PermissionCheck.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/permission/PermissionCheck.java @@ -20,11 +20,12 @@ package org.apache.dolphinscheduler.service.permission; import org.apache.dolphinscheduler.common.enums.AuthorizationType; import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.process.ResourceInfo; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.service.exceptions.ServiceException; import org.apache.dolphinscheduler.service.process.ProcessService; +import org.apache.commons.collections.CollectionUtils; + import java.util.List; import org.slf4j.Logger; diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java index 4f5058cd1f6f548abde8b6fdd9124496ef9cddeb..df6d0377c3760117ef7adeb924c8f7ccb2098283 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java @@ -23,12 +23,17 @@ import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_EMPTY_SUB_P import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_FATHER_PARAMS; import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING; import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS; -import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS_DEFINE_ID; +import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS_DEFINE_CODE; import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS_PARENT_INSTANCE_ID; import static org.apache.dolphinscheduler.common.Constants.LOCAL_PARAMS; +import static org.apache.dolphinscheduler.common.Constants.START_UP_PARAMS_PREFIX; +import static org.apache.dolphinscheduler.common.Constants.GLOBAL_PARAMS_PREFIX; import static java.util.stream.Collectors.toSet; +import static org.apache.dolphinscheduler.common.enums.DataType.VARCHAR; +import static org.apache.dolphinscheduler.common.enums.Direct.IN; +import org.apache.commons.collections4.MapUtils; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.AuthorizationType; import org.apache.dolphinscheduler.common.enums.CommandType; @@ -37,7 +42,6 @@ import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.FailureStrategy; import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.common.enums.ReleaseState; -import org.apache.dolphinscheduler.common.enums.ResourceType; import org.apache.dolphinscheduler.common.enums.TaskDependType; import org.apache.dolphinscheduler.common.enums.TimeoutFlag; import org.apache.dolphinscheduler.common.enums.WarningType; @@ -51,12 +55,11 @@ import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.TaskTimeoutParameter; import org.apache.dolphinscheduler.common.task.subprocess.SubProcessParameters; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.common.utils.CodeGenerateUtils; +import org.apache.dolphinscheduler.common.utils.CodeGenerateUtils.CodeGenerateException; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils; -import org.apache.dolphinscheduler.common.utils.SnowFlakeUtils; -import org.apache.dolphinscheduler.common.utils.SnowFlakeUtils.SnowFlakeException; import org.apache.dolphinscheduler.common.utils.TaskParametersUtils; import org.apache.dolphinscheduler.dao.entity.Command; import org.apache.dolphinscheduler.dao.entity.DagData; @@ -101,9 +104,12 @@ import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper; import org.apache.dolphinscheduler.dao.mapper.UserMapper; import org.apache.dolphinscheduler.dao.utils.DagHelper; import org.apache.dolphinscheduler.remote.utils.Host; +import org.apache.dolphinscheduler.service.exceptions.ServiceException; import org.apache.dolphinscheduler.service.log.LogClientService; import org.apache.dolphinscheduler.service.quartz.cron.CronUtils; +import org.apache.dolphinscheduler.spi.enums.ResourceType; +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.StringUtils; import java.util.ArrayList; @@ -125,10 +131,9 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import org.springframework.transaction.annotation.Transactional; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import com.facebook.presto.jdbc.internal.guava.collect.Lists; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.node.ObjectNode; +import com.google.common.collect.Lists; /** * process relative dao that some mappers in this. @@ -139,10 +144,10 @@ public class ProcessService { private final Logger logger = LoggerFactory.getLogger(getClass()); private final int[] stateArray = new int[]{ExecutionStatus.SUBMITTED_SUCCESS.ordinal(), - ExecutionStatus.RUNNING_EXECUTION.ordinal(), - ExecutionStatus.DELAY_EXECUTION.ordinal(), - ExecutionStatus.READY_PAUSE.ordinal(), - ExecutionStatus.READY_STOP.ordinal()}; + ExecutionStatus.RUNNING_EXECUTION.ordinal(), + ExecutionStatus.DELAY_EXECUTION.ordinal(), + ExecutionStatus.READY_PAUSE.ordinal(), + ExecutionStatus.READY_STOP.ordinal()}; @Autowired private UserMapper userMapper; @@ -207,14 +212,13 @@ public class ProcessService { /** * handle Command (construct ProcessInstance from Command) , wrapped in transaction * - * @param logger logger - * @param host host - * @param validThreadNum validThreadNum + * @param logger logger + * @param host host * @param command found command * @return process instance */ - @Transactional(rollbackFor = Exception.class) - public ProcessInstance handleCommand(Logger logger, String host, int validThreadNum, Command command) { + @Transactional + public ProcessInstance handleCommand(Logger logger, String host, Command command) { ProcessInstance processInstance = constructProcessInstance(command, host); // cannot construct process instance, return null if (processInstance == null) { @@ -222,15 +226,11 @@ public class ProcessService { moveToErrorCommand(command, "process instance is null"); return null; } - if (!checkThreadNum(command, validThreadNum)) { - logger.info("there is not enough thread for this command: {}", command); - return setWaitingThreadProcess(command, processInstance); - } processInstance.setCommandType(command.getCommandType()); processInstance.addHistoryCmd(command.getCommandType()); saveProcessInstance(processInstance); this.setSubProcessParam(processInstance); - this.commandMapper.deleteById(command.getId()); + this.deleteCommandWithCheck(command.getId()); return processInstance; } @@ -240,7 +240,6 @@ public class ProcessService { * @param command command * @param message message */ - @Transactional(rollbackFor = Exception.class) public void moveToErrorCommand(Command command, String message) { ErrorCommand errorCommand = new ErrorCommand(command, message); this.errorCommandMapper.insert(errorCommand); @@ -250,7 +249,7 @@ public class ProcessService { /** * set process waiting thread * - * @param command command + * @param command command * @param processInstance processInstance * @return process instance */ @@ -265,18 +264,6 @@ public class ProcessService { return null; } - /** - * check thread num - * - * @param command command - * @param validThreadNum validThreadNum - * @return if thread is enough - */ - private boolean checkThreadNum(Command command, int validThreadNum) { - int commandThreadCount = this.workProcessThreadNumCount(command.getProcessDefinitionCode()); - return validThreadNum >= commandThreadCount; - } - /** * insert one command * @@ -291,21 +278,8 @@ public class ProcessService { return result; } - /** - * find one command from queue list - * - * @return command - */ - public Command findOneCommand() { - return commandMapper.getOneToRun(); - } - /** * get command page - * - * @param pageSize - * @param pageNumber - * @return */ public List findCommandPage(int pageSize, int pageNumber) { return commandMapper.queryCommandPage(pageSize, pageNumber * pageSize); @@ -357,11 +331,11 @@ public class ProcessService { /** * get task node list by definitionId */ - public List getTaskNodeListByDefinitionId(Integer defineId) { - ProcessDefinition processDefinition = processDefineMapper.selectById(defineId); + public List getTaskNodeListByDefinition(long defineCode) { + ProcessDefinition processDefinition = processDefineMapper.queryByCode(defineCode); if (processDefinition == null) { logger.error("process define not exists"); - return new ArrayList<>(); + return Lists.newArrayList(); } List processTaskRelations = processTaskRelationLogMapper.queryByProcessCodeAndVersion(processDefinition.getCode(), processDefinition.getVersion()); Set taskDefinitionSet = new HashSet<>(); @@ -370,8 +344,11 @@ public class ProcessService { taskDefinitionSet.add(new TaskDefinition(processTaskRelation.getPostTaskCode(), processTaskRelation.getPostTaskVersion())); } } + if (taskDefinitionSet.isEmpty()) { + return Lists.newArrayList(); + } List taskDefinitionLogs = taskDefinitionLogMapper.queryByTaskDefinitions(taskDefinitionSet); - return new ArrayList<>(taskDefinitionLogs); + return Lists.newArrayList(taskDefinitionLogs); } /** @@ -481,37 +458,43 @@ public class ProcessService { } /** - * calculate sub process number in the process define. + * recursive delete all task instance by process instance id * - * @param processDefinitionCode processDefinitionCode - * @return process thread num count + * @param processInstanceId */ - private Integer workProcessThreadNumCount(long processDefinitionCode) { - ProcessDefinition processDefinition = processDefineMapper.queryByCode(processDefinitionCode); + public void deleteWorkTaskInstanceByProcessInstanceId(int processInstanceId) { + List taskInstanceList = findValidTaskListByProcessId(processInstanceId); + if (CollectionUtils.isEmpty(taskInstanceList)) { + return; + } + + List taskInstanceIdList = new ArrayList<>(); + + for (TaskInstance taskInstance : taskInstanceList) { + taskInstanceIdList.add(taskInstance.getId()); + } - List ids = new ArrayList<>(); - recurseFindSubProcessId(processDefinition.getId(), ids); - return ids.size() + 1; + taskInstanceMapper.deleteBatchIds(taskInstanceIdList); } /** * recursive query sub process definition id by parent id. * - * @param parentId parentId - * @param ids ids + * @param parentCode parentCode + * @param ids ids */ - public void recurseFindSubProcessId(int parentId, List ids) { - List taskNodeList = this.getTaskNodeListByDefinitionId(parentId); + public void recurseFindSubProcess(long parentCode, List ids) { + List taskNodeList = this.getTaskNodeListByDefinition(parentCode); if (taskNodeList != null && !taskNodeList.isEmpty()) { for (TaskDefinition taskNode : taskNodeList) { String parameter = taskNode.getTaskParams(); ObjectNode parameterJson = JSONUtils.parseObject(parameter); - if (parameterJson.get(CMD_PARAM_SUB_PROCESS_DEFINE_ID) != null) { + if (parameterJson.get(CMD_PARAM_SUB_PROCESS_DEFINE_CODE) != null) { SubProcessParameters subProcessParam = JSONUtils.parseObject(parameter, SubProcessParameters.class); - ids.add(subProcessParam.getProcessDefinitionId()); - recurseFindSubProcessId(subProcessParam.getProcessDefinitionId(), ids); + ids.add(subProcessParam.getProcessDefinitionCode()); + recurseFindSubProcess(subProcessParam.getProcessDefinitionCode(), ids); } } } @@ -523,7 +506,7 @@ public class ProcessService { * create recovery waiting thread command and delete origin command at the same time. * if the recovery command is exists, only update the field update_time * - * @param originCommand originCommand + * @param originCommand originCommand * @param processInstance processInstance */ public void createRecoveryWaitingThreadCommand(Command originCommand, ProcessInstance processInstance) { @@ -540,19 +523,21 @@ public class ProcessService { // process instance quit by "waiting thread" state if (originCommand == null) { Command command = new Command( - CommandType.RECOVER_WAITING_THREAD, - processInstance.getTaskDependType(), - processInstance.getFailureStrategy(), - processInstance.getExecutorId(), - processInstance.getProcessDefinition().getCode(), - JSONUtils.toJsonString(cmdParam), - processInstance.getWarningType(), - processInstance.getWarningGroupId(), - processInstance.getScheduleTime(), - processInstance.getWorkerGroup(), - processInstance.getEnvironmentCode(), - processInstance.getProcessInstancePriority(), - processInstance.getDryRun() + CommandType.RECOVER_WAITING_THREAD, + processInstance.getTaskDependType(), + processInstance.getFailureStrategy(), + processInstance.getExecutorId(), + processInstance.getProcessDefinition().getCode(), + JSONUtils.toJsonString(cmdParam), + processInstance.getWarningType(), + processInstance.getWarningGroupId(), + processInstance.getScheduleTime(), + processInstance.getWorkerGroup(), + processInstance.getEnvironmentCode(), + processInstance.getProcessInstancePriority(), + processInstance.getDryRun(), + processInstance.getId(), + processInstance.getProcessDefinitionVersion() ); saveCommand(command); return; @@ -577,7 +562,7 @@ public class ProcessService { /** * get schedule time from command * - * @param command command + * @param command command * @param cmdParam cmdParam map * @return date */ @@ -606,8 +591,8 @@ public class ProcessService { * generate a new work process instance from command. * * @param processDefinition processDefinition - * @param command command - * @param cmdParam cmdParam map + * @param command command + * @param cmdParam cmdParam map * @return process instance */ private ProcessInstance generateNewProcessInstance(ProcessDefinition processDefinition, @@ -619,9 +604,9 @@ public class ProcessService { processInstance.setState(ExecutionStatus.RUNNING_EXECUTION); processInstance.setRecovery(Flag.NO); processInstance.setStartTime(new Date()); + processInstance.setRestartTime(processInstance.getStartTime()); processInstance.setRunTimes(1); processInstance.setMaxTryTimes(0); - //processInstance.setProcessDefinitionId(command.getProcessDefinitionId()); processInstance.setCommandParam(command.getCommandParam()); processInstance.setCommandType(command.getCommandType()); processInstance.setIsSubProcess(Flag.NO); @@ -634,10 +619,8 @@ public class ProcessService { processInstance.setWarningGroupId(warningGroupId); processInstance.setDryRun(command.getDryRun()); - // schedule time - Date scheduleTime = getScheduleTime(command, cmdParam); - if (scheduleTime != null) { - processInstance.setScheduleTime(scheduleTime); + if (command.getScheduleTime() != null) { + processInstance.setScheduleTime(command.getScheduleTime()); } processInstance.setCommandStartTime(command.getStartTime()); processInstance.setLocations(processDefinition.getLocations()); @@ -647,10 +630,10 @@ public class ProcessService { // curing global params processInstance.setGlobalParams(ParameterUtils.curingGlobalParams( - processDefinition.getGlobalParamMap(), - processDefinition.getGlobalParamList(), - getCommandTypeIfComplement(processInstance, command), - processInstance.getScheduleTime())); + processDefinition.getGlobalParamMap(), + processDefinition.getGlobalParamList(), + getCommandTypeIfComplement(processInstance, command), + processInstance.getScheduleTime())); // set process instance priority processInstance.setProcessInstancePriority(command.getProcessInstancePriority()); @@ -675,15 +658,22 @@ public class ProcessService { fatherParamMap = JSONUtils.toMap(fatherParamJson); } startParamMap.putAll(fatherParamMap); - // set start param into global params - if (startParamMap.size() > 0 - && processDefinition.getGlobalParamMap() != null) { - for (Map.Entry param : processDefinition.getGlobalParamMap().entrySet()) { - String val = startParamMap.get(param.getKey()); - if (val != null) { - param.setValue(val); - } + Map globalMap = processDefinition.getGlobalParamMap(); + List globalParamList = processDefinition.getGlobalParamList(); + if (MapUtils.isNotEmpty(startParamMap) && globalMap != null) { + Map tempGlobalMap = new HashMap<>(); + // add prefix for global params + for (Map.Entry param : globalMap.entrySet()) { + tempGlobalMap.put(GLOBAL_PARAMS_PREFIX + param.getKey(), param.getValue()); + } + globalParamList.forEach(property -> property.setProp(GLOBAL_PARAMS_PREFIX + property.getProp())); + // set start param into global params, add prefix for startup params + for (Entry startParam : startParamMap.entrySet()) { + String tmpStartParamKey = START_UP_PARAMS_PREFIX + startParam.getKey(); + tempGlobalMap.put(tmpStartParamKey, startParam.getValue()); + globalParamList.add(new Property(tmpStartParamKey, IN, VARCHAR, startParam.getValue())); } + processDefinition.setGlobalParamMap(tempGlobalMap); } } @@ -694,7 +684,7 @@ public class ProcessService { * use definition creator's tenant. * * @param tenantId tenantId - * @param userId userId + * @param userId userId * @return tenant */ public Tenant getTenantForProcess(int tenantId, int userId) { @@ -732,15 +722,15 @@ public class ProcessService { /** * check command parameters is valid * - * @param command command + * @param command command * @param cmdParam cmdParam map * @return whether command param is valid */ private Boolean checkCmdParam(Command command, Map cmdParam) { if (command.getTaskDependType() == TaskDependType.TASK_ONLY || command.getTaskDependType() == TaskDependType.TASK_PRE) { if (cmdParam == null - || !cmdParam.containsKey(Constants.CMD_PARAM_START_NODE_NAMES) - || cmdParam.get(Constants.CMD_PARAM_START_NODE_NAMES).isEmpty()) { + || !cmdParam.containsKey(Constants.CMD_PARAM_START_NODES) + || cmdParam.get(Constants.CMD_PARAM_START_NODES).isEmpty()) { logger.error("command node depend type is {}, but start nodes is null ", command.getTaskDependType()); return false; } @@ -752,89 +742,66 @@ public class ProcessService { * construct process instance according to one command. * * @param command command - * @param host host + * @param host host * @return process instance */ private ProcessInstance constructProcessInstance(Command command, String host) { ProcessInstance processInstance; + ProcessDefinition processDefinition; CommandType commandType = command.getCommandType(); - Map cmdParam = JSONUtils.toMap(command.getCommandParam()); - ProcessDefinition processDefinition = getProcessDefinitionByCommand(command.getProcessDefinitionCode(), cmdParam); + processDefinition = this.findProcessDefinition(command.getProcessDefinitionCode(), command.getProcessDefinitionVersion()); if (processDefinition == null) { logger.error("cannot find the work process define! define code : {}", command.getProcessDefinitionCode()); return null; } - + Map cmdParam = JSONUtils.toMap(command.getCommandParam()); + int processInstanceId = command.getProcessInstanceId(); + if (processInstanceId == 0) { + processInstance = generateNewProcessInstance(processDefinition, command, cmdParam); + } else { + processInstance = this.findProcessInstanceDetailById(processInstanceId); + if (processInstance == null) { + return processInstance; + } + } if (cmdParam != null) { - int processInstanceId = 0; - // recover from failure or pause tasks - if (cmdParam.containsKey(Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING)) { - String processId = cmdParam.get(Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING); - processInstanceId = Integer.parseInt(processId); - if (processInstanceId == 0) { - logger.error("command parameter is error, [ ProcessInstanceId ] is 0"); - return null; - } - } else if (cmdParam.containsKey(Constants.CMD_PARAM_SUB_PROCESS)) { - // sub process map - String pId = cmdParam.get(Constants.CMD_PARAM_SUB_PROCESS); - processInstanceId = Integer.parseInt(pId); - } else if (cmdParam.containsKey(Constants.CMD_PARAM_RECOVERY_WAITING_THREAD)) { - // waiting thread command - String pId = cmdParam.get(Constants.CMD_PARAM_RECOVERY_WAITING_THREAD); - processInstanceId = Integer.parseInt(pId); + CommandType commandTypeIfComplement = getCommandTypeIfComplement(processInstance, command); + // reset global params while repeat running is needed by cmdParam + if (commandTypeIfComplement == CommandType.REPEAT_RUNNING) { + setGlobalParamIfCommanded(processDefinition, cmdParam); } - if (processInstanceId == 0) { - processInstance = generateNewProcessInstance(processDefinition, command, cmdParam); - } else { - processInstance = this.findProcessInstanceDetailById(processInstanceId); - if (processInstance == null) { - return processInstance; - } - CommandType commandTypeIfComplement = getCommandTypeIfComplement(processInstance, command); - - // reset global params while repeat running is needed by cmdParam - if (commandTypeIfComplement == CommandType.REPEAT_RUNNING) { - setGlobalParamIfCommanded(processDefinition, cmdParam); - } - - // Recalculate global parameters after rerun. - processInstance.setGlobalParams(ParameterUtils.curingGlobalParams( + // Recalculate global parameters after rerun. + processInstance.setGlobalParams(ParameterUtils.curingGlobalParams( processDefinition.getGlobalParamMap(), processDefinition.getGlobalParamList(), commandTypeIfComplement, processInstance.getScheduleTime())); - processInstance.setProcessDefinition(processDefinition); - } - //reset command parameter - if (processInstance.getCommandParam() != null) { - Map processCmdParam = JSONUtils.toMap(processInstance.getCommandParam()); - for (Map.Entry entry : processCmdParam.entrySet()) { - if (!cmdParam.containsKey(entry.getKey())) { - cmdParam.put(entry.getKey(), entry.getValue()); - } + processInstance.setProcessDefinition(processDefinition); + } + //reset command parameter + if (processInstance.getCommandParam() != null) { + Map processCmdParam = JSONUtils.toMap(processInstance.getCommandParam()); + for (Map.Entry entry : processCmdParam.entrySet()) { + if (!cmdParam.containsKey(entry.getKey())) { + cmdParam.put(entry.getKey(), entry.getValue()); } } - // reset command parameter if sub process - if (cmdParam.containsKey(Constants.CMD_PARAM_SUB_PROCESS)) { - processInstance.setCommandParam(command.getCommandParam()); - } - } else { - // generate one new process instance - processInstance = generateNewProcessInstance(processDefinition, command, cmdParam); + } + // reset command parameter if sub process + if (cmdParam != null && cmdParam.containsKey(Constants.CMD_PARAM_SUB_PROCESS)) { + processInstance.setCommandParam(command.getCommandParam()); } if (Boolean.FALSE.equals(checkCmdParam(command, cmdParam))) { logger.error("command parameter check failed!"); return null; } - if (command.getScheduleTime() != null) { processInstance.setScheduleTime(command.getScheduleTime()); } processInstance.setHost(host); - + processInstance.setRestartTime(new Date()); ExecutionStatus runStatus = ExecutionStatus.RUNNING_EXECUTION; int runTime = processInstance.getRunTimes(); switch (commandType) { @@ -842,18 +809,14 @@ public class ProcessService { break; case START_FAILURE_TASK_PROCESS: // find failed tasks and init these tasks - List failedList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.FAILURE); - List toleranceList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.NEED_FAULT_TOLERANCE); - List killedList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.KILL); + List needReSubmitTasks = this.findLastTaskIdByStateList(processInstance.getId(), + Lists.newArrayList(ExecutionStatus.FAILURE, ExecutionStatus.NEED_FAULT_TOLERANCE, ExecutionStatus.KILL)); cmdParam.remove(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING); - - failedList.addAll(killedList); - failedList.addAll(toleranceList); - for (Integer taskId : failedList) { + for (Integer taskId : needReSubmitTasks) { initTaskInstance(this.findTaskInstanceById(taskId)); } cmdParam.put(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING, - String.join(Constants.COMMA, convertIntListToString(failedList))); + String.join(Constants.COMMA, convertIntListToString(needReSubmitTasks))); processInstance.setCommandParam(JSONUtils.toJsonString(cmdParam)); processInstance.setRunTimes(runTime + 1); break; @@ -864,15 +827,12 @@ public class ProcessService { case RECOVER_SUSPENDED_PROCESS: // find pause tasks and init task's state cmdParam.remove(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING); - List suspendedNodeList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.PAUSE); - List stopNodeList = findTaskIdByInstanceState(processInstance.getId(), - ExecutionStatus.KILL); - suspendedNodeList.addAll(stopNodeList); - for (Integer taskId : suspendedNodeList) { + List needReSubmitNodeList = this.findLastTaskIdByStateList(processInstance.getId(), Lists.newArrayList(ExecutionStatus.PAUSE, ExecutionStatus.KILL)); + for (Integer taskId : needReSubmitNodeList) { // initialize the pause state initTaskInstance(this.findTaskInstanceById(taskId)); } - cmdParam.put(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING, String.join(",", convertIntListToString(suspendedNodeList))); + cmdParam.put(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING, String.join(",", convertIntListToString(needReSubmitNodeList))); processInstance.setCommandParam(JSONUtils.toJsonString(cmdParam)); processInstance.setRunTimes(runTime + 1); break; @@ -882,13 +842,14 @@ public class ProcessService { runStatus = processInstance.getState(); break; case COMPLEMENT_DATA: - // delete all the valid tasks when complement data - List taskInstanceList = this.findValidTaskListByProcessId(processInstance.getId()); - for (TaskInstance taskInstance : taskInstanceList) { - taskInstance.setFlag(Flag.NO); - this.updateTaskInstance(taskInstance); + // delete all the valid tasks when complement data if id is not null + if (processInstance.getId() != 0) { + List taskInstanceList = this.findValidTaskListByProcessId(processInstance.getId()); + for (TaskInstance taskInstance : taskInstanceList) { + taskInstance.setFlag(Flag.NO); + this.updateTaskInstance(taskInstance); + } } - initComplementDataParam(processDefinition, processInstance, cmdParam); break; case REPEAT_RUNNING: // delete the recover task names from command parameter @@ -903,6 +864,7 @@ public class ProcessService { updateTaskInstance(taskInstance); } processInstance.setStartTime(new Date()); + processInstance.setRestartTime(processInstance.getStartTime()); processInstance.setEndTime(null); processInstance.setRunTimes(runTime + 1); initComplementDataParam(processDefinition, processInstance, cmdParam); @@ -941,7 +903,7 @@ public class ProcessService { } return processDefineLogMapper.queryByDefinitionCodeAndVersion( - processInstance.getProcessDefinitionCode(), processInstance.getProcessDefinitionVersion()); + processInstance.getProcessDefinitionCode(), processInstance.getProcessDefinitionVersion()); } } @@ -952,7 +914,7 @@ public class ProcessService { * return complement data if the process start with complement data * * @param processInstance processInstance - * @param command command + * @param command command * @return command type */ private CommandType getCommandTypeIfComplement(ProcessInstance processInstance, Command command) { @@ -967,8 +929,8 @@ public class ProcessService { * initialize complement data parameters * * @param processDefinition processDefinition - * @param processInstance processInstance - * @param cmdParam cmdParam + * @param processInstance processInstance + * @param cmdParam cmdParam */ private void initComplementDataParam(ProcessDefinition processDefinition, ProcessInstance processInstance, @@ -987,9 +949,9 @@ public class ProcessService { processInstance.setScheduleTime(complementDate.get(0)); } processInstance.setGlobalParams(ParameterUtils.curingGlobalParams( - processDefinition.getGlobalParamMap(), - processDefinition.getGlobalParamList(), - CommandType.COMPLEMENT_DATA, processInstance.getScheduleTime())); + processDefinition.getGlobalParamMap(), + processDefinition.getGlobalParamList(), + CommandType.COMPLEMENT_DATA, processInstance.getScheduleTime())); } /** @@ -1007,7 +969,7 @@ public class ProcessService { Map paramMap = JSONUtils.toMap(cmdParam); // write sub process id into cmd param. if (paramMap.containsKey(CMD_PARAM_SUB_PROCESS) - && CMD_PARAM_EMPTY_SUB_PROCESS.equals(paramMap.get(CMD_PARAM_SUB_PROCESS))) { + && CMD_PARAM_EMPTY_SUB_PROCESS.equals(paramMap.get(CMD_PARAM_SUB_PROCESS))) { paramMap.remove(CMD_PARAM_SUB_PROCESS); paramMap.put(CMD_PARAM_SUB_PROCESS, String.valueOf(subProcessInstance.getId())); subProcessInstance.setCommandParam(JSONUtils.toJsonString(paramMap)); @@ -1020,7 +982,7 @@ public class ProcessService { ProcessInstance parentInstance = findProcessInstanceDetailById(Integer.parseInt(parentInstanceId)); if (parentInstance != null) { subProcessInstance.setGlobalParams( - joinGlobalParams(parentInstance.getGlobalParams(), subProcessInstance.getGlobalParams())); + joinGlobalParams(parentInstance.getGlobalParams(), subProcessInstance.getGlobalParams())); this.saveProcessInstance(subProcessInstance); } else { logger.error("sub process command params error, cannot find parent instance: {} ", cmdParam); @@ -1034,6 +996,13 @@ public class ProcessService { processInstanceMap.setProcessInstanceId(subProcessInstance.getId()); this.updateWorkProcessInstanceMap(processInstanceMap); + + TaskInstance subTask = this.findTaskInstanceById(processInstanceMap.getParentTaskInstanceId()); + if (subTask != null && subTask.isSubProcess() && subTask.getState() == ExecutionStatus.SUBMITTED_SUCCESS) { + subTask.setState(ExecutionStatus.RUNNING_EXECUTION); + subTask.setStartTime(new Date()); + this.updateTaskInstance(subTask); + } } /** @@ -1041,22 +1010,26 @@ public class ProcessService { * only the keys doesn't in sub process global would be joined. * * @param parentGlobalParams parentGlobalParams - * @param subGlobalParams subGlobalParams + * @param subGlobalParams subGlobalParams * @return global params join */ private String joinGlobalParams(String parentGlobalParams, String subGlobalParams) { - List parentPropertyList = JSONUtils.toList(parentGlobalParams, Property.class); - List subPropertyList = JSONUtils.toList(subGlobalParams, Property.class); + // Since JSONUtils.toList return unmodified list, we need to creat a new List here. + List parentParams = Lists.newArrayList(JSONUtils.toList(parentGlobalParams, Property.class)); + List subParams = JSONUtils.toList(subGlobalParams, Property.class); - Map subMap = subPropertyList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue)); + Set parentParamKeys = parentParams.stream().map(Property::getProp).collect(toSet()); - for (Property parent : parentPropertyList) { - if (!subMap.containsKey(parent.getProp())) { - subPropertyList.add(parent); - } - } - return JSONUtils.toJsonString(subPropertyList); + // We will combine the params of parent workflow and sub workflow + // If the params are defined in both, we will use parent's params to override the sub workflow(ISSUE-7962) + // todo: Do we need to consider the other attribute of Property? + // e.g. the subProp's type is not equals with parent, or subProp's direct is not equals with parent + // It's suggested to add node name in property, this kind of problem can be solved. + List extraSubParams = subParams.stream() + .filter(subProp -> !parentParamKeys.contains(subProp.getProp())).collect(Collectors.toList()); + parentParams.addAll(extraSubParams); + return JSONUtils.toJsonString(parentParams); } /** @@ -1067,22 +1040,18 @@ public class ProcessService { private void initTaskInstance(TaskInstance taskInstance) { if (!taskInstance.isSubProcess() - && (taskInstance.getState().typeIsCancel() || taskInstance.getState().typeIsFailure())) { + && (taskInstance.getState().typeIsCancel() || taskInstance.getState().typeIsFailure())) { taskInstance.setFlag(Flag.NO); updateTaskInstance(taskInstance); return; } taskInstance.setState(ExecutionStatus.SUBMITTED_SUCCESS); updateTaskInstance(taskInstance); + logger.debug("update task instance, task instance id:{}", taskInstance.getId()); } /** * retry submit task to db - * - * @param taskInstance - * @param commitRetryTimes - * @param commitInterval - * @return */ public TaskInstance submitTask(TaskInstance taskInstance, int commitRetryTimes, int commitInterval) { @@ -1104,7 +1073,7 @@ public class ProcessService { } Thread.sleep(commitInterval); } catch (Exception e) { - logger.error("task commit to mysql failed", e); + logger.error("task commit to db failed", e); } retryTimes += 1; } @@ -1122,12 +1091,12 @@ public class ProcessService { public TaskInstance submitTask(TaskInstance taskInstance) { ProcessInstance processInstance = this.findProcessInstanceDetailById(taskInstance.getProcessInstanceId()); logger.info("start submit task : {}, instance id:{}, state: {}", - taskInstance.getName(), taskInstance.getProcessInstanceId(), processInstance.getState()); + taskInstance.getName(), taskInstance.getProcessInstanceId(), processInstance.getState()); //submit to db TaskInstance task = submitTaskInstanceToDB(taskInstance, processInstance); if (task == null) { logger.error("end submit task to db error, task name:{}, process id:{} state: {} ", - taskInstance.getName(), taskInstance.getProcessInstance(), processInstance.getState()); + taskInstance.getName(), taskInstance.getProcessInstance(), processInstance.getState()); return task; } if (!task.getState().typeIsFinished()) { @@ -1146,7 +1115,7 @@ public class ProcessService { * set map {parent instance id, task instance id, 0(child instance id)} * * @param parentInstance parentInstance - * @param parentTask parentTask + * @param parentTask parentTask * @return process instance map */ private ProcessInstanceMap setProcessInstanceMap(ProcessInstance parentInstance, TaskInstance parentTask) { @@ -1175,7 +1144,7 @@ public class ProcessService { * find previous task work process map. * * @param parentProcessInstance parentProcessInstance - * @param parentTask parentTask + * @param parentTask parentTask * @return process instance map */ private ProcessInstanceMap findPreviousTaskProcessMap(ProcessInstance parentProcessInstance, @@ -1193,7 +1162,7 @@ public class ProcessService { } } logger.info("sub process instance is not found,parent task:{},parent instance:{}", - parentTask.getId(), parentProcessInstance.getId()); + parentTask.getId(), parentProcessInstance.getId()); return null; } @@ -1201,7 +1170,7 @@ public class ProcessService { * create sub work process command * * @param parentProcessInstance parentProcessInstance - * @param task task + * @param task task */ public void createSubWorkProcess(ProcessInstance parentProcessInstance, TaskInstance task) { if (!task.isSubProcess()) { @@ -1267,8 +1236,11 @@ public class ProcessService { TaskInstance task) { CommandType commandType = getSubCommandType(parentProcessInstance, childInstance); Map subProcessParam = JSONUtils.toMap(task.getTaskParams()); - int childDefineId = Integer.parseInt(subProcessParam.get(Constants.CMD_PARAM_SUB_PROCESS_DEFINE_ID)); - ProcessDefinition processDefinition = processDefineMapper.queryByDefineId(childDefineId); + long childDefineCode = 0L; + if (subProcessParam.containsKey(Constants.CMD_PARAM_SUB_PROCESS_DEFINE_CODE)) { + childDefineCode = Long.parseLong(subProcessParam.get(Constants.CMD_PARAM_SUB_PROCESS_DEFINE_CODE)); + } + ProcessDefinition subProcessDefinition = processDefineMapper.queryByCode(childDefineCode); Object localParams = subProcessParam.get(Constants.LOCAL_PARAMS); List allParam = JSONUtils.toList(JSONUtils.toJsonString(localParams), Property.class); @@ -1280,21 +1252,23 @@ public class ProcessService { } } String processParam = getSubWorkFlowParam(instanceMap, parentProcessInstance, fatherParams); - + int subProcessInstanceId = childInstance == null ? 0 : childInstance.getId(); return new Command( - commandType, - TaskDependType.TASK_POST, - parentProcessInstance.getFailureStrategy(), - parentProcessInstance.getExecutorId(), - processDefinition.getCode(), - processParam, - parentProcessInstance.getWarningType(), - parentProcessInstance.getWarningGroupId(), - parentProcessInstance.getScheduleTime(), - task.getWorkerGroup(), - task.getEnvironmentCode(), - parentProcessInstance.getProcessInstancePriority(), - parentProcessInstance.getDryRun() + commandType, + TaskDependType.TASK_POST, + parentProcessInstance.getFailureStrategy(), + parentProcessInstance.getExecutorId(), + subProcessDefinition.getCode(), + processParam, + parentProcessInstance.getWarningType(), + parentProcessInstance.getWarningGroupId(), + parentProcessInstance.getScheduleTime(), + task.getWorkerGroup(), + task.getEnvironmentCode(), + parentProcessInstance.getProcessInstancePriority(), + parentProcessInstance.getDryRun(), + subProcessInstanceId, + subProcessDefinition.getVersion() ); } @@ -1327,11 +1301,11 @@ public class ProcessService { * update sub process definition * * @param parentProcessInstance parentProcessInstance - * @param childDefinitionCode childDefinitionId + * @param childDefinitionCode childDefinitionId */ private void updateSubProcessDefinitionByParent(ProcessInstance parentProcessInstance, long childDefinitionCode) { ProcessDefinition fatherDefinition = this.findProcessDefinition(parentProcessInstance.getProcessDefinitionCode(), - parentProcessInstance.getProcessDefinitionVersion()); + parentProcessInstance.getProcessDefinitionVersion()); ProcessDefinition childDefinition = this.findProcessDefinitionByCode(childDefinitionCode); if (childDefinition != null && fatherDefinition != null) { childDefinition.setWarningGroupId(fatherDefinition.getWarningGroupId()); @@ -1342,37 +1316,21 @@ public class ProcessService { /** * submit task to mysql * - * @param taskInstance taskInstance + * @param taskInstance taskInstance * @param processInstance processInstance * @return task instance */ public TaskInstance submitTaskInstanceToDB(TaskInstance taskInstance, ProcessInstance processInstance) { ExecutionStatus processInstanceState = processInstance.getState(); - - if (taskInstance.getState().typeIsFailure()) { - if (taskInstance.isSubProcess()) { - taskInstance.setRetryTimes(taskInstance.getRetryTimes() + 1); - } else { - if (processInstanceState != ExecutionStatus.READY_STOP - && processInstanceState != ExecutionStatus.READY_PAUSE) { - // failure task set invalid - taskInstance.setFlag(Flag.NO); - updateTaskInstance(taskInstance); - // crate new task instance - if (taskInstance.getState() != ExecutionStatus.NEED_FAULT_TOLERANCE) { - taskInstance.setRetryTimes(taskInstance.getRetryTimes() + 1); - } - taskInstance.setSubmitTime(null); - taskInstance.setStartTime(null); - taskInstance.setEndTime(null); - taskInstance.setFlag(Flag.YES); - taskInstance.setHost(null); - taskInstance.setId(0); - } - } + if (processInstanceState.typeIsFinished() || processInstanceState == ExecutionStatus.READY_STOP) { + logger.warn("processInstance {} was {}, skip submit task", processInstance.getProcessDefinitionCode(), processInstanceState); + return null; + } + if (processInstanceState == ExecutionStatus.READY_PAUSE) { + taskInstance.setState(ExecutionStatus.PAUSE); } - taskInstance.setExecutorId(processInstance.getExecutorId()); taskInstance.setProcessInstancePriority(processInstance.getProcessInstancePriority()); + taskInstance.setExecutorId(processInstance.getExecutorId()); taskInstance.setState(getSubmitTaskState(taskInstance, processInstanceState)); if (taskInstance.getSubmitTime() == null) { taskInstance.setSubmitTime(new Date()); @@ -1395,7 +1353,7 @@ public class ProcessService { * return stop if work process state is ready stop * if all of above are not satisfied, return submit success * - * @param taskInstance taskInstance + * @param taskInstance taskInstance * @param processInstanceState processInstanceState * @return process instance state */ @@ -1405,9 +1363,9 @@ public class ProcessService { // the task already exists in task queue // return state if ( - state == ExecutionStatus.RUNNING_EXECUTION - || state == ExecutionStatus.DELAY_EXECUTION - || state == ExecutionStatus.KILL + state == ExecutionStatus.RUNNING_EXECUTION + || state == ExecutionStatus.DELAY_EXECUTION + || state == ExecutionStatus.KILL ) { return state; } @@ -1416,7 +1374,7 @@ public class ProcessService { if (processInstanceState == ExecutionStatus.READY_PAUSE) { state = ExecutionStatus.PAUSE; } else if (processInstanceState == ExecutionStatus.READY_STOP - || !checkProcessStrategy(taskInstance)) { + || !checkProcessStrategy(taskInstance)) { state = ExecutionStatus.KILL; } else { state = ExecutionStatus.SUBMITTED_SUCCESS; @@ -1440,7 +1398,7 @@ public class ProcessService { for (TaskInstance task : taskInstances) { if (task.getState() == ExecutionStatus.FAILURE - && task.getRetryTimes() >= task.getMaxRetryTimes()) { + && task.getRetryTimes() >= task.getMaxRetryTimes()) { return false; } } @@ -1492,6 +1450,11 @@ public class ProcessService { } } + public boolean updateHostAndSubmitTimeById(int id, String host, Date date) { + int count = taskInstanceMapper.updateHostAndSubmitTimeById(id, host, date); + return count > 0; + } + /** * insert task instance * @@ -1511,6 +1474,7 @@ public class ProcessService { */ public boolean updateTaskInstance(TaskInstance taskInstance) { int count = taskInstanceMapper.updateById(taskInstance); + logger.debug("updateTaskInstance, task instance id:{}, state;{}", taskInstance.getId(), taskInstance.getState()); return count > 0; } @@ -1518,12 +1482,25 @@ public class ProcessService { * find task instance by id * * @param taskId task id - * @return task intance + * @return task instance */ public TaskInstance findTaskInstanceById(Integer taskId) { return taskInstanceMapper.selectById(taskId); } + /** + * find task instance list by ids + * + * @param taskIds task id list + * @return task instance list + */ + public List findTaskInstanceListByIds(Set taskIds) { + if (CollectionUtils.isEmpty(taskIds)) { + return new ArrayList<>(); + } + return taskInstanceMapper.queryTaskInstanceListByIds(taskIds); + } + /** * package task instance,associate processInstance and processDefine * @@ -1569,7 +1546,8 @@ public class ProcessService { private void updateTaskDefinitionResources(TaskDefinition taskDefinition) { Map taskParameters = JSONUtils.parseObject( taskDefinition.getTaskParams(), - new TypeReference>() { }); + new TypeReference>() { + }); if (taskParameters != null) { // if contains mainJar field, query resource from database // Flink, Spark, MR @@ -1632,13 +1610,36 @@ public class ProcessService { * get id list by task state * * @param instanceId instanceId - * @param state state + * @param state state * @return task instance states */ public List findTaskIdByInstanceState(int instanceId, ExecutionStatus state) { return taskInstanceMapper.queryTaskByProcessIdAndState(instanceId, state.ordinal()); } + /** + * get id list by task state list + * + * @param instanceId instanceId + * @param stateList stateList + * @return task instance ids + */ + public List findLastTaskIdByStateList(int instanceId, List stateList) { + List validTaskInstanceList = this.findValidTaskListByProcessId(instanceId); + Map validTaskInstanceMap = new HashMap<>(); + for (TaskInstance instance : validTaskInstanceList) { + validTaskInstanceMap.compute(instance.getTaskCode(), (k, v) -> { + if (v == null || v.getId() < instance.getId()) { + return instance; + } else { + return v; + } + }); + } + return validTaskInstanceMap.values().stream().filter(t -> stateList.contains(t.getState())) + .map(TaskInstance::getId).collect(Collectors.toList()); + } + /** * find valid task list by process definition id * @@ -1687,7 +1688,7 @@ public class ProcessService { * find work process map by parent process id and parent task id. * * @param parentWorkProcessId parentWorkProcessId - * @param parentTaskId parentTaskId + * @param parentTaskId parentTaskId * @return process instance map */ public ProcessInstanceMap findWorkProcessMapByParent(Integer parentWorkProcessId, Integer parentTaskId) { @@ -1709,7 +1710,7 @@ public class ProcessService { * find sub process instance * * @param parentProcessId parentProcessId - * @param parentTaskId parentTaskId + * @param parentTaskId parentTaskId * @return process instance */ public ProcessInstance findSubProcessInstance(Integer parentProcessId, Integer parentTaskId) { @@ -1741,23 +1742,24 @@ public class ProcessService { /** * change task state * - * @param state state - * @param startTime startTime - * @param host host + * @param state state + * @param startTime startTime + * @param host host * @param executePath executePath - * @param logPath logPath - * @param taskInstId taskInstId - */ - public void changeTaskState(TaskInstance taskInstance, ExecutionStatus state, Date startTime, String host, - String executePath, - String logPath, - int taskInstId) { + * @param logPath logPath + * @param taskInstId taskInstId + * @reutrn + */ + public boolean changeTaskState(TaskInstance taskInstance, ExecutionStatus state, Date startTime, String host, + String executePath, + String logPath, + int taskInstId) { taskInstance.setState(state); taskInstance.setStartTime(startTime); taskInstance.setHost(host); taskInstance.setExecutePath(executePath); taskInstance.setLogPath(logPath); - saveTaskInstance(taskInstance); + return saveTaskInstance(taskInstance); } /** @@ -1773,30 +1775,29 @@ public class ProcessService { /** * change task state * - * @param state state - * @param endTime endTime + * @param state state + * @param endTime endTime * @param taskInstId taskInstId - * @param varPool varPool - */ - public void changeTaskState(TaskInstance taskInstance, ExecutionStatus state, - Date endTime, - int processId, - String appIds, - int taskInstId, - String varPool) { + * @param varPool varPool + * @return + */ + public boolean changeTaskState(TaskInstance taskInstance, ExecutionStatus state, + Date endTime, + int processId, + String appIds, + int taskInstId, + String varPool) { taskInstance.setPid(processId); taskInstance.setAppLink(appIds); taskInstance.setState(state); taskInstance.setEndTime(endTime); taskInstance.setVarPool(varPool); changeOutParam(taskInstance); - saveTaskInstance(taskInstance); + return saveTaskInstance(taskInstance); } /** * for show in page of taskInstance - * - * @param taskInstance */ public void changeOutParam(TaskInstance taskInstance) { if (StringUtils.isEmpty(taskInstance.getVarPool())) { @@ -1807,7 +1808,8 @@ public class ProcessService { return; } //if the result more than one line,just get the first . - Map taskParams = JSONUtils.parseObject(taskInstance.getTaskParams(), new TypeReference>() {}); + Map taskParams = JSONUtils.parseObject(taskInstance.getTaskParams(), new TypeReference>() { + }); Object localParams = taskParams.get(LOCAL_PARAMS); if (localParams == null) { return; @@ -1876,6 +1878,10 @@ public class ProcessService { return processInstanceMapper.queryByHostAndStatus(host, stateArray); } + public List queryNeedFailoverProcessInstanceHost() { + return processInstanceMapper.queryNeedFailoverProcessInstanceHost(stateArray); + } + /** * process need failover process instance * @@ -1892,6 +1898,8 @@ public class ProcessService { //2 insert into recover command Command cmd = new Command(); cmd.setProcessDefinitionCode(processDefinition.getCode()); + cmd.setProcessDefinitionVersion(processDefinition.getVersion()); + cmd.setProcessInstanceId(processInstance.getId()); cmd.setCommandParam(String.format("{\"%s\":%d}", Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING, processInstance.getId())); cmd.setExecutorId(processInstance.getExecutorId()); cmd.setCommandType(CommandType.RECOVER_TOLERANCE_FAULT_PROCESS); @@ -1906,7 +1914,7 @@ public class ProcessService { */ public List queryNeedFailoverTaskInstances(String host) { return taskInstanceMapper.queryByHostAndStatus(host, - stateArray); + stateArray); } /** @@ -1923,7 +1931,7 @@ public class ProcessService { * update process instance state by id * * @param processInstanceId processInstanceId - * @param executionStatus executionStatus + * @param executionStatus executionStatus * @return update process result */ public int updateProcessInstanceState(Integer processInstanceId, ExecutionStatus executionStatus) { @@ -1959,7 +1967,7 @@ public class ProcessService { /** * find tenant code by resource name * - * @param resName resource name + * @param resName resource name * @param resourceType resource type * @return tenant code */ @@ -1976,7 +1984,7 @@ public class ProcessService { if (Objects.isNull(user)) { return StringUtils.EMPTY; } - Tenant tenant = tenantMapper.selectById(user.getTenantId()); + Tenant tenant = tenantMapper.queryById(user.getTenantId()); if (Objects.isNull(tenant)) { return StringUtils.EMPTY; } @@ -1993,45 +2001,67 @@ public class ProcessService { return scheduleMapper.selectAllByProcessDefineArray(codes); } + /** + * find last task instance in the date interval + * + * @param taskCode taskCode + * @param dateInterval dateInterval + * @return task instance + */ + public TaskInstance findLastTaskInstanceInterval(long taskCode, DateInterval dateInterval) { + return taskInstanceMapper.queryLastTaskInstance(taskCode, dateInterval.getStartTime(), dateInterval.getEndTime()); + } + + /** + * find last task instance list in the date interval + * + * @param taskCodes taskCode list + * @param dateInterval dateInterval + * @return task instance + */ + public List findLastTaskInstanceListInterval(Set taskCodes, DateInterval dateInterval) { + return taskInstanceMapper.queryLastTaskInstanceList(taskCodes, dateInterval.getStartTime(), dateInterval.getEndTime()); + } + /** * find last scheduler process instance in the date interval * * @param definitionCode definitionCode - * @param dateInterval dateInterval + * @param dateInterval dateInterval * @return process instance */ public ProcessInstance findLastSchedulerProcessInterval(Long definitionCode, DateInterval dateInterval) { return processInstanceMapper.queryLastSchedulerProcess(definitionCode, - dateInterval.getStartTime(), - dateInterval.getEndTime()); + dateInterval.getStartTime(), + dateInterval.getEndTime()); } /** * find last manual process instance interval * * @param definitionCode process definition code - * @param dateInterval dateInterval + * @param dateInterval dateInterval * @return process instance */ public ProcessInstance findLastManualProcessInterval(Long definitionCode, DateInterval dateInterval) { return processInstanceMapper.queryLastManualProcess(definitionCode, - dateInterval.getStartTime(), - dateInterval.getEndTime()); + dateInterval.getStartTime(), + dateInterval.getEndTime()); } /** * find last running process instance * * @param definitionCode process definition code - * @param startTime start time - * @param endTime end time + * @param startTime start time + * @param endTime end time * @return process instance */ public ProcessInstance findLastRunningProcess(Long definitionCode, Date startTime, Date endTime) { return processInstanceMapper.queryLastRunningProcess(definitionCode, - startTime, - endTime, - stateArray); + startTime, + endTime, + stateArray); } /** @@ -2109,7 +2139,7 @@ public class ProcessService { /** * list unauthorized udf function * - * @param userId user id + * @param userId user id * @param needChecks data source id array * @return unauthorized udf function list */ @@ -2211,7 +2241,7 @@ public class ProcessService { if (result > 0) { result = switchProcessTaskRelationVersion(processDefinitionLog); if (result <= 0) { - return Constants.DEFINITION_FAILURE; + return Constants.EXIT_CODE_FAILURE; } } return result; @@ -2223,7 +2253,36 @@ public class ProcessService { processTaskRelationMapper.deleteByCode(processDefinition.getProjectCode(), processDefinition.getCode()); } List processTaskRelationLogList = processTaskRelationLogMapper.queryByProcessCodeAndVersion(processDefinition.getCode(), processDefinition.getVersion()); - return processTaskRelationMapper.batchInsert(processTaskRelationLogList); + int batchInsert = processTaskRelationMapper.batchInsert(processTaskRelationLogList); + if (batchInsert == 0) { + return Constants.EXIT_CODE_FAILURE; + } else { + int result = 0; + for (ProcessTaskRelationLog taskRelationLog : processTaskRelationLogList) { + int switchResult = switchTaskDefinitionVersion(taskRelationLog.getPostTaskCode(), taskRelationLog.getPostTaskVersion()); + if (switchResult != Constants.EXIT_CODE_FAILURE) { + result++; + } + } + return result; + } + } + + public int switchTaskDefinitionVersion(long taskCode, int taskVersion) { + TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(taskCode); + if (taskDefinition == null) { + return Constants.EXIT_CODE_FAILURE; + } + if (taskDefinition.getVersion() == taskVersion) { + return Constants.EXIT_CODE_SUCCESS; + } + TaskDefinitionLog taskDefinitionUpdate = taskDefinitionLogMapper.queryByDefinitionCodeAndVersion(taskCode, taskVersion); + if (taskDefinitionUpdate == null) { + return Constants.EXIT_CODE_FAILURE; + } + taskDefinitionUpdate.setUpdateTime(new Date()); + taskDefinitionUpdate.setId(taskDefinition.getId()); + return taskDefinitionMapper.updateById(taskDefinitionUpdate); } /** @@ -2237,10 +2296,10 @@ public class ProcessService { AbstractParameters params = TaskParametersUtils.getParameters(taskDefinition.getTaskType(), taskDefinition.getTaskParams()); if (params != null && CollectionUtils.isNotEmpty(params.getResourceFilesList())) { resourceIds = params.getResourceFilesList(). - stream() - .filter(t -> t.getId() != 0) - .map(ResourceInfo::getId) - .collect(Collectors.toSet()); + stream() + .filter(t -> t.getId() != 0) + .map(ResourceInfo::getId) + .collect(Collectors.toSet()); } if (CollectionUtils.isEmpty(resourceIds)) { return StringUtils.EMPTY; @@ -2248,7 +2307,7 @@ public class ProcessService { return StringUtils.join(resourceIds, ","); } - public int saveTaskDefine(User operator, long projectCode, List taskDefinitionLogs) { + public int saveTaskDefine(User operator, long projectCode, List taskDefinitionLogs, Boolean syncDefine) { Date now = new Date(); List newTaskDefinitionLogs = new ArrayList<>(); List updateTaskDefinitionLogs = new ArrayList<>(); @@ -2260,7 +2319,7 @@ public class ProcessService { taskDefinitionLog.setResourceIds(getResourceIds(taskDefinitionLog)); if (taskDefinitionLog.getCode() > 0 && taskDefinitionLog.getVersion() > 0) { TaskDefinitionLog definitionCodeAndVersion = taskDefinitionLogMapper - .queryByDefinitionCodeAndVersion(taskDefinitionLog.getCode(), taskDefinitionLog.getVersion()); + .queryByDefinitionCodeAndVersion(taskDefinitionLog.getCode(), taskDefinitionLog.getVersion()); if (definitionCodeAndVersion != null) { if (!taskDefinitionLog.equals(definitionCodeAndVersion)) { taskDefinitionLog.setUserId(definitionCodeAndVersion.getUserId()); @@ -2277,8 +2336,8 @@ public class ProcessService { taskDefinitionLog.setCreateTime(now); if (taskDefinitionLog.getCode() == 0) { try { - taskDefinitionLog.setCode(SnowFlakeUtils.getInstance().nextId()); - } catch (SnowFlakeException e) { + taskDefinitionLog.setCode(CodeGenerateUtils.getInstance().genCode()); + } catch (CodeGenerateException e) { logger.error("Task code get error, ", e); return Constants.DEFINITION_FAILURE; } @@ -2293,13 +2352,21 @@ public class ProcessService { newTaskDefinitionLogs.add(taskDefinitionToUpdate); } else { insertResult += taskDefinitionLogMapper.insert(taskDefinitionToUpdate); - taskDefinitionToUpdate.setId(task.getId()); - updateResult += taskDefinitionMapper.updateById(taskDefinitionToUpdate); + if (Boolean.TRUE.equals(syncDefine)) { + taskDefinitionToUpdate.setId(task.getId()); + updateResult += taskDefinitionMapper.updateById(taskDefinitionToUpdate); + } else { + updateResult++; + } } } if (!newTaskDefinitionLogs.isEmpty()) { - updateResult += taskDefinitionMapper.batchInsert(newTaskDefinitionLogs); insertResult += taskDefinitionLogMapper.batchInsert(newTaskDefinitionLogs); + if (Boolean.TRUE.equals(syncDefine)) { + updateResult += taskDefinitionMapper.batchInsert(newTaskDefinitionLogs); + } else { + updateResult += newTaskDefinitionLogs.size(); + } } return (insertResult & updateResult) > 0 ? 1 : Constants.EXIT_CODE_SUCCESS; } @@ -2307,21 +2374,23 @@ public class ProcessService { /** * save processDefinition (including create or update processDefinition) */ - public int saveProcessDefine(User operator, ProcessDefinition processDefinition, Boolean isFromProcessDefine) { + public int saveProcessDefine(User operator, ProcessDefinition processDefinition, Boolean syncDefine, Boolean isFromProcessDefine) { ProcessDefinitionLog processDefinitionLog = new ProcessDefinitionLog(processDefinition); Integer version = processDefineLogMapper.queryMaxVersionForDefinition(processDefinition.getCode()); int insertVersion = version == null || version == 0 ? Constants.VERSION_FIRST : version + 1; processDefinitionLog.setVersion(insertVersion); - processDefinitionLog.setReleaseState(isFromProcessDefine ? ReleaseState.OFFLINE : ReleaseState.ONLINE); + processDefinitionLog.setReleaseState(!isFromProcessDefine || processDefinitionLog.getReleaseState() == ReleaseState.ONLINE ? ReleaseState.ONLINE : ReleaseState.OFFLINE); processDefinitionLog.setOperator(operator.getId()); processDefinitionLog.setOperateTime(processDefinition.getUpdateTime()); int insertLog = processDefineLogMapper.insert(processDefinitionLog); - int result; - if (0 == processDefinition.getId()) { - result = processDefineMapper.insert(processDefinitionLog); - } else { - processDefinitionLog.setId(processDefinition.getId()); - result = processDefineMapper.updateById(processDefinitionLog); + int result = 1; + if (Boolean.TRUE.equals(syncDefine)) { + if (0 == processDefinition.getId()) { + result = processDefineMapper.insert(processDefinitionLog); + } else { + processDefinitionLog.setId(processDefinition.getId()); + result = processDefineMapper.updateById(processDefinitionLog); + } } return (insertLog & result) > 0 ? insertVersion : 0; } @@ -2330,11 +2399,15 @@ public class ProcessService { * save task relations */ public int saveTaskRelation(User operator, long projectCode, long processDefinitionCode, int processDefinitionVersion, - List taskRelationList, List taskDefinitionLogs) { + List taskRelationList, List taskDefinitionLogs, + Boolean syncDefine) { + if (taskRelationList.isEmpty()) { + return Constants.EXIT_CODE_SUCCESS; + } Map taskDefinitionLogMap = null; if (CollectionUtils.isNotEmpty(taskDefinitionLogs)) { taskDefinitionLogMap = taskDefinitionLogs.stream() - .collect(Collectors.toMap(TaskDefinition::getCode, taskDefinitionLog -> taskDefinitionLog)); + .collect(Collectors.toMap(TaskDefinition::getCode, taskDefinitionLog -> taskDefinitionLog)); } Date now = new Date(); for (ProcessTaskRelationLog processTaskRelationLog : taskRelationList) { @@ -2342,38 +2415,45 @@ public class ProcessService { processTaskRelationLog.setProcessDefinitionCode(processDefinitionCode); processTaskRelationLog.setProcessDefinitionVersion(processDefinitionVersion); if (taskDefinitionLogMap != null) { - TaskDefinitionLog taskDefinitionLog = taskDefinitionLogMap.get(processTaskRelationLog.getPreTaskCode()); - if (taskDefinitionLog != null) { - processTaskRelationLog.setPreTaskVersion(taskDefinitionLog.getVersion()); + TaskDefinitionLog preTaskDefinitionLog = taskDefinitionLogMap.get(processTaskRelationLog.getPreTaskCode()); + if (preTaskDefinitionLog != null) { + processTaskRelationLog.setPreTaskVersion(preTaskDefinitionLog.getVersion()); + } + TaskDefinitionLog postTaskDefinitionLog = taskDefinitionLogMap.get(processTaskRelationLog.getPostTaskCode()); + if (postTaskDefinitionLog != null) { + processTaskRelationLog.setPostTaskVersion(postTaskDefinitionLog.getVersion()); } - processTaskRelationLog.setPostTaskVersion(taskDefinitionLogMap.get(processTaskRelationLog.getPostTaskCode()).getVersion()); } processTaskRelationLog.setCreateTime(now); processTaskRelationLog.setUpdateTime(now); processTaskRelationLog.setOperator(operator.getId()); processTaskRelationLog.setOperateTime(now); } - List processTaskRelationList = processTaskRelationMapper.queryByProcessCode(projectCode, processDefinitionCode); - if (!processTaskRelationList.isEmpty()) { - Set processTaskRelationSet = processTaskRelationList.stream().map(ProcessTaskRelation::hashCode).collect(toSet()); - Set taskRelationSet = taskRelationList.stream().map(ProcessTaskRelationLog::hashCode).collect(toSet()); - if (CollectionUtils.isEqualCollection(processTaskRelationSet, taskRelationSet)) { - return Constants.EXIT_CODE_SUCCESS; + int insert = taskRelationList.size(); + if (Boolean.TRUE.equals(syncDefine)) { + List processTaskRelationList = processTaskRelationMapper.queryByProcessCode(projectCode, processDefinitionCode); + if (!processTaskRelationList.isEmpty()) { + Set processTaskRelationSet = processTaskRelationList.stream().map(ProcessTaskRelation::hashCode).collect(toSet()); + Set taskRelationSet = taskRelationList.stream().map(ProcessTaskRelationLog::hashCode).collect(toSet()); + boolean result = CollectionUtils.isEqualCollection(processTaskRelationSet, taskRelationSet); + if (result) { + return Constants.EXIT_CODE_SUCCESS; + } + processTaskRelationMapper.deleteByCode(projectCode, processDefinitionCode); } - processTaskRelationMapper.deleteByCode(projectCode, processDefinitionCode); + insert = processTaskRelationMapper.batchInsert(taskRelationList); } - int result = processTaskRelationMapper.batchInsert(taskRelationList); int resultLog = processTaskRelationLogMapper.batchInsert(taskRelationList); - return (result & resultLog) > 0 ? Constants.EXIT_CODE_SUCCESS : Constants.EXIT_CODE_FAILURE; + return (insert & resultLog) > 0 ? Constants.EXIT_CODE_SUCCESS : Constants.EXIT_CODE_FAILURE; } public boolean isTaskOnline(long taskCode) { List processTaskRelationList = processTaskRelationMapper.queryByTaskCode(taskCode); if (!processTaskRelationList.isEmpty()) { Set processDefinitionCodes = processTaskRelationList - .stream() - .map(ProcessTaskRelation::getProcessDefinitionCode) - .collect(Collectors.toSet()); + .stream() + .map(ProcessTaskRelation::getProcessDefinitionCode) + .collect(Collectors.toSet()); List processDefinitionList = processDefineMapper.queryByCodes(processDefinitionCodes); // check process definition is already online for (ProcessDefinition processDefinition : processDefinitionList) { @@ -2387,14 +2467,15 @@ public class ProcessService { /** * Generate the DAG Graph based on the process definition id + * Use temporarily before refactoring taskNode * * @param processDefinition process definition * @return dag graph */ public DAG genDagGraph(ProcessDefinition processDefinition) { - List processTaskRelations = processTaskRelationMapper.queryByProcessCode(processDefinition.getProjectCode(), processDefinition.getCode()); - List taskNodeList = transformTask(processTaskRelations, Lists.newArrayList()); - ProcessDag processDag = DagHelper.getProcessDag(taskNodeList, new ArrayList<>(processTaskRelations)); + List taskRelations = this.findRelationByCode(processDefinition.getCode(), processDefinition.getVersion()); + List taskNodeList = transformTask(taskRelations, Lists.newArrayList()); + ProcessDag processDag = DagHelper.getProcessDag(taskNodeList, new ArrayList<>(taskRelations)); // Generate concrete Dag to be executed return DagHelper.buildDagGraph(processDag); } @@ -2403,12 +2484,10 @@ public class ProcessService { * generate DagData */ public DagData genDagData(ProcessDefinition processDefinition) { - List processTaskRelations = processTaskRelationMapper.queryByProcessCode(processDefinition.getProjectCode(), processDefinition.getCode()); - List taskDefinitionLogList = genTaskDefineList(processTaskRelations); - List taskDefinitions = taskDefinitionLogList.stream() - .map(taskDefinitionLog -> JSONUtils.parseObject(JSONUtils.toJsonString(taskDefinitionLog), TaskDefinition.class)) - .collect(Collectors.toList()); - return new DagData(processDefinition, processTaskRelations, taskDefinitions); + List taskRelations = this.findRelationByCode(processDefinition.getCode(), processDefinition.getVersion()); + List taskDefinitionLogList = genTaskDefineList(taskRelations); + List taskDefinitions = taskDefinitionLogList.stream().map(t -> (TaskDefinition) t).collect(Collectors.toList()); + return new DagData(processDefinition, taskRelations, taskDefinitions); } public List genTaskDefineList(List processTaskRelations) { @@ -2421,9 +2500,29 @@ public class ProcessService { taskDefinitionSet.add(new TaskDefinition(processTaskRelation.getPostTaskCode(), processTaskRelation.getPostTaskVersion())); } } + if (taskDefinitionSet.isEmpty()) { + return Lists.newArrayList(); + } return taskDefinitionLogMapper.queryByTaskDefinitions(taskDefinitionSet); } + public List getTaskDefineLogListByRelation(List processTaskRelations) { + List taskDefinitionLogs = new ArrayList<>(); + Map taskCodeVersionMap = new HashMap<>(); + for (ProcessTaskRelation processTaskRelation : processTaskRelations) { + if (processTaskRelation.getPreTaskCode() > 0) { + taskCodeVersionMap.put(processTaskRelation.getPreTaskCode(), processTaskRelation.getPreTaskVersion()); + } + if (processTaskRelation.getPostTaskCode() > 0) { + taskCodeVersionMap.put(processTaskRelation.getPostTaskCode(), processTaskRelation.getPostTaskVersion()); + } + } + taskCodeVersionMap.forEach((code, version) -> { + taskDefinitionLogs.add((TaskDefinitionLog) this.findTaskDefinition(code, version)); + }); + return taskDefinitionLogs; + } + /** * find task definition by code and version */ @@ -2432,17 +2531,25 @@ public class ProcessService { } /** - * find process task relation list by projectCode and processDefinitionCode + * find task definition by code + */ + public TaskDefinition findTaskDefinitionByCode(long taskCode) { + return taskDefinitionMapper.queryByCode(taskCode); + } + + /** + * find process task relation list by process */ - public List findRelationByCode(long projectCode, long processDefinitionCode) { - return processTaskRelationMapper.queryByProcessCode(projectCode, processDefinitionCode); + public List findRelationByCode(long processDefinitionCode, int processDefinitionVersion) { + List processTaskRelationLogList = processTaskRelationLogMapper.queryByProcessCodeAndVersion(processDefinitionCode, processDefinitionVersion); + return processTaskRelationLogList.stream().map(r -> (ProcessTaskRelation) r).collect(Collectors.toList()); } /** * add authorized resources * * @param ownResources own resources - * @param userId userId + * @param userId userId */ private void addAuthorizedResources(List ownResources, int userId) { List relationResourceIds = resourceUserMapper.queryResourcesIdListByUserIdAndPerm(userId, 7); @@ -2470,7 +2577,7 @@ public class ProcessService { taskDefinitionLogs = genTaskDefineList(taskRelationList); } Map taskDefinitionLogMap = taskDefinitionLogs.stream() - .collect(Collectors.toMap(TaskDefinitionLog::getCode, taskDefinitionLog -> taskDefinitionLog)); + .collect(Collectors.toMap(TaskDefinitionLog::getCode, taskDefinitionLog -> taskDefinitionLog)); List taskNodeList = new ArrayList<>(); for (Entry> code : taskCodeMap.entrySet()) { TaskDefinitionLog taskDefinitionLog = taskDefinitionLogMap.get(code.getKey()); @@ -2495,17 +2602,17 @@ public class ProcessService { taskNode.setWorkerGroup(taskDefinitionLog.getWorkerGroup()); taskNode.setEnvironmentCode(taskDefinitionLog.getEnvironmentCode()); taskNode.setTimeout(JSONUtils.toJsonString(new TaskTimeoutParameter(taskDefinitionLog.getTimeoutFlag() == TimeoutFlag.OPEN, - taskDefinitionLog.getTimeoutNotifyStrategy(), - taskDefinitionLog.getTimeout()))); + taskDefinitionLog.getTimeoutNotifyStrategy(), + taskDefinitionLog.getTimeout()))); taskNode.setDelayTime(taskDefinitionLog.getDelayTime()); - taskNode.setPreTasks(JSONUtils.toJsonString(code.getValue().stream().map(taskDefinitionLogMap::get).map(TaskDefinition::getName).collect(Collectors.toList()))); + taskNode.setPreTasks(JSONUtils.toJsonString(code.getValue().stream().map(taskDefinitionLogMap::get).map(TaskDefinition::getCode).collect(Collectors.toList()))); taskNodeList.add(taskNode); } } return taskNodeList; } - public Map notifyProcessList(int processId, int taskId) { + public Map notifyProcessList(int processId) { HashMap processTaskMap = new HashMap<>(); //find sub tasks ProcessInstanceMap processInstanceMap = processInstanceMapMapper.queryBySubProcessId(processId); @@ -2520,4 +2627,37 @@ public class ProcessService { } return processTaskMap; } + + /** + * whether the graph has a ring + * + * @param taskNodeResponseList task node response list + * @return if graph has cycle flag + */ + public boolean graphHasCycle(List taskNodeResponseList) { + DAG graph = new DAG<>(); + // Fill the vertices + for (TaskNode taskNodeResponse : taskNodeResponseList) { + graph.addNode(Long.toString(taskNodeResponse.getCode()), taskNodeResponse); + } + // Fill edge relations + for (TaskNode taskNodeResponse : taskNodeResponseList) { + List preTasks = JSONUtils.toList(taskNodeResponse.getPreTasks(), String.class); + if (org.apache.commons.collections4.CollectionUtils.isNotEmpty(preTasks)) { + for (String preTask : preTasks) { + if (!graph.addEdge(preTask, Long.toString(taskNodeResponse.getCode()))) { + return true; + } + } + } + } + return graph.hasCycle(); + } + + private void deleteCommandWithCheck(int commandId) { + int delete = this.commandMapper.deleteById(commandId); + if (delete != 1) { + throw new ServiceException("delete command fail, id:" + commandId); + } + } } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/DruidConnectionProvider.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/HikariConnectionProvider.java similarity index 78% rename from dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/DruidConnectionProvider.java rename to dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/HikariConnectionProvider.java index 3ac6ccaedcb9affcbb9b21319afc5a5ddfa64a7e..ec064538a1d7a24c8ae3eefb42c094f15d9b9832 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/DruidConnectionProvider.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/HikariConnectionProvider.java @@ -14,24 +14,27 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.service.quartz; -import com.alibaba.druid.pool.DruidDataSource; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; -import org.quartz.utils.ConnectionProvider; import java.sql.Connection; import java.sql.SQLException; +import org.quartz.utils.ConnectionProvider; + +import com.zaxxer.hikari.HikariDataSource; + /** - * druid connection provider + * hikari connection provider */ -public class DruidConnectionProvider implements ConnectionProvider { +public class HikariConnectionProvider implements ConnectionProvider { - private final DruidDataSource dataSource; + private final HikariDataSource dataSource; - public DruidConnectionProvider(){ - this.dataSource = SpringApplicationContext.getBean(DruidDataSource.class); + public HikariConnectionProvider() { + this.dataSource = SpringApplicationContext.getBean(HikariDataSource.class); } @Override @@ -40,7 +43,7 @@ public class DruidConnectionProvider implements ConnectionProvider { } @Override - public void shutdown() throws SQLException { + public void shutdown() { dataSource.close(); } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/ProcessScheduleJob.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/ProcessScheduleJob.java index 1de5c56fd013606347198b1619932557f09754b8..3c9f165e4799c8a1fbede1a143563b61a34259c1 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/ProcessScheduleJob.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/ProcessScheduleJob.java @@ -101,6 +101,7 @@ public class ProcessScheduleJob implements Job { command.setWorkerGroup(workerGroup); command.setWarningType(schedule.getWarningType()); command.setProcessInstancePriority(schedule.getProcessInstancePriority()); + command.setProcessDefinitionVersion(processDefinition.getVersion()); getProcessService().createCommand(command); } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/QuartzExecutors.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/QuartzExecutors.java index 1588907abe68e53f2381dc6058763fe75d402c5b..86d73b62683642f714b5724f02f603773ca05cbf 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/QuartzExecutors.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/QuartzExecutors.java @@ -17,7 +17,6 @@ package org.apache.dolphinscheduler.service.quartz; -import static org.apache.dolphinscheduler.common.Constants.DATASOURCE_PROPERTIES; import static org.apache.dolphinscheduler.common.Constants.ORG_POSTGRESQL_DRIVER; import static org.apache.dolphinscheduler.common.Constants.ORG_QUARTZ_DATASOURCE_MYDS_CONNECTIONPROVIDER_CLASS; import static org.apache.dolphinscheduler.common.Constants.ORG_QUARTZ_JOBSTORE_ACQUIRETRIGGERSWITHINLOCK; @@ -29,6 +28,7 @@ import static org.apache.dolphinscheduler.common.Constants.ORG_QUARTZ_JOBSTORE_I import static org.apache.dolphinscheduler.common.Constants.ORG_QUARTZ_JOBSTORE_MISFIRETHRESHOLD; import static org.apache.dolphinscheduler.common.Constants.ORG_QUARTZ_JOBSTORE_TABLEPREFIX; import static org.apache.dolphinscheduler.common.Constants.ORG_QUARTZ_JOBSTORE_USEPROPERTIES; +import static org.apache.dolphinscheduler.common.Constants.ORG_QUARTZ_SCHEDULER_BATCHTRIGGERACQUISTITIONMAXCOUNT; import static org.apache.dolphinscheduler.common.Constants.ORG_QUARTZ_SCHEDULER_INSTANCEID; import static org.apache.dolphinscheduler.common.Constants.ORG_QUARTZ_SCHEDULER_INSTANCENAME; import static org.apache.dolphinscheduler.common.Constants.ORG_QUARTZ_SCHEDULER_MAKESCHEDULERTHREADDAEMON; @@ -38,6 +38,7 @@ import static org.apache.dolphinscheduler.common.Constants.ORG_QUARTZ_THREADPOOL import static org.apache.dolphinscheduler.common.Constants.ORG_QUARTZ_THREADPOOL_THREADPRIORITY; import static org.apache.dolphinscheduler.common.Constants.PROJECT_ID; import static org.apache.dolphinscheduler.common.Constants.QUARTZ_ACQUIRETRIGGERSWITHINLOCK; +import static org.apache.dolphinscheduler.common.Constants.QUARTZ_BATCHTRIGGERACQUISTITIONMAXCOUNT; import static org.apache.dolphinscheduler.common.Constants.QUARTZ_CLUSTERCHECKININTERVAL; import static org.apache.dolphinscheduler.common.Constants.QUARTZ_DATASOURCE; import static org.apache.dolphinscheduler.common.Constants.QUARTZ_INSTANCEID; @@ -55,7 +56,6 @@ import static org.apache.dolphinscheduler.common.Constants.SPRING_DATASOURCE_DRI import static org.apache.dolphinscheduler.common.Constants.STRING_FALSE; import static org.apache.dolphinscheduler.common.Constants.STRING_TRUE; import static org.apache.dolphinscheduler.common.Constants.UNDERLINE; - import static org.quartz.CronScheduleBuilder.cronSchedule; import static org.quartz.JobBuilder.newJob; import static org.quartz.TriggerBuilder.newTrigger; @@ -150,7 +150,6 @@ public class QuartzExecutors { */ private void init() { try { - PropertyUtils.loadPropertyFile(DATASOURCE_PROPERTIES); StdSchedulerFactory schedulerFactory = new StdSchedulerFactory(); Properties properties = new Properties(); @@ -175,7 +174,9 @@ public class QuartzExecutors { properties.setProperty(ORG_QUARTZ_JOBSTORE_CLUSTERCHECKININTERVAL, conf.getString(ORG_QUARTZ_JOBSTORE_CLUSTERCHECKININTERVAL, QUARTZ_CLUSTERCHECKININTERVAL)); properties.setProperty(ORG_QUARTZ_JOBSTORE_ACQUIRETRIGGERSWITHINLOCK, conf.getString(ORG_QUARTZ_JOBSTORE_ACQUIRETRIGGERSWITHINLOCK, QUARTZ_ACQUIRETRIGGERSWITHINLOCK)); properties.setProperty(ORG_QUARTZ_JOBSTORE_DATASOURCE, conf.getString(ORG_QUARTZ_JOBSTORE_DATASOURCE, QUARTZ_DATASOURCE)); - properties.setProperty(ORG_QUARTZ_DATASOURCE_MYDS_CONNECTIONPROVIDER_CLASS, conf.getString(ORG_QUARTZ_DATASOURCE_MYDS_CONNECTIONPROVIDER_CLASS, DruidConnectionProvider.class.getName())); + properties.setProperty(ORG_QUARTZ_DATASOURCE_MYDS_CONNECTIONPROVIDER_CLASS, conf.getString(ORG_QUARTZ_DATASOURCE_MYDS_CONNECTIONPROVIDER_CLASS, HikariConnectionProvider.class.getName())); + properties.setProperty(ORG_QUARTZ_SCHEDULER_BATCHTRIGGERACQUISTITIONMAXCOUNT, + conf.getString(ORG_QUARTZ_SCHEDULER_BATCHTRIGGERACQUISTITIONMAXCOUNT, QUARTZ_BATCHTRIGGERACQUISTITIONMAXCOUNT)); schedulerFactory.initialize(properties); scheduler = schedulerFactory.getScheduler(); diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/cron/AbstractCycle.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/cron/AbstractCycle.java index 60c862340bfe4ea6894e851ab12f6f1294b88253..b00f1476abf0b66fb918003cc970a7bb7808fbbc 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/cron/AbstractCycle.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/cron/AbstractCycle.java @@ -173,6 +173,16 @@ public abstract class AbstractCycle { FieldExpression dayOfWeekFieldExpression = dayOfWeekField.getExpression(); return (dayOfWeekFieldExpression instanceof Every || dayOfWeekFieldExpression instanceof Always); } + + /** + * whether the year field has a value of every or always + * + * @return if year field has a value of every or always return true,else return false + */ + protected boolean yearFieldIsEvery() { + FieldExpression yearFieldExpression = yearField.getExpression(); + return (yearFieldExpression instanceof Every || yearFieldExpression instanceof Always); + } /** * get cycle enum diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/cron/CronUtils.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/cron/CronUtils.java index ab9a97b7ca538920469e18db60729269cba63228..d2e1479427ab39f78f3d62ab9e93d07d46674596 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/cron/CronUtils.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/cron/CronUtils.java @@ -22,16 +22,18 @@ import static org.apache.dolphinscheduler.service.quartz.cron.CycleFactory.hour; import static org.apache.dolphinscheduler.service.quartz.cron.CycleFactory.min; import static org.apache.dolphinscheduler.service.quartz.cron.CycleFactory.month; import static org.apache.dolphinscheduler.service.quartz.cron.CycleFactory.week; +import static org.apache.dolphinscheduler.service.quartz.cron.CycleFactory.year; import static com.cronutils.model.CronType.QUARTZ; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.CycleEnum; import org.apache.dolphinscheduler.common.thread.Stopper; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.dao.entity.Schedule; +import org.apache.commons.collections.CollectionUtils; + import java.text.ParseException; import java.util.ArrayList; import java.util.Calendar; @@ -89,7 +91,7 @@ public class CronUtils { * @return CycleEnum */ public static CycleEnum getMaxCycle(Cron cron) { - return min(cron).addCycle(hour(cron)).addCycle(day(cron)).addCycle(week(cron)).addCycle(month(cron)).getCycle(); + return min(cron).addCycle(hour(cron)).addCycle(day(cron)).addCycle(week(cron)).addCycle(month(cron)).addCycle(year(cron)).getCycle(); } /** @@ -99,7 +101,7 @@ public class CronUtils { * @return CycleEnum */ public static CycleEnum getMiniCycle(Cron cron) { - return min(cron).addCycle(hour(cron)).addCycle(day(cron)).addCycle(week(cron)).addCycle(month(cron)).getMiniCycle(); + return min(cron).addCycle(hour(cron)).addCycle(day(cron)).addCycle(week(cron)).addCycle(month(cron)).addCycle(year(cron)).getMiniCycle(); } /** @@ -185,8 +187,10 @@ public class CronUtils { */ public static List getSelfFireDateList(final Date startTime, final Date endTime, final List schedules) { List result = new ArrayList<>(); + + // support left closed and right closed time interval (startDate <= N <= endDate) Date from = new Date(startTime.getTime() - Constants.SECOND_TIME_MILLIS); - Date to = new Date(endTime.getTime() - Constants.SECOND_TIME_MILLIS); + Date to = new Date(endTime.getTime() + Constants.SECOND_TIME_MILLIS); List listSchedule = new ArrayList<>(); listSchedule.addAll(schedules); diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/cron/CycleFactory.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/cron/CycleFactory.java index 1f807dce7f785380a1da1583069d2bbf84dd9851..9f931d20bbefd51224b80507da49f4cd1802abba 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/cron/CycleFactory.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/cron/CycleFactory.java @@ -72,6 +72,15 @@ public class CycleFactory { public static AbstractCycle month(Cron cron) { return new MonthCycle(cron); } + + /** + * year + * @param cron cron + * @return AbstractCycle + */ + public static AbstractCycle year(Cron cron) { + return new YearCycle(cron); + } /** * day cycle @@ -275,4 +284,52 @@ public class CycleFactory { return null; } } + + /** + * year cycle + */ + public static class YearCycle extends AbstractCycle { + public YearCycle(Cron cron) { + super(cron); + } + + /** + * get cycle + * @return CycleEnum + */ + @Override + protected CycleEnum getCycle() { + boolean flag = (minFiledIsSetAll() + && hourFiledIsSetAll() + && dayOfMonthFieldIsSetAll() + && dayOfWeekField.getExpression() instanceof QuestionMark + && monthFieldIsSetAll()) + && yearFieldIsEvery() || + (minFiledIsSetAll() + && hourFiledIsSetAll() + && dayOfMonthField.getExpression() instanceof QuestionMark + && dayofWeekFieldIsSetAll() + && monthFieldIsSetAll() + && yearFieldIsEvery()); + + if (flag) { + return CycleEnum.YEAR; + } + + return null; + } + + /** + * get mini cycle + * @return CycleEnum + */ + @Override + protected CycleEnum getMiniCycle() { + if (yearFieldIsEvery()) { + return CycleEnum.YEAR; + } + + return null; + } + } } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/PeerTaskInstancePriorityQueue.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/PeerTaskInstancePriorityQueue.java index 59a0fe229c78deebe5c14fe1eb551ba5810c6e14..7502607bcf24910d6ceff5a7513049c5fafc3b3d 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/PeerTaskInstancePriorityQueue.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/PeerTaskInstancePriorityQueue.java @@ -104,6 +104,14 @@ public class PeerTaskInstancePriorityQueue implements TaskPriorityQueue iterator = this.queue.iterator(); while (iterator.hasNext()) { TaskInstance taskInstance = iterator.next(); - if (taskId == taskInstance.getId()) { + if (taskCode == taskInstance.getTaskCode() + && taskVersion == taskInstance.getTaskDefinitionVersion()) { return true; } } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/TaskPriorityQueueImpl.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/TaskPriorityQueueImpl.java index 8d630beeb07bd357629109ead03d61e0e252c525..40914d3e20861c7d18a90f9c06511e4b42e00dfc 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/TaskPriorityQueueImpl.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/TaskPriorityQueueImpl.java @@ -18,21 +18,25 @@ package org.apache.dolphinscheduler.service.queue; import org.apache.dolphinscheduler.service.exceptions.TaskPriorityQueueException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Service; import java.util.concurrent.PriorityBlockingQueue; import java.util.concurrent.TimeUnit; -import org.springframework.stereotype.Service; - /** * A singleton of a task queue implemented using PriorityBlockingQueue */ @Service public class TaskPriorityQueueImpl implements TaskPriorityQueue { + + private static final Logger logger = LoggerFactory.getLogger(TaskPriorityQueueImpl.class); + /** * queue size */ - private static final Integer QUEUE_MAX_SIZE = 3000; + private static final Integer QUEUE_MAX_SIZE = 10000; /** * queue @@ -47,7 +51,12 @@ public class TaskPriorityQueueImpl implements TaskPriorityQueue { */ @Override public void put(TaskPriority taskPriorityInfo) throws TaskPriorityQueueException { - queue.put(taskPriorityInfo); + if (!queue.contains(taskPriorityInfo)) { + queue.put(taskPriorityInfo); + } else { + logger.warn("the priorityBlockingQueue contain the task already, taskId: {}, processInstanceId: {}", + taskPriorityInfo.getTaskId(), taskPriorityInfo.getProcessInstanceId()); + } } /** diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/entity/TaskExecutionContext.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/entity/TaskExecutionContext.java index 609566ab5266b984623e3ba7059970e693145034..ba56eec4511c6a76fad074117f3d773afddcba34 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/entity/TaskExecutionContext.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/entity/TaskExecutionContext.java @@ -61,6 +61,12 @@ public class TaskExecutionContext implements Serializable { @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") private Date startTime; + /** + * endTime + */ + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + private Date endTime; + /** * task type */ @@ -287,6 +293,14 @@ public class TaskExecutionContext implements Serializable { this.startTime = startTime; } + public Date getEndTime() { + return endTime; + } + + public void setEndTime(Date endTime) { + this.endTime = endTime; + } + public String getTaskType() { return taskType; } @@ -573,6 +587,7 @@ public class TaskExecutionContext implements Serializable { + ", currentExecutionStatus=" + currentExecutionStatus + ", firstSubmitTime=" + firstSubmitTime + ", startTime=" + startTime + + ", endTime=" + endTime + ", taskType='" + taskType + '\'' + ", host='" + host + '\'' + ", executePath='" + executePath + '\'' diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/registry/RegistryCenter.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/registry/RegistryCenter.java deleted file mode 100644 index ba74f88afb5961adeec2b678442336ac27161dcb..0000000000000000000000000000000000000000 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/registry/RegistryCenter.java +++ /dev/null @@ -1,243 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.service.registry; - -import static org.apache.dolphinscheduler.common.Constants.REGISTRY_DOLPHINSCHEDULER_DEAD_SERVERS; -import static org.apache.dolphinscheduler.common.Constants.REGISTRY_DOLPHINSCHEDULER_MASTERS; -import static org.apache.dolphinscheduler.common.Constants.REGISTRY_DOLPHINSCHEDULER_WORKERS; - -import org.apache.dolphinscheduler.common.IStoppable; -import org.apache.dolphinscheduler.common.utils.PropertyUtils; -import org.apache.dolphinscheduler.spi.plugin.DolphinPluginLoader; -import org.apache.dolphinscheduler.spi.plugin.DolphinPluginManagerConfig; -import org.apache.dolphinscheduler.spi.register.Registry; -import org.apache.dolphinscheduler.spi.register.RegistryConnectListener; -import org.apache.dolphinscheduler.spi.register.RegistryException; -import org.apache.dolphinscheduler.spi.register.RegistryPluginManager; -import org.apache.dolphinscheduler.spi.register.SubscribeListener; - -import org.apache.commons.lang.StringUtils; - -import java.util.List; -import java.util.Map; -import java.util.concurrent.atomic.AtomicBoolean; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.google.common.collect.ImmutableList; - -/** - * All business parties use this class to access the registry - */ -public class RegistryCenter { - - private static final Logger logger = LoggerFactory.getLogger(RegistryCenter.class); - - private final AtomicBoolean isStarted = new AtomicBoolean(false); - - private Registry registry; - - private IStoppable stoppable; - - /** - * nodes namespace - */ - protected static String NODES; - - private RegistryPluginManager registryPluginManager; - - protected static final String EMPTY = ""; - - private static final String REGISTRY_PREFIX = "registry"; - - private static final String REGISTRY_PLUGIN_BINDING = "registry.plugin.binding"; - - private static final String REGISTRY_PLUGIN_DIR = "registry.plugin.dir"; - - private static final String MAVEN_LOCAL_REPOSITORY = "maven.local.repository"; - - private static final String REGISTRY_PLUGIN_NAME = "plugin.name"; - - /** - * default registry plugin dir - */ - private static final String REGISTRY_PLUGIN_PATH = "lib/plugin/registry"; - - private static final String REGISTRY_CONFIG_FILE_PATH = "/registry.properties"; - - /** - * init node persist - */ - public void init() { - if (isStarted.compareAndSet(false, true)) { - PropertyUtils.loadPropertyFile(REGISTRY_CONFIG_FILE_PATH); - Map registryConfig = PropertyUtils.getPropertiesByPrefix(REGISTRY_PREFIX); - - if (null == registryConfig || registryConfig.isEmpty()) { - throw new RegistryException("registry config param is null"); - } - if (null == registryPluginManager) { - installRegistryPlugin(registryConfig.get(REGISTRY_PLUGIN_NAME)); - registry = registryPluginManager.getRegistry(); - } - - registry.init(registryConfig); - initNodes(); - - } - } - - /** - * init nodes - */ - private void initNodes() { - persist(REGISTRY_DOLPHINSCHEDULER_MASTERS, EMPTY); - persist(REGISTRY_DOLPHINSCHEDULER_WORKERS, EMPTY); - persist(REGISTRY_DOLPHINSCHEDULER_DEAD_SERVERS, EMPTY); - } - - /** - * install registry plugin - */ - private void installRegistryPlugin(String registryPluginName) { - DolphinPluginManagerConfig registryPluginManagerConfig = new DolphinPluginManagerConfig(); - registryPluginManagerConfig.setPlugins(PropertyUtils.getString(REGISTRY_PLUGIN_BINDING)); - if (StringUtils.isNotBlank(PropertyUtils.getString(REGISTRY_PLUGIN_DIR))) { - registryPluginManagerConfig.setInstalledPluginsDir(PropertyUtils.getString(REGISTRY_PLUGIN_DIR, REGISTRY_PLUGIN_PATH).trim()); - } - - if (StringUtils.isNotBlank(PropertyUtils.getString(MAVEN_LOCAL_REPOSITORY))) { - registryPluginManagerConfig.setMavenLocalRepository(PropertyUtils.getString(MAVEN_LOCAL_REPOSITORY).trim()); - } - - registryPluginManager = new RegistryPluginManager(registryPluginName); - - DolphinPluginLoader registryPluginLoader = new DolphinPluginLoader(registryPluginManagerConfig, ImmutableList.of(registryPluginManager)); - try { - registryPluginLoader.loadPlugins(); - } catch (Exception e) { - throw new RuntimeException("Load registry Plugin Failed !", e); - } - } - - /** - * close - */ - public void close() { - if (isStarted.compareAndSet(true, false) && registry != null) { - registry.close(); - } - } - - public void persist(String key, String value) { - registry.persist(key, value); - } - - public void persistEphemeral(String key, String value) { - registry.persistEphemeral(key, value); - } - - public void remove(String key) { - registry.remove(key); - } - - public void update(String key, String value) { - registry.update(key, value); - } - - public String get(String key) { - return registry.get(key); - } - - public void subscribe(String path, SubscribeListener subscribeListener) { - registry.subscribe(path, subscribeListener); - } - - public void addConnectionStateListener(RegistryConnectListener registryConnectListener) { - registry.addConnectionStateListener(registryConnectListener); - } - - public boolean isExisted(String key) { - return registry.isExisted(key); - } - - public boolean getLock(String key) { - return registry.acquireLock(key); - } - - public boolean releaseLock(String key) { - return registry.releaseLock(key); - } - - /** - * @return get dead server node parent path - */ - public String getDeadZNodeParentPath() { - return REGISTRY_DOLPHINSCHEDULER_DEAD_SERVERS; - } - - public void setStoppable(IStoppable stoppable) { - this.stoppable = stoppable; - } - - public IStoppable getStoppable() { - return stoppable; - } - - /** - * whether master path - * - * @param path path - * @return result - */ - public boolean isMasterPath(String path) { - return path != null && path.contains(REGISTRY_DOLPHINSCHEDULER_MASTERS); - } - - /** - * get worker group path - * - * @param workerGroup workerGroup - * @return worker group path - */ - public String getWorkerGroupPath(String workerGroup) { - return REGISTRY_DOLPHINSCHEDULER_WORKERS + "/" + workerGroup; - } - - /** - * whether worker path - * - * @param path path - * @return result - */ - public boolean isWorkerPath(String path) { - return path != null && path.contains(REGISTRY_DOLPHINSCHEDULER_WORKERS); - } - - /** - * get children nodes - * - * @param key key - * @return children nodes - */ - public List getChildrenKeys(final String key) { - return registry.getChildren(key); - } - -} diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/registry/RegistryClient.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/registry/RegistryClient.java index c6b1eb8936e3192acb58f3e7beff932c428f9894..775fc10798659a7c1d5647feaec1f6f340933135 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/registry/RegistryClient.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/registry/RegistryClient.java @@ -22,58 +22,72 @@ import static org.apache.dolphinscheduler.common.Constants.COLON; import static org.apache.dolphinscheduler.common.Constants.DELETE_OP; import static org.apache.dolphinscheduler.common.Constants.DIVISION_STRING; import static org.apache.dolphinscheduler.common.Constants.MASTER_TYPE; +import static org.apache.dolphinscheduler.common.Constants.REGISTRY_DOLPHINSCHEDULER_DEAD_SERVERS; import static org.apache.dolphinscheduler.common.Constants.REGISTRY_DOLPHINSCHEDULER_MASTERS; import static org.apache.dolphinscheduler.common.Constants.REGISTRY_DOLPHINSCHEDULER_WORKERS; import static org.apache.dolphinscheduler.common.Constants.SINGLE_SLASH; import static org.apache.dolphinscheduler.common.Constants.UNDERLINE; import static org.apache.dolphinscheduler.common.Constants.WORKER_TYPE; +import static com.google.common.base.Preconditions.checkArgument; + import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.IStoppable; import org.apache.dolphinscheduler.common.enums.NodeType; import org.apache.dolphinscheduler.common.model.Server; -import org.apache.dolphinscheduler.common.utils.ResInfo; - -import org.apache.commons.lang.StringUtils; - +import org.apache.dolphinscheduler.common.utils.HeartBeat; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.common.utils.PropertyUtils; +import org.apache.dolphinscheduler.registry.api.ConnectionListener; +import org.apache.dolphinscheduler.registry.api.Registry; +import org.apache.dolphinscheduler.registry.api.RegistryException; +import org.apache.dolphinscheduler.registry.api.RegistryFactory; +import org.apache.dolphinscheduler.registry.api.RegistryFactoryLoader; +import org.apache.dolphinscheduler.registry.api.SubscribeListener; + +import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; +import java.util.Collection; +import java.util.Date; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.stream.Collectors; + +import javax.annotation.PostConstruct; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Component; -/** - * registry client singleton - */ -public class RegistryClient extends RegistryCenter { +import com.google.common.base.Strings; +@Component +public class RegistryClient { private static final Logger logger = LoggerFactory.getLogger(RegistryClient.class); - private static RegistryClient registryClient = new RegistryClient(); - - private RegistryClient() { - super.init(); - } - - public static RegistryClient getInstance() { - return registryClient; + private static final String EMPTY = ""; + private static final String REGISTRY_PREFIX = "registry"; + private static final String REGISTRY_PLUGIN_NAME = "plugin.name"; + private static final String REGISTRY_CONFIG_FILE_PATH = "/registry.properties"; + private final AtomicBoolean isStarted = new AtomicBoolean(false); + private Registry registry; + private IStoppable stoppable; + + @PostConstruct + public void afterConstruct() { + start(); + initNodes(); } - /** - * get active master num - * - * @return active master number - */ public int getActiveMasterNum() { - List childrenList = new ArrayList<>(); + Collection childrenList = new ArrayList<>(); try { // read master node parent path from conf - if (isExisted(getNodeParentPath(NodeType.MASTER))) { - childrenList = getChildrenKeys(getNodeParentPath(NodeType.MASTER)); + if (exists(rootNodePath(NodeType.MASTER))) { + childrenList = getChildrenKeys(rootNodePath(NodeType.MASTER)); } } catch (Exception e) { logger.error("getActiveMasterNum error", e); @@ -81,22 +95,23 @@ public class RegistryClient extends RegistryCenter { return childrenList.size(); } - /** - * get server list. - * - * @param nodeType zookeeper node type - * @return server list - */ public List getServerList(NodeType nodeType) { - Map serverMaps = getServerMaps(nodeType); - String parentPath = getNodeParentPath(nodeType); + Map serverMaps = getServerMaps(nodeType, false); + String parentPath = rootNodePath(nodeType); List serverList = new ArrayList<>(); for (Map.Entry entry : serverMaps.entrySet()) { - Server server = ResInfo.parseHeartbeatForRegistryInfo(entry.getValue()); - if (server == null) { + HeartBeat heartBeat = HeartBeat.decodeHeartBeat(entry.getValue()); + if (heartBeat == null) { continue; } + + Server server = new Server(); + server.setResInfo(JSONUtils.toJsonString(heartBeat)); + server.setCreateTime(new Date(heartBeat.getStartupTime())); + server.setLastHeartbeatTime(new Date(heartBeat.getReportTime())); + server.setId(heartBeat.getProcessId()); + String key = entry.getKey(); server.setZkDirectory(parentPath + "/" + key); // set host and port @@ -110,40 +125,11 @@ public class RegistryClient extends RegistryCenter { return serverList; } - /** - * get server nodes. - * - * @param nodeType registry node type - * @return result : list - */ - public List getServerNodes(NodeType nodeType) { - String path = getNodeParentPath(nodeType); - List serverList = getChildrenKeys(path); - if (nodeType == NodeType.WORKER) { - List workerList = new ArrayList<>(); - for (String group : serverList) { - List groupServers = getChildrenKeys(path + SINGLE_SLASH + group); - for (String groupServer : groupServers) { - workerList.add(group + SINGLE_SLASH + groupServer); - } - } - serverList = workerList; - } - return serverList; - } - - /** - * get server list map. - * - * @param nodeType zookeeper node type - * @param hostOnly host only - * @return result : {host : resource info} - */ public Map getServerMaps(NodeType nodeType, boolean hostOnly) { Map serverMap = new HashMap<>(); try { - String path = getNodeParentPath(nodeType); - List serverList = getServerNodes(nodeType); + String path = rootNodePath(nodeType); + Collection serverList = getServerNodes(nodeType); for (String server : serverList) { String host = server; if (nodeType == NodeType.WORKER && hostOnly) { @@ -158,298 +144,194 @@ public class RegistryClient extends RegistryCenter { return serverMap; } - /** - * get server list map. - * - * @param nodeType zookeeper node type - * @return result : {host : resource info} - */ - public Map getServerMaps(NodeType nodeType) { - return getServerMaps(nodeType, false); + public boolean checkNodeExists(String host, NodeType nodeType) { + return getServerMaps(nodeType, true).keySet() + .stream() + .anyMatch(it -> it.contains(host)); } - /** - * get server node set. - * - * @param nodeType zookeeper node type - * @param hostOnly host only - * @return result : set - */ - public Set getServerNodeSet(NodeType nodeType, boolean hostOnly) { - Set serverSet = new HashSet<>(); - try { - List serverList = getServerNodes(nodeType); - for (String server : serverList) { - String host = server; - if (nodeType == NodeType.WORKER && hostOnly) { - host = server.split(SINGLE_SLASH)[1]; - } - serverSet.add(host); + public void handleDeadServer(Collection nodes, NodeType nodeType, String opType) { + nodes.forEach(node -> { + final String host = getHostByEventDataPath(node); + final String type = nodeType == NodeType.MASTER ? MASTER_TYPE : WORKER_TYPE; + + if (opType.equals(DELETE_OP)) { + removeDeadServerByHost(host, type); + } else if (opType.equals(ADD_OP)) { + String deadServerPath = REGISTRY_DOLPHINSCHEDULER_DEAD_SERVERS + SINGLE_SLASH + type + UNDERLINE + host; + // Add dead server info to zk dead server path : /dead-servers/ + registry.put(deadServerPath, type + UNDERLINE + host, false); + logger.info("{} server dead , and {} added to zk dead server path success", nodeType, node); } - } catch (Exception e) { - logger.error("get server node set failed", e); - } - return serverSet; + }); } - /** - * get server node list. - * - * @param nodeType zookeeper node type - * @param hostOnly host only - * @return result : list - */ - public List getServerNodeList(NodeType nodeType, boolean hostOnly) { - Set serverSet = getServerNodeSet(nodeType, hostOnly); - List serverList = new ArrayList<>(serverSet); - Collections.sort(serverList); - return serverList; - } + public boolean checkIsDeadServer(String node, String serverType) { + // ip_sequence_no + String[] zNodesPath = node.split("/"); + String ipSeqNo = zNodesPath[zNodesPath.length - 1]; + String deadServerPath = REGISTRY_DOLPHINSCHEDULER_DEAD_SERVERS + SINGLE_SLASH + serverType + UNDERLINE + ipSeqNo; - /** - * check the zookeeper node already exists - * - * @param host host - * @param nodeType zookeeper node type - * @return true if exists - */ - public boolean checkNodeExists(String host, NodeType nodeType) { - String path = getNodeParentPath(nodeType); - if (StringUtils.isEmpty(path)) { - logger.error("check zk node exists error, host:{}, zk node type:{}", - host, nodeType); - return false; - } - Map serverMaps = getServerMaps(nodeType, true); - for (String hostKey : serverMaps.keySet()) { - if (hostKey.contains(host)) { - return true; - } - } - return false; + return !exists(node) || exists(deadServerPath); } - /** - * @return get worker node parent path - */ - protected String getWorkerNodeParentPath() { - return Constants.REGISTRY_DOLPHINSCHEDULER_WORKERS; + public Collection getMasterNodesDirectly() { + return getChildrenKeys(REGISTRY_DOLPHINSCHEDULER_MASTERS); } - /** - * @return get master node parent path - */ - protected String getMasterNodeParentPath() { - return Constants.REGISTRY_DOLPHINSCHEDULER_MASTERS; + public Collection getWorkerGroupDirectly() { + return getChildrenKeys(REGISTRY_DOLPHINSCHEDULER_WORKERS); } - /** - * @return get dead server node parent path - */ - protected String getDeadNodeParentPath() { - return Constants.REGISTRY_DOLPHINSCHEDULER_DEAD_SERVERS; + public Collection getWorkerGroupNodesDirectly(String workerGroup) { + return getChildrenKeys(REGISTRY_DOLPHINSCHEDULER_WORKERS + "/" + workerGroup); } /** - * @return get master lock path + * get host ip:port, path format: parentPath/ip:port + * + * @param path path + * @return host ip:port, string format: parentPath/ip:port */ - public String getMasterLockPath() { - return Constants.REGISTRY_DOLPHINSCHEDULER_LOCK_MASTERS; + public String getHostByEventDataPath(String path) { + checkArgument(!Strings.isNullOrEmpty(path), "path cannot be null or empty"); + + final String[] pathArray = path.split(SINGLE_SLASH); + + checkArgument(pathArray.length >= 1, "cannot parse path: %s", path); + + return pathArray[pathArray.length - 1]; } - /** - * @param nodeType zookeeper node type - * @return get zookeeper node parent path - */ - public String getNodeParentPath(NodeType nodeType) { - String path = ""; - switch (nodeType) { - case MASTER: - return getMasterNodeParentPath(); - case WORKER: - return getWorkerNodeParentPath(); - case DEAD_SERVER: - return getDeadNodeParentPath(); - default: - break; + public void close() throws IOException { + if (isStarted.compareAndSet(true, false) && registry != null) { + registry.close(); } - return path; } - /** - * @return get master start up lock path - */ - public String getMasterStartUpLockPath() { - return Constants.REGISTRY_DOLPHINSCHEDULER_LOCK_FAILOVER_STARTUP_MASTERS; + public void persistEphemeral(String key, String value) { + registry.put(key, value, true); } - /** - * @return get master failover lock path - */ - public String getMasterFailoverLockPath() { - return Constants.REGISTRY_DOLPHINSCHEDULER_LOCK_FAILOVER_MASTERS; + public void remove(String key) { + registry.delete(key); } - /** - * @return get worker failover lock path - */ - public String getWorkerFailoverLockPath() { - return Constants.REGISTRY_DOLPHINSCHEDULER_LOCK_FAILOVER_WORKERS; + public String get(String key) { + return registry.get(key); } - /** - * opType(add): if find dead server , then add to zk deadServerPath - * opType(delete): delete path from zk - * - * @param node node path - * @param nodeType master or worker - * @param opType delete or add - * @throws Exception errors - */ - public void handleDeadServer(String node, NodeType nodeType, String opType) throws Exception { - String host = getHostByEventDataPath(node); - String type = (nodeType == NodeType.MASTER) ? MASTER_TYPE : WORKER_TYPE; - - //check server restart, if restart , dead server path in zk should be delete - if (opType.equals(DELETE_OP)) { - removeDeadServerByHost(host, type); - - } else if (opType.equals(ADD_OP)) { - String deadServerPath = getDeadZNodeParentPath() + SINGLE_SLASH + type + UNDERLINE + host; - if (!isExisted(deadServerPath)) { - //add dead server info to zk dead server path : /dead-servers/ + public void subscribe(String path, SubscribeListener listener) { + registry.subscribe(path, listener); + } - persist(deadServerPath, (type + UNDERLINE + host)); + public void addConnectionStateListener(ConnectionListener listener) { + registry.addConnectionStateListener(listener); + } - logger.info("{} server dead , and {} added to zk dead server path success", - nodeType, node); - } - } + public boolean exists(String key) { + return registry.exists(key); + } + public boolean getLock(String key) { + return registry.acquireLock(key); } - /** - * check dead server or not , if dead, stop self - * - * @param node node path - * @param serverType master or worker prefix - * @return true if not exists - * @throws Exception errors - */ - public boolean checkIsDeadServer(String node, String serverType) throws Exception { - // ip_sequence_no - String[] zNodesPath = node.split("\\/"); - String ipSeqNo = zNodesPath[zNodesPath.length - 1]; - String deadServerPath = getDeadZNodeParentPath() + SINGLE_SLASH + serverType + UNDERLINE + ipSeqNo; + public boolean releaseLock(String key) { + return registry.releaseLock(key); + } - return !isExisted(node) || isExisted(deadServerPath); + public void setStoppable(IStoppable stoppable) { + this.stoppable = stoppable; } - /** - * get master nodes directly - * - * @return master nodes - */ - public Set getMasterNodesDirectly() { - List masters = getChildrenKeys(REGISTRY_DOLPHINSCHEDULER_MASTERS); - return new HashSet<>(masters); + public IStoppable getStoppable() { + return stoppable; } - /** - * get worker nodes directly - * - * @return master nodes - */ - public Set getWorkerNodesDirectly() { - List workers = getChildrenKeys(REGISTRY_DOLPHINSCHEDULER_WORKERS); - return new HashSet<>(workers); + public boolean isMasterPath(String path) { + return path != null && path.startsWith(REGISTRY_DOLPHINSCHEDULER_MASTERS); } - /** - * get worker group directly - * - * @return worker group nodes - */ - public Set getWorkerGroupDirectly() { - List workers = getChildrenKeys(REGISTRY_DOLPHINSCHEDULER_WORKERS); - return new HashSet<>(workers); + public boolean isWorkerPath(String path) { + return path != null && path.startsWith(REGISTRY_DOLPHINSCHEDULER_WORKERS); } - /** - * get worker group nodes - */ - public Set getWorkerGroupNodesDirectly(String workerGroup) { - List workers = getChildrenKeys(getWorkerGroupPath(workerGroup)); - return new HashSet<>(workers); + public Collection getChildrenKeys(final String key) { + return registry.children(key); } - /** - * opType(add): if find dead server , then add to zk deadServerPath - * opType(delete): delete path from zk - * - * @param nodeSet node path set - * @param nodeType master or worker - * @param opType delete or add - * @throws Exception errors - */ - public void handleDeadServer(Set nodeSet, NodeType nodeType, String opType) throws Exception { + public Set getServerNodeSet(NodeType nodeType, boolean hostOnly) { + try { + return getServerNodes(nodeType).stream().map(server -> { + if (nodeType == NodeType.WORKER && hostOnly) { + return server.split(SINGLE_SLASH)[1]; + } + return server; + }).collect(Collectors.toSet()); + } catch (Exception e) { + throw new RegistryException("Failed to get server node: " + nodeType, e); + } + } - String type = (nodeType == NodeType.MASTER) ? MASTER_TYPE : WORKER_TYPE; - for (String node : nodeSet) { - String host = getHostByEventDataPath(node); - //check server restart, if restart , dead server path in zk should be delete - if (opType.equals(DELETE_OP)) { - removeDeadServerByHost(host, type); + private void start() { + if (isStarted.compareAndSet(false, true)) { + PropertyUtils.loadPropertyFile(REGISTRY_CONFIG_FILE_PATH); + final Map registryConfig = PropertyUtils.getPropertiesByPrefix(REGISTRY_PREFIX); - } else if (opType.equals(ADD_OP)) { - String deadServerPath = getDeadZNodeParentPath() + SINGLE_SLASH + type + UNDERLINE + host; - if (!isExisted(deadServerPath)) { - //add dead server info to zk dead server path : /dead-servers/ - persist(deadServerPath, (type + UNDERLINE + host)); - logger.info("{} server dead , and {} added to registry dead server path success", - nodeType, node); - } + if (null == registryConfig || registryConfig.isEmpty()) { + throw new RegistryException("registry config param is null"); } - + final String pluginName = registryConfig.get(REGISTRY_PLUGIN_NAME); + final Map factories = RegistryFactoryLoader.load(); + if (!factories.containsKey(pluginName)) { + throw new RegistryException("No such registry plugin: " + pluginName); + } + registry = factories.get(pluginName).create(); + registry.start(registryConfig); } + } + private void initNodes() { + registry.put(REGISTRY_DOLPHINSCHEDULER_MASTERS, EMPTY, false); + registry.put(REGISTRY_DOLPHINSCHEDULER_WORKERS, EMPTY, false); + registry.put(REGISTRY_DOLPHINSCHEDULER_DEAD_SERVERS, EMPTY, false); } - /** - * get host ip:port, string format: parentPath/ip:port - * - * @param path path - * @return host ip:port, string format: parentPath/ip:port - */ - public String getHostByEventDataPath(String path) { - if (StringUtils.isEmpty(path)) { - logger.error("empty path!"); - return ""; - } - String[] pathArray = path.split(SINGLE_SLASH); - if (pathArray.length < 1) { - logger.error("parse ip error: {}", path); - return ""; + private String rootNodePath(NodeType type) { + switch (type) { + case MASTER: + return Constants.REGISTRY_DOLPHINSCHEDULER_MASTERS; + case WORKER: + return Constants.REGISTRY_DOLPHINSCHEDULER_WORKERS; + case DEAD_SERVER: + return Constants.REGISTRY_DOLPHINSCHEDULER_DEAD_SERVERS; + default: + throw new IllegalStateException("Should not reach here"); } - return pathArray[pathArray.length - 1]; + } + private Collection getServerNodes(NodeType nodeType) { + final String path = rootNodePath(nodeType); + final Collection serverList = getChildrenKeys(path); + if (nodeType != NodeType.WORKER) { + return serverList; + } + return serverList.stream().flatMap(group -> + getChildrenKeys(path + SINGLE_SLASH + group) + .stream() + .map(it -> group + SINGLE_SLASH + it) + ).collect(Collectors.toList()); } - /** - * remove dead server by host - * - * @param host host - * @param serverType serverType - */ - public void removeDeadServerByHost(String host, String serverType) { - List deadServers = getChildrenKeys(getDeadZNodeParentPath()); + private void removeDeadServerByHost(String host, String serverType) { + Collection deadServers = getChildrenKeys(REGISTRY_DOLPHINSCHEDULER_DEAD_SERVERS); for (String serverPath : deadServers) { if (serverPath.startsWith(serverType + UNDERLINE + host)) { - String server = getDeadZNodeParentPath() + SINGLE_SLASH + serverPath; + String server = REGISTRY_DOLPHINSCHEDULER_DEAD_SERVERS + SINGLE_SLASH + serverPath; remove(server); - logger.info("{} server {} deleted from zk dead server path success", serverType, host); + logger.info("{} server {} deleted from zk dead server path:{} success", serverType, host,server); } } } - } diff --git a/dolphinscheduler-service/src/main/resources/quartz.properties b/dolphinscheduler-service/src/main/resources/quartz.properties index 93ee71c6a3dcef6d5f14e533ac65085ee939cb6c..5420755fe70d4873aeafd5abe12482cfc28a4bea 100644 --- a/dolphinscheduler-service/src/main/resources/quartz.properties +++ b/dolphinscheduler-service/src/main/resources/quartz.properties @@ -51,4 +51,4 @@ #============================================================================ # Configure Datasources #============================================================================ -#org.quartz.dataSource.myDs.connectionProvider.class = org.apache.dolphinscheduler.service.quartz.DruidConnectionProvider +#org.quartz.dataSource.myDs.connectionProvider.class = org.apache.dolphinscheduler.service.quartz.HikariConnectionProvider diff --git a/dolphinscheduler-service/src/main/resources/registry.properties b/dolphinscheduler-service/src/main/resources/registry.properties index 4da2ec3e5532a89054e6448d41d3ae251234a5bc..f8e06de4ce9f9a4312b824c8510a353e70338242 100644 --- a/dolphinscheduler-service/src/main/resources/registry.properties +++ b/dolphinscheduler-service/src/main/resources/registry.properties @@ -15,13 +15,13 @@ # limitations under the License. # -#registry.plugin.dir config the Registry Plugin dir. -registry.plugin.dir=lib/plugin/registry - registry.plugin.name=zookeeper registry.servers=127.0.0.1:2181 - -#maven.local.repository=/usr/local/localRepository - -#registry.plugin.binding config the Registry Plugin need be load when development and run in IDE -#registry.plugin.binding=./dolphinscheduler-registry-plugin/dolphinscheduler-registry-zookeeper/pom.xml +registry.namespace=dolphinscheduler +registry.base.sleep.time.ms=60 +registry.max.sleep.ms=300 +registry.max.retries=5 +registry.session.timeout.ms=30000 +registry.connection.timeout.ms=7500 +registry.block.until.connected.wait=600 +registry.digest= diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/RegistryCenterUtils.java b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/cache/CacheNotifyServiceTest.java similarity index 33% rename from dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/RegistryCenterUtils.java rename to dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/cache/CacheNotifyServiceTest.java index 71e0456583eb7aa8aa410b3f07fb06cd05f6eae1..a3dafb677b326e9fe134c20e168f3114f76b2223 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/RegistryCenterUtils.java +++ b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/cache/CacheNotifyServiceTest.java @@ -15,68 +15,72 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.api.utils; +package org.apache.dolphinscheduler.service.cache; +import org.apache.dolphinscheduler.common.enums.CacheType; import org.apache.dolphinscheduler.common.enums.NodeType; import org.apache.dolphinscheduler.common.model.Server; -import org.apache.dolphinscheduler.dao.entity.ZookeeperRecord; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.remote.NettyRemotingServer; +import org.apache.dolphinscheduler.remote.command.CacheExpireCommand; +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.command.CommandType; +import org.apache.dolphinscheduler.remote.config.NettyServerConfig; +import org.apache.dolphinscheduler.service.cache.impl.CacheNotifyServiceImpl; import org.apache.dolphinscheduler.service.registry.RegistryClient; import java.util.ArrayList; import java.util.List; -import java.util.Map; + +import org.junit.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.MockitoJUnitRunner; /** - * monitor zookeeper info todo registry-spi - * fixme Some of the information obtained in the api belongs to the unique information of zk. - * I am not sure whether there is a good abstraction method. This is related to whether the specific plug-in is provided. + * tenant cache proxy test */ -public class RegistryCenterUtils { +@RunWith(MockitoJUnitRunner.Silent.class) +public class CacheNotifyServiceTest { - private static RegistryClient registryClient = RegistryClient.getInstance(); + @Rule + public final ExpectedException exception = ExpectedException.none(); - /** - * @return zookeeper info list - */ - public static List zookeeperInfoList() { - return null; - } + @InjectMocks + private CacheNotifyServiceImpl cacheNotifyService; - /** - * get master servers - * - * @return master server information - */ - public static List getMasterServers() { - return registryClient.getServerList(NodeType.MASTER); - } + @Mock + private RegistryClient registryClient; - /** - * master construct is the same with worker, use the master instead - * - * @return worker server informations - */ - public static List getWorkerServers() { - return registryClient.getServerList(NodeType.WORKER); - } + @Test + public void testNotifyMaster() { + User user1 = new User(); + user1.setId(100); + Command cacheExpireCommand = new CacheExpireCommand(CacheType.USER, "100").convert2Command(); - public static Map getServerMaps(NodeType nodeType, boolean hostOnly) { - return registryClient.getServerMaps(nodeType, hostOnly); - } + NettyServerConfig serverConfig = new NettyServerConfig(); - public static List getServerNodeList(NodeType nodeType, boolean hostOnly) { - return registryClient.getServerNodeList(nodeType, hostOnly); - } + NettyRemotingServer nettyRemotingServer = new NettyRemotingServer(serverConfig); + nettyRemotingServer.registerProcessor(CommandType.CACHE_EXPIRE, (channel, command) -> { + Assert.assertEquals(cacheExpireCommand, command); + }); + nettyRemotingServer.start(); - public static boolean isNodeExisted(String key) { - return registryClient.isExisted(key); - } + List serverList = new ArrayList<>(); + Server server = new Server(); + server.setHost("127.0.0.1"); + server.setPort(serverConfig.getListenPort()); + serverList.add(server); - public static List getChildrenNodes(final String key) { - return registryClient.getChildrenKeys(key); - } + Mockito.when(registryClient.getServerList(NodeType.MASTER)).thenReturn(serverList); + + cacheNotifyService.notifyMaster(cacheExpireCommand); - public static String getNodeData(String key) { - return registryClient.get(key); + nettyRemotingServer.close(); } } diff --git a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/process/ProcessServiceTest.java b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/process/ProcessServiceTest.java index e13558dbc1d8abff54c03ebd114ebd07543c03c5..4cfab170b24ee70c4247c4f0cfeef4ef603ae07e 100644 --- a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/process/ProcessServiceTest.java +++ b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/process/ProcessServiceTest.java @@ -19,7 +19,8 @@ package org.apache.dolphinscheduler.service.process; import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING; import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_START_PARAMS; -import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS_DEFINE_ID; +import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS_DEFINE_CODE; + import static org.mockito.ArgumentMatchers.any; import org.apache.dolphinscheduler.common.Constants; @@ -82,7 +83,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.Lists; /** * process service test @@ -126,7 +126,7 @@ public class ProcessServiceTest { parentInstance.setWarningGroupId(0); TaskInstance task = new TaskInstance(); - task.setTaskParams("{\"processDefinitionId\":100}}"); + task.setTaskParams("{\"processDefinitionCode\":10}}"); task.setId(10); task.setTaskCode(1L); task.setTaskDefinitionVersion(1); @@ -141,8 +141,8 @@ public class ProcessServiceTest { parentInstance.setHistoryCmd("START_PROCESS"); parentInstance.setCommandType(CommandType.START_PROCESS); ProcessDefinition processDefinition = new ProcessDefinition(); - processDefinition.setCode(1L); - Mockito.when(processDefineMapper.queryByDefineId(100)).thenReturn(processDefinition); + processDefinition.setCode(10L); + Mockito.when(processDefineMapper.queryByCode(10L)).thenReturn(processDefinition); command = processService.createSubProcessCommand(parentInstance, childInstance, instanceMap, task); Assert.assertEquals(CommandType.START_PROCESS, command.getCommandType()); @@ -240,56 +240,74 @@ public class ProcessServiceTest { //cannot construct process instance, return null; String host = "127.0.0.1"; - int validThreadNum = 1; Command command = new Command(); command.setProcessDefinitionCode(222); command.setCommandType(CommandType.REPEAT_RUNNING); command.setCommandParam("{\"" + CMD_PARAM_RECOVER_PROCESS_ID_STRING + "\":\"111\",\"" - + CMD_PARAM_SUB_PROCESS_DEFINE_ID + "\":\"222\"}"); - Assert.assertNull(processService.handleCommand(logger, host, validThreadNum, command)); + + CMD_PARAM_SUB_PROCESS_DEFINE_CODE + "\":\"222\"}"); + Assert.assertNull(processService.handleCommand(logger, host, command)); + int definitionVersion = 1; + long definitionCode = 123; + int processInstanceId = 222; //there is not enough thread for this command Command command1 = new Command(); - command1.setProcessDefinitionCode(123); + command1.setId(1); + command1.setProcessDefinitionCode(definitionCode); + command1.setProcessDefinitionVersion(definitionVersion); command1.setCommandParam("{\"ProcessInstanceId\":222}"); command1.setCommandType(CommandType.START_PROCESS); + Mockito.when(commandMapper.deleteById(1)).thenReturn(1); + ProcessDefinition processDefinition = new ProcessDefinition(); processDefinition.setId(123); processDefinition.setName("test"); - processDefinition.setVersion(1); - processDefinition.setCode(11L); + processDefinition.setVersion(definitionVersion); + processDefinition.setCode(definitionCode); processDefinition.setGlobalParams("[{\"prop\":\"startParam1\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"\"}]"); ProcessInstance processInstance = new ProcessInstance(); - processInstance.setId(222); - processInstance.setProcessDefinitionCode(11L); - processInstance.setProcessDefinitionVersion(1); - Mockito.when(processDefineMapper.queryByCode(command1.getProcessDefinitionCode())).thenReturn(processDefinition); + processInstance.setId(processInstanceId); + processInstance.setProcessDefinitionCode(definitionCode); + processInstance.setProcessDefinitionVersion(definitionVersion); Mockito.when(processDefineLogMapper.queryByDefinitionCodeAndVersion(processInstance.getProcessDefinitionCode(), - processInstance.getProcessDefinitionVersion())).thenReturn(new ProcessDefinitionLog(processDefinition)); + processInstance.getProcessDefinitionVersion())).thenReturn(new ProcessDefinitionLog(processDefinition)); Mockito.when(processInstanceMapper.queryDetailById(222)).thenReturn(processInstance); - Assert.assertNotNull(processService.handleCommand(logger, host, validThreadNum, command1)); + Assert.assertNotNull(processService.handleCommand(logger, host, command1)); Command command2 = new Command(); + command2.setId(2); command2.setCommandParam("{\"ProcessInstanceId\":222,\"StartNodeIdList\":\"n1,n2\"}"); - command2.setProcessDefinitionCode(123); + command2.setProcessDefinitionCode(definitionCode); + command2.setProcessDefinitionVersion(definitionVersion); command2.setCommandType(CommandType.RECOVER_SUSPENDED_PROCESS); - - Assert.assertNotNull(processService.handleCommand(logger, host, validThreadNum, command2)); + command2.setProcessInstanceId(processInstanceId); + Mockito.when(commandMapper.deleteById(2)).thenReturn(1); + Assert.assertNotNull(processService.handleCommand(logger, host, command2)); Command command3 = new Command(); - command3.setProcessDefinitionCode(123); + command3.setId(3); + command3.setProcessDefinitionCode(definitionCode); + command3.setProcessDefinitionVersion(definitionVersion); + command3.setProcessInstanceId(processInstanceId); command3.setCommandParam("{\"WaitingThreadInstanceId\":222}"); command3.setCommandType(CommandType.START_FAILURE_TASK_PROCESS); - Assert.assertNotNull(processService.handleCommand(logger, host, validThreadNum, command3)); + Mockito.when(commandMapper.deleteById(3)).thenReturn(1); + Assert.assertNotNull(processService.handleCommand(logger, host, command3)); Command command4 = new Command(); - command4.setProcessDefinitionCode(123); + command4.setId(4); + command4.setProcessDefinitionCode(definitionCode); + command4.setProcessDefinitionVersion(definitionVersion); command4.setCommandParam("{\"WaitingThreadInstanceId\":222,\"StartNodeIdList\":\"n1,n2\"}"); command4.setCommandType(CommandType.REPEAT_RUNNING); - Assert.assertNotNull(processService.handleCommand(logger, host, validThreadNum, command4)); + command4.setProcessInstanceId(processInstanceId); + Mockito.when(commandMapper.deleteById(4)).thenReturn(1); + Assert.assertNotNull(processService.handleCommand(logger, host, command4)); Command command5 = new Command(); - command5.setProcessDefinitionCode(123); + command5.setId(5); + command5.setProcessDefinitionCode(definitionCode); + command5.setProcessDefinitionVersion(definitionVersion); HashMap startParams = new HashMap<>(); startParams.put("startParam1", "testStartParam1"); HashMap commandParams = new HashMap<>(); @@ -297,7 +315,9 @@ public class ProcessServiceTest { command5.setCommandParam(JSONUtils.toJsonString(commandParams)); command5.setCommandType(CommandType.START_PROCESS); command5.setDryRun(Constants.DRY_RUN_FLAG_NO); - ProcessInstance processInstance1 = processService.handleCommand(logger, host, validThreadNum, command5); + Mockito.when(commandMapper.deleteById(5)).thenReturn(1); + + ProcessInstance processInstance1 = processService.handleCommand(logger, host, command5); Assert.assertTrue(processInstance1.getGlobalParams().contains("\"testStartParam1\"")); } @@ -336,7 +356,6 @@ public class ProcessServiceTest { ProcessDefinition processDefinition = new ProcessDefinition(); processDefinition.setCode(parentProcessDefineCode); processDefinition.setVersion(parentProcessDefineVersion); - Mockito.when(processDefineMapper.selectById(parentProcessDefineId)).thenReturn(processDefinition); long postTaskCode = 2L; int postTaskVersion = 2; @@ -346,19 +365,19 @@ public class ProcessServiceTest { processTaskRelationLog.setPostTaskCode(postTaskCode); processTaskRelationLog.setPostTaskVersion(postTaskVersion); relationLogList.add(processTaskRelationLog); + Mockito.when(processDefineMapper.queryByCode(parentProcessDefineCode)).thenReturn(processDefinition); Mockito.when(processTaskRelationLogMapper.queryByProcessCodeAndVersion(parentProcessDefineCode - , parentProcessDefineVersion)).thenReturn(relationLogList); + , parentProcessDefineVersion)).thenReturn(relationLogList); List taskDefinitionLogs = new ArrayList<>(); TaskDefinitionLog taskDefinitionLog1 = new TaskDefinitionLog(); - taskDefinitionLog1.setTaskParams("{\"processDefinitionId\": 123}"); + taskDefinitionLog1.setTaskParams("{\"processDefinitionCode\": 123L}"); taskDefinitionLogs.add(taskDefinitionLog1); - Mockito.when(taskDefinitionLogMapper.queryByTaskDefinitions(Mockito.anySet())).thenReturn(taskDefinitionLogs); - List ids = new ArrayList<>(); - processService.recurseFindSubProcessId(parentProcessDefineId, ids); + List ids = new ArrayList<>(); + processService.recurseFindSubProcess(parentProcessDefineCode, ids); - Assert.assertEquals(1, ids.size()); + Assert.assertEquals(0, ids.size()); } @Test @@ -383,14 +402,14 @@ public class ProcessServiceTest { operator.setUserType(UserType.GENERAL_USER); long projectCode = 751485690568704L; String taskJson = "[{\"code\":751500437479424,\"name\":\"aa\",\"version\":1,\"description\":\"\",\"delayTime\":0," - + "\"taskType\":\"SHELL\",\"taskParams\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"sleep 1s\\necho 11\"," - + "\"dependence\":{},\"conditionResult\":{\"successNode\":[\"\"],\"failedNode\":[\"\"]},\"waitStartTimeout\":{}}," - + "\"flag\":\"YES\",\"taskPriority\":\"MEDIUM\",\"workerGroup\":\"yarn\",\"failRetryTimes\":0,\"failRetryInterval\":1," - + "\"timeoutFlag\":\"OPEN\",\"timeoutNotifyStrategy\":\"FAILED\",\"timeout\":1,\"environmentCode\":751496815697920}," - + "{\"code\":751516889636864,\"name\":\"bb\",\"description\":\"\",\"taskType\":\"SHELL\",\"taskParams\":{\"resourceList\":[]," - + "\"localParams\":[],\"rawScript\":\"echo 22\",\"dependence\":{},\"conditionResult\":{\"successNode\":[\"\"],\"failedNode\":[\"\"]}," - + "\"waitStartTimeout\":{}},\"flag\":\"YES\",\"taskPriority\":\"MEDIUM\",\"workerGroup\":\"default\",\"failRetryTimes\":\"0\"," - + "\"failRetryInterval\":\"1\",\"timeoutFlag\":\"CLOSE\",\"timeoutNotifyStrategy\":\"\",\"timeout\":0,\"delayTime\":\"0\",\"environmentCode\":-1}]"; + + "\"taskType\":\"SHELL\",\"taskParams\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"sleep 1s\\necho 11\"," + + "\"dependence\":{},\"conditionResult\":{\"successNode\":[\"\"],\"failedNode\":[\"\"]},\"waitStartTimeout\":{}}," + + "\"flag\":\"YES\",\"taskPriority\":\"MEDIUM\",\"workerGroup\":\"yarn\",\"failRetryTimes\":0,\"failRetryInterval\":1," + + "\"timeoutFlag\":\"OPEN\",\"timeoutNotifyStrategy\":\"FAILED\",\"timeout\":1,\"environmentCode\":751496815697920}," + + "{\"code\":751516889636864,\"name\":\"bb\",\"description\":\"\",\"taskType\":\"SHELL\",\"taskParams\":{\"resourceList\":[]," + + "\"localParams\":[],\"rawScript\":\"echo 22\",\"dependence\":{},\"conditionResult\":{\"successNode\":[\"\"],\"failedNode\":[\"\"]}," + + "\"waitStartTimeout\":{}},\"flag\":\"YES\",\"taskPriority\":\"MEDIUM\",\"workerGroup\":\"default\",\"failRetryTimes\":\"0\"," + + "\"failRetryInterval\":\"1\",\"timeoutFlag\":\"CLOSE\",\"timeoutNotifyStrategy\":\"\",\"timeout\":0,\"delayTime\":\"0\",\"environmentCode\":-1}]"; List taskDefinitionLogs = JSONUtils.toList(taskJson, TaskDefinitionLog.class); TaskDefinitionLog taskDefinition = new TaskDefinitionLog(); taskDefinition.setCode(751500437479424L); @@ -404,7 +423,7 @@ public class ProcessServiceTest { Mockito.when(taskDefinitionLogMapper.queryByDefinitionCodeAndVersion(taskDefinition.getCode(), taskDefinition.getVersion())).thenReturn(taskDefinition); Mockito.when(taskDefinitionLogMapper.queryMaxVersionForDefinition(taskDefinition.getCode())).thenReturn(1); Mockito.when(taskDefinitionMapper.queryByCode(taskDefinition.getCode())).thenReturn(taskDefinition); - int result = processService.saveTaskDefine(operator, projectCode, taskDefinitionLogs); + int result = processService.saveTaskDefine(operator, projectCode, taskDefinitionLogs, Boolean.TRUE); Assert.assertEquals(0, result); } @@ -417,7 +436,7 @@ public class ProcessServiceTest { processDefinition.setVersion(1); processDefinition.setCode(11L); - ProcessTaskRelation processTaskRelation = new ProcessTaskRelation(); + ProcessTaskRelationLog processTaskRelation = new ProcessTaskRelationLog(); processTaskRelation.setName("def 1"); processTaskRelation.setProcessDefinitionVersion(1); processTaskRelation.setProjectCode(1L); @@ -426,7 +445,7 @@ public class ProcessServiceTest { processTaskRelation.setPreTaskCode(2L); processTaskRelation.setUpdateTime(new Date()); processTaskRelation.setCreateTime(new Date()); - List list = new ArrayList<>(); + List list = new ArrayList<>(); list.add(processTaskRelation); TaskDefinitionLog taskDefinition = new TaskDefinitionLog(); @@ -454,7 +473,7 @@ public class ProcessServiceTest { taskDefinitionLogs.add(td2); Mockito.when(taskDefinitionLogMapper.queryByTaskDefinitions(any())).thenReturn(taskDefinitionLogs); - Mockito.when(processTaskRelationMapper.queryByProcessCode(Mockito.anyLong(), Mockito.anyLong())).thenReturn(list); + Mockito.when(processTaskRelationLogMapper.queryByProcessCodeAndVersion(Mockito.anyLong(), Mockito.anyInt())).thenReturn(list); DAG stringTaskNodeTaskNodeRelationDAG = processService.genDagGraph(processDefinition); Assert.assertEquals(1, stringTaskNodeTaskNodeRelationDAG.getNodesCount()); @@ -481,10 +500,10 @@ public class ProcessServiceTest { processInstance.setId(62); taskInstance.setVarPool("[{\"direct\":\"OUT\",\"prop\":\"test1\",\"type\":\"VARCHAR\",\"value\":\"\"}]"); taskInstance.setTaskParams("{\"type\":\"MYSQL\",\"datasource\":1,\"sql\":\"select id from tb_test limit 1\"," - + "\"udfs\":\"\",\"sqlType\":\"0\",\"sendEmail\":false,\"displayRows\":10,\"title\":\"\"," - + "\"groupId\":null,\"localParams\":[{\"prop\":\"test1\",\"direct\":\"OUT\",\"type\":\"VARCHAR\",\"value\":\"12\"}]," - + "\"connParams\":\"\",\"preStatements\":[],\"postStatements\":[],\"conditionResult\":\"{\\\"successNode\\\":[\\\"\\\"]," - + "\\\"failedNode\\\":[\\\"\\\"]}\",\"dependence\":\"{}\"}"); + + "\"udfs\":\"\",\"sqlType\":\"0\",\"sendEmail\":false,\"displayRows\":10,\"title\":\"\"," + + "\"groupId\":null,\"localParams\":[{\"prop\":\"test1\",\"direct\":\"OUT\",\"type\":\"VARCHAR\",\"value\":\"12\"}]," + + "\"connParams\":\"\",\"preStatements\":[],\"postStatements\":[],\"conditionResult\":\"{\\\"successNode\\\":[\\\"\\\"]," + + "\\\"failedNode\\\":[\\\"\\\"]}\",\"dependence\":\"{}\"}"); processService.changeOutParam(taskInstance); } @@ -492,65 +511,65 @@ public class ProcessServiceTest { public void testUpdateTaskDefinitionResources() throws Exception { TaskDefinition taskDefinition = new TaskDefinition(); String taskParameters = "{\n" - + " \"mainClass\": \"org.apache.dolphinscheduler.SparkTest\",\n" - + " \"mainJar\": {\n" - + " \"id\": 1\n" - + " },\n" - + " \"deployMode\": \"cluster\",\n" - + " \"resourceList\": [\n" - + " {\n" - + " \"id\": 3\n" - + " },\n" - + " {\n" - + " \"id\": 4\n" - + " }\n" - + " ],\n" - + " \"localParams\": [],\n" - + " \"driverCores\": 1,\n" - + " \"driverMemory\": \"512M\",\n" - + " \"numExecutors\": 2,\n" - + " \"executorMemory\": \"2G\",\n" - + " \"executorCores\": 2,\n" - + " \"appName\": \"\",\n" - + " \"mainArgs\": \"\",\n" - + " \"others\": \"\",\n" - + " \"programType\": \"JAVA\",\n" - + " \"sparkVersion\": \"SPARK2\",\n" - + " \"dependence\": {},\n" - + " \"conditionResult\": {\n" - + " \"successNode\": [\n" - + " \"\"\n" - + " ],\n" - + " \"failedNode\": [\n" - + " \"\"\n" - + " ]\n" - + " },\n" - + " \"waitStartTimeout\": {}\n" - + "}"; + + " \"mainClass\": \"org.apache.dolphinscheduler.SparkTest\",\n" + + " \"mainJar\": {\n" + + " \"id\": 1\n" + + " },\n" + + " \"deployMode\": \"cluster\",\n" + + " \"resourceList\": [\n" + + " {\n" + + " \"id\": 3\n" + + " },\n" + + " {\n" + + " \"id\": 4\n" + + " }\n" + + " ],\n" + + " \"localParams\": [],\n" + + " \"driverCores\": 1,\n" + + " \"driverMemory\": \"512M\",\n" + + " \"numExecutors\": 2,\n" + + " \"executorMemory\": \"2G\",\n" + + " \"executorCores\": 2,\n" + + " \"appName\": \"\",\n" + + " \"mainArgs\": \"\",\n" + + " \"others\": \"\",\n" + + " \"programType\": \"JAVA\",\n" + + " \"sparkVersion\": \"SPARK2\",\n" + + " \"dependence\": {},\n" + + " \"conditionResult\": {\n" + + " \"successNode\": [\n" + + " \"\"\n" + + " ],\n" + + " \"failedNode\": [\n" + + " \"\"\n" + + " ]\n" + + " },\n" + + " \"waitStartTimeout\": {}\n" + + "}"; taskDefinition.setTaskParams(taskParameters); Map resourceMap = - Stream.of(1, 3, 4) - .map(i -> { - Resource resource = new Resource(); - resource.setId(i); - resource.setFileName("file" + i); - resource.setFullName("/file" + i); - return resource; - }) - .collect( - Collectors.toMap( - Resource::getId, - resource -> resource) - ); + Stream.of(1, 3, 4) + .map(i -> { + Resource resource = new Resource(); + resource.setId(i); + resource.setFileName("file" + i); + resource.setFullName("/file" + i); + return resource; + }) + .collect( + Collectors.toMap( + Resource::getId, + resource -> resource) + ); for (Integer integer : Arrays.asList(1, 3, 4)) { Mockito.when(resourceMapper.selectById(integer)) - .thenReturn(resourceMap.get(integer)); + .thenReturn(resourceMap.get(integer)); } Whitebox.invokeMethod(processService, - "updateTaskDefinitionResources", - taskDefinition); + "updateTaskDefinitionResources", + taskDefinition); String taskParams = taskDefinition.getTaskParams(); SparkParameters sparkParameters = JSONUtils.parseObject(taskParams, SparkParameters.class); @@ -576,15 +595,15 @@ public class ProcessServiceTest { // test if input is null ResourceInfo resourceInfoNull = null; ResourceInfo updatedResourceInfo1 = Whitebox.invokeMethod(processService, - "updateResourceInfo", - resourceInfoNull); + "updateResourceInfo", + resourceInfoNull); Assert.assertNull(updatedResourceInfo1); // test if resource id less than 1 ResourceInfo resourceInfoVoid = new ResourceInfo(); ResourceInfo updatedResourceInfo2 = Whitebox.invokeMethod(processService, - "updateResourceInfo", - resourceInfoVoid); + "updateResourceInfo", + resourceInfoVoid); Assert.assertNull(updatedResourceInfo2); // test normal situation @@ -596,8 +615,8 @@ public class ProcessServiceTest { resource.setFullName("/test.txt"); Mockito.when(resourceMapper.selectById(1)).thenReturn(resource); ResourceInfo updatedResourceInfo3 = Whitebox.invokeMethod(processService, - "updateResourceInfo", - resourceInfoNormal); + "updateResourceInfo", + resourceInfoNormal); Assert.assertEquals(1, updatedResourceInfo3.getId()); Assert.assertEquals("test.txt", updatedResourceInfo3.getRes()); diff --git a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/quartz/cron/CronUtilsTest.java b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/quartz/cron/CronUtilsTest.java index 55cc19d338df94c98ac8ef2bb98e610e18a08d85..4fbcd8f9c081ac4ae8fbbbd99bd38187da41609f 100644 --- a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/quartz/cron/CronUtilsTest.java +++ b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/quartz/cron/CronUtilsTest.java @@ -95,6 +95,20 @@ public class CronUtilsTest { CycleEnum cycleEnum3 = CronUtils.getMiniCycle(CronUtils.parse2Cron("0 * * * * ? *")); Assert.assertEquals("MINUTE", cycleEnum3.name()); + + CycleEnum cycleEnum4 = CronUtils.getMaxCycle(CronUtils.parse2Cron("0 0 7 * 1 ? *")); + Assert.assertEquals("YEAR", cycleEnum4.name()); + cycleEnum4 = CronUtils.getMiniCycle(CronUtils.parse2Cron("0 0 7 * 1 ? *")); + Assert.assertEquals("DAY", cycleEnum4.name()); + + CycleEnum cycleEnum5 = CronUtils.getMaxCycle(CronUtils.parse2Cron("0 0 7 * 1/1 ? *")); + Assert.assertEquals("MONTH", cycleEnum5.name()); + + CycleEnum cycleEnum6 = CronUtils.getMaxCycle(CronUtils.parse2Cron("0 0 7 * 1-2 ? *")); + Assert.assertEquals("YEAR", cycleEnum6.name()); + + CycleEnum cycleEnum7 = CronUtils.getMaxCycle(CronUtils.parse2Cron("0 0 7 * 1,2 ? *")); + Assert.assertEquals("YEAR", cycleEnum7.name()); } /** @@ -113,7 +127,7 @@ public class CronUtilsTest { .instance(); // minute cycle String[] cronArayy = new String[]{"* * * * * ? *","* 0 * * * ? *", - "* 5 * * 3/5 ? *","0 0 * * * ? *"}; + "* 5 * * 3/5 ? *","0 0 * * * ? *", "0 0 7 * 1 ? *", "0 0 7 * 1/1 ? *", "0 0 7 * 1-2 ? *" , "0 0 7 * 1,2 ? *"}; for(String minCrontab:cronArayy){ if (!org.quartz.CronExpression.isValidExpression(minCrontab)) { throw new RuntimeException(minCrontab+" verify failure, cron expression not valid"); @@ -155,6 +169,14 @@ public class CronUtilsTest { logger.info("dayOfWeekField instanceof On:"+(dayOfWeekField.getExpression() instanceof On)); logger.info("dayOfWeekField instanceof And:"+(dayOfWeekField.getExpression() instanceof And)); logger.info("dayOfWeekField instanceof QuestionMark:"+(dayOfWeekField.getExpression() instanceof QuestionMark)); + + CronField yearField = cron.retrieve(CronFieldName.YEAR); + logger.info("yearField instanceof Between:"+(yearField.getExpression() instanceof Between)); + logger.info("yearField instanceof Always:"+(yearField.getExpression() instanceof Always)); + logger.info("yearField instanceof Every:"+(yearField.getExpression() instanceof Every)); + logger.info("yearField instanceof On:"+(yearField.getExpression() instanceof On)); + logger.info("yearField instanceof And:"+(yearField.getExpression() instanceof And)); + logger.info("yearField instanceof QuestionMark:"+(yearField.getExpression() instanceof QuestionMark)); CycleEnum cycleEnum = CronUtils.getMaxCycle(minCrontab); if(cycleEnum !=null){ @@ -204,4 +226,4 @@ public class CronUtilsTest { expirationTime = CronUtils.getExpirationTime(startTime, CycleEnum.YEAR); Assert.assertEquals("2020-02-07 18:30:00", DateUtils.dateToString(expirationTime)); } -} \ No newline at end of file +} diff --git a/dolphinscheduler-spi/pom.xml b/dolphinscheduler-spi/pom.xml index 4755b828b2c226d1cd3b1f40ec7f5ffd1dcd4798..934338c27abde50cb47b200e053320ccfa6f8636 100644 --- a/dolphinscheduler-spi/pom.xml +++ b/dolphinscheduler-spi/pom.xml @@ -20,90 +20,45 @@ org.apache.dolphinscheduler dolphinscheduler - 2.0.0-SNAPSHOT + 2.0.10-SNAPSHOT dolphinscheduler-spi ${project.artifactId} - - UTF-8 - - com.fasterxml.jackson.core jackson-annotations - com.fasterxml.jackson.core jackson-databind - - com.fasterxml.jackson.core - jackson-core - - org.apache.commons commons-collections4 - provided - - - commons-beanutils - commons-beanutils - provided - - - commons-codec - commons-codec - provided org.slf4j slf4j-api - provided - - junit - junit - test - - - org.jacoco - org.jacoco.agent - runtime - test + com.baomidou + mybatis-plus-annotation + ${mybatis-plus.version} + provided com.google.guava guava - provided - - com.google.code.findbugs - jsr305 - + + com.google.code.findbugs + jsr305 + - - org.sonatype.aether - aether-api - provided - - - org.ow2.asm - asm - provided - - - io.airlift.resolver - resolver - provided - - - \ No newline at end of file + diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/alert/AlertChannelFactory.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/alert/AlertChannelFactory.java deleted file mode 100644 index 8ac27ab3094046ff06492e523eeac57b629d0330..0000000000000000000000000000000000000000 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/alert/AlertChannelFactory.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.spi.alert; - -import org.apache.dolphinscheduler.spi.common.UiChannelFactory; - -/** - * Each AlertPlugin need implement this interface - */ -public interface AlertChannelFactory extends UiChannelFactory { - - /** - * The parameters configured in the alert / xxx.properties file will be in the config map - * - * @return AlertChannel - */ - AlertChannel create(); -} diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/classloader/ThreadContextClassLoader.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/classloader/ThreadContextClassLoader.java deleted file mode 100644 index b905ef72b7a6caa8fa3ca6f1c267a4bca8541cac..0000000000000000000000000000000000000000 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/classloader/ThreadContextClassLoader.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.spi.classloader; - -import java.io.Closeable; - -public class ThreadContextClassLoader - implements Closeable { - private final ClassLoader threadContextClassLoader; - - public ThreadContextClassLoader(ClassLoader newThreadContextClassLoader) { - this.threadContextClassLoader = Thread.currentThread().getContextClassLoader(); - Thread.currentThread().setContextClassLoader(newThreadContextClassLoader); - } - - @Override - public void close() { - Thread.currentThread().setContextClassLoader(threadContextClassLoader); - } -} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/BaseConnectionParam.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/datasource/BaseConnectionParam.java similarity index 67% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/BaseConnectionParam.java rename to dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/datasource/BaseConnectionParam.java index ab6e4a84ab4502da338bb5e17982237e54925fe1..b1df15c39786a9ce08deaef12c002a48c9576c80 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/BaseConnectionParam.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/datasource/BaseConnectionParam.java @@ -15,24 +15,14 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.datasource; +package org.apache.dolphinscheduler.spi.datasource; + +import java.util.HashMap; +import java.util.Map; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude.Include; -/** - * The base model of connection param - *

- * {@link org.apache.dolphinscheduler.common.datasource.clickhouse.ClickhouseConnectionParam} - * {@link org.apache.dolphinscheduler.common.datasource.db2.Db2ConnectionParam} - * {@link org.apache.dolphinscheduler.common.datasource.hive.HiveConnectionParam} - * {@link org.apache.dolphinscheduler.common.datasource.mysql.MysqlConnectionParam} - * {@link org.apache.dolphinscheduler.common.datasource.oracle.OracleConnectionParam} - * {@link org.apache.dolphinscheduler.common.datasource.postgresql.PostgreSqlConnectionParam} - * {@link org.apache.dolphinscheduler.common.datasource.presto.PrestoConnectionParam} - * {@link org.apache.dolphinscheduler.common.datasource.spark.SparkConnectionParam} - * {@link org.apache.dolphinscheduler.common.datasource.sqlserver.SqlServerConnectionParam} - */ @JsonInclude(Include.NON_NULL) public abstract class BaseConnectionParam implements ConnectionParam { @@ -46,8 +36,16 @@ public abstract class BaseConnectionParam implements ConnectionParam { protected String jdbcUrl; + protected String driverLocation; + + protected String driverClassName; + + protected String validationQuery; + protected String other; + private Map props = new HashMap<>(); + public String getUser() { return user; } @@ -88,6 +86,30 @@ public abstract class BaseConnectionParam implements ConnectionParam { this.jdbcUrl = jdbcUrl; } + public String getDriverLocation() { + return driverLocation; + } + + public void setDriverLocation(String driverLocation) { + this.driverLocation = driverLocation; + } + + public String getDriverClassName() { + return driverClassName; + } + + public void setDriverClassName(String driverClassName) { + this.driverClassName = driverClassName; + } + + public String getValidationQuery() { + return validationQuery; + } + + public void setValidationQuery(String validationQuery) { + this.validationQuery = validationQuery; + } + public String getOther() { return other; } @@ -95,4 +117,12 @@ public abstract class BaseConnectionParam implements ConnectionParam { public void setOther(String other) { this.other = other; } + + public Map getProps() { + return props; + } + + public void setProps(Map props) { + this.props = props; + } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/ConnectionParam.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/datasource/ConnectionParam.java similarity index 94% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/ConnectionParam.java rename to dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/datasource/ConnectionParam.java index d4ec697751a75b49f434d45adf6e3bb9d0afd4c4..b3eb903dc76a86f6ea4ff0ada4079a8e6d6fe79f 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/datasource/ConnectionParam.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/datasource/ConnectionParam.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.common.datasource; +package org.apache.dolphinscheduler.spi.datasource; import java.io.Serializable; diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/datasource/DataSourceChannel.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/datasource/DataSourceChannel.java new file mode 100644 index 0000000000000000000000000000000000000000..98b67352f70d57827905029c176bb1448f2a1824 --- /dev/null +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/datasource/DataSourceChannel.java @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.spi.datasource; + +import org.apache.dolphinscheduler.spi.enums.DbType; + +public interface DataSourceChannel { + + DataSourceClient createDataSourceClient(BaseConnectionParam baseConnectionParam, DbType dbType); +} diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/register/RegistryFactory.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/datasource/DataSourceChannelFactory.java similarity index 80% rename from dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/register/RegistryFactory.java rename to dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/datasource/DataSourceChannelFactory.java index 244c0f437a079a13a178215d5dbd16082c32bf2a..c947c3a647ffcc94a4ddb607a927b8a862320a36 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/register/RegistryFactory.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/datasource/DataSourceChannelFactory.java @@ -15,20 +15,16 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.spi.register; - -/** - * Registry the component factory, all registry must implement this interface - */ -public interface RegistryFactory { +package org.apache.dolphinscheduler.spi.datasource; +public interface DataSourceChannelFactory { /** - * get registry component name + * get datasource client */ - String getName(); + DataSourceChannel create(); /** - * get registry + * get registry component name */ - Registry create(); + String getName(); } diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/datasource/DataSourceClient.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/datasource/DataSourceClient.java new file mode 100644 index 0000000000000000000000000000000000000000..879d198284daf3e7716a776b9a7b1955871d7223 --- /dev/null +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/datasource/DataSourceClient.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.spi.datasource; + +import java.sql.Connection; + +public interface DataSourceClient { + + void checkClient(); + + void close(); + + Connection getConnection(); +} diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/DbType.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/DbType.java index 0c88407a02aa80e9379de5a79fe157ee1955f3d3..3809c52269c525fde27ef4a7ca7556461c5efe1b 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/DbType.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/DbType.java @@ -22,31 +22,38 @@ import static java.util.stream.Collectors.toMap; import java.util.Arrays; import java.util.Map; +import com.baomidou.mybatisplus.annotation.EnumValue; import com.google.common.base.Functions; public enum DbType { + MYSQL(0, "mysql"), + POSTGRESQL(1, "postgresql"), + HIVE(2, "hive"), + SPARK(3, "spark"), + CLICKHOUSE(4, "clickhouse"), + ORACLE(5, "oracle"), + SQLSERVER(6, "sqlserver"), + DB2(7, "db2"), + PRESTO(8, "presto"), + H2(9, "h2"); - MYSQL(0), - POSTGRESQL(1), - HIVE(2), - SPARK(3), - CLICKHOUSE(4), - ORACLE(5), - SQLSERVER(6), - DB2(7), - PRESTO(8), - H2(9); - - DbType(int code) { + @EnumValue + private final int code; + private final String descp; + + DbType(int code, String descp) { this.code = code; + this.descp = descp; } - private final int code; - public int getCode() { return code; } + public String getDescp() { + return descp; + } + private static final Map DB_TYPE_MAP = Arrays.stream(DbType.values()).collect(toMap(DbType::getCode, Functions.identity())); @@ -56,4 +63,5 @@ public enum DbType { } return null; } + } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ResourceType.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/ResourceType.java similarity index 91% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ResourceType.java rename to dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/ResourceType.java index 043402c2ae4aed614556af66291d87d86815592a..8e80802b857b53c398096a8d61e8253b3c71cdb1 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ResourceType.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/ResourceType.java @@ -14,22 +14,22 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.enums; + +package org.apache.dolphinscheduler.spi.enums; import com.baomidou.mybatisplus.annotation.EnumValue; /** * resource type */ -public enum ResourceType { +public enum ResourceType { /** * 0 file, 1 udf */ FILE(0, "file"), UDF(1, "udf"); - - ResourceType(int code, String descp){ + ResourceType(int code, String descp) { this.code = code; this.descp = descp; } diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/exception/PluginNotFoundException.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/exception/PluginNotFoundException.java deleted file mode 100644 index 2153299f17a00e7a95c95f5f11127a44323f61f1..0000000000000000000000000000000000000000 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/exception/PluginNotFoundException.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.spi.exception; - -public class PluginNotFoundException extends RuntimeException { - - private static final long serialVersionUID = -5487812425126112159L; - - public PluginNotFoundException(String message, Throwable cause) { - super(message, cause); - } - - public PluginNotFoundException(String message) { - super(message); - } -} diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/plugin/AbstractDolphinPluginManager.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/plugin/AbstractDolphinPluginManager.java deleted file mode 100644 index b1d9592e68a5361f243d2c6998a1dbb939c6fce4..0000000000000000000000000000000000000000 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/plugin/AbstractDolphinPluginManager.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.spi.plugin; - -import org.apache.dolphinscheduler.spi.DolphinSchedulerPlugin; - -public abstract class AbstractDolphinPluginManager { - - public abstract void installPlugin(DolphinSchedulerPlugin dolphinSchedulerPlugin); -} diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/plugin/DolphinPluginClassLoader.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/plugin/DolphinPluginClassLoader.java deleted file mode 100644 index 55b7b410ce51eaf35cc582b0d3479a3ced077501..0000000000000000000000000000000000000000 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/plugin/DolphinPluginClassLoader.java +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.spi.plugin; - -import static java.util.Objects.requireNonNull; - -import java.io.IOException; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; -import java.net.URL; -import java.net.URLClassLoader; -import java.util.Enumeration; -import java.util.List; -import java.util.stream.Collectors; -import java.util.stream.StreamSupport; - -import com.google.common.collect.ImmutableList; - -class DolphinPluginClassLoader - extends URLClassLoader { - private static final ClassLoader PLATFORM_CLASS_LOADER = findPlatformClassLoader(); - - private final ClassLoader spiClassLoader; - private final List spiPackages; - private final List spiResources; - - DolphinPluginClassLoader( - List urls, - ClassLoader spiClassLoader, - Iterable spiPackages) { - this(urls, - spiClassLoader, - spiPackages, - StreamSupport.stream(spiPackages.spliterator(), false).map(DolphinPluginClassLoader::classNameToResource).collect(Collectors.toList())); - } - - private DolphinPluginClassLoader( - List urls, - ClassLoader spiClassLoader, - Iterable spiPackages, - Iterable spiResources) { - // plugins should not have access to the system (application) class loader - super(urls.toArray(new URL[0]), PLATFORM_CLASS_LOADER); - this.spiClassLoader = requireNonNull(spiClassLoader, "spiClassLoader is null"); - this.spiPackages = ImmutableList.copyOf(spiPackages); - this.spiResources = ImmutableList.copyOf(spiResources); - } - - @Override - protected Class loadClass(String name, boolean resolve) - throws ClassNotFoundException { - // grab the magic lock - synchronized (getClassLoadingLock(name)) { - // Check if class is in the loaded classes cache - Class cachedClass = findLoadedClass(name); - if (cachedClass != null) { - return resolveClass(cachedClass, resolve); - } - - // If this is an SPI class, only check SPI class loader - if (isSpiClass(name)) { - return resolveClass(spiClassLoader.loadClass(name), resolve); - } - - // Look for class locally - return super.loadClass(name, resolve); - } - } - - private Class resolveClass(Class clazz, boolean resolve) { - if (resolve) { - resolveClass(clazz); - } - return clazz; - } - - @Override - public URL getResource(String name) { - // If this is an SPI resource, only check SPI class loader - if (isSpiResource(name)) { - return spiClassLoader.getResource(name); - } - - // Look for resource locally - return super.getResource(name); - } - - @Override - public Enumeration getResources(String name) - throws IOException { - // If this is an SPI resource, use SPI resources - if (isSpiClass(name)) { - return spiClassLoader.getResources(name); - } - - // Use local resources - return super.getResources(name); - } - - private boolean isSpiClass(String name) { - return spiPackages.stream().anyMatch(name::startsWith); - } - - private boolean isSpiResource(String name) { - return spiResources.stream().anyMatch(name::startsWith); - } - - private static String classNameToResource(String className) { - return className.replace('.', '/'); - } - - @SuppressWarnings("JavaReflectionMemberAccess") - private static ClassLoader findPlatformClassLoader() { - try { - // use platform class loader on Java 9 - Method method = ClassLoader.class.getMethod("getPlatformClassLoader"); - return (ClassLoader) method.invoke(null); - } catch (NoSuchMethodException ignored) { - // use null class loader on Java 8 - return null; - } catch (IllegalAccessException | InvocationTargetException e) { - throw new AssertionError(e); - } - } -} diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/plugin/DolphinPluginDiscovery.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/plugin/DolphinPluginDiscovery.java deleted file mode 100644 index c09bf71da20f4f032408aab55fd2941c09a08fc5..0000000000000000000000000000000000000000 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/plugin/DolphinPluginDiscovery.java +++ /dev/null @@ -1,150 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.spi.plugin; - -import static java.nio.charset.StandardCharsets.UTF_8; -import static java.nio.file.Files.createDirectories; -import static java.nio.file.Files.walkFileTree; - -import org.apache.dolphinscheduler.spi.DolphinSchedulerPlugin; - -import java.io.File; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStreamWriter; -import java.io.UncheckedIOException; -import java.io.Writer; -import java.nio.file.FileVisitResult; -import java.nio.file.Path; -import java.nio.file.SimpleFileVisitor; -import java.nio.file.attribute.BasicFileAttributes; -import java.util.List; -import java.util.Set; -import java.util.StringJoiner; -import java.util.stream.Collectors; - -import org.objectweb.asm.ClassReader; -import org.sonatype.aether.artifact.Artifact; - -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableSet; -import com.google.common.io.ByteStreams; - -/** - * The role of this class is to load the plugin class during development - */ -final class DolphinPluginDiscovery { - // Windows: \target\classes, Unix-like: /target/classes - private static final String ARTIFACT_DIR = new StringJoiner(File.separator, File.separator, "") - .add("target").add("classes").toString(); - private static final String JAVA_CLASS_FILE_SUFFIX = ".class"; - - // Windows: "META-INF\services\" + DolphinSchedulerPlugin.class.getName() - // Unix-like: "META-INF/services/" + DolphinSchedulerPlugin.class.getName() - private static final String PLUGIN_SERVICES_FILE = String.join(File.separator, "META-INF", - "services", DolphinSchedulerPlugin.class.getName()); - - private DolphinPluginDiscovery() { - } - - static Set discoverPluginsFromArtifact(Artifact artifact, ClassLoader classLoader) - throws IOException { - if (!artifact.getExtension().equals("dolphinscheduler-plugin")) { - throw new RuntimeException("Unexpected extension for main artifact: " + artifact); - } - - File file = artifact.getFile(); - if (!file.getPath().endsWith(ARTIFACT_DIR)) { - throw new RuntimeException("Unexpected file for main artifact: " + file); - } - if (!file.isDirectory()) { - throw new RuntimeException("Main artifact file is not a directory: " + file); - } - - if (new File(file, PLUGIN_SERVICES_FILE).exists()) { - return ImmutableSet.of(); - } - - return listClasses(file.toPath()).stream() - .filter(name -> classInterfaces(name, classLoader).contains(DolphinSchedulerPlugin.class.getName())) - .collect(Collectors.toSet()); - } - - static void writePluginServices(Iterable plugins, File root) - throws IOException { - Path path = root.toPath().resolve(PLUGIN_SERVICES_FILE); - createDirectories(path.getParent()); - try (Writer out = new OutputStreamWriter(new FileOutputStream(path.toFile()), UTF_8)) { - for (String plugin : plugins) { - out.write(plugin + "\n"); - } - } - } - - private static List listClasses(Path base) - throws IOException { - ImmutableList.Builder list = ImmutableList.builder(); - walkFileTree(base, new SimpleFileVisitor() { - @Override - public FileVisitResult visitFile(Path file, BasicFileAttributes attributes) { - if (file.getFileName().toString().endsWith(JAVA_CLASS_FILE_SUFFIX)) { - String name = file.subpath(base.getNameCount(), file.getNameCount()).toString(); - list.add(convertClassName(name.substring(0, name.length() - JAVA_CLASS_FILE_SUFFIX.length()))); - } - return FileVisitResult.CONTINUE; - } - }); - return list.build(); - } - - private static List classInterfaces(String name, ClassLoader classLoader) { - ImmutableList.Builder list = ImmutableList.builder(); - ClassReader reader = readClass(name, classLoader); - for (String binaryName : reader.getInterfaces()) { - list.add(javaName(binaryName)); - } - if (reader.getSuperName() != null) { - list.addAll(classInterfaces(javaName(reader.getSuperName()), classLoader)); - } - return list.build(); - } - - private static ClassReader readClass(String name, ClassLoader classLoader) { - try (InputStream in = classLoader.getResourceAsStream(binaryName(name) + JAVA_CLASS_FILE_SUFFIX)) { - if (in == null) { - throw new RuntimeException("Failed to read class: " + name); - } - return new ClassReader(ByteStreams.toByteArray(in)); - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } - - private static String binaryName(String javaName) { - return javaName.replace('.', '/'); - } - - private static String javaName(String binaryName) { - return binaryName.replace('/', '.'); - } - - private static String convertClassName(String pathName) { - return pathName.replace(File.separatorChar, '.'); - } -} diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/plugin/DolphinPluginLoader.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/plugin/DolphinPluginLoader.java deleted file mode 100644 index 448da7f7994a759ec9e3a568c222bde2ae4bb1fa..0000000000000000000000000000000000000000 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/plugin/DolphinPluginLoader.java +++ /dev/null @@ -1,211 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.spi.plugin; - -import static java.lang.String.format; -import static java.util.Objects.requireNonNull; - -import org.apache.dolphinscheduler.spi.DolphinSchedulerPlugin; -import org.apache.dolphinscheduler.spi.classloader.ThreadContextClassLoader; - -import java.io.File; -import java.io.IOException; -import java.net.URL; -import java.net.URLClassLoader; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Optional; -import java.util.ServiceLoader; -import java.util.Set; -import java.util.stream.Collectors; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.sonatype.aether.artifact.Artifact; - -import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Ordering; - -import io.airlift.resolver.ArtifactResolver; - -/** - * Plugin Loader - * Load Plugin from pom when development and run server in IDE - * Load Plugin from the plugin directory when running on the server - */ -public class DolphinPluginLoader { - private static final Logger logger = LoggerFactory.getLogger(DolphinPluginLoader.class); - - /** - * All third-party jar packages used in the classes which in spi package need to be add - */ - private static final ImmutableList DOLPHIN_SPI_PACKAGES = ImmutableList.builder() - .add("org.apache.dolphinscheduler.spi.") - .add("com.fasterxml.jackson.") - .add("org.slf4j") - .build(); - - private final File installedPluginsDir; - private final List configPlugins; - private ArtifactResolver resolver = null; - private final List dolphinPluginManagerList; - - public DolphinPluginLoader(DolphinPluginManagerConfig config, List dolphinPluginManagerList) { - installedPluginsDir = config.getInstalledPluginsDir(); - if (config.getPlugins() == null) { - this.configPlugins = ImmutableList.of(); - } else { - this.configPlugins = ImmutableList.copyOf(config.getPlugins()); - } - - this.dolphinPluginManagerList = requireNonNull(dolphinPluginManagerList, "dolphinPluginManagerList is null"); - if (configPlugins != null && configPlugins.size() > 0) { - this.resolver = new ArtifactResolver(config.getMavenLocalRepository(), config.getMavenRemoteRepository()); - } - } - - public void loadPlugins() - throws Exception { - for (File file : listPluginInstanceDirs(installedPluginsDir)) { - if (file.isDirectory()) { - loadPlugin(file.getAbsolutePath()); - } - } - - for (String plugin : configPlugins) { - loadPlugin(plugin); - } - } - - private void loadPlugin(String plugin) - throws Exception { - logger.info("-- Loading plugin {} --", plugin); - URLClassLoader pluginClassLoader = buildPluginClassLoader(plugin); - try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(pluginClassLoader)) { - loadPlugin(pluginClassLoader); - } - logger.info("-- Finished loading plugin {} --", plugin); - } - - private void loadPlugin(URLClassLoader pluginClassLoader) { - ServiceLoader serviceLoader = ServiceLoader.load(DolphinSchedulerPlugin.class, pluginClassLoader); - List plugins = ImmutableList.copyOf(serviceLoader); - Preconditions.checkState(!plugins.isEmpty(), "No service providers the plugin %s", DolphinSchedulerPlugin.class.getName()); - for (DolphinSchedulerPlugin plugin : plugins) { - logger.info("Installing {}", plugin.getClass().getName()); - for (AbstractDolphinPluginManager dolphinPluginManager : dolphinPluginManagerList) { - dolphinPluginManager.installPlugin(plugin); - } - } - } - - private URLClassLoader buildPluginClassLoader(String plugin) - throws Exception { - File file = new File(plugin); - - if (!file.isDirectory() && (file.getName().equals("pom.xml") || file.getName().endsWith(".pom"))) { - return buildPluginClassLoaderFromPom(file); - } - if (file.isDirectory()) { - return buildPluginClassLoaderFromDirectory(file); - } else { - throw new IllegalArgumentException(format("plugin must be a pom file or directory %s .", plugin)); - } - } - - private URLClassLoader buildPluginClassLoaderFromPom(File pomFile) - throws Exception { - List artifacts = resolver.resolvePom(pomFile); - URLClassLoader classLoader = createClassLoader(artifacts, pomFile.getPath()); - - Artifact artifact = artifacts.get(0); - Set plugins = DolphinPluginDiscovery.discoverPluginsFromArtifact(artifact, classLoader); - if (!plugins.isEmpty()) { - DolphinPluginDiscovery.writePluginServices(plugins, artifact.getFile()); - } - - return classLoader; - } - - private URLClassLoader buildPluginClassLoaderFromDirectory(File dir) - throws Exception { - logger.info("Classpath for {}:", dir.getName()); - List urls = new ArrayList<>(); - for (File file : listPluginInstanceJars(dir)) { - logger.info(" {}", file); - urls.add(file.toURI().toURL()); - } - return createClassLoader(urls); - } - - private URLClassLoader createClassLoader(List artifacts, String name) - throws IOException { - logger.info("Classpath for {}:", name); - List urls = new ArrayList<>(); - for (Artifact artifact : sortArtifacts(artifacts)) { - if (artifact.getFile() == null) { - throw new RuntimeException("Could not resolve artifact: " + artifact); - } - File file = artifact.getFile().getCanonicalFile(); - logger.info(" {}", file); - urls.add(file.toURI().toURL()); - } - return createClassLoader(urls); - } - - private URLClassLoader createClassLoader(List urls) { - ClassLoader parent = getClass().getClassLoader(); - return new DolphinPluginClassLoader(urls, parent, DOLPHIN_SPI_PACKAGES); - } - - private static List listPluginInstanceDirs(File installedPluginsDir) { - if (installedPluginsDir != null && installedPluginsDir.isDirectory()) { - File[] files = installedPluginsDir.listFiles(); - if (files != null) { - Optional isNotDir = Arrays.stream(files).filter(file -> !file.isDirectory()).findAny(); - if (isNotDir.isPresent()) { - return ImmutableList.of(installedPluginsDir); - } else { - Arrays.sort(files); - return ImmutableList.copyOf(files); - } - } - } - return ImmutableList.of(); - } - - private static List listPluginInstanceJars(File installedPluginsDir) { - if (installedPluginsDir != null && installedPluginsDir.isDirectory()) { - File[] files = installedPluginsDir.listFiles(); - if (files != null) { - return ImmutableList.copyOf(Arrays.stream(files).filter(file -> file.isFile() && file.getName().endsWith(".jar")) - .collect(Collectors.toList())); - } - } - return ImmutableList.of(); - } - - private static List sortArtifacts(List artifacts) { - List list = new ArrayList<>(artifacts); - list.sort(Ordering.natural().nullsLast().onResultOf(Artifact::getFile)); - return list; - } - -} diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/plugin/DolphinPluginManagerConfig.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/plugin/DolphinPluginManagerConfig.java deleted file mode 100644 index 518f90e810232f2a728351413b7b6afe6aca648a..0000000000000000000000000000000000000000 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/plugin/DolphinPluginManagerConfig.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.spi.plugin; - -import static java.lang.String.format; -import static java.util.Objects.requireNonNull; - -import java.io.File; -import java.util.List; - -import com.google.common.base.Splitter; -import com.google.common.collect.ImmutableList; - -/** - * Dolphin Scheduler Plugin Manager Config - */ -public class DolphinPluginManagerConfig { - - /** - * The dir of the Alert Plugin in. - * When AlertServer is running on the server, it will load the Alert Plugin from this directory. - */ - private File installedPluginsDir; - - /** - * The plugin should be load. - * The installedPluginsDir is empty when we development and run server in IDEA. Then we can config which plugin should be load by param name alert.plugin.binding in the alert.properties file - */ - private List plugins; - - /** - * Development, When AlertServer is running on IDE, AlertPluginLoad can load Alert Plugin from local Repository. - */ - private String mavenLocalRepository = System.getProperty("user.home") + "/.m2/repository"; - private List mavenRemoteRepository = ImmutableList.of("http://repo1.maven.org/maven2/"); - - File getInstalledPluginsDir() { - return installedPluginsDir; - } - - /** - * @param pluginDir plugin directory - */ - public void setInstalledPluginsDir(String pluginDir) { - requireNonNull(pluginDir, "pluginDir can not be null"); - File pluginDirFile = new File(pluginDir); - if (!pluginDirFile.exists()) { - throw new IllegalArgumentException(format("plugin dir not exists ! %s", pluginDirFile.getPath())); - } - this.installedPluginsDir = pluginDirFile; - } - - public List getPlugins() { - return plugins; - } - - public DolphinPluginManagerConfig setPlugins(List plugins) { - this.plugins = plugins; - return this; - } - - /** - * When development and run server in IDE, this method can set plugins in alert.properties . - * Then when you start AlertServer in IDE, the plugin can be load. - * eg: - * file: alert.properties - * alert.plugin=\ - * ../dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/pom.xml, \ - * ../dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/pom.xml - * - * @param plugins plugins - * @return DolphinPluginManagerConfig - */ - public DolphinPluginManagerConfig setPlugins(String plugins) { - if (plugins == null) { - this.plugins = null; - } else { - this.plugins = ImmutableList.copyOf(Splitter.on(',').omitEmptyStrings().trimResults().split(plugins)); - } - return this; - } - - String getMavenLocalRepository() { - return mavenLocalRepository; - } - - public void setMavenLocalRepository(String mavenLocalRepository) { - this.mavenLocalRepository = mavenLocalRepository; - } - - List getMavenRemoteRepository() { - return mavenRemoteRepository; - } - - public DolphinPluginManagerConfig setMavenRemoteRepository(List mavenRemoteRepository) { - this.mavenRemoteRepository = mavenRemoteRepository; - return this; - } - - public DolphinPluginManagerConfig setMavenRemoteRepository(String mavenRemoteRepository) { - this.mavenRemoteRepository = ImmutableList.copyOf(Splitter.on(',').omitEmptyStrings().trimResults().split(mavenRemoteRepository)); - return this; - } -} diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/register/ListenerManager.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/register/ListenerManager.java deleted file mode 100644 index ee134058f08cc001e624644eac71006681b3c419..0000000000000000000000000000000000000000 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/register/ListenerManager.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.spi.register; - -import java.util.HashMap; - -/** - * The registry node monitors subscriptions - */ -public class ListenerManager { - - /** - * All message subscriptions must be subscribed uniformly at startup. - * A node path only supports one listener - */ - private static HashMap listeners = new HashMap<>(); - - /** - * Check whether the key has been monitored - */ - public static boolean checkHasListeners(String path) { - return null != listeners.get(path); - } - - /** - * add listener(A node can only be monitored by one listener) - */ - public static void addListener(String path, SubscribeListener listener) { - listeners.put(path, listener); - } - - /** - * remove listener - */ - public static void removeListener(String path) { - listeners.remove(path); - } - - /** - * - *After the data changes, it is distributed to the corresponding listener for processing - */ - public static void dataChange(String key,String path, DataChangeEvent dataChangeEvent) { - SubscribeListener notifyListener = listeners.get(key); - if (null == notifyListener) { - return; - } - notifyListener.notify(path,dataChangeEvent); - } - -} diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/register/Registry.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/register/Registry.java deleted file mode 100644 index 11fe25a891ea2d5d76b54759c3ab374fe01a1d57..0000000000000000000000000000000000000000 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/register/Registry.java +++ /dev/null @@ -1,102 +0,0 @@ -package org.apache.dolphinscheduler.spi.register;/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import java.util.List; -import java.util.Map; - -/** - * The final display of all registry component data must follow a tree structure. - * Therefore, some registry may need to do a layer of internal conversion, such as Etcd - */ -public interface Registry { - - /** - * initialize registry center. - */ - void init(Map registerData); - - /** - * close registry - */ - void close(); - - /** - * subscribe registry data change, a path can only be monitored by one listener - */ - boolean subscribe(String path, SubscribeListener subscribeListener); - - /** - * unsubscribe - */ - void unsubscribe(String path); - - /** - * Registry status monitoring, globally unique. Only one is allowed to subscribe. - */ - void addConnectionStateListener(RegistryConnectListener registryConnectListener); - - /** - * get key - */ - String get(String key); - - /** - * delete - */ - void remove(String key); - - /** - * persist data - */ - void persist(String key, String value); - - /** - *persist ephemeral data - */ - void persistEphemeral(String key, String value); - - /** - * update data - */ - void update(String key, String value); - - /** - * get children keys - */ - List getChildren(String path); - - /** - * Judge node is exist or not. - */ - boolean isExisted(String key); - - /** - * delete kay - */ - boolean delete(String key); - - /** - * Obtain a distributed lock - * todo It is best to add expiration time, and automatically release the lock after expiration - */ - boolean acquireLock(String key); - - /** - * release key - */ - boolean releaseLock(String key); -} diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/register/RegistryConnectState.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/register/RegistryConnectState.java deleted file mode 100644 index e085e6d0910157e33dc787851dbf277eb19854cb..0000000000000000000000000000000000000000 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/register/RegistryConnectState.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.spi.register; - -/** - * All registry connection status must be converted to this - */ -public enum RegistryConnectState { - CONNECTED("connected", 1), - RECONNECTED("reconnected", 2), - SUSPENDED("suspended", 3), - LOST("lost", 4); - - private String description; - - private int state; - - RegistryConnectState(String description, int state) { - this.description = description; - this.state = state; - } -} diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/register/RegistryException.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/register/RegistryException.java deleted file mode 100644 index 884f0059107ed2724422ffb8fbe5bef86402972c..0000000000000000000000000000000000000000 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/register/RegistryException.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.spi.register; - -/** - * registry exception - */ -public class RegistryException extends RuntimeException { - - public RegistryException(String message, Throwable cause) { - super(message, cause); - } - - public RegistryException(String message) { - super(message); - } -} diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/register/SubscribeListener.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/register/SubscribeListener.java deleted file mode 100644 index 6a2f3d1b6ef47eaca1ce74b23b11a8531916a149..0000000000000000000000000000000000000000 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/register/SubscribeListener.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.spi.register; - -/** - * Registration center subscription. All listeners must implement this interface - */ -public interface SubscribeListener { - - /** - * Processing logic when the subscription node changes - */ - void notify(String path, DataChangeEvent dataChangeEvent); - -} diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/AbstractParameters.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/AbstractParameters.java index 55f5203967d55f2d2ba8ccbbec5c0260207a7ebb..2da6a2f5b60a23eb28f84901af380f4bec9c6ce2 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/AbstractParameters.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/AbstractParameters.java @@ -17,19 +17,13 @@ package org.apache.dolphinscheduler.spi.task; -import org.apache.dolphinscheduler.spi.utils.CollectionUtils; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ArrayNode; +import org.apache.commons.collections4.CollectionUtils; import org.apache.dolphinscheduler.spi.utils.JSONUtils; import org.apache.dolphinscheduler.spi.utils.StringUtils; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Iterator; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ArrayNode; +import java.util.*; /** * job params related class @@ -140,9 +134,8 @@ public abstract class AbstractParameters implements IParameters { public List> getListMapByString(String json) { List> allParams = new ArrayList<>(); ArrayNode paramsByJson = JSONUtils.parseArray(json); - Iterator listIterator = paramsByJson.iterator(); - while (listIterator.hasNext()) { - Map param = JSONUtils.toMap(listIterator.next().toString(), String.class, String.class); + for (JsonNode jsonNode : paramsByJson) { + Map param = JSONUtils.toMap(jsonNode.toString()); allParams.add(param); } return allParams; diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/ExecutionStatus.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/ExecutionStatus.java index 836c0cd3d9d454256d59702f6920ff664ccc1513..6f9503ed302022aa69a15b8fb7587daa7235b719 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/ExecutionStatus.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/ExecutionStatus.java @@ -96,8 +96,7 @@ public enum ExecutionStatus { * @return status */ public boolean typeIsFinished() { - return typeIsSuccess() || typeIsFailure() || typeIsCancel() || typeIsPause() - || typeIsStop(); + return typeIsSuccess() || typeIsFailure() || typeIsCancel(); } /** @@ -142,7 +141,7 @@ public enum ExecutionStatus { * @return status */ public boolean typeIsCancel() { - return this == KILL || this == STOP; + return this == KILL || this == STOP || this == PAUSE; } public int getCode() { diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/TaskConstants.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/TaskConstants.java index 4e1002e7258e62b50a267f0bcd7a0d3a74382820..a3a1f922aaa472a45143d72f8c0c12d69f8351f3 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/TaskConstants.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/TaskConstants.java @@ -124,9 +124,27 @@ public class TaskConstants { public static final String RWXR_XR_X = "rwxr-xr-x"; /** - * task log info format + * Task Logger Thread's name + */ + public static final String TASK_LOGGER_THREAD_NAME = "TaskLogInfo"; + + /** + * task logger thread name format + */ + public static final String TASK_LOGGER_THREAD_NAME_FORMAT = TASK_LOGGER_THREAD_NAME + "-%s"; + + public static final String MASTER_COMMON_TASK_LOGGER_THREAD_NAME = "CommonTask"; + + public static final String MASTER_COMMON_TASK_LOGGER_THREAD_NAME_FORMAT = MASTER_COMMON_TASK_LOGGER_THREAD_NAME + "-%s"; + /** + * task log logger name + */ + public static final String TASK_LOG_LOGGER_NAME = "TaskLogLogger"; + + /** + * task log logger name format */ - public static final String TASK_LOG_INFO_FORMAT = "TaskLogInfo-%s"; + public static final String TASK_LOG_LOGGER_NAME_FORMAT = TASK_LOG_LOGGER_NAME + "-%s"; /** * date format of yyyyMMdd @@ -259,7 +277,7 @@ public class TaskConstants { * driver */ public static final String ORG_POSTGRESQL_DRIVER = "org.postgresql.Driver"; - public static final String COM_MYSQL_JDBC_DRIVER = "com.mysql.jdbc.Driver"; + public static final String COM_MYSQL_CJ_JDBC_DRIVER = "com.mysql.cj.jdbc.Driver"; public static final String ORG_APACHE_HIVE_JDBC_HIVE_DRIVER = "org.apache.hive.jdbc.HiveDriver"; public static final String COM_CLICKHOUSE_JDBC_DRIVER = "ru.yandex.clickhouse.ClickHouseDriver"; public static final String COM_ORACLE_JDBC_DRIVER = "oracle.jdbc.driver.OracleDriver"; @@ -320,8 +338,12 @@ public class TaskConstants { */ public static final String HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE = "hadoop.security.authentication.startup.state"; + public static final String GET_OUTPUT_LOG_SERVICE = "-getOutputLogService"; + /** - * Task Logger Thread's name + * hdfs/s3 configuration + * resource.upload.path */ - public static final String TASK_LOGGER_THREAD_NAME = "TaskLogInfo"; -} + public static final String RESOURCE_UPLOAD_PATH = "resource.upload.path"; + +} \ No newline at end of file diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/TaskExecutionContextCacheManager.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/TaskExecutionContextCacheManager.java index c4347d6beea257efd8d93c2e51a06ae254b988a3..2b5df36e2c5b5d166a392ae800857aa043376a3d 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/TaskExecutionContextCacheManager.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/TaskExecutionContextCacheManager.java @@ -19,6 +19,7 @@ package org.apache.dolphinscheduler.spi.task; import org.apache.dolphinscheduler.spi.task.request.TaskRequest; +import java.util.Collection; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; @@ -66,4 +67,16 @@ public class TaskExecutionContextCacheManager { taskRequestContextCache.computeIfPresent(request.getTaskInstanceId(), (k, v) -> request); return taskRequestContextCache.containsKey(request.getTaskInstanceId()); } + + public static Collection getAllTaskRequestList() { + return taskRequestContextCache.values(); + } + + public static boolean statusIsStop(Integer taskInstanceId) { + TaskRequest taskRequest = taskRequestContextCache.get(taskInstanceId); + if (taskRequest == null || taskRequest.getCurrentExecutionStatus() == null) { + return false; + } + return taskRequest.getCurrentExecutionStatus().typeIsStop(); + } } diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/paramparser/BusinessTimeUtils.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/paramparser/BusinessTimeUtils.java index b35598d42eaaa7c3b2b8dbea34d6ae0d72381e67..56ee124866db8cba494e033965cd929249c97963 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/paramparser/BusinessTimeUtils.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/paramparser/BusinessTimeUtils.java @@ -26,6 +26,7 @@ import static org.apache.dolphinscheduler.spi.utils.DateUtils.addDays; import static org.apache.dolphinscheduler.spi.utils.DateUtils.format; import org.apache.dolphinscheduler.spi.enums.CommandType; +import org.apache.dolphinscheduler.spi.utils.DateUtils; import java.util.Date; import java.util.HashMap; @@ -47,9 +48,10 @@ public class BusinessTimeUtils { * @return business time */ public static Map getBusinessTime(CommandType commandType, Date runTime) { - Date businessDate = runTime; + Date businessDate; switch (commandType) { case COMPLEMENT_DATA: + businessDate = DateUtils.addDays(runTime, -1); break; case START_PROCESS: case START_CURRENT_TASK_PROCESS: diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/paramparser/ParamUtils.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/paramparser/ParamUtils.java index 069f941ff8182fec4f85d5d2f9da82e381172ed1..dc869c9cfd0044821dce7efc837c7a9c6cca6b71 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/paramparser/ParamUtils.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/paramparser/ParamUtils.java @@ -19,7 +19,9 @@ package org.apache.dolphinscheduler.spi.task.paramparser; import static org.apache.dolphinscheduler.spi.task.TaskConstants.PARAMETER_TASK_EXECUTE_PATH; import static org.apache.dolphinscheduler.spi.task.TaskConstants.PARAMETER_TASK_INSTANCE_ID; - +import static org.apache.dolphinscheduler.spi.utils.Constants.GLOBAL_PARAMS_PREFIX; +import static org.apache.dolphinscheduler.spi.utils.Constants.START_UP_PARAMS_PREFIX; +import org.apache.commons.collections4.MapUtils; import org.apache.dolphinscheduler.spi.enums.CommandType; import org.apache.dolphinscheduler.spi.enums.DataType; import org.apache.dolphinscheduler.spi.task.AbstractParameters; @@ -60,12 +62,14 @@ public class ParamUtils { CommandType commandType = CommandType.of(taskExecutionContext.getCmdTypeIfComplement()); Date scheduleTime = taskExecutionContext.getScheduleTime(); + Map convertedParams = new HashMap<>(); + // combining local and global parameters Map localParams = parameters.getLocalParametersMap(); Map varParams = parameters.getVarPoolMap(); - if (globalParams == null && localParams == null) { + if (MapUtils.isEmpty(globalParams) && MapUtils.isEmpty(localParams) && MapUtils.isEmpty(varParams)) { return null; } // if it is a complement, @@ -75,8 +79,7 @@ public class ParamUtils { .getBusinessTime(commandType, scheduleTime); - if (globalParamsMap != null) { - + if (MapUtils.isNotEmpty(globalParamsMap)) { params.putAll(globalParamsMap); } @@ -87,12 +90,21 @@ public class ParamUtils { if (globalParams != null && localParams != null) { globalParams.putAll(localParams); + for (Map.Entry entry : localParams.entrySet()) { + convertedParams.put(entry.getKey(), entry.getValue()); + } } else if (globalParams == null && localParams != null) { globalParams = localParams; + convertedParams = localParams; } if (varParams != null) { - varParams.putAll(globalParams); + if (globalParams != null) { + varParams.putAll(globalParams); + } globalParams = varParams; + for (Map.Entry entry : varParams.entrySet()) { + convertedParams.put(entry.getKey(), entry.getValue()); + } } Iterator> iter = globalParams.entrySet().iterator(); while (iter.hasNext()) { @@ -111,9 +123,20 @@ public class ParamUtils { val = ParameterUtils.convertParameterPlaceholders(val, params); property.setValue(val); } + + if (property.getProp().startsWith(START_UP_PARAMS_PREFIX)) { + property.setProp(property.getProp().replaceFirst(START_UP_PARAMS_PREFIX, "")); + convertedParams.put(property.getProp(), property); + } else if (property.getProp().startsWith(GLOBAL_PARAMS_PREFIX)) { + String prop = property.getProp().replaceFirst(GLOBAL_PARAMS_PREFIX, ""); + if (!convertedParams.containsKey(prop)) { + property.setProp(prop); + convertedParams.put(prop, property); + } + } } - return globalParams; + return convertedParams; } /** diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/paramparser/PlaceholderUtils.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/paramparser/PlaceholderUtils.java index 90ee18311a72c8ed1ca3491edba824f23af0004b..89ca56faf97a9775ab23dcd93b274018e72536b0 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/paramparser/PlaceholderUtils.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/paramparser/PlaceholderUtils.java @@ -17,6 +17,9 @@ package org.apache.dolphinscheduler.spi.task.paramparser; +import static org.apache.dolphinscheduler.spi.utils.Constants.GLOBAL_PARAMS_PREFIX; +import static org.apache.dolphinscheduler.spi.utils.Constants.START_UP_PARAMS_PREFIX; + import java.util.Map; import org.slf4j.Logger; @@ -92,7 +95,9 @@ public class PlaceholderUtils { @Override public String resolvePlaceholder(String placeholderName) { try { - return paramsMap.get(placeholderName); + String startUpPlaceholderName = START_UP_PARAMS_PREFIX + placeholderName; + String globalPlaceholderName = GLOBAL_PARAMS_PREFIX + placeholderName; + return paramsMap.getOrDefault(startUpPlaceholderName, paramsMap.getOrDefault(placeholderName, paramsMap.getOrDefault(globalPlaceholderName, null))); } catch (Exception ex) { logger.error("resolve placeholder '{}' in [ {} ]", placeholderName, value, ex); return null; diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/request/SQLTaskExecutionContext.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/request/SQLTaskExecutionContext.java index b712b50a7f5c79ac60897fafc34e60c52bdb2e23..c02cd4361c8a8a1720e5fd3d422cdec33ee034e1 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/request/SQLTaskExecutionContext.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/request/SQLTaskExecutionContext.java @@ -17,12 +17,13 @@ package org.apache.dolphinscheduler.spi.task.request; -import org.apache.dolphinscheduler.spi.task.UdfFuncBean.UdfFuncDeserializer; +import org.apache.dolphinscheduler.spi.task.request.UdfFuncRequest.UdfFuncDeserializer; -import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import java.io.Serializable; import java.util.Map; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; + /** * SQL Task ExecutionContext */ @@ -44,6 +45,10 @@ public class SQLTaskExecutionContext implements Serializable { @JsonDeserialize(keyUsing = UdfFuncDeserializer.class) private Map udfFuncTenantCodeMap; + /** + * DefaultFS + */ + private String defaultFS; public int getWarningGroupId() { return warningGroupId; @@ -69,12 +74,20 @@ public class SQLTaskExecutionContext implements Serializable { this.connectionParams = connectionParams; } + public String getDefaultFS() { + return defaultFS; + } + + public void setDefaultFS(String defaultFS) { + this.defaultFS = defaultFS; + } + @Override public String toString() { - return "SQLTaskExecutionContext{" + - "warningGroupId=" + warningGroupId + - ", connectionParams='" + connectionParams + '\'' + - ", udfFuncTenantCodeMap=" + udfFuncTenantCodeMap + - '}'; + return "SQLTaskExecutionContext{" + + "warningGroupId=" + warningGroupId + + ", connectionParams='" + connectionParams + '\'' + + ", udfFuncTenantCodeMap=" + udfFuncTenantCodeMap + + ", defaultFS='" + defaultFS + '\'' + '}'; } } diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/request/TaskRequest.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/request/TaskRequest.java index 3f0f901454702825ac15d02376636cb53e1bda98..76cbcf8b08a70178f02053cbb28f78037c3835cf 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/request/TaskRequest.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/request/TaskRequest.java @@ -18,6 +18,7 @@ package org.apache.dolphinscheduler.spi.task.request; import org.apache.dolphinscheduler.spi.enums.TaskTimeoutStrategy; +import org.apache.dolphinscheduler.spi.task.ExecutionStatus; import org.apache.dolphinscheduler.spi.task.Property; import java.util.Date; @@ -148,6 +149,11 @@ public class TaskRequest { */ private String envFile; + /** + * environmentConfig + */ + private String environmentConfig; + /** * definedParams */ @@ -178,6 +184,11 @@ public class TaskRequest { */ private int delayTime; + /** + * current execution status + */ + private ExecutionStatus currentExecutionStatus; + /** * Task Logger name should be like: Task-{processDefinitionId}-{processInstanceId}-{taskInstanceId} */ @@ -412,6 +423,14 @@ public class TaskRequest { this.envFile = envFile; } + public String getEnvironmentConfig() { + return environmentConfig; + } + + public void setEnvironmentConfig(String config) { + this.environmentConfig = config; + } + public Map getDefinedParams() { return definedParams; } @@ -460,6 +479,14 @@ public class TaskRequest { this.delayTime = delayTime; } + public ExecutionStatus getCurrentExecutionStatus() { + return currentExecutionStatus; + } + + public void setCurrentExecutionStatus(ExecutionStatus currentExecutionStatus) { + this.currentExecutionStatus = currentExecutionStatus; + } + public SQLTaskExecutionContext getSqlTaskExecutionContext() { return sqlTaskExecutionContext; } diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/CollectionUtils.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/CollectionUtils.java index d6e148174e744f9b0badcb2b990f72a31ef225d6..8c848b4e29cd2f14e973952bd557956b6262a528 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/CollectionUtils.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/CollectionUtils.java @@ -17,18 +17,9 @@ package org.apache.dolphinscheduler.spi.utils; -import org.apache.commons.beanutils.BeanMap; - -import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.List; import java.util.Map; -import java.util.Set; -import java.util.function.Function; -import java.util.stream.Collectors; /** * Provides utility methods and decorators for {@link Collection} instances. @@ -48,118 +39,6 @@ public class CollectionUtils { throw new UnsupportedOperationException("Construct CollectionUtils"); } - /** - * The load factor used when none specified in constructor. - */ - static final float DEFAULT_LOAD_FACTOR = 0.75f; - - /** - * Returns a new {@link Collection} containing a minus a subset of - * b. Only the elements of b that satisfy the predicate - * condition, p are subtracted from a. - * - *

The cardinality of each element e in the returned {@link Collection} - * that satisfies the predicate condition will be the cardinality of e in a - * minus the cardinality of e in b, or zero, whichever is greater.

- *

The cardinality of each element e in the returned {@link Collection} that does not - * satisfy the predicate condition will be equal to the cardinality of e in a.

- * - * @param a the collection to subtract from, must not be null - * @param b the collection to subtract, must not be null - * @param T - * @return a new collection with the results - * @see Collection#removeAll - */ - public static Collection subtract(Set a, Set b) { - return org.apache.commons.collections4.CollectionUtils.subtract(a, b); - } - - public static boolean isNotEmpty(Collection coll) { - return !isEmpty(coll); - } - - public static boolean isEmpty(Collection coll) { - return coll == null || coll.isEmpty(); - } - - /** - * String to map - * - * @param str string - * @param separator separator - * @return string to map - */ - public static Map stringToMap(String str, String separator) { - return stringToMap(str, separator, ""); - } - - /** - * String to map - * - * @param str string - * @param separator separator - * @param keyPrefix prefix - * @return string to map - */ - public static Map stringToMap(String str, String separator, String keyPrefix) { - - Map emptyMap = new HashMap<>(0); - if (StringUtils.isEmpty(str)) { - return emptyMap; - } - if (StringUtils.isEmpty(separator)) { - return emptyMap; - } - String[] strings = str.split(separator); - int initialCapacity = (int)(strings.length / DEFAULT_LOAD_FACTOR) + 1; - Map map = new HashMap<>(initialCapacity); - for (int i = 0; i < strings.length; i++) { - String[] strArray = strings[i].split("="); - if (strArray.length != 2) { - return emptyMap; - } - //strArray[0] KEY strArray[1] VALUE - if (StringUtils.isEmpty(keyPrefix)) { - map.put(strArray[0], strArray[1]); - } else { - map.put(keyPrefix + strArray[0], strArray[1]); - } - } - return map; - } - - /** - * Transform item in collection - * - * @param collection origin collection - * @param transformFunc transform function - * @param origin item type - * @param target type - * @return transform list - */ - public static List transformToList(Collection collection, Function transformFunc) { - if (isEmpty(collection)) { - return new ArrayList<>(); - } - return collection.stream().map(transformFunc).collect(Collectors.toList()); - } - - /** - * Collect collection to map - * - * @param collection origin collection - * @param keyTransformFunction key transform function - * @param target k type - * @param value - * @return map - */ - public static Map collectionToMap(Collection collection, Function keyTransformFunction) { - if (isEmpty(collection)) { - return new HashMap<>(); - } - return collection.stream().collect(Collectors.toMap(keyTransformFunction, Function.identity())); - } - /** * Helper class to easily access cardinality properties of two collections. * @@ -286,35 +165,4 @@ public class CollectionUtils { return count; } - /** - * Removes certain attributes of each object in the list - * - * @param originList origin list - * @param exclusionSet exclusion set - * @param T - * @return removes certain attributes of each object in the list - */ - public static List> getListByExclusion(List originList, Set exclusionSet) { - List> instanceList = new ArrayList<>(); - if (exclusionSet == null) { - exclusionSet = new HashSet<>(); - } - if (originList == null) { - return instanceList; - } - Map instanceMap; - for (T instance : originList) { - BeanMap beanMap = new BeanMap(instance); - instanceMap = new LinkedHashMap<>(16, 0.75f, true); - for (Map.Entry entry : beanMap.entrySet()) { - if (exclusionSet.contains(entry.getKey())) { - continue; - } - instanceMap.put((String) entry.getKey(), entry.getValue()); - } - instanceList.add(instanceMap); - } - return instanceList; - } - } diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/Constants.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/Constants.java index a3a8b7756494f56d0ee0e6382e964475f2773dc3..33e34b37f8bfa06624e717e406e618d6e255d776 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/Constants.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/Constants.java @@ -72,4 +72,136 @@ public class Constants { */ public static final String YYYYMMDDHHMMSSSSS = "yyyyMMddHHmmssSSS"; + public static final String SPRING_DATASOURCE_MIN_IDLE = "spring.datasource.minIdle"; + + public static final String SPRING_DATASOURCE_MAX_ACTIVE = "spring.datasource.maxActive"; + + public static final String SPRING_DATASOURCE_TEST_ON_BORROW = "spring.datasource.testOnBorrow"; + + /** + * java.security.krb5.conf + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java.security.krb5.conf.path + */ + public static final String JAVA_SECURITY_KRB5_CONF_PATH = "java.security.krb5.conf.path"; + + /** + * hadoop.security.authentication + */ + public static final String HADOOP_SECURITY_AUTHENTICATION = "hadoop.security.authentication"; + + /** + * hadoop.security.authentication + */ + public static final String HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE = "hadoop.security.authentication.startup.state"; + + /** + * loginUserFromKeytab user + */ + public static final String LOGIN_USER_KEY_TAB_USERNAME = "login.user.keytab.username"; + + /** + * loginUserFromKeytab path + */ + public static final String LOGIN_USER_KEY_TAB_PATH = "login.user.keytab.path"; + + /** + * resource storage type + */ + public static final String RESOURCE_STORAGE_TYPE = "resource.storage.type"; + + /** + * kerberos + */ + public static final String KERBEROS = "kerberos"; + + /** + * support hive datasource in one session + */ + public static final String SUPPORT_HIVE_ONE_SESSION = "support.hive.oneSession"; + + /** + * driver + */ + public static final String ORG_POSTGRESQL_DRIVER = "org.postgresql.Driver"; + public static final String COM_MYSQL_CJ_JDBC_DRIVER = "com.mysql.cj.jdbc.Driver"; + public static final String COM_MYSQL_JDBC_DRIVER = "com.mysql.jdbc.Driver"; + public static final String ORG_APACHE_HIVE_JDBC_HIVE_DRIVER = "org.apache.hive.jdbc.HiveDriver"; + public static final String COM_CLICKHOUSE_JDBC_DRIVER = "ru.yandex.clickhouse.ClickHouseDriver"; + public static final String COM_ORACLE_JDBC_DRIVER = "oracle.jdbc.OracleDriver"; + public static final String COM_SQLSERVER_JDBC_DRIVER = "com.microsoft.sqlserver.jdbc.SQLServerDriver"; + public static final String COM_DB2_JDBC_DRIVER = "com.ibm.db2.jcc.DB2Driver"; + public static final String COM_PRESTO_JDBC_DRIVER = "com.facebook.presto.jdbc.PrestoDriver"; + + /** + * temporary parameter prefix + */ + public static final String START_UP_PARAMS_PREFIX = "startup-"; + public static final String GLOBAL_PARAMS_PREFIX = "global-"; + + /** + * validation Query + */ + public static final String POSTGRESQL_VALIDATION_QUERY = "select version()"; + public static final String MYSQL_VALIDATION_QUERY = "select 1"; + public static final String HIVE_VALIDATION_QUERY = "select 1"; + public static final String CLICKHOUSE_VALIDATION_QUERY = "select 1"; + public static final String ORACLE_VALIDATION_QUERY = "select 1 from dual"; + public static final String SQLSERVER_VALIDATION_QUERY = "select 1"; + public static final String DB2_VALIDATION_QUERY = "select 1 from sysibm.sysdummy1"; + public static final String PRESTO_VALIDATION_QUERY = "select 1"; + + /** + * jdbc url + */ + public static final String JDBC_MYSQL = "jdbc:mysql://"; + public static final String JDBC_POSTGRESQL = "jdbc:postgresql://"; + public static final String JDBC_HIVE_2 = "jdbc:hive2://"; + public static final String JDBC_CLICKHOUSE = "jdbc:clickhouse://"; + public static final String JDBC_ORACLE_SID = "jdbc:oracle:thin:@"; + public static final String JDBC_ORACLE_SERVICE_NAME = "jdbc:oracle:thin:@//"; + public static final String JDBC_SQLSERVER = "jdbc:sqlserver://"; + public static final String JDBC_DB2 = "jdbc:db2://"; + public static final String JDBC_PRESTO = "jdbc:presto://"; + + + public static final String ADDRESS = "address"; + public static final String DATABASE = "database"; + public static final String JDBC_URL = "jdbcUrl"; + public static final String PRINCIPAL = "principal"; + public static final String OTHER = "other"; + public static final String ORACLE_DB_CONNECT_TYPE = "connectType"; + public static final String KERBEROS_KRB5_CONF_PATH = "javaSecurityKrb5Conf"; + public static final String KERBEROS_KEY_TAB_USERNAME = "loginUserKeytabUsername"; + public static final String KERBEROS_KEY_TAB_PATH = "loginUserKeytabPath"; + + /** + * DOUBLE_SLASH // + */ + public static final String DOUBLE_SLASH = "//"; + + /** + * comma , + */ + public static final String COMMA = ","; + + /** + * COLON : + */ + public static final String COLON = ":"; + + /** + * AT SIGN + */ + public static final String AT_SIGN = "@"; + + /** + * datasource encryption salt + */ + public static final String DATASOURCE_ENCRYPTION_SALT_DEFAULT = "!@#$%^&*"; + public static final String DATASOURCE_ENCRYPTION_ENABLE = "datasource.encryption.enable"; + public static final String DATASOURCE_ENCRYPTION_SALT = "datasource.encryption.salt"; } diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/DateUtils.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/DateUtils.java index 9e6349864516749b5aa3519f9a727ba7b3d2b206..104e4487b3e0a087f0dce40c1f10e465a81110a4 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/DateUtils.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/DateUtils.java @@ -20,7 +20,6 @@ package org.apache.dolphinscheduler.spi.utils; import java.time.Instant; import java.time.LocalDateTime; import java.time.ZoneId; -import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.util.Calendar; import java.util.Date; @@ -41,7 +40,6 @@ public class DateUtils { static final long C3 = C2 * 1000L; static final long C4 = C3 * 60L; static final long C5 = C4 * 60L; - static final long C6 = C5 * 24L; /** * a default datetime formatter for the timestamp @@ -94,25 +92,6 @@ public class DateUtils { return Date.from(instant); } - /** - * get current date str - * - * @return date string - */ - public static String getCurrentTime() { - return getCurrentTime(Constants.YYYY_MM_DD_HH_MM_SS); - } - - /** - * get the date string in the specified format of the current time - * - * @param format date format - * @return date string - */ - public static String getCurrentTime(String format) { - return LocalDateTime.now().format(DateTimeFormatter.ofPattern(format)); - } - /** * get the formatted date string * @@ -135,16 +114,6 @@ public class DateUtils { return localDateTime.format(DateTimeFormatter.ofPattern(format)); } - /** - * convert time to yyyy-MM-dd HH:mm:ss format - * - * @param date date - * @return date string - */ - public static String dateToString(Date date) { - return format(date, Constants.YYYY_MM_DD_HH_MM_SS); - } - /** * convert string to date and time * @@ -197,54 +166,6 @@ public class DateUtils { return Math.abs(d1.getTime() - d2.getTime()); } - /** - * get hours between two dates - * - * @param d1 date1 - * @param d2 date2 - * @return differ hours - */ - public static long diffHours(Date d1, Date d2) { - return (long) Math.ceil(diffMin(d1, d2) / 60.0); - } - - /** - * get minutes between two dates - * - * @param d1 date1 - * @param d2 date2 - * @return differ minutes - */ - public static long diffMin(Date d1, Date d2) { - return (long) Math.ceil(differSec(d1, d2) / 60.0); - } - - /** - * get the date of the specified date in the days before and after - * - * @param date date - * @param day day - * @return the date of the specified date in the days before and after - */ - public static Date getSomeDay(Date date, int day) { - Calendar calendar = Calendar.getInstance(); - calendar.setTime(date); - calendar.add(Calendar.DATE, day); - return calendar.getTime(); - } - - /** - * get the hour of day. - * - * @param date date - * @return hour of day - */ - public static int getHourIndex(Date date) { - Calendar calendar = Calendar.getInstance(); - calendar.setTime(date); - return calendar.get(Calendar.HOUR_OF_DAY); - } - /** * compare two dates * @@ -266,60 +187,6 @@ public class DateUtils { return stringToDate(schedule); } - /** - * format time to readable - * - * @param ms ms - * @return format time - */ - public static String format2Readable(long ms) { - - long days = MILLISECONDS.toDays(ms); - long hours = MILLISECONDS.toDurationHours(ms); - long minutes = MILLISECONDS.toDurationMinutes(ms); - long seconds = MILLISECONDS.toDurationSeconds(ms); - - return String.format("%02d %02d:%02d:%02d", days, hours, minutes, seconds); - - } - - /** - * format time to duration - * - * @param d1 d1 - * @param d2 d2 - * @return format time - */ - public static String format2Duration(Date d1, Date d2) { - if (d1 == null || d2 == null) { - return null; - } - return format2Duration(differMs(d1, d2)); - } - - /** - * format time to duration - * - * @param ms ms - * @return format time - */ - public static String format2Duration(long ms) { - - long days = MILLISECONDS.toDays(ms); - long hours = MILLISECONDS.toDurationHours(ms); - long minutes = MILLISECONDS.toDurationMinutes(ms); - long seconds = MILLISECONDS.toDurationSeconds(ms); - - StringBuilder strBuilder = new StringBuilder(); - strBuilder = days > 0 ? strBuilder.append(days).append("d").append(" ") : strBuilder; - strBuilder = hours > 0 ? strBuilder.append(hours).append("h").append(" ") : strBuilder; - strBuilder = minutes > 0 ? strBuilder.append(minutes).append("m").append(" ") : strBuilder; - strBuilder = seconds > 0 ? strBuilder.append(seconds).append("s") : strBuilder; - - return strBuilder.toString(); - - } - /** * get monday *

@@ -372,25 +239,6 @@ public class DateUtils { return cal.getTime(); } - /** - * get some hour of day - * - * @param date date - * @param offsetHour hours - * @return some hour of day - */ - public static Date getSomeHourOfDay(Date date, int offsetHour) { - Calendar cal = Calendar.getInstance(); - - cal.setTime(date); - cal.set(Calendar.HOUR_OF_DAY, cal.get(Calendar.HOUR_OF_DAY) + offsetHour); - cal.set(Calendar.MINUTE, 0); - cal.set(Calendar.SECOND, 0); - cal.set(Calendar.MILLISECOND, 0); - - return cal.getTime(); - } - /** * get last day of month * @@ -409,110 +257,18 @@ public class DateUtils { return cal.getTime(); } - /** - * return YYYY-MM-DD 00:00:00 - * - * @param inputDay date - * @return start day - */ - public static Date getStartOfDay(Date inputDay) { - Calendar cal = Calendar.getInstance(); - cal.setTime(inputDay); - cal.set(Calendar.HOUR_OF_DAY, 0); - cal.set(Calendar.MINUTE, 0); - cal.set(Calendar.SECOND, 0); - cal.set(Calendar.MILLISECOND, 0); - return cal.getTime(); - } - - /** - * return YYYY-MM-DD 23:59:59 - * - * @param inputDay day - * @return end of day - */ - public static Date getEndOfDay(Date inputDay) { - Calendar cal = Calendar.getInstance(); - cal.setTime(inputDay); - cal.set(Calendar.HOUR_OF_DAY, 23); - cal.set(Calendar.MINUTE, 59); - cal.set(Calendar.SECOND, 59); - cal.set(Calendar.MILLISECOND, 999); - return cal.getTime(); - } - - /** - * return YYYY-MM-DD 00:00:00 - * - * @param inputDay day - * @return start of hour - */ - public static Date getStartOfHour(Date inputDay) { - Calendar cal = Calendar.getInstance(); - cal.setTime(inputDay); - cal.set(Calendar.MINUTE, 0); - cal.set(Calendar.SECOND, 0); - cal.set(Calendar.MILLISECOND, 0); - return cal.getTime(); - } - - /** - * return YYYY-MM-DD 23:59:59 - * - * @param inputDay day - * @return end of hour - */ - public static Date getEndOfHour(Date inputDay) { - Calendar cal = Calendar.getInstance(); - cal.setTime(inputDay); - cal.set(Calendar.MINUTE, 59); - cal.set(Calendar.SECOND, 59); - cal.set(Calendar.MILLISECOND, 999); - return cal.getTime(); - } - - /** - * get current date - * - * @return current date - */ - public static Date getCurrentDate() { - return DateUtils.parse(DateUtils.getCurrentTime(), - Constants.YYYY_MM_DD_HH_MM_SS); - } - - public static Date addYears(Date date, int amount) { - return add(date, 1, amount); - } - public static Date addMonths(Date date, int amount) { return add(date, 2, amount); } - public static Date addWeeks(Date date, int amount) { - return add(date, 3, amount); - } - public static Date addDays(Date date, int amount) { return add(date, 5, amount); } - public static Date addHours(Date date, int amount) { - return add(date, 11, amount); - } - public static Date addMinutes(Date date, int amount) { return add(date, 12, amount); } - public static Date addSeconds(Date date, int amount) { - return add(date, 13, amount); - } - - public static Date addMilliseconds(Date date, int amount) { - return add(date, 14, amount); - } - /** * get date * @@ -547,33 +303,6 @@ public class DateUtils { return intervalSeconds - usedTime; } - /** - * get current time stamp : yyyyMMddHHmmssSSS - * - * @return date string - */ - public static String getCurrentTimeStamp() { - return getCurrentTime(Constants.YYYYMMDDHHMMSSSSS); - } - - /** - * transform date to target timezone date - *

e.g. - *

if input date is 2020-01-01 00:00:00 current timezone is CST - *

targetTimezoneId is MST - *

this method will return 2020-01-01 15:00:00 - */ - public static Date getTimezoneDate(Date date, String targetTimezoneId) { - if (StringUtils.isEmpty(targetTimezoneId)) { - return date; - } - - String dateToString = dateToString(date); - LocalDateTime localDateTime = LocalDateTime.parse(dateToString, DateTimeFormatter.ofPattern(Constants.YYYY_MM_DD_HH_MM_SS)); - ZonedDateTime zonedDateTime = ZonedDateTime.of(localDateTime, TimeZone.getTimeZone(targetTimezoneId).toZoneId()); - return Date.from(zonedDateTime.toInstant()); - } - /** * get timezone by timezoneId */ @@ -583,40 +312,4 @@ public class DateUtils { } return TimeZone.getTimeZone(timezoneId); } - - /** - * Time unit representing one thousandth of a second - */ - public static class MILLISECONDS { - - public static long toSeconds(long d) { - return d / (C3 / C2); - } - - public static long toMinutes(long d) { - return d / (C4 / C2); - } - - public static long toHours(long d) { - return d / (C5 / C2); - } - - public static long toDays(long d) { - return d / (C6 / C2); - } - - public static long toDurationSeconds(long d) { - return (d % (C4 / C2)) / (C3 / C2); - } - - public static long toDurationMinutes(long d) { - return (d % (C5 / C2)) / (C4 / C2); - } - - public static long toDurationHours(long d) { - return (d % (C6 / C2)) / (C5 / C2); - } - - } - } diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/JSONUtils.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/JSONUtils.java index ed53cb2625af0b822582c01805bcf7e71b2f8847..070e62f5a1a2f27f7eda365f88240c62df6e01ab 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/JSONUtils.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/JSONUtils.java @@ -34,18 +34,14 @@ import java.util.TimeZone; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.JsonDeserializer; import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.JsonSerializer; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; import com.fasterxml.jackson.databind.SerializationFeature; -import com.fasterxml.jackson.databind.SerializerProvider; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.node.TextNode; @@ -169,47 +165,6 @@ public class JSONUtils { return Collections.emptyList(); } - /** - * check json object valid - * - * @param json json - * @return true if valid - */ - public static boolean checkJsonValid(String json) { - - if (StringUtils.isEmpty(json)) { - return false; - } - - try { - objectMapper.readTree(json); - return true; - } catch (IOException e) { - logger.error("check json object valid exception!", e); - } - - return false; - } - - /** - * Method for finding a JSON Object field with specified name in this - * node or its child nodes, and returning value it has. - * If no matching field is found in this node or its descendants, returns null. - * - * @param jsonNode json node - * @param fieldName Name of field to look for - * @return Value of first matching node found, if any; null if none - */ - public static String findValue(JsonNode jsonNode, String fieldName) { - JsonNode node = jsonNode.findValue(fieldName); - - if (node == null) { - return null; - } - - return node.asText(); - } - /** * json to map * {@link #toMap(String, Class, Class)} @@ -221,21 +176,6 @@ public class JSONUtils { return parseObject(json, new TypeReference>() {}); } - /** - * from the key-value generated json to get the str value no matter the real type of value - * @param json the json str - * @param nodeName key - * @return the str value of key - */ - public static String getNodeString(String json, String nodeName) { - try { - JsonNode rootNode = objectMapper.readTree(json); - return rootNode.has(nodeName) ? rootNode.get(nodeName).toString() : ""; - } catch (JsonProcessingException e) { - return ""; - } - } - /** * json to map * @@ -327,18 +267,6 @@ public class JSONUtils { } } - /** - * json serializer - */ - public static class JsonDataSerializer extends JsonSerializer { - - @Override - public void serialize(String value, JsonGenerator gen, SerializerProvider provider) throws IOException { - gen.writeRawValue(value); - } - - } - /** * json data deserializer */ diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/PropertyUtils.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/PropertyUtils.java index 491be64d5d07bc6e561e406ce866ffa6172e4b52..8a4068c51ed7c394c25bb84e1321d52adac2e2ff 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/PropertyUtils.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/PropertyUtils.java @@ -21,15 +21,13 @@ import static org.apache.dolphinscheduler.spi.utils.Constants.COMMON_PROPERTIES_ import java.io.IOException; import java.io.InputStream; -import java.util.HashMap; -import java.util.Map; import java.util.Properties; -import java.util.Set; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class PropertyUtils { + private static final Logger logger = LoggerFactory.getLogger(PropertyUtils.class); private static final Properties properties = new Properties(); @@ -78,7 +76,8 @@ public class PropertyUtils { * @return property value with upper case */ public static String getUpperCaseString(String key) { - return properties.getProperty(key.trim()).toUpperCase(); + String val = getString(key); + return StringUtils.isEmpty(val) ? val : val.toUpperCase(); } /** @@ -89,8 +88,8 @@ public class PropertyUtils { * @return property value */ public static String getString(String key, String defaultVal) { - String val = properties.getProperty(key.trim()); - return val == null ? defaultVal : val; + String val = getString(key); + return StringUtils.isEmpty(val) ? defaultVal : val; } /** @@ -110,7 +109,7 @@ public class PropertyUtils { */ public static int getInt(String key, int defaultValue) { String value = getString(key); - if (value == null) { + if (StringUtils.isEmpty(value)) { return defaultValue; } @@ -129,12 +128,7 @@ public class PropertyUtils { * @return property value */ public static boolean getBoolean(String key) { - String value = properties.getProperty(key.trim()); - if (null != value) { - return Boolean.parseBoolean(value); - } - - return false; + return getBoolean(key, false); } /** @@ -145,116 +139,49 @@ public class PropertyUtils { * @return property value */ public static Boolean getBoolean(String key, boolean defaultValue) { - String value = properties.getProperty(key.trim()); - if (null != value) { - return Boolean.parseBoolean(value); - } - - return defaultValue; + String value = getString(key); + return StringUtils.isEmpty(value) ? defaultValue : Boolean.parseBoolean(value); } /** * get property long value * * @param key key - * @param defaultVal default value - * @return property value - */ - public static long getLong(String key, long defaultVal) { - String val = getString(key); - return val == null ? defaultVal : Long.parseLong(val); - } - - /** - * @param key key - * @return property value - */ - public static long getLong(String key) { - return getLong(key, -1); - } - - /** - * @param key key - * @param defaultVal default value + * @param defaultValue default value * @return property value */ - public static double getDouble(String key, double defaultVal) { - String val = getString(key); - return val == null ? defaultVal : Double.parseDouble(val); - } - - /** - * get array - * - * @param key property name - * @param splitStr separator - * @return property value through array - */ - public static String[] getArray(String key, String splitStr) { + public static long getLong(String key, long defaultValue) { String value = getString(key); - if (value == null) { - return new String[0]; + if (StringUtils.isEmpty(value)) { + return defaultValue; } + try { - String[] propertyArray = value.split(splitStr); - return propertyArray; + return Long.parseLong(value); } catch (NumberFormatException e) { logger.info(e.getMessage(), e); } - return new String[0]; + return defaultValue; } /** * @param key key - * @param type type - * @param defaultValue default value - * @param T - * @return get enum value - */ - public static > T getEnum(String key, Class type, - T defaultValue) { - String val = getString(key); - return val == null ? defaultValue : Enum.valueOf(type, val); - } - - /** - * get all properties with specified prefix, like: fs. - * - * @param prefix prefix to search - * @return all properties with specified prefix + * @return property value */ - public static Map getPrefixedProperties(String prefix) { - Map matchedProperties = new HashMap<>(); - for (String propName : properties.stringPropertyNames()) { - if (propName.startsWith(prefix)) { - matchedProperties.put(propName, properties.getProperty(propName)); - } - } - return matchedProperties; + public static long getLong(String key) { + return getLong(key, -1); } /** - * + * set value + * @param key key + * @param value value */ public static void setValue(String key, String value) { properties.setProperty(key, value); } - public static Map getPropertiesByPrefix(String prefix) { - if (StringUtils.isEmpty(prefix)) { - return null; - } - Set keys = properties.keySet(); - if (keys.isEmpty()) { - return null; - } - Map propertiesMap = new HashMap<>(); - keys.forEach(k -> { - if (k.toString().contains(prefix)) { - propertiesMap.put(k.toString().replaceFirst(prefix + ".", ""), properties.getProperty((String) k)); - } - }); - return propertiesMap; + public static String dumpProperties() { + return properties.toString(); } - } diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/StringUtils.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/StringUtils.java index 3d6bdfa73844bc34f9320d49cd89e9916acd1954..4c116e817a08831daa72f060ff608301494ca986 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/StringUtils.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/StringUtils.java @@ -30,8 +30,6 @@ public class StringUtils { */ public static final String EMPTY = ""; - public static final int INDEX_NOT_FOUND = -1; - private StringUtils() { throw new UnsupportedOperationException("Construct StringUtils"); } @@ -85,16 +83,6 @@ public class StringUtils { return !isBlank(cs); } - /** - *

Replace all strings matching the regular expression \t \n \r with _

- * - * @param src the String , may be null - * @return the string that has been replaced - */ - public static String replaceNRTtoUnderline(String src) { - return isBlank(src) ? src : src.replaceAll("[\n|\r|\t]", "_"); - } - /** *

Removes control characters (char <= 32) from both * ends of this String, handling {@code null} by returning @@ -134,46 +122,6 @@ public class StringUtils { return str1 == null ? str2 == null : str1.equalsIgnoreCase(str2); } - public static String substringBefore(final String str, final String separator) { - if (isEmpty(str) || separator == null) { - return str; - } - if (separator.isEmpty()) { - return EMPTY; - } - final int pos = str.indexOf(separator); - if (pos == INDEX_NOT_FOUND) { - return str; - } - return str.substring(0, pos); - } - - public static String substringAfter(final String str, final String separator) { - if (isEmpty(str)) { - return str; - } - if (separator == null) { - return EMPTY; - } - final int pos = str.indexOf(separator); - if (pos == INDEX_NOT_FOUND) { - return EMPTY; - } - return str.substring(pos + separator.length()); - } - - public static long strDigitToLong(String str, long defaultValue) { - if (str == null) { - return defaultValue; - } else { - try { - return Long.parseLong(str); - } catch (NumberFormatException var4) { - return defaultValue; - } - } - } - /** *

Joins the elements of the provided Collection into a single String * containing the provided Collection of elements.

diff --git a/dolphinscheduler-standalone-server/pom.xml b/dolphinscheduler-standalone-server/pom.xml index 8b9efc124162868a340cd6787a361d401b309d8c..9ce552595800526f26753a529fd2e0683e12255d 100644 --- a/dolphinscheduler-standalone-server/pom.xml +++ b/dolphinscheduler-standalone-server/pom.xml @@ -15,13 +15,11 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - + dolphinscheduler org.apache.dolphinscheduler - 2.0.0-SNAPSHOT + 2.0.10-SNAPSHOT 4.0.0 @@ -36,6 +34,10 @@ org.apache.dolphinscheduler dolphinscheduler-api + + org.apache.dolphinscheduler + dolphinscheduler-python + org.apache.curator curator-test @@ -49,7 +51,7 @@ org.apache.dolphinscheduler - dolphinscheduler-alert + dolphinscheduler-alert-server diff --git a/dolphinscheduler-standalone-server/src/main/java/org/apache/dolphinscheduler/server/StandaloneServer.java b/dolphinscheduler-standalone-server/src/main/java/org/apache/dolphinscheduler/server/StandaloneServer.java index 94b6ca7bc271882e4b3af5d30cda4d757e251081..c216c07a53942f114e3c45117f61a7200b2bcfb1 100644 --- a/dolphinscheduler-standalone-server/src/main/java/org/apache/dolphinscheduler/server/StandaloneServer.java +++ b/dolphinscheduler-standalone-server/src/main/java/org/apache/dolphinscheduler/server/StandaloneServer.java @@ -17,114 +17,30 @@ package org.apache.dolphinscheduler.server; -import static org.apache.dolphinscheduler.common.Constants.SPRING_DATASOURCE_DRIVER_CLASS_NAME; -import static org.apache.dolphinscheduler.common.Constants.SPRING_DATASOURCE_PASSWORD; -import static org.apache.dolphinscheduler.common.Constants.SPRING_DATASOURCE_URL; -import static org.apache.dolphinscheduler.common.Constants.SPRING_DATASOURCE_USERNAME; - import org.apache.dolphinscheduler.alert.AlertServer; import org.apache.dolphinscheduler.api.ApiApplicationServer; -import org.apache.dolphinscheduler.common.utils.ScriptRunner; -import org.apache.dolphinscheduler.dao.datasource.ConnectionFactory; import org.apache.dolphinscheduler.server.master.MasterServer; import org.apache.dolphinscheduler.server.worker.WorkerServer; import org.apache.curator.test.TestingServer; -import java.io.FileReader; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.sql.SQLException; - -import javax.sql.DataSource; - -import org.h2.tools.Server; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.builder.SpringApplicationBuilder; +import org.springframework.context.annotation.ComponentScan; -@SpringBootApplication +@EnableAutoConfiguration +@ComponentScan public class StandaloneServer { - private static final Logger LOGGER = LoggerFactory.getLogger(StandaloneServer.class); - public static void main(String[] args) throws Exception { - Thread.currentThread().setName("Standalone-Server"); - - System.setProperty("spring.profiles.active", "api"); - - startDatabase(); - - startRegistry(); - - startAlertServer(); - - setTaskPlugin(); - - new SpringApplicationBuilder( - ApiApplicationServer.class, - MasterServer.class, - WorkerServer.class - ).run(args); - } - - private static void startAlertServer() { - final Path alertPluginPath = Paths.get( - StandaloneServer.class.getProtectionDomain().getCodeSource().getLocation().getPath(), - "../../../dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/pom.xml" - ).toAbsolutePath(); - if (Files.exists(alertPluginPath)) { - System.setProperty("alert.plugin.binding", alertPluginPath.toString()); - System.setProperty("alert.plugin.dir", ""); - } - AlertServer.getInstance().start(); - } - - private static void startRegistry() throws Exception { final TestingServer server = new TestingServer(true); System.setProperty("registry.servers", server.getConnectString()); - final Path registryPath = Paths.get( - StandaloneServer.class.getProtectionDomain().getCodeSource().getLocation().getPath(), - "../../../dolphinscheduler-registry-plugin/dolphinscheduler-registry-zookeeper/pom.xml" - ).toAbsolutePath(); - if (Files.exists(registryPath)) { - System.setProperty("registry.plugin.binding", registryPath.toString()); - System.setProperty("registry.plugin.dir", ""); - } - } - - private static void startDatabase() throws IOException, SQLException { - final Path temp = Files.createTempDirectory("dolphinscheduler_"); - LOGGER.info("H2 database directory: {}", temp); - System.setProperty( - SPRING_DATASOURCE_DRIVER_CLASS_NAME, - org.h2.Driver.class.getName() - ); - System.setProperty( - SPRING_DATASOURCE_URL, - String.format("jdbc:h2:tcp://localhost/%s;MODE=MySQL;DATABASE_TO_LOWER=true", temp.toAbsolutePath()) - ); - System.setProperty(SPRING_DATASOURCE_USERNAME, "sa"); - System.setProperty(SPRING_DATASOURCE_PASSWORD, ""); - - Server.createTcpServer("-ifNotExists").start(); - - final DataSource ds = ConnectionFactory.getInstance().getDataSource(); - final ScriptRunner runner = new ScriptRunner(ds.getConnection(), true, true); - runner.runScript(new FileReader("sql/dolphinscheduler_h2.sql")); - } - - private static void setTaskPlugin() { - final Path taskPluginPath = Paths.get( - StandaloneServer.class.getProtectionDomain().getCodeSource().getLocation().getPath(), - "../../../dolphinscheduler-task-plugin/dolphinscheduler-task-shell/pom.xml" - ).toAbsolutePath(); - if (Files.exists(taskPluginPath)) { - System.setProperty("task.plugin.binding", taskPluginPath.toString()); - System.setProperty("task.plugin.dir", ""); - } + new SpringApplicationBuilder( + ApiApplicationServer.class, + MasterServer.class, + WorkerServer.class, + AlertServer.class, + PythonGatewayServer.class + ).profiles("master", "worker", "api", "alert", "python-gateway", "h2", "standalone").run(args); } } diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/alert.properties.j2 b/dolphinscheduler-standalone-server/src/main/resources/application-standalone.yaml similarity index 54% rename from ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/alert.properties.j2 rename to dolphinscheduler-standalone-server/src/main/resources/application-standalone.yaml index 73840b8c18109e00fa348c2c3113189eec810a95..64f47242068e6ef8bad522281a4f410d9a7ef6ae 100644 --- a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/alert.properties.j2 +++ b/dolphinscheduler-standalone-server/src/main/resources/application-standalone.yaml @@ -15,6 +15,41 @@ # limitations under the License. # -{% for key, value in dolphin_alert_map.iteritems() -%} - {{key}}={{value}} -{% endfor %} \ No newline at end of file +spring: + application: + name: standalone-server + cache: + # default unable cache, you can enable by `type: caffeine` + type: none + cache-names: + - tenant + - user + - processDefinition + - processTaskRelation + - taskDefinition + - workerGroup + - schedule + caffeine: + spec: maximumSize=100,expireAfterWrite=300s,recordStats + +server: + port: 12345 + +management: + endpoints: + web: + exposure: + include: '*' + server: + port: 8080 + metrics: + tags: + application: ${spring.application.name} + +logging: + level: + org: + apache: + zookeeper: WARN + hbase: WARN + hadoop: WARN diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/pom.xml b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/pom.xml index 02693938032a8d7efa7bbbfc56d59c93ecacc047..b94d9ab6ac3537bd3c09712c399f6836fb2cfdfa 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/pom.xml +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/pom.xml @@ -15,13 +15,11 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - + dolphinscheduler-task-plugin org.apache.dolphinscheduler - 2.0.0-SNAPSHOT + 2.0.10-SNAPSHOT 4.0.0 jar @@ -49,6 +47,10 @@ commons-codec commons-codec + + commons-lang + commons-lang + org.slf4j slf4j-api @@ -272,10 +274,11 @@ jasper-compiler tomcat + + commons-lang + commons-lang + - - dolphinscheduler-task-api-${project.version} - - \ No newline at end of file + diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractCommandExecutor.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractCommandExecutor.java index 418bf1aa9f090293776db2031228f7f018f01e7b..4112e2ca7b3588ceb1eadf162658d71d65627359 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractCommandExecutor.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractCommandExecutor.java @@ -122,10 +122,12 @@ public abstract class AbstractCommandExecutor { // merge error information to standard output stream processBuilder.redirectErrorStream(true); - // setting up user to run commands - command.add("sudo"); - command.add("-u"); - command.add(taskRequest.getTenantCode()); + // if sudo.enable=true,setting up user to run commands + if (OSUtils.isSudoEnable()) { + command.add("sudo"); + command.add("-u"); + command.add(taskRequest.getTenantCode()); + } command.add(commandInterpreter()); command.addAll(Collections.emptyList()); command.add(commandFile); @@ -307,8 +309,9 @@ public abstract class AbstractCommandExecutor { * @param process process */ private void parseProcessOutput(Process process) { - String threadLoggerInfoName = String.format(TaskConstants.TASK_LOGGER_THREAD_NAME + "-%s", taskRequest.getTaskAppId()); - ExecutorService getOutputLogService = newDaemonSingleThreadExecutor(threadLoggerInfoName + "-" + "getOutputLogService"); + String threadLoggerInfoName = String.format(TaskConstants.TASK_LOGGER_THREAD_NAME_FORMAT, + taskRequest.getTaskLogName() + TaskConstants.GET_OUTPUT_LOG_SERVICE); + ExecutorService getOutputLogService = newDaemonSingleThreadExecutor(threadLoggerInfoName); getOutputLogService.submit(() -> { try (BufferedReader inReader = new BufferedReader(new InputStreamReader(process.getInputStream()))) { String line; diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractTaskExecutor.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractTaskExecutor.java index e57241802969b51eb593befa5bcc0680d62ebaf8..22fbc6b00f4c267b6edb73e82c3524195e092905 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractTaskExecutor.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractTaskExecutor.java @@ -18,10 +18,15 @@ package org.apache.dolphinscheduler.plugin.task.api; import org.apache.dolphinscheduler.spi.task.AbstractTask; +import org.apache.dolphinscheduler.spi.task.TaskConstants; +import org.apache.dolphinscheduler.spi.task.Property; import org.apache.dolphinscheduler.spi.task.request.TaskRequest; +import java.util.Map; import java.util.StringJoiner; import java.util.concurrent.LinkedBlockingQueue; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -32,8 +37,9 @@ public abstract class AbstractTaskExecutor extends AbstractTask { public static final Marker FINALIZE_SESSION_MARKER = MarkerFactory.getMarker("FINALIZE_SESSION"); - protected Logger logger; + protected final Logger logger = LoggerFactory.getLogger(String.format(TaskConstants.TASK_LOG_LOGGER_NAME_FORMAT, getClass())); + public String rgex = "['\"]*\\$\\{(.*?)\\}['\"]*"; /** * constructor * @@ -41,7 +47,6 @@ public abstract class AbstractTaskExecutor extends AbstractTask { */ protected AbstractTaskExecutor(TaskRequest taskRequest) { super(taskRequest); - logger = LoggerFactory.getLogger(taskRequest.getTaskLogName()); } /** @@ -61,4 +66,35 @@ public abstract class AbstractTaskExecutor extends AbstractTask { logger.info(" -> {}", joiner); } } + + /** + * regular expressions match the contents between two specified strings + * + * @param content content + * @param rgex rgex + * @param sqlParamsMap sql params map + * @param paramsPropsMap params props map + */ + public void setSqlParamsMap(String content, String rgex, Map sqlParamsMap, + Map paramsPropsMap,int taskInstanceId) { + Pattern pattern = Pattern.compile(rgex); + Matcher m = pattern.matcher(content); + int index = 1; + while (m.find()) { + + String paramName = m.group(1); + Property prop = paramsPropsMap.get(paramName); + + if (prop == null) { + logger.error("setSqlParamsMap: No Property with paramName: {} is found in paramsPropsMap of task instance" + + " with id: {}. So couldn't put Property in sqlParamsMap.", paramName, taskInstanceId); + } else { + sqlParamsMap.put(index, prop); + index++; + logger.info("setSqlParamsMap: Property with paramName: {} put in sqlParamsMap of content {} successfully.", paramName, content); + } + + } + } + } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/ProcessUtils.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/ProcessUtils.java index 2f94003fbb3ff1df5a39d0fce4a3c49014c86f25..2a728cdadbcd05799772752cb9afba5833fe96a9 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/ProcessUtils.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/ProcessUtils.java @@ -43,7 +43,7 @@ public final class ProcessUtils { /** * Expression of PID recognition in Windows scene */ - private static final Pattern WINDOWSATTERN = Pattern.compile("(\\d+)"); + private static final Pattern WINDOWSATTERN = Pattern.compile("\\w+\\((\\d+)\\)"); /** * kill tasks according to different task types. diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/ShellCommandExecutor.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/ShellCommandExecutor.java index 5272c0e21866081716b86292c96b7e5458cda922..2912395413777767e9220391b5c4000c2d4af46a 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/ShellCommandExecutor.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/ShellCommandExecutor.java @@ -21,13 +21,13 @@ import org.apache.dolphinscheduler.plugin.task.util.OSUtils; import org.apache.dolphinscheduler.spi.task.request.TaskRequest; import org.apache.commons.io.FileUtils; +import org.apache.commons.lang.StringUtils; import java.io.File; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Paths; -import java.util.List; import java.util.concurrent.LinkedBlockingQueue; import java.util.function.Consumer; @@ -94,18 +94,25 @@ public class ShellCommandExecutor extends AbstractCommandExecutor { if (OSUtils.isWindows()) { sb.append("@echo off\n"); sb.append("cd /d %~dp0\n"); - if (taskRequest.getEnvFile() != null) { - sb.append("call ").append(taskRequest.getEnvFile()).append("\n"); + if (StringUtils.isNotBlank(taskRequest.getEnvironmentConfig())) { + sb.append(taskRequest.getEnvironmentConfig()).append("\n"); + } else { + if (taskRequest.getEnvFile() != null) { + sb.append("call ").append(taskRequest.getEnvFile()).append("\n"); + } } } else { sb.append("#!/bin/sh\n"); sb.append("BASEDIR=$(cd `dirname $0`; pwd)\n"); sb.append("cd $BASEDIR\n"); - if (taskRequest.getEnvFile() != null) { - sb.append("source ").append(taskRequest.getEnvFile()).append("\n"); + if (StringUtils.isNotBlank(taskRequest.getEnvironmentConfig())) { + sb.append(taskRequest.getEnvironmentConfig()).append("\n"); + } else { + if (taskRequest.getEnvFile() != null) { + sb.append("source ").append(taskRequest.getEnvFile()).append("\n"); + } } } - sb.append(execCommand); logger.info("command : {}", sb); diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/BaseConnectionParam.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/BaseConnectionParam.java deleted file mode 100644 index 44a5962f9291d373963255f98a8081876c00709f..0000000000000000000000000000000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/BaseConnectionParam.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.task.datasource; - -import org.apache.dolphinscheduler.plugin.task.datasource.clickhouse.ClickhouseConnectionParam; -import org.apache.dolphinscheduler.plugin.task.datasource.db2.Db2ConnectionParam; -import org.apache.dolphinscheduler.plugin.task.datasource.hive.HiveConnectionParam; -import org.apache.dolphinscheduler.plugin.task.datasource.mysql.MysqlConnectionParam; -import org.apache.dolphinscheduler.plugin.task.datasource.oracle.OracleConnectionParam; -import org.apache.dolphinscheduler.plugin.task.datasource.postgresql.PostgreSqlConnectionParam; -import org.apache.dolphinscheduler.plugin.task.datasource.presto.PrestoConnectionParam; -import org.apache.dolphinscheduler.plugin.task.datasource.spark.SparkConnectionParam; -import org.apache.dolphinscheduler.plugin.task.datasource.sqlserver.SqlServerConnectionParam; - -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonInclude.Include; - -/** - * The base model of connection param - *

- * {@link ClickhouseConnectionParam} - * {@link Db2ConnectionParam} - * {@link HiveConnectionParam} - * {@link MysqlConnectionParam} - * {@link OracleConnectionParam} - * {@link PostgreSqlConnectionParam} - * {@link PrestoConnectionParam} - * {@link SparkConnectionParam} - * {@link SqlServerConnectionParam} - */ -@JsonInclude(Include.NON_NULL) -public abstract class BaseConnectionParam implements ConnectionParam { - - protected String user; - - protected String password; - - protected String address; - - protected String database; - - protected String jdbcUrl; - - protected String other; - - public String getUser() { - return user; - } - - public void setUser(String user) { - this.user = user; - } - - public String getPassword() { - return password; - } - - public void setPassword(String password) { - this.password = password; - } - - public String getAddress() { - return address; - } - - public void setAddress(String address) { - this.address = address; - } - - public String getDatabase() { - return database; - } - - public void setDatabase(String database) { - this.database = database; - } - - public String getJdbcUrl() { - return jdbcUrl; - } - - public void setJdbcUrl(String jdbcUrl) { - this.jdbcUrl = jdbcUrl; - } - - public String getOther() { - return other; - } - - public void setOther(String other) { - this.other = other; - } -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/BaseDataSourceParamDTO.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/BaseDataSourceParamDTO.java deleted file mode 100644 index 5af330d38e51c661f438f0d1ec0a9b87b9d6cbdc..0000000000000000000000000000000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/BaseDataSourceParamDTO.java +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.task.datasource; - -import org.apache.dolphinscheduler.plugin.task.datasource.clickhouse.ClickHouseDatasourceParamDTO; -import org.apache.dolphinscheduler.plugin.task.datasource.db2.Db2DatasourceParamDTO; -import org.apache.dolphinscheduler.plugin.task.datasource.hive.HiveDataSourceParamDTO; -import org.apache.dolphinscheduler.plugin.task.datasource.mysql.MysqlDatasourceParamDTO; -import org.apache.dolphinscheduler.plugin.task.datasource.oracle.OracleDatasourceParamDTO; -import org.apache.dolphinscheduler.plugin.task.datasource.postgresql.PostgreSqlDatasourceParamDTO; -import org.apache.dolphinscheduler.plugin.task.datasource.presto.PrestoDatasourceParamDTO; -import org.apache.dolphinscheduler.plugin.task.datasource.spark.SparkDatasourceParamDTO; -import org.apache.dolphinscheduler.plugin.task.datasource.sqlserver.SqlServerDatasourceParamDTO; -import org.apache.dolphinscheduler.spi.enums.DbType; - -import java.io.Serializable; -import java.util.Map; - -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonTypeInfo; - -/** - * Basic datasource params submitted to api. - *

- * see {@link MysqlDatasourceParamDTO} - * see {@link PostgreSqlDatasourceParamDTO} - * see {@link HiveDataSourceParamDTO} - * see {@link SparkDatasourceParamDTO} - * see {@link ClickHouseDatasourceParamDTO} - * see {@link OracleDatasourceParamDTO} - * see {@link SqlServerDatasourceParamDTO} - * see {@link Db2DatasourceParamDTO} - * see {@link PrestoDatasourceParamDTO} - */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type") -@JsonSubTypes(value = { - @JsonSubTypes.Type(value = MysqlDatasourceParamDTO.class, name = "MYSQL"), - @JsonSubTypes.Type(value = PostgreSqlDatasourceParamDTO.class, name = "POSTGRESQL"), - @JsonSubTypes.Type(value = HiveDataSourceParamDTO.class, name = "HIVE"), - @JsonSubTypes.Type(value = SparkDatasourceParamDTO.class, name = "SPARK"), - @JsonSubTypes.Type(value = ClickHouseDatasourceParamDTO.class, name = "CLICKHOUSE"), - @JsonSubTypes.Type(value = OracleDatasourceParamDTO.class, name = "ORACLE"), - @JsonSubTypes.Type(value = SqlServerDatasourceParamDTO.class, name = "SQLSERVER"), - @JsonSubTypes.Type(value = Db2DatasourceParamDTO.class, name = "DB2"), - @JsonSubTypes.Type(value = PrestoDatasourceParamDTO.class, name = "PRESTO"), -}) -public abstract class BaseDataSourceParamDTO implements Serializable { - - protected Integer id; - - protected String name; - - protected String note; - - protected String host; - - protected Integer port; - - protected String database; - - protected String userName; - - protected String password; - - protected Map other; - - public Integer getId() { - return id; - } - - public void setId(Integer id) { - this.id = id; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String getNote() { - return note; - } - - public void setNote(String note) { - this.note = note; - } - - public String getHost() { - return host; - } - - public void setHost(String host) { - this.host = host; - } - - public Integer getPort() { - return port; - } - - public void setPort(Integer port) { - this.port = port; - } - - public String getDatabase() { - return database; - } - - public void setDatabase(String database) { - this.database = database; - } - - public String getUserName() { - return userName; - } - - public void setUserName(String userName) { - this.userName = userName; - } - - public String getPassword() { - return password; - } - - public void setPassword(String password) { - this.password = password; - } - - public Map getOther() { - return other; - } - - public void setOther(Map other) { - this.other = other; - } - - /** - * Get the datasource type - * see{@link DbType} - * - * @return datasource type code - */ - public abstract DbType getType(); -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/BaseHdfsConnectionParam.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/BaseHdfsConnectionParam.java deleted file mode 100644 index ab2402748708458878393ae52d764a6c365d8ab4..0000000000000000000000000000000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/BaseHdfsConnectionParam.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.task.datasource; - -public class BaseHdfsConnectionParam extends BaseConnectionParam { - protected String principal; - protected String javaSecurityKrb5Conf; - protected String loginUserKeytabUsername; - protected String loginUserKeytabPath; - - public String getPrincipal() { - return principal; - } - - public void setPrincipal(String principal) { - this.principal = principal; - } - - public String getJavaSecurityKrb5Conf() { - return javaSecurityKrb5Conf; - } - - public void setJavaSecurityKrb5Conf(String javaSecurityKrb5Conf) { - this.javaSecurityKrb5Conf = javaSecurityKrb5Conf; - } - - public String getLoginUserKeytabUsername() { - return loginUserKeytabUsername; - } - - public void setLoginUserKeytabUsername(String loginUserKeytabUsername) { - this.loginUserKeytabUsername = loginUserKeytabUsername; - } - - public String getLoginUserKeytabPath() { - return loginUserKeytabPath; - } - - public void setLoginUserKeytabPath(String loginUserKeytabPath) { - this.loginUserKeytabPath = loginUserKeytabPath; - } -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/ConnectionParam.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/ConnectionParam.java deleted file mode 100644 index 46bd979a2d86f5b6649059f2d698a052ada42cfb..0000000000000000000000000000000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/ConnectionParam.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.task.datasource; - -import java.io.Serializable; - -/** - * The model of Datasource Connection param - */ -public interface ConnectionParam extends Serializable { -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/DatasourceUtil.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/DatasourceUtil.java deleted file mode 100644 index d7b47f07ac06354bfd3722512e414cb1f83de213..0000000000000000000000000000000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/DatasourceUtil.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.task.datasource; - -import org.apache.dolphinscheduler.plugin.task.datasource.clickhouse.ClickHouseDatasourceProcessor; -import org.apache.dolphinscheduler.plugin.task.datasource.db2.Db2DatasourceProcessor; -import org.apache.dolphinscheduler.plugin.task.datasource.hive.HiveDatasourceProcessor; -import org.apache.dolphinscheduler.plugin.task.datasource.mysql.MysqlDatasourceProcessor; -import org.apache.dolphinscheduler.plugin.task.datasource.oracle.OracleDatasourceProcessor; -import org.apache.dolphinscheduler.plugin.task.datasource.postgresql.PostgreSqlDatasourceProcessor; -import org.apache.dolphinscheduler.plugin.task.datasource.presto.PrestoDatasourceProcessor; -import org.apache.dolphinscheduler.plugin.task.datasource.spark.SparkDatasourceProcessor; -import org.apache.dolphinscheduler.plugin.task.datasource.sqlserver.SqlServerDatasourceProcessor; -import org.apache.dolphinscheduler.spi.enums.DbType; - -import java.sql.Connection; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class DatasourceUtil { - - private DatasourceUtil() { - } - - private static final Logger logger = LoggerFactory.getLogger(DatasourceUtil.class); - - private static final DatasourceProcessor mysqlProcessor = new MysqlDatasourceProcessor(); - private static final DatasourceProcessor postgreSqlProcessor = new PostgreSqlDatasourceProcessor(); - private static final DatasourceProcessor hiveProcessor = new HiveDatasourceProcessor(); - private static final DatasourceProcessor sparkProcessor = new SparkDatasourceProcessor(); - private static final DatasourceProcessor clickhouseProcessor = new ClickHouseDatasourceProcessor(); - private static final DatasourceProcessor oracleProcessor = new OracleDatasourceProcessor(); - private static final DatasourceProcessor sqlServerProcessor = new SqlServerDatasourceProcessor(); - private static final DatasourceProcessor db2PROCESSOR = new Db2DatasourceProcessor(); - private static final DatasourceProcessor prestoPROCESSOR = new PrestoDatasourceProcessor(); - - public static ConnectionParam buildConnectionParams(DbType dbType, String connectionJson) { - return getDatasourceProcessor(dbType).createConnectionParams(connectionJson); - } - - public static Connection getConnection(DbType dbType, ConnectionParam connectionParam) { - try { - return getDatasourceProcessor(dbType).getConnection(connectionParam); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - public static String getJdbcUrl(DbType dbType, ConnectionParam baseConnectionParam) { - return getDatasourceProcessor(dbType).getJdbcUrl(baseConnectionParam); - } - - public static DatasourceProcessor getDatasourceProcessor(DbType dbType) { - switch (dbType) { - case MYSQL: - return mysqlProcessor; - case POSTGRESQL: - return postgreSqlProcessor; - case HIVE: - return hiveProcessor; - case SPARK: - return sparkProcessor; - case CLICKHOUSE: - return clickhouseProcessor; - case ORACLE: - return oracleProcessor; - case SQLSERVER: - return sqlServerProcessor; - case DB2: - return db2PROCESSOR; - case PRESTO: - return prestoPROCESSOR; - default: - throw new IllegalArgumentException("datasource type illegal:" + dbType); - } - } - -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/HiveConfUtils.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/HiveConfUtils.java deleted file mode 100644 index 0985b7457cf5604ad80851b5de0736fe40e5ea65..0000000000000000000000000000000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/HiveConfUtils.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.task.datasource; - -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.conf.HiveConf.ConfVars; - -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -/** - * hive conf utils - */ -public class HiveConfUtils { - - private HiveConfUtils() { - throw new UnsupportedOperationException("Construct HiveConfUtils"); - } - - private static class HiveConfHandler { - private static HiveConf singleton; - - private static Map hiveConfVars; - - static { - singleton = new HiveConf(); - hiveConfVars = new HashMap<>(); - Arrays.stream(ConfVars.values()).forEach(confVar -> hiveConfVars.put(confVar.varname,confVar)); - } - } - - /** - * get HiveConf instance - * @return HiveConf hiveConf - */ - public static HiveConf getInstance() { - return HiveConfHandler.singleton; - } - - /** - * get hive conf vars - * @return - */ - public static Map getHiveConfVars() { - return HiveConfHandler.hiveConfVars; - } - - /** - * Determine if it belongs to a hive conf property - * @param conf config - * @return boolean result - */ - public static boolean isHiveConfVar(String conf) { - // the default hive conf var name - String confKey = conf.split("=")[0]; - Map hiveConfVars = HiveConfUtils.getHiveConfVars(); - if (hiveConfVars.get(confKey) != null) { - return true; - } - - // the security authorization hive conf var name - HiveConf hiveConf = HiveConfUtils.getInstance(); - String hiveAuthorizationSqlStdAuthConfigWhitelist = hiveConf.getVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_SQL_STD_AUTH_CONFIG_WHITELIST); - Pattern modWhiteListPattern = Pattern.compile(hiveAuthorizationSqlStdAuthConfigWhitelist); - Matcher matcher = modWhiteListPattern.matcher(confKey); - return matcher.matches(); - } - -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/clickhouse/ClickHouseDatasourceProcessor.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/clickhouse/ClickHouseDatasourceProcessor.java deleted file mode 100644 index 624afee9401ea8eacba5c512e9f275ae7772e61a..0000000000000000000000000000000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/clickhouse/ClickHouseDatasourceProcessor.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.task.datasource.clickhouse; - -import static org.apache.dolphinscheduler.plugin.task.datasource.PasswordUtils.decodePassword; -import static org.apache.dolphinscheduler.plugin.task.datasource.PasswordUtils.encodePassword; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.COLON; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.COMMA; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.COM_CLICKHOUSE_JDBC_DRIVER; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.DOUBLE_SLASH; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.JDBC_CLICKHOUSE; - -import org.apache.dolphinscheduler.plugin.task.datasource.AbstractDatasourceProcessor; -import org.apache.dolphinscheduler.plugin.task.datasource.BaseDataSourceParamDTO; -import org.apache.dolphinscheduler.plugin.task.datasource.ConnectionParam; -import org.apache.dolphinscheduler.spi.enums.DbType; -import org.apache.dolphinscheduler.spi.utils.JSONUtils; -import org.apache.dolphinscheduler.spi.utils.StringUtils; - -import org.apache.commons.collections.MapUtils; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; -import java.util.LinkedHashMap; -import java.util.Map; - -public class ClickHouseDatasourceProcessor extends AbstractDatasourceProcessor { - - @Override - public BaseDataSourceParamDTO createDatasourceParamDTO(String connectionJson) { - ClickhouseConnectionParam connectionParams = (ClickhouseConnectionParam) createConnectionParams(connectionJson); - - ClickHouseDatasourceParamDTO clickHouseDatasourceParamDTO = new ClickHouseDatasourceParamDTO(); - clickHouseDatasourceParamDTO.setDatabase(connectionParams.getDatabase()); - clickHouseDatasourceParamDTO.setUserName(connectionParams.getUser()); - clickHouseDatasourceParamDTO.setOther(parseOther(connectionParams.getOther())); - - String[] hostSeperator = connectionParams.getAddress().split(DOUBLE_SLASH); - String[] hostPortArray = hostSeperator[hostSeperator.length - 1].split(COMMA); - clickHouseDatasourceParamDTO.setPort(Integer.parseInt(hostPortArray[0].split(COLON)[1])); - clickHouseDatasourceParamDTO.setHost(hostPortArray[0].split(COLON)[0]); - - return clickHouseDatasourceParamDTO; - } - - @Override - public ConnectionParam createConnectionParams(BaseDataSourceParamDTO datasourceParam) { - ClickHouseDatasourceParamDTO clickHouseParam = (ClickHouseDatasourceParamDTO) datasourceParam; - String address = String.format("%s%s:%s", JDBC_CLICKHOUSE, clickHouseParam.getHost(), clickHouseParam.getPort()); - String jdbcUrl = address + "/" + clickHouseParam.getDatabase(); - - ClickhouseConnectionParam clickhouseConnectionParam = new ClickhouseConnectionParam(); - clickhouseConnectionParam.setDatabase(clickHouseParam.getDatabase()); - clickhouseConnectionParam.setAddress(address); - clickhouseConnectionParam.setJdbcUrl(jdbcUrl); - clickhouseConnectionParam.setUser(clickHouseParam.getUserName()); - clickhouseConnectionParam.setPassword(encodePassword(clickHouseParam.getPassword())); - clickhouseConnectionParam.setOther(transformOther(clickHouseParam.getOther())); - return clickhouseConnectionParam; - } - - @Override - public ConnectionParam createConnectionParams(String connectionJson) { - return JSONUtils.parseObject(connectionJson, ClickhouseConnectionParam.class); - } - - @Override - public String getDatasourceDriver() { - return COM_CLICKHOUSE_JDBC_DRIVER; - } - - @Override - public String getJdbcUrl(ConnectionParam connectionParam) { - ClickhouseConnectionParam clickhouseConnectionParam = (ClickhouseConnectionParam) connectionParam; - String jdbcUrl = clickhouseConnectionParam.getJdbcUrl(); - if (StringUtils.isNotEmpty(clickhouseConnectionParam.getOther())) { - jdbcUrl = String.format("%s?%s", jdbcUrl, clickhouseConnectionParam.getOther()); - } - return jdbcUrl; - } - - @Override - public Connection getConnection(ConnectionParam connectionParam) throws ClassNotFoundException, SQLException { - ClickhouseConnectionParam clickhouseConnectionParam = (ClickhouseConnectionParam) connectionParam; - Class.forName(getDatasourceDriver()); - return DriverManager.getConnection(getJdbcUrl(clickhouseConnectionParam), - clickhouseConnectionParam.getUser(), decodePassword(clickhouseConnectionParam.getPassword())); - } - - @Override - public DbType getDbType() { - return DbType.CLICKHOUSE; - } - - private String transformOther(Map otherMap) { - if (MapUtils.isEmpty(otherMap)) { - return null; - } - StringBuilder stringBuilder = new StringBuilder(); - otherMap.forEach((key, value) -> stringBuilder.append(String.format("%s=%s%s", key, value, "&"))); - return stringBuilder.toString(); - } - - private Map parseOther(String other) { - if (other == null) { - return null; - } - Map otherMap = new LinkedHashMap<>(); - String[] configs = other.split("&"); - for (String config : configs) { - otherMap.put(config.split("=")[0], config.split("=")[1]); - } - return otherMap; - } -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/clickhouse/ClickhouseConnectionParam.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/clickhouse/ClickhouseConnectionParam.java deleted file mode 100644 index c01d0005a99ec1ebbd0cb81b7ffe0dad6c23ebe2..0000000000000000000000000000000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/clickhouse/ClickhouseConnectionParam.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.task.datasource.clickhouse; - -import org.apache.dolphinscheduler.plugin.task.datasource.BaseConnectionParam; - -public class ClickhouseConnectionParam extends BaseConnectionParam { - @Override - public String toString() { - return "ClickhouseConnectionParam{" - + "user='" + user + '\'' - + ", password='" + password + '\'' - + ", address='" + address + '\'' - + ", database='" + database + '\'' - + ", jdbcUrl='" + jdbcUrl + '\'' - + ", other='" + other + '\'' - + '}'; - } -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/db2/Db2ConnectionParam.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/db2/Db2ConnectionParam.java deleted file mode 100644 index ba66c5e16ee5355e354eb9860cfe69b7a54dbca5..0000000000000000000000000000000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/db2/Db2ConnectionParam.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.task.datasource.db2; - -import org.apache.dolphinscheduler.plugin.task.datasource.BaseConnectionParam; - -public class Db2ConnectionParam extends BaseConnectionParam { - @Override - public String toString() { - return "Db2ConnectionParam{" - + "user='" + user + '\'' - + ", password='" + password + '\'' - + ", address='" + address + '\'' - + ", database='" + database + '\'' - + ", jdbcUrl='" + jdbcUrl + '\'' - + ", other='" + other + '\'' - + '}'; - } -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/db2/Db2DatasourceProcessor.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/db2/Db2DatasourceProcessor.java deleted file mode 100644 index ee130ebe606ab5d4dd6c01c78d83189eede19ca7..0000000000000000000000000000000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/db2/Db2DatasourceProcessor.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.task.datasource.db2; - -import static org.apache.dolphinscheduler.plugin.task.datasource.PasswordUtils.decodePassword; -import static org.apache.dolphinscheduler.plugin.task.datasource.PasswordUtils.encodePassword; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.COLON; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.COMMA; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.COM_DB2_JDBC_DRIVER; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.DOUBLE_SLASH; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.JDBC_DB2; - -import org.apache.dolphinscheduler.plugin.task.datasource.AbstractDatasourceProcessor; -import org.apache.dolphinscheduler.plugin.task.datasource.BaseConnectionParam; -import org.apache.dolphinscheduler.plugin.task.datasource.BaseDataSourceParamDTO; -import org.apache.dolphinscheduler.plugin.task.datasource.ConnectionParam; -import org.apache.dolphinscheduler.spi.enums.DbType; -import org.apache.dolphinscheduler.spi.utils.JSONUtils; -import org.apache.dolphinscheduler.spi.utils.StringUtils; - -import org.apache.commons.collections.MapUtils; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; -import java.util.LinkedHashMap; -import java.util.Map; - -public class Db2DatasourceProcessor extends AbstractDatasourceProcessor { - - @Override - public BaseDataSourceParamDTO createDatasourceParamDTO(String connectionJson) { - Db2ConnectionParam connectionParams = (Db2ConnectionParam) createConnectionParams(connectionJson); - - Db2DatasourceParamDTO db2DatasourceParamDTO = new Db2DatasourceParamDTO(); - db2DatasourceParamDTO.setDatabase(connectionParams.getDatabase()); - db2DatasourceParamDTO.setOther(parseOther(connectionParams.getOther())); - db2DatasourceParamDTO.setUserName(db2DatasourceParamDTO.getUserName()); - - String[] hostSeperator = connectionParams.getAddress().split(DOUBLE_SLASH); - String[] hostPortArray = hostSeperator[hostSeperator.length - 1].split(COMMA); - db2DatasourceParamDTO.setHost(hostPortArray[0].split(COLON)[0]); - db2DatasourceParamDTO.setPort(Integer.parseInt(hostPortArray[0].split(COLON)[1])); - - return db2DatasourceParamDTO; - } - - @Override - public BaseConnectionParam createConnectionParams(BaseDataSourceParamDTO datasourceParam) { - Db2DatasourceParamDTO db2Param = (Db2DatasourceParamDTO) datasourceParam; - String address = String.format("%s%s:%s", JDBC_DB2, db2Param.getHost(), db2Param.getPort()); - String jdbcUrl = String.format("%s/%s", address, db2Param.getDatabase()); - - Db2ConnectionParam db2ConnectionParam = new Db2ConnectionParam(); - db2ConnectionParam.setAddress(address); - db2ConnectionParam.setDatabase(db2Param.getDatabase()); - db2ConnectionParam.setJdbcUrl(jdbcUrl); - db2ConnectionParam.setUser(db2Param.getUserName()); - db2ConnectionParam.setPassword(encodePassword(db2Param.getPassword())); - db2ConnectionParam.setOther(transformOther(db2Param.getOther())); - - return db2ConnectionParam; - } - - @Override - public ConnectionParam createConnectionParams(String connectionJson) { - return JSONUtils.parseObject(connectionJson, Db2ConnectionParam.class); - } - - @Override - public String getDatasourceDriver() { - return COM_DB2_JDBC_DRIVER; - } - - @Override - public String getJdbcUrl(ConnectionParam connectionParam) { - Db2ConnectionParam db2ConnectionParam = (Db2ConnectionParam) connectionParam; - if (StringUtils.isNotEmpty(db2ConnectionParam.getOther())) { - return String.format("%s;%s", db2ConnectionParam.getJdbcUrl(), db2ConnectionParam.getOther()); - } - return db2ConnectionParam.getJdbcUrl(); - } - - @Override - public Connection getConnection(ConnectionParam connectionParam) throws ClassNotFoundException, SQLException { - Db2ConnectionParam db2ConnectionParam = (Db2ConnectionParam) connectionParam; - Class.forName(getDatasourceDriver()); - return DriverManager.getConnection(getJdbcUrl(db2ConnectionParam), - db2ConnectionParam.getUser(), decodePassword(db2ConnectionParam.getPassword())); - } - - @Override - public DbType getDbType() { - return DbType.DB2; - } - - private String transformOther(Map otherMap) { - if (MapUtils.isEmpty(otherMap)) { - return null; - } - StringBuilder stringBuilder = new StringBuilder(); - otherMap.forEach((key, value) -> stringBuilder.append(String.format("%s=%s%s", key, value, ";"))); - stringBuilder.deleteCharAt(stringBuilder.length() - 1); - return stringBuilder.toString(); - } - - private Map parseOther(String other) { - if (other == null) { - return null; - } - Map otherMap = new LinkedHashMap<>(); - for (String config : other.split("&")) { - otherMap.put(config.split("=")[0], config.split("=")[1]); - } - return otherMap; - } -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/hive/HiveDatasourceProcessor.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/hive/HiveDatasourceProcessor.java deleted file mode 100644 index 16c4ceb111557addab0cb5ff8b9552eaab5bfce4..0000000000000000000000000000000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/hive/HiveDatasourceProcessor.java +++ /dev/null @@ -1,192 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.task.datasource.hive; - -import static org.apache.dolphinscheduler.plugin.task.datasource.PasswordUtils.decodePassword; -import static org.apache.dolphinscheduler.plugin.task.datasource.PasswordUtils.encodePassword; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.COLON; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.COMMA; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.DOUBLE_SLASH; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.JDBC_HIVE_2; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER; - -import org.apache.dolphinscheduler.plugin.task.datasource.AbstractDatasourceProcessor; -import org.apache.dolphinscheduler.plugin.task.datasource.BaseConnectionParam; -import org.apache.dolphinscheduler.plugin.task.datasource.BaseDataSourceParamDTO; -import org.apache.dolphinscheduler.plugin.task.datasource.ConnectionParam; -import org.apache.dolphinscheduler.plugin.task.datasource.HiveConfUtils; -import org.apache.dolphinscheduler.plugin.task.util.CommonUtils; -import org.apache.dolphinscheduler.spi.enums.DbType; -import org.apache.dolphinscheduler.spi.utils.JSONUtils; -import org.apache.dolphinscheduler.spi.utils.StringUtils; - -import org.apache.commons.collections.MapUtils; - -import java.io.IOException; -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; -import java.util.LinkedHashMap; -import java.util.Map; - -public class HiveDatasourceProcessor extends AbstractDatasourceProcessor { - - @Override - public BaseDataSourceParamDTO createDatasourceParamDTO(String connectionJson) { - HiveDataSourceParamDTO hiveDataSourceParamDTO = new HiveDataSourceParamDTO(); - HiveConnectionParam hiveConnectionParam = (HiveConnectionParam) createConnectionParams(connectionJson); - - hiveDataSourceParamDTO.setDatabase(hiveConnectionParam.getDatabase()); - hiveDataSourceParamDTO.setUserName(hiveConnectionParam.getUser()); - hiveDataSourceParamDTO.setOther(parseOther(hiveConnectionParam.getOther())); - hiveDataSourceParamDTO.setLoginUserKeytabUsername(hiveConnectionParam.getLoginUserKeytabUsername()); - hiveDataSourceParamDTO.setLoginUserKeytabPath(hiveConnectionParam.getLoginUserKeytabPath()); - hiveDataSourceParamDTO.setJavaSecurityKrb5Conf(hiveConnectionParam.getJavaSecurityKrb5Conf()); - - String[] tmpArray = hiveConnectionParam.getAddress().split(DOUBLE_SLASH); - StringBuilder hosts = new StringBuilder(); - String[] hostPortArray = tmpArray[tmpArray.length - 1].split(COMMA); - for (String hostPort : hostPortArray) { - hosts.append(hostPort.split(COLON)[0]).append(COMMA); - } - hosts.deleteCharAt(hosts.length() - 1); - hiveDataSourceParamDTO.setHost(hosts.toString()); - hiveDataSourceParamDTO.setPort(Integer.parseInt(hostPortArray[0].split(COLON)[1])); - - return hiveDataSourceParamDTO; - } - - @Override - public BaseConnectionParam createConnectionParams(BaseDataSourceParamDTO datasourceParam) { - HiveDataSourceParamDTO hiveParam = (HiveDataSourceParamDTO) datasourceParam; - StringBuilder address = new StringBuilder(); - address.append(JDBC_HIVE_2); - for (String zkHost : hiveParam.getHost().split(",")) { - address.append(String.format("%s:%s,", zkHost, hiveParam.getPort())); - } - address.deleteCharAt(address.length() - 1); - String jdbcUrl = address.toString() + "/" + hiveParam.getDatabase(); - if (CommonUtils.getKerberosStartupState()) { - jdbcUrl += ";principal=" + hiveParam.getPrincipal(); - } - - HiveConnectionParam hiveConnectionParam = new HiveConnectionParam(); - hiveConnectionParam.setDatabase(hiveParam.getDatabase()); - hiveConnectionParam.setAddress(address.toString()); - hiveConnectionParam.setJdbcUrl(jdbcUrl); - hiveConnectionParam.setUser(hiveParam.getUserName()); - hiveConnectionParam.setPassword(encodePassword(hiveParam.getPassword())); - - if (CommonUtils.getKerberosStartupState()) { - hiveConnectionParam.setPrincipal(hiveParam.getPrincipal()); - hiveConnectionParam.setJavaSecurityKrb5Conf(hiveParam.getJavaSecurityKrb5Conf()); - hiveConnectionParam.setLoginUserKeytabPath(hiveParam.getLoginUserKeytabPath()); - hiveConnectionParam.setLoginUserKeytabUsername(hiveParam.getLoginUserKeytabUsername()); - } - hiveConnectionParam.setOther(transformOther(hiveParam.getOther())); - return hiveConnectionParam; - } - - @Override - public ConnectionParam createConnectionParams(String connectionJson) { - return JSONUtils.parseObject(connectionJson, HiveConnectionParam.class); - } - - @Override - public String getDatasourceDriver() { - return ORG_APACHE_HIVE_JDBC_HIVE_DRIVER; - } - - @Override - public String getJdbcUrl(ConnectionParam connectionParam) { - HiveConnectionParam hiveConnectionParam = (HiveConnectionParam) connectionParam; - String jdbcUrl = hiveConnectionParam.getJdbcUrl(); - String otherParams = filterOther(hiveConnectionParam.getOther()); - if (StringUtils.isNotEmpty(otherParams) && !"?".equals(otherParams.substring(0, 1))) { - jdbcUrl += ";"; - } - return jdbcUrl + otherParams; - } - - @Override - public Connection getConnection(ConnectionParam connectionParam) throws IOException, ClassNotFoundException, SQLException { - HiveConnectionParam hiveConnectionParam = (HiveConnectionParam) connectionParam; - CommonUtils.loadKerberosConf(hiveConnectionParam.getJavaSecurityKrb5Conf(), - hiveConnectionParam.getLoginUserKeytabUsername(), hiveConnectionParam.getLoginUserKeytabPath()); - Class.forName(getDatasourceDriver()); - return DriverManager.getConnection(getJdbcUrl(connectionParam), - hiveConnectionParam.getUser(), decodePassword(hiveConnectionParam.getPassword())); - } - - @Override - public DbType getDbType() { - return DbType.HIVE; - } - - private String transformOther(Map otherMap) { - if (MapUtils.isEmpty(otherMap)) { - return null; - } - StringBuilder stringBuilder = new StringBuilder(); - otherMap.forEach((key, value) -> stringBuilder.append(String.format("%s=%s;", key, value))); - return stringBuilder.toString(); - } - - private String filterOther(String otherParams) { - if (StringUtils.isBlank(otherParams)) { - return ""; - } - - StringBuilder hiveConfListSb = new StringBuilder(); - hiveConfListSb.append("?"); - StringBuilder sessionVarListSb = new StringBuilder(); - - String[] otherArray = otherParams.split(";", -1); - - for (String conf : otherArray) { - if (HiveConfUtils.isHiveConfVar(conf)) { - hiveConfListSb.append(conf).append(";"); - } else { - sessionVarListSb.append(conf).append(";"); - } - } - - // remove the last ";" - if (sessionVarListSb.length() > 0) { - sessionVarListSb.deleteCharAt(sessionVarListSb.length() - 1); - } - - if (hiveConfListSb.length() > 0) { - hiveConfListSb.deleteCharAt(hiveConfListSb.length() - 1); - } - - return sessionVarListSb.toString() + hiveConfListSb.toString(); - } - - private Map parseOther(String other) { - if (other == null) { - return null; - } - Map otherMap = new LinkedHashMap<>(); - String[] configs = other.split(";"); - for (String config : configs) { - otherMap.put(config.split("=")[0], config.split("=")[1]); - } - return otherMap; - } -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/mysql/MysqlDatasourceProcessor.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/mysql/MysqlDatasourceProcessor.java deleted file mode 100644 index e41ccd21c573293ecab70b5464b7e4cbeaf7a4a5..0000000000000000000000000000000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/mysql/MysqlDatasourceProcessor.java +++ /dev/null @@ -1,176 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.task.datasource.mysql; - -import static org.apache.dolphinscheduler.plugin.task.datasource.PasswordUtils.decodePassword; -import static org.apache.dolphinscheduler.plugin.task.datasource.PasswordUtils.encodePassword; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.COLON; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.COMMA; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.COM_MYSQL_JDBC_DRIVER; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.DOUBLE_SLASH; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.JDBC_MYSQL; - -import org.apache.dolphinscheduler.plugin.task.datasource.AbstractDatasourceProcessor; -import org.apache.dolphinscheduler.plugin.task.datasource.BaseConnectionParam; -import org.apache.dolphinscheduler.plugin.task.datasource.BaseDataSourceParamDTO; -import org.apache.dolphinscheduler.plugin.task.datasource.ConnectionParam; -import org.apache.dolphinscheduler.spi.enums.DbType; -import org.apache.dolphinscheduler.spi.utils.JSONUtils; -import org.apache.dolphinscheduler.spi.utils.StringUtils; - -import org.apache.commons.collections.MapUtils; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class MysqlDatasourceProcessor extends AbstractDatasourceProcessor { - - private final Logger logger = LoggerFactory.getLogger(MysqlDatasourceProcessor.class); - - private static final String ALLOW_LOAD_LOCAL_IN_FILE_NAME = "allowLoadLocalInfile"; - - private static final String AUTO_DESERIALIZE = "autoDeserialize"; - - private static final String ALLOW_LOCAL_IN_FILE_NAME = "allowLocalInfile"; - - private static final String ALLOW_URL_IN_LOCAL_IN_FILE_NAME = "allowUrlInLocalInfile"; - - private static final String APPEND_PARAMS = "allowLoadLocalInfile=false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false"; - - @Override - public BaseDataSourceParamDTO createDatasourceParamDTO(String connectionJson) { - MysqlConnectionParam connectionParams = (MysqlConnectionParam) createConnectionParams(connectionJson); - MysqlDatasourceParamDTO mysqlDatasourceParamDTO = new MysqlDatasourceParamDTO(); - - mysqlDatasourceParamDTO.setUserName(connectionParams.getUser()); - mysqlDatasourceParamDTO.setDatabase(connectionParams.getDatabase()); - mysqlDatasourceParamDTO.setOther(parseOther(connectionParams.getOther())); - - String address = connectionParams.getAddress(); - String[] hostSeperator = address.split(DOUBLE_SLASH); - String[] hostPortArray = hostSeperator[hostSeperator.length - 1].split(COMMA); - mysqlDatasourceParamDTO.setPort(Integer.parseInt(hostPortArray[0].split(COLON)[1])); - mysqlDatasourceParamDTO.setHost(hostPortArray[0].split(COLON)[0]); - - return mysqlDatasourceParamDTO; - } - - @Override - public BaseConnectionParam createConnectionParams(BaseDataSourceParamDTO dataSourceParam) { - MysqlDatasourceParamDTO mysqlDatasourceParam = (MysqlDatasourceParamDTO) dataSourceParam; - String address = String.format("%s%s:%s", JDBC_MYSQL, mysqlDatasourceParam.getHost(), mysqlDatasourceParam.getPort()); - String jdbcUrl = String.format("%s/%s", address, mysqlDatasourceParam.getDatabase()); - - MysqlConnectionParam mysqlConnectionParam = new MysqlConnectionParam(); - mysqlConnectionParam.setJdbcUrl(jdbcUrl); - mysqlConnectionParam.setDatabase(mysqlDatasourceParam.getDatabase()); - mysqlConnectionParam.setAddress(address); - mysqlConnectionParam.setUser(mysqlDatasourceParam.getUserName()); - mysqlConnectionParam.setPassword(encodePassword(mysqlDatasourceParam.getPassword())); - mysqlConnectionParam.setOther(transformOther(mysqlDatasourceParam.getOther())); - - return mysqlConnectionParam; - } - - @Override - public ConnectionParam createConnectionParams(String connectionJson) { - return JSONUtils.parseObject(connectionJson, MysqlConnectionParam.class); - } - - @Override - public String getDatasourceDriver() { - return COM_MYSQL_JDBC_DRIVER; - } - - @Override - public String getJdbcUrl(ConnectionParam connectionParam) { - MysqlConnectionParam mysqlConnectionParam = (MysqlConnectionParam) connectionParam; - String jdbcUrl = mysqlConnectionParam.getJdbcUrl(); - if (StringUtils.isNotEmpty(mysqlConnectionParam.getOther())) { - return String.format("%s?%s&%s", jdbcUrl, mysqlConnectionParam.getOther(), APPEND_PARAMS); - } - return String.format("%s?%s", jdbcUrl, APPEND_PARAMS); - } - - @Override - public Connection getConnection(ConnectionParam connectionParam) throws ClassNotFoundException, SQLException { - MysqlConnectionParam mysqlConnectionParam = (MysqlConnectionParam) connectionParam; - Class.forName(getDatasourceDriver()); - String user = mysqlConnectionParam.getUser(); - if (user.contains(AUTO_DESERIALIZE)) { - logger.warn("sensitive param : {} in username field is filtered", AUTO_DESERIALIZE); - user = user.replace(AUTO_DESERIALIZE, ""); - } - String password = decodePassword(mysqlConnectionParam.getPassword()); - if (password.contains(AUTO_DESERIALIZE)) { - logger.warn("sensitive param : {} in password field is filtered", AUTO_DESERIALIZE); - password = password.replace(AUTO_DESERIALIZE, ""); - } - return DriverManager.getConnection(getJdbcUrl(connectionParam), user, password); - } - - @Override - public DbType getDbType() { - return DbType.MYSQL; - } - - private String transformOther(Map paramMap) { - if (MapUtils.isEmpty(paramMap)) { - return null; - } - Map otherMap = new HashMap<>(); - paramMap.forEach((k, v) -> { - if (!checkKeyIsLegitimate(k)) { - return; - } - otherMap.put(k, v); - }); - if (MapUtils.isEmpty(otherMap)) { - return null; - } - StringBuilder stringBuilder = new StringBuilder(); - otherMap.forEach((key, value) -> stringBuilder.append(String.format("%s=%s&", key, value))); - return stringBuilder.toString(); - } - - private static boolean checkKeyIsLegitimate(String key) { - return !key.contains(ALLOW_LOAD_LOCAL_IN_FILE_NAME) - && !key.contains(AUTO_DESERIALIZE) - && !key.contains(ALLOW_LOCAL_IN_FILE_NAME) - && !key.contains(ALLOW_URL_IN_LOCAL_IN_FILE_NAME); - } - - private Map parseOther(String other) { - if (StringUtils.isEmpty(other)) { - return null; - } - Map otherMap = new LinkedHashMap<>(); - for (String config : other.split("&")) { - otherMap.put(config.split("=")[0], config.split("=")[1]); - } - return otherMap; - } - -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/oracle/OracleDatasourceProcessor.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/oracle/OracleDatasourceProcessor.java deleted file mode 100644 index bfe94960b0d3427489e7c44a434cf61562a03b2e..0000000000000000000000000000000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/oracle/OracleDatasourceProcessor.java +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.task.datasource.oracle; - -import static org.apache.dolphinscheduler.plugin.task.datasource.PasswordUtils.decodePassword; -import static org.apache.dolphinscheduler.plugin.task.datasource.PasswordUtils.encodePassword; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.AT_SIGN; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.COLON; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.COMMA; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.COM_ORACLE_JDBC_DRIVER; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.DOUBLE_SLASH; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.JDBC_ORACLE_SERVICE_NAME; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.JDBC_ORACLE_SID; - -import org.apache.dolphinscheduler.plugin.task.datasource.AbstractDatasourceProcessor; -import org.apache.dolphinscheduler.plugin.task.datasource.BaseConnectionParam; -import org.apache.dolphinscheduler.plugin.task.datasource.BaseDataSourceParamDTO; -import org.apache.dolphinscheduler.plugin.task.datasource.ConnectionParam; -import org.apache.dolphinscheduler.spi.enums.DbConnectType; -import org.apache.dolphinscheduler.spi.enums.DbType; -import org.apache.dolphinscheduler.spi.utils.JSONUtils; -import org.apache.dolphinscheduler.spi.utils.StringUtils; - -import org.apache.commons.collections.MapUtils; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; - -public class OracleDatasourceProcessor extends AbstractDatasourceProcessor { - - @Override - public BaseDataSourceParamDTO createDatasourceParamDTO(String connectionJson) { - OracleConnectionParam connectionParams = (OracleConnectionParam) createConnectionParams(connectionJson); - OracleDatasourceParamDTO oracleDatasourceParamDTO = new OracleDatasourceParamDTO(); - - oracleDatasourceParamDTO.setDatabase(connectionParams.getDatabase()); - oracleDatasourceParamDTO.setUserName(connectionParams.getUser()); - oracleDatasourceParamDTO.setOther(parseOther(connectionParams.getOther())); - - String hostSeperator = DOUBLE_SLASH; - if (DbConnectType.ORACLE_SID.equals(connectionParams.connectType)) { - hostSeperator = AT_SIGN; - } - String[] hostPort = connectionParams.getAddress().split(hostSeperator); - String[] hostPortArray = hostPort[hostPort.length - 1].split(COMMA); - oracleDatasourceParamDTO.setPort(Integer.parseInt(hostPortArray[0].split(COLON)[1])); - oracleDatasourceParamDTO.setHost(hostPortArray[0].split(COLON)[0]); - - return oracleDatasourceParamDTO; - } - - @Override - public BaseConnectionParam createConnectionParams(BaseDataSourceParamDTO datasourceParam) { - OracleDatasourceParamDTO oracleParam = (OracleDatasourceParamDTO) datasourceParam; - String address; - if (DbConnectType.ORACLE_SID.equals(oracleParam.getConnectType())) { - address = String.format("%s%s:%s", - JDBC_ORACLE_SID, oracleParam.getHost(), oracleParam.getPort()); - } else { - address = String.format("%s%s:%s", - JDBC_ORACLE_SERVICE_NAME, oracleParam.getHost(), oracleParam.getPort()); - } - String jdbcUrl = address + "/" + oracleParam.getDatabase(); - - OracleConnectionParam oracleConnectionParam = new OracleConnectionParam(); - oracleConnectionParam.setUser(oracleParam.getUserName()); - oracleConnectionParam.setPassword(encodePassword(oracleParam.getPassword())); - oracleConnectionParam.setAddress(address); - oracleConnectionParam.setJdbcUrl(jdbcUrl); - oracleConnectionParam.setDatabase(oracleParam.getDatabase()); - oracleConnectionParam.setConnectType(oracleParam.getConnectType()); - oracleConnectionParam.setOther(transformOther(oracleParam.getOther())); - - return oracleConnectionParam; - } - - @Override - public ConnectionParam createConnectionParams(String connectionJson) { - return JSONUtils.parseObject(connectionJson, OracleConnectionParam.class); - } - - @Override - public String getDatasourceDriver() { - return COM_ORACLE_JDBC_DRIVER; - } - - @Override - public String getJdbcUrl(ConnectionParam connectionParam) { - OracleConnectionParam oracleConnectionParam = (OracleConnectionParam) connectionParam; - if (StringUtils.isNotEmpty(oracleConnectionParam.getOther())) { - return String.format("%s?%s", oracleConnectionParam.getJdbcUrl(), oracleConnectionParam.getOther()); - } - return oracleConnectionParam.getJdbcUrl(); - } - - @Override - public Connection getConnection(ConnectionParam connectionParam) throws ClassNotFoundException, SQLException { - OracleConnectionParam oracleConnectionParam = (OracleConnectionParam) connectionParam; - Class.forName(getDatasourceDriver()); - return DriverManager.getConnection(getJdbcUrl(connectionParam), - oracleConnectionParam.getUser(), decodePassword(oracleConnectionParam.getPassword())); - } - - @Override - public DbType getDbType() { - return DbType.ORACLE; - } - - private String transformOther(Map otherMap) { - if (MapUtils.isEmpty(otherMap)) { - return null; - } - List list = new ArrayList<>(); - otherMap.forEach((key, value) -> list.add(String.format("%s=%s", key, value))); - return String.join("&", list); - } - - private Map parseOther(String other) { - if (StringUtils.isEmpty(other)) { - return null; - } - Map otherMap = new LinkedHashMap<>(); - String[] configs = other.split("&"); - for (String config : configs) { - otherMap.put(config.split("=")[0], config.split("=")[1]); - } - return otherMap; - } -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/postgresql/PostgreSqlConnectionParam.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/postgresql/PostgreSqlConnectionParam.java deleted file mode 100644 index 1d459ca2b05c668cd76455c776e5df81a2a34cd2..0000000000000000000000000000000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/postgresql/PostgreSqlConnectionParam.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.task.datasource.postgresql; - -import org.apache.dolphinscheduler.plugin.task.datasource.BaseConnectionParam; - -public class PostgreSqlConnectionParam extends BaseConnectionParam { - @Override - public String toString() { - return "PostgreSqlConnectionParam{" - + "user='" + user + '\'' - + ", password='" + password + '\'' - + ", address='" + address + '\'' - + ", database='" + database + '\'' - + ", jdbcUrl='" + jdbcUrl + '\'' - + ", other='" + other + '\'' - + '}'; - } -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/postgresql/PostgreSqlDatasourceProcessor.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/postgresql/PostgreSqlDatasourceProcessor.java deleted file mode 100644 index fc705c6e41ceef19d947ea42010ad1f7a289c61b..0000000000000000000000000000000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/postgresql/PostgreSqlDatasourceProcessor.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.task.datasource.postgresql; - -import static org.apache.dolphinscheduler.plugin.task.datasource.PasswordUtils.decodePassword; -import static org.apache.dolphinscheduler.plugin.task.datasource.PasswordUtils.encodePassword; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.COLON; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.COMMA; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.DOUBLE_SLASH; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.JDBC_POSTGRESQL; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.ORG_POSTGRESQL_DRIVER; - -import org.apache.dolphinscheduler.plugin.task.datasource.AbstractDatasourceProcessor; -import org.apache.dolphinscheduler.plugin.task.datasource.BaseConnectionParam; -import org.apache.dolphinscheduler.plugin.task.datasource.BaseDataSourceParamDTO; -import org.apache.dolphinscheduler.plugin.task.datasource.ConnectionParam; -import org.apache.dolphinscheduler.spi.enums.DbType; -import org.apache.dolphinscheduler.spi.utils.JSONUtils; -import org.apache.dolphinscheduler.spi.utils.StringUtils; - -import org.apache.commons.collections.MapUtils; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; -import java.util.LinkedHashMap; -import java.util.Map; - -public class PostgreSqlDatasourceProcessor extends AbstractDatasourceProcessor { - - @Override - public BaseDataSourceParamDTO createDatasourceParamDTO(String connectionJson) { - PostgreSqlConnectionParam connectionParams = (PostgreSqlConnectionParam) createConnectionParams(connectionJson); - PostgreSqlDatasourceParamDTO postgreSqlDatasourceParamDTO = new PostgreSqlDatasourceParamDTO(); - postgreSqlDatasourceParamDTO.setDatabase(connectionParams.getDatabase()); - postgreSqlDatasourceParamDTO.setUserName(connectionParams.getUser()); - postgreSqlDatasourceParamDTO.setOther(parseOther(connectionParams.getOther())); - - String address = connectionParams.getAddress(); - String[] hostSeperator = address.split(DOUBLE_SLASH); - String[] hostPortArray = hostSeperator[hostSeperator.length - 1].split(COMMA); - postgreSqlDatasourceParamDTO.setHost(hostPortArray[0].split(COLON)[0]); - postgreSqlDatasourceParamDTO.setPort(Integer.parseInt(hostPortArray[0].split(COLON)[1])); - - return postgreSqlDatasourceParamDTO; - } - - @Override - public BaseConnectionParam createConnectionParams(BaseDataSourceParamDTO datasourceParam) { - PostgreSqlDatasourceParamDTO postgreSqlParam = (PostgreSqlDatasourceParamDTO) datasourceParam; - String address = String.format("%s%s:%s", JDBC_POSTGRESQL, postgreSqlParam.getHost(), postgreSqlParam.getPort()); - String jdbcUrl = String.format("%s/%s", address, postgreSqlParam.getDatabase()); - - PostgreSqlConnectionParam postgreSqlConnectionParam = new PostgreSqlConnectionParam(); - postgreSqlConnectionParam.setJdbcUrl(jdbcUrl); - postgreSqlConnectionParam.setAddress(address); - postgreSqlConnectionParam.setDatabase(postgreSqlParam.getDatabase()); - postgreSqlConnectionParam.setUser(postgreSqlParam.getUserName()); - postgreSqlConnectionParam.setPassword(encodePassword(postgreSqlParam.getPassword())); - postgreSqlConnectionParam.setOther(transformOther(postgreSqlParam.getOther())); - - return postgreSqlConnectionParam; - } - - @Override - public ConnectionParam createConnectionParams(String connectionJson) { - return JSONUtils.parseObject(connectionJson, PostgreSqlConnectionParam.class); - } - - @Override - public String getDatasourceDriver() { - return ORG_POSTGRESQL_DRIVER; - } - - @Override - public String getJdbcUrl(ConnectionParam connectionParam) { - PostgreSqlConnectionParam postgreSqlConnectionParam = (PostgreSqlConnectionParam) connectionParam; - if (StringUtils.isNotEmpty(postgreSqlConnectionParam.getOther())) { - return String.format("%s?%s", postgreSqlConnectionParam.getJdbcUrl(), postgreSqlConnectionParam.getOther()); - } - return postgreSqlConnectionParam.getJdbcUrl(); - } - - @Override - public Connection getConnection(ConnectionParam connectionParam) throws ClassNotFoundException, SQLException { - PostgreSqlConnectionParam postgreSqlConnectionParam = (PostgreSqlConnectionParam) connectionParam; - Class.forName(getDatasourceDriver()); - return DriverManager.getConnection(getJdbcUrl(postgreSqlConnectionParam), - postgreSqlConnectionParam.getUser(), decodePassword(postgreSqlConnectionParam.getPassword())); - } - - @Override - public DbType getDbType() { - return DbType.POSTGRESQL; - } - - private String transformOther(Map otherMap) { - if (MapUtils.isEmpty(otherMap)) { - return null; - } - StringBuilder stringBuilder = new StringBuilder(); - otherMap.forEach((key, value) -> stringBuilder.append(String.format("%s=%s&", key, value))); - return stringBuilder.toString(); - } - - private Map parseOther(String other) { - if (StringUtils.isEmpty(other)) { - return null; - } - Map otherMap = new LinkedHashMap<>(); - for (String config : other.split("&")) { - String[] split = config.split("="); - otherMap.put(split[0], split[1]); - } - return otherMap; - } -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/presto/PrestoConnectionParam.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/presto/PrestoConnectionParam.java deleted file mode 100644 index b6a226d982f7d470bf4fcb0b748256c81333571e..0000000000000000000000000000000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/presto/PrestoConnectionParam.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.task.datasource.presto; - -import org.apache.dolphinscheduler.plugin.task.datasource.BaseConnectionParam; - -public class PrestoConnectionParam extends BaseConnectionParam { - @Override - public String toString() { - return "PrestoConnectionParam{" - + "user='" + user + '\'' - + ", password='" + password + '\'' - + ", address='" + address + '\'' - + ", database='" + database + '\'' - + ", jdbcUrl='" + jdbcUrl + '\'' - + ", other='" + other + '\'' - + '}'; - } -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/presto/PrestoDatasourceProcessor.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/presto/PrestoDatasourceProcessor.java deleted file mode 100644 index d8630bf95fcd674f7fe940992e49e2bcaab26168..0000000000000000000000000000000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/presto/PrestoDatasourceProcessor.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.task.datasource.presto; - -import static org.apache.dolphinscheduler.plugin.task.datasource.PasswordUtils.decodePassword; -import static org.apache.dolphinscheduler.plugin.task.datasource.PasswordUtils.encodePassword; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.COLON; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.COMMA; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.COM_PRESTO_JDBC_DRIVER; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.DOUBLE_SLASH; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.JDBC_PRESTO; - -import org.apache.dolphinscheduler.plugin.task.datasource.AbstractDatasourceProcessor; -import org.apache.dolphinscheduler.plugin.task.datasource.BaseConnectionParam; -import org.apache.dolphinscheduler.plugin.task.datasource.BaseDataSourceParamDTO; -import org.apache.dolphinscheduler.plugin.task.datasource.ConnectionParam; -import org.apache.dolphinscheduler.spi.enums.DbType; -import org.apache.dolphinscheduler.spi.utils.JSONUtils; -import org.apache.dolphinscheduler.spi.utils.StringUtils; - -import org.apache.commons.collections.MapUtils; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; - -public class PrestoDatasourceProcessor extends AbstractDatasourceProcessor { - - @Override - public BaseDataSourceParamDTO createDatasourceParamDTO(String connectionJson) { - PrestoConnectionParam connectionParams = (PrestoConnectionParam) createConnectionParams(connectionJson); - - String[] hostSeperator = connectionParams.getAddress().split(DOUBLE_SLASH); - String[] hostPortArray = hostSeperator[hostSeperator.length - 1].split(COMMA); - - PrestoDatasourceParamDTO prestoDatasourceParamDTO = new PrestoDatasourceParamDTO(); - prestoDatasourceParamDTO.setPort(Integer.parseInt(hostPortArray[0].split(COLON)[1])); - prestoDatasourceParamDTO.setHost(hostPortArray[0].split(COLON)[0]); - prestoDatasourceParamDTO.setDatabase(connectionParams.getDatabase()); - prestoDatasourceParamDTO.setUserName(connectionParams.getUser()); - prestoDatasourceParamDTO.setOther(parseOther(connectionParams.getOther())); - - return prestoDatasourceParamDTO; - } - - @Override - public BaseConnectionParam createConnectionParams(BaseDataSourceParamDTO datasourceParam) { - PrestoDatasourceParamDTO prestoParam = (PrestoDatasourceParamDTO) datasourceParam; - String address = String.format("%s%s:%s", JDBC_PRESTO, prestoParam.getHost(), prestoParam.getPort()); - String jdbcUrl = address + "/" + prestoParam.getDatabase(); - - PrestoConnectionParam prestoConnectionParam = new PrestoConnectionParam(); - prestoConnectionParam.setUser(prestoParam.getUserName()); - prestoConnectionParam.setPassword(encodePassword(prestoParam.getPassword())); - prestoConnectionParam.setOther(transformOther(prestoParam.getOther())); - prestoConnectionParam.setAddress(address); - prestoConnectionParam.setJdbcUrl(jdbcUrl); - prestoConnectionParam.setDatabase(prestoParam.getDatabase()); - - return prestoConnectionParam; - } - - @Override - public ConnectionParam createConnectionParams(String connectionJson) { - return JSONUtils.parseObject(connectionJson, PrestoConnectionParam.class); - } - - @Override - public String getDatasourceDriver() { - return COM_PRESTO_JDBC_DRIVER; - } - - @Override - public String getJdbcUrl(ConnectionParam connectionParam) { - PrestoConnectionParam prestoConnectionParam = (PrestoConnectionParam) connectionParam; - if (StringUtils.isNotEmpty(prestoConnectionParam.getOther())) { - return String.format("%s?%s", prestoConnectionParam.getJdbcUrl(), prestoConnectionParam.getOther()); - } - return prestoConnectionParam.getJdbcUrl(); - } - - @Override - public Connection getConnection(ConnectionParam connectionParam) throws ClassNotFoundException, SQLException { - PrestoConnectionParam prestoConnectionParam = (PrestoConnectionParam) connectionParam; - Class.forName(getDatasourceDriver()); - return DriverManager.getConnection(getJdbcUrl(connectionParam), - prestoConnectionParam.getUser(), decodePassword(prestoConnectionParam.getPassword())); - } - - @Override - public DbType getDbType() { - return DbType.PRESTO; - } - - private String transformOther(Map otherMap) { - if (MapUtils.isNotEmpty(otherMap)) { - List list = new ArrayList<>(); - otherMap.forEach((key, value) -> list.add(String.format("%s=%s", key, value))); - return String.join("&", list); - } - return null; - } - - private Map parseOther(String other) { - if (StringUtils.isEmpty(other)) { - return null; - } - Map otherMap = new LinkedHashMap<>(); - String[] configs = other.split("&"); - for (String config : configs) { - otherMap.put(config.split("=")[0], config.split("=")[1]); - } - return otherMap; - } -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/spark/SparkConnectionParam.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/spark/SparkConnectionParam.java deleted file mode 100644 index fad385e3ce6931a7555c3581e4f8acb19cfd91dd..0000000000000000000000000000000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/spark/SparkConnectionParam.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.task.datasource.spark; - -import org.apache.dolphinscheduler.plugin.task.datasource.BaseHdfsConnectionParam; - -public class SparkConnectionParam extends BaseHdfsConnectionParam { - @Override - public String toString() { - return "SparkConnectionParam{" - + "user='" + user + '\'' - + ", password='" + password + '\'' - + ", address='" + address + '\'' - + ", database='" + database + '\'' - + ", jdbcUrl='" + jdbcUrl + '\'' - + ", other='" + other + '\'' - + ", principal='" + principal + '\'' - + ", javaSecurityKrb5Conf='" + javaSecurityKrb5Conf + '\'' - + ", loginUserKeytabUsername='" + loginUserKeytabUsername + '\'' - + ", loginUserKeytabPath='" + loginUserKeytabPath + '\'' - + '}'; - } -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/spark/SparkDatasourceProcessor.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/spark/SparkDatasourceProcessor.java deleted file mode 100644 index 00e7ecbe0de0267d8d23f3fc824dd04d9c01e5a8..0000000000000000000000000000000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/spark/SparkDatasourceProcessor.java +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.task.datasource.spark; - -import static org.apache.dolphinscheduler.plugin.task.datasource.PasswordUtils.decodePassword; -import static org.apache.dolphinscheduler.plugin.task.datasource.PasswordUtils.encodePassword; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.COLON; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.COMMA; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.DOUBLE_SLASH; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.JDBC_HIVE_2; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER; - -import org.apache.dolphinscheduler.plugin.task.datasource.AbstractDatasourceProcessor; -import org.apache.dolphinscheduler.plugin.task.datasource.BaseConnectionParam; -import org.apache.dolphinscheduler.plugin.task.datasource.BaseDataSourceParamDTO; -import org.apache.dolphinscheduler.plugin.task.datasource.ConnectionParam; -import org.apache.dolphinscheduler.plugin.task.util.CommonUtils; -import org.apache.dolphinscheduler.spi.enums.DbType; -import org.apache.dolphinscheduler.spi.utils.JSONUtils; -import org.apache.dolphinscheduler.spi.utils.StringUtils; - -import org.apache.commons.collections.MapUtils; - -import java.io.IOException; -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; -import java.util.Arrays; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - -public class SparkDatasourceProcessor extends AbstractDatasourceProcessor { - - @Override - public BaseDataSourceParamDTO createDatasourceParamDTO(String connectionJson) { - SparkConnectionParam connectionParams = (SparkConnectionParam) createConnectionParams(connectionJson); - - SparkDatasourceParamDTO sparkDatasourceParamDTO = new SparkDatasourceParamDTO(); - sparkDatasourceParamDTO.setDatabase(connectionParams.getDatabase()); - sparkDatasourceParamDTO.setUserName(connectionParams.getUser()); - sparkDatasourceParamDTO.setOther(parseOther(connectionParams.getOther())); - sparkDatasourceParamDTO.setJavaSecurityKrb5Conf(connectionParams.getJavaSecurityKrb5Conf()); - sparkDatasourceParamDTO.setLoginUserKeytabPath(connectionParams.getLoginUserKeytabPath()); - sparkDatasourceParamDTO.setLoginUserKeytabUsername(connectionParams.getLoginUserKeytabUsername()); - - StringBuilder hosts = new StringBuilder(); - String[] tmpArray = connectionParams.getAddress().split(DOUBLE_SLASH); - String[] hostPortArray = tmpArray[tmpArray.length - 1].split(COMMA); - Arrays.stream(hostPortArray).forEach(hostPort -> hosts.append(hostPort.split(COLON)[0]).append(COMMA)); - hosts.deleteCharAt(hosts.length() - 1); - - sparkDatasourceParamDTO.setHost(hosts.toString()); - sparkDatasourceParamDTO.setPort(Integer.parseInt(hostPortArray[0].split(COLON)[1])); - - return sparkDatasourceParamDTO; - } - - @Override - public BaseConnectionParam createConnectionParams(BaseDataSourceParamDTO dataSourceParam) { - StringBuilder address = new StringBuilder(); - SparkDatasourceParamDTO sparkDatasourceParam = (SparkDatasourceParamDTO) dataSourceParam; - address.append(JDBC_HIVE_2); - for (String zkHost : sparkDatasourceParam.getHost().split(",")) { - address.append(String.format("%s:%s,", zkHost, sparkDatasourceParam.getPort())); - } - address.deleteCharAt(address.length() - 1); - - String jdbcUrl = address + "/" + sparkDatasourceParam.getDatabase(); - if (CommonUtils.getKerberosStartupState()) { - jdbcUrl += ";principal=" + sparkDatasourceParam.getPrincipal(); - } - - SparkConnectionParam sparkConnectionParam = new SparkConnectionParam(); - sparkConnectionParam.setPassword(encodePassword(sparkDatasourceParam.getPassword())); - sparkConnectionParam.setUser(sparkDatasourceParam.getUserName()); - sparkConnectionParam.setOther(transformOther(sparkDatasourceParam.getOther())); - sparkConnectionParam.setDatabase(sparkDatasourceParam.getDatabase()); - sparkConnectionParam.setAddress(address.toString()); - sparkConnectionParam.setJdbcUrl(jdbcUrl); - if (CommonUtils.getKerberosStartupState()) { - sparkConnectionParam.setPrincipal(sparkDatasourceParam.getPrincipal()); - sparkConnectionParam.setJavaSecurityKrb5Conf(sparkDatasourceParam.getJavaSecurityKrb5Conf()); - sparkConnectionParam.setLoginUserKeytabPath(sparkDatasourceParam.getLoginUserKeytabPath()); - sparkConnectionParam.setLoginUserKeytabUsername(sparkDatasourceParam.getLoginUserKeytabUsername()); - } - - return sparkConnectionParam; - } - - @Override - public ConnectionParam createConnectionParams(String connectionJson) { - return JSONUtils.parseObject(connectionJson, SparkConnectionParam.class); - } - - @Override - public String getDatasourceDriver() { - return ORG_APACHE_HIVE_JDBC_HIVE_DRIVER; - } - - @Override - public String getJdbcUrl(ConnectionParam connectionParam) { - SparkConnectionParam sparkConnectionParam = (SparkConnectionParam) connectionParam; - if (StringUtils.isNotEmpty(sparkConnectionParam.getOther())) { - return String.format("%s;%s", sparkConnectionParam.getJdbcUrl(), sparkConnectionParam.getOther()); - } - return sparkConnectionParam.getJdbcUrl(); - } - - @Override - public Connection getConnection(ConnectionParam connectionParam) throws IOException, ClassNotFoundException, SQLException { - SparkConnectionParam sparkConnectionParam = (SparkConnectionParam) connectionParam; - CommonUtils.loadKerberosConf(sparkConnectionParam.getJavaSecurityKrb5Conf(), - sparkConnectionParam.getLoginUserKeytabUsername(), sparkConnectionParam.getLoginUserKeytabPath()); - Class.forName(getDatasourceDriver()); - return DriverManager.getConnection(getJdbcUrl(sparkConnectionParam), - sparkConnectionParam.getUser(), decodePassword(sparkConnectionParam.getPassword())); - } - - @Override - public DbType getDbType() { - return DbType.SPARK; - } - - private String transformOther(Map otherMap) { - if (MapUtils.isEmpty(otherMap)) { - return null; - } - List stringBuilder = otherMap.entrySet().stream() - .map(entry -> String.format("%s=%s", entry.getKey(), entry.getValue())).collect(Collectors.toList()); - return String.join(";", stringBuilder); - } - - private Map parseOther(String other) { - if (StringUtils.isEmpty(other)) { - return null; - } - Map otherMap = new LinkedHashMap<>(); - String[] configs = other.split(";"); - for (String config : configs) { - otherMap.put(config.split("=")[0], config.split("=")[1]); - } - return otherMap; - } -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/sqlserver/SqlServerConnectionParam.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/sqlserver/SqlServerConnectionParam.java deleted file mode 100644 index 5ef72fdeaa0ad8c76046801c1646fcbf4e51a4fa..0000000000000000000000000000000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/sqlserver/SqlServerConnectionParam.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.task.datasource.sqlserver; - -import org.apache.dolphinscheduler.plugin.task.datasource.BaseConnectionParam; - -public class SqlServerConnectionParam extends BaseConnectionParam { - @Override - public String toString() { - return "SqlServerConnectionParam{" - + "user='" + user + '\'' - + ", password='" + password + '\'' - + ", address='" + address + '\'' - + ", database='" + database + '\'' - + ", jdbcUrl='" + jdbcUrl + '\'' - + ", other='" + other + '\'' - + '}'; - } -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/sqlserver/SqlServerDatasourceProcessor.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/sqlserver/SqlServerDatasourceProcessor.java deleted file mode 100644 index 5eef2cd49dff8ba1239e9c6ac331015a4b857b30..0000000000000000000000000000000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/datasource/sqlserver/SqlServerDatasourceProcessor.java +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.task.datasource.sqlserver; - -import static org.apache.dolphinscheduler.plugin.task.datasource.PasswordUtils.decodePassword; -import static org.apache.dolphinscheduler.plugin.task.datasource.PasswordUtils.encodePassword; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.COLON; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.COMMA; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.COM_SQLSERVER_JDBC_DRIVER; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.DOUBLE_SLASH; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.JDBC_SQLSERVER; - -import org.apache.dolphinscheduler.plugin.task.datasource.AbstractDatasourceProcessor; -import org.apache.dolphinscheduler.plugin.task.datasource.BaseConnectionParam; -import org.apache.dolphinscheduler.plugin.task.datasource.BaseDataSourceParamDTO; -import org.apache.dolphinscheduler.plugin.task.datasource.ConnectionParam; -import org.apache.dolphinscheduler.spi.enums.DbType; -import org.apache.dolphinscheduler.spi.utils.JSONUtils; -import org.apache.dolphinscheduler.spi.utils.StringUtils; - -import org.apache.commons.collections.MapUtils; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; -import java.util.LinkedHashMap; -import java.util.Map; - -public class SqlServerDatasourceProcessor extends AbstractDatasourceProcessor { - - @Override - public BaseDataSourceParamDTO createDatasourceParamDTO(String connectionJson) { - SqlServerConnectionParam connectionParams = (SqlServerConnectionParam) createConnectionParams(connectionJson); - String[] hostSeperator = connectionParams.getAddress().split(DOUBLE_SLASH); - String[] hostPortArray = hostSeperator[hostSeperator.length - 1].split(COMMA); - - SqlServerDatasourceParamDTO sqlServerDatasourceParamDTO = new SqlServerDatasourceParamDTO(); - sqlServerDatasourceParamDTO.setDatabase(connectionParams.getDatabase()); - sqlServerDatasourceParamDTO.setUserName(connectionParams.getUser()); - sqlServerDatasourceParamDTO.setOther(parseOther(connectionParams.getOther())); - sqlServerDatasourceParamDTO.setPort(Integer.parseInt(hostPortArray[0].split(COLON)[1])); - sqlServerDatasourceParamDTO.setHost(hostPortArray[0].split(COLON)[0]); - return sqlServerDatasourceParamDTO; - } - - @Override - public BaseConnectionParam createConnectionParams(BaseDataSourceParamDTO datasourceParam) { - SqlServerDatasourceParamDTO sqlServerParam = (SqlServerDatasourceParamDTO) datasourceParam; - String address = String.format("%s%s:%s", JDBC_SQLSERVER, sqlServerParam.getHost(), sqlServerParam.getPort()); - String jdbcUrl = address + ";databaseName=" + sqlServerParam.getDatabase(); - - SqlServerConnectionParam sqlServerConnectionParam = new SqlServerConnectionParam(); - sqlServerConnectionParam.setAddress(address); - sqlServerConnectionParam.setDatabase(sqlServerParam.getDatabase()); - sqlServerConnectionParam.setJdbcUrl(jdbcUrl); - sqlServerConnectionParam.setOther(transformOther(sqlServerParam.getOther())); - sqlServerConnectionParam.setUser(sqlServerParam.getUserName()); - sqlServerConnectionParam.setPassword(encodePassword(sqlServerParam.getPassword())); - return sqlServerConnectionParam; - } - - @Override - public BaseConnectionParam createConnectionParams(String connectionJson) { - return JSONUtils.parseObject(connectionJson, SqlServerConnectionParam.class); - } - - @Override - public String getDatasourceDriver() { - return COM_SQLSERVER_JDBC_DRIVER; - } - - @Override - public String getJdbcUrl(ConnectionParam connectionParam) { - SqlServerConnectionParam sqlServerConnectionParam = (SqlServerConnectionParam) connectionParam; - - if (StringUtils.isNotEmpty(sqlServerConnectionParam.getOther())) { - return String.format("%s;%s", sqlServerConnectionParam.getJdbcUrl(), sqlServerConnectionParam.getOther()); - } - return sqlServerConnectionParam.getJdbcUrl(); - } - - @Override - public Connection getConnection(ConnectionParam connectionParam) throws ClassNotFoundException, SQLException { - SqlServerConnectionParam sqlServerConnectionParam = (SqlServerConnectionParam) connectionParam; - Class.forName(getDatasourceDriver()); - return DriverManager.getConnection(getJdbcUrl(connectionParam), sqlServerConnectionParam.getUser(), - decodePassword(sqlServerConnectionParam.getPassword())); - } - - @Override - public DbType getDbType() { - return DbType.SQLSERVER; - } - - private String transformOther(Map otherMap) { - if (MapUtils.isEmpty(otherMap)) { - return null; - } - StringBuilder stringBuilder = new StringBuilder(); - otherMap.forEach((key, value) -> stringBuilder.append(String.format("%s=%s;", key, value))); - return stringBuilder.toString(); - } - - private Map parseOther(String other) { - if (StringUtils.isEmpty(other)) { - return null; - } - Map otherMap = new LinkedHashMap<>(); - for (String config : other.split(";")) { - otherMap.put(config.split("=")[0], config.split("=")[1]); - } - return otherMap; - } -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/util/OSUtils.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/util/OSUtils.java index e8c66a7cf1e3068fca4f48b958c6a5eb5bc50e49..f34268d1790cb5c09020e4c8327cddc2334b7628 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/util/OSUtils.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/util/OSUtils.java @@ -18,6 +18,7 @@ package org.apache.dolphinscheduler.plugin.task.util; import org.apache.dolphinscheduler.plugin.task.api.ShellExecutor; +import org.apache.dolphinscheduler.spi.utils.PropertyUtils; import org.apache.dolphinscheduler.spi.utils.StringUtils; import java.io.IOException; @@ -40,6 +41,15 @@ public class OSUtils { return StringUtils.isEmpty(tenantCode) ? command : "sudo -u " + tenantCode + " " + command; } + /** + * use sudo or not + * + * @return true is use sudo + */ + public static boolean isSudoEnable() { + return PropertyUtils.getBoolean("sudo.enable", Boolean.TRUE); + } + /** * whether is macOS * diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-datax/pom.xml b/dolphinscheduler-task-plugin/dolphinscheduler-task-datax/pom.xml index b75f26884ccadedeb958f633c9bd8ffdae2d3a2d..42c9299d9218e080d0ff8f6c62654d9861ad9b9d 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-datax/pom.xml +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-datax/pom.xml @@ -15,20 +15,23 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - + dolphinscheduler-task-plugin org.apache.dolphinscheduler - 2.0.0-SNAPSHOT + 2.0.10-SNAPSHOT 4.0.0 dolphinscheduler-task-datax - dolphinscheduler-plugin + jar + + org.apache.dolphinscheduler + dolphinscheduler-datasource-all + + org.apache.dolphinscheduler dolphinscheduler-spi @@ -39,6 +42,11 @@ dolphinscheduler-task-api ${project.version} + + org.apache.dolphinscheduler + dolphinscheduler-datasource-api + ${project.version} + com.alibaba @@ -46,9 +54,4 @@ - - - dolphinscheduler-task-datax-${project.version} - - diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxTask.java index 9087070df30ba8739ef9982c71f807c5e98247f2..216ad743c1620e0757854fede9f4c21f56cc1507 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxTask.java @@ -17,17 +17,18 @@ package org.apache.dolphinscheduler.plugin.task.datax; -import static org.apache.dolphinscheduler.plugin.task.datasource.PasswordUtils.decodePassword; +import static org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils.decodePassword; import static org.apache.dolphinscheduler.spi.task.TaskConstants.EXIT_CODE_FAILURE; import static org.apache.dolphinscheduler.spi.task.TaskConstants.RWXR_XR_X; +import org.apache.dolphinscheduler.plugin.datasource.api.plugin.DataSourceClientProvider; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.DatasourceUtil; import org.apache.dolphinscheduler.plugin.task.api.AbstractTaskExecutor; import org.apache.dolphinscheduler.plugin.task.api.ShellCommandExecutor; import org.apache.dolphinscheduler.plugin.task.api.TaskResponse; -import org.apache.dolphinscheduler.plugin.task.datasource.BaseConnectionParam; -import org.apache.dolphinscheduler.plugin.task.datasource.DatasourceUtil; import org.apache.dolphinscheduler.plugin.task.util.MapUtils; import org.apache.dolphinscheduler.plugin.task.util.OSUtils; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; import org.apache.dolphinscheduler.spi.enums.DbType; import org.apache.dolphinscheduler.spi.enums.Flag; import org.apache.dolphinscheduler.spi.task.AbstractParameters; @@ -36,10 +37,10 @@ import org.apache.dolphinscheduler.spi.task.paramparser.ParamUtils; import org.apache.dolphinscheduler.spi.task.paramparser.ParameterUtils; import org.apache.dolphinscheduler.spi.task.request.DataxTaskExecutionContext; import org.apache.dolphinscheduler.spi.task.request.TaskRequest; -import org.apache.dolphinscheduler.spi.utils.CollectionUtils; import org.apache.dolphinscheduler.spi.utils.JSONUtils; import org.apache.dolphinscheduler.spi.utils.StringUtils; +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.io.FileUtils; import java.io.File; @@ -144,9 +145,6 @@ public class DataxTask extends AbstractTaskExecutor { @Override public void handle() throws Exception { try { - // set the name of the current thread - String threadLoggerInfoName = String.format("TaskLogInfo-%s", taskExecutionContext.getTaskAppId()); - Thread.currentThread().setName(threadLoggerInfoName); // replace placeholder,and combine local and global parameters Map paramsMap = ParamUtils.convert(taskExecutionContext, getParameters()); @@ -547,7 +545,7 @@ public class DataxTask extends AbstractTaskExecutor { sql = sql.replace(";", ""); try ( - Connection connection = DatasourceUtil.getConnection(sourceType, baseDataSource); + Connection connection = DataSourceClientProvider.getInstance().getConnection(sourceType, baseDataSource); PreparedStatement stmt = connection.prepareStatement(sql); ResultSet resultSet = stmt.executeQuery()) { diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxTaskChannelFactory.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxTaskChannelFactory.java index bb35b716c0e7faddaba4814d5abb4001ba86eec5..d9c16ef0b26ac50e0ac0389c06704bb8195d7fee 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxTaskChannelFactory.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxTaskChannelFactory.java @@ -23,6 +23,9 @@ import org.apache.dolphinscheduler.spi.task.TaskChannelFactory; import java.util.List; +import com.google.auto.service.AutoService; + +@AutoService(TaskChannelFactory.class) public class DataxTaskChannelFactory implements TaskChannelFactory { @Override diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxTaskPlugin.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxTaskPlugin.java deleted file mode 100644 index 3e2f7ad205757f0907622f979234ec8b732cb5d2..0000000000000000000000000000000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxTaskPlugin.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.task.datax; - -import org.apache.dolphinscheduler.spi.DolphinSchedulerPlugin; -import org.apache.dolphinscheduler.spi.task.TaskChannelFactory; - -import com.google.common.collect.ImmutableList; - -public class DataxTaskPlugin implements DolphinSchedulerPlugin { - - @Override - public Iterable getTaskChannelFactorys() { - return ImmutableList.of(new DataxTaskChannelFactory()); - } -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxUtils.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxUtils.java index c4edc4eab0877b3e92e3a25f3842a191bdeeaadb..42ba97527284cd1f87340402ac65031827a8e7f5 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxUtils.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxUtils.java @@ -19,6 +19,7 @@ package org.apache.dolphinscheduler.plugin.task.datax; import org.apache.dolphinscheduler.spi.enums.DbType; +import com.alibaba.druid.sql.dialect.clickhouse.parser.ClickhouseStatementParser; import com.alibaba.druid.sql.dialect.mysql.parser.MySqlStatementParser; import com.alibaba.druid.sql.dialect.oracle.parser.OracleStatementParser; import com.alibaba.druid.sql.dialect.postgresql.parser.PGSQLStatementParser; @@ -91,6 +92,8 @@ public class DataxUtils { return new OracleStatementParser(sql); case SQLSERVER: return new SQLServerStatementParser(sql); + case CLICKHOUSE: + return new ClickhouseStatementParser(sql); default: return null; } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-flink/pom.xml b/dolphinscheduler-task-plugin/dolphinscheduler-task-flink/pom.xml index 9d8162fc38594dd029566d3e54642eab7e76841b..ba396da4456befa16c20a5b65f74c885c8743757 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-flink/pom.xml +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-flink/pom.xml @@ -15,18 +15,16 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - + dolphinscheduler-task-plugin org.apache.dolphinscheduler - 2.0.0-SNAPSHOT + 2.0.10-SNAPSHOT 4.0.0 dolphinscheduler-task-flink - dolphinscheduler-plugin + jar @@ -40,8 +38,4 @@ ${project.version} - - - dolphinscheduler-task-flink-${project.version} - - \ No newline at end of file + diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-flink/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkTaskChannelFactory.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-flink/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkTaskChannelFactory.java index 6d843c637a11d697a628e24c6ab63d5056aaffcf..7501f16fe86753ca383eb8045fa79796664e0289 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-flink/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkTaskChannelFactory.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-flink/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkTaskChannelFactory.java @@ -23,6 +23,9 @@ import org.apache.dolphinscheduler.spi.task.TaskChannelFactory; import java.util.List; +import com.google.auto.service.AutoService; + +@AutoService(TaskChannelFactory.class) public class FlinkTaskChannelFactory implements TaskChannelFactory { @Override public TaskChannel create() { diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-flink/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkTaskPlugin.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-flink/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkTaskPlugin.java deleted file mode 100644 index 1b687fff919f13db9f6c2ba10826baa10cddd0fd..0000000000000000000000000000000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-flink/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkTaskPlugin.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.task.flink; - -import org.apache.dolphinscheduler.spi.DolphinSchedulerPlugin; -import org.apache.dolphinscheduler.spi.task.TaskChannelFactory; - -import com.google.common.collect.ImmutableList; - -public class FlinkTaskPlugin implements DolphinSchedulerPlugin { - @Override - public Iterable getTaskChannelFactorys() { - return ImmutableList.of(new FlinkTaskChannelFactory()); - } -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-http/pom.xml b/dolphinscheduler-task-plugin/dolphinscheduler-task-http/pom.xml index 6cfa3330b2175ad4b5813dac85863bd53bcbd64a..afe4f9442f180292091b277016b76e8d326bdd50 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-http/pom.xml +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-http/pom.xml @@ -15,18 +15,16 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - + dolphinscheduler-task-plugin org.apache.dolphinscheduler - 2.0.0-SNAPSHOT + 2.0.10-SNAPSHOT 4.0.0 dolphinscheduler-task-http - dolphinscheduler-plugin + jar @@ -40,6 +38,10 @@ ${project.version} + + org.apache.commons + commons-collections4 + org.apache.httpcomponents httpclient @@ -50,9 +52,4 @@ - - - dolphinscheduler-task-http-${project.version} - - diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpTask.java index 4f29a3117de394d9928456fa99311dead0a721ba..04978f0fed9f0e45be3ab562645686e88e6d6017 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpTask.java @@ -18,7 +18,6 @@ package org.apache.dolphinscheduler.plugin.task.http; import static org.apache.dolphinscheduler.plugin.task.http.HttpTaskConstants.APPLICATION_JSON; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.TASK_LOG_INFO_FORMAT; import org.apache.dolphinscheduler.plugin.task.api.AbstractTaskExecutor; import org.apache.dolphinscheduler.plugin.task.util.MapUtils; @@ -27,11 +26,11 @@ import org.apache.dolphinscheduler.spi.task.Property; import org.apache.dolphinscheduler.spi.task.paramparser.ParamUtils; import org.apache.dolphinscheduler.spi.task.paramparser.ParameterUtils; import org.apache.dolphinscheduler.spi.task.request.TaskRequest; -import org.apache.dolphinscheduler.spi.utils.CollectionUtils; import org.apache.dolphinscheduler.spi.utils.DateUtils; import org.apache.dolphinscheduler.spi.utils.JSONUtils; import org.apache.dolphinscheduler.spi.utils.StringUtils; +import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.io.Charsets; import org.apache.http.HttpEntity; import org.apache.http.ParseException; @@ -92,9 +91,6 @@ public class HttpTask extends AbstractTaskExecutor { @Override public void handle() throws Exception { - String threadLoggerInfoName = String.format(TASK_LOG_INFO_FORMAT, taskExecutionContext.getTaskAppId()); - Thread.currentThread().setName(threadLoggerInfoName); - long startTime = System.currentTimeMillis(); String formatTimeStamp = DateUtils.formatTimeStamp(startTime); String statusCode = null; diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpTaskChannelFactory.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpTaskChannelFactory.java index 4791b26a1673c18c0d824a963bc77e15321458ca..e69524c9ba88330a0489fa3626939bb1790b799b 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpTaskChannelFactory.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpTaskChannelFactory.java @@ -23,6 +23,9 @@ import org.apache.dolphinscheduler.spi.task.TaskChannelFactory; import java.util.List; +import com.google.auto.service.AutoService; + +@AutoService(TaskChannelFactory.class) public class HttpTaskChannelFactory implements TaskChannelFactory { @Override diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpTaskPlugin.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpTaskPlugin.java deleted file mode 100644 index fa2a4705ca53a4a99b68ae49fbec75df3eba3731..0000000000000000000000000000000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpTaskPlugin.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.task.http; - -import org.apache.dolphinscheduler.spi.DolphinSchedulerPlugin; -import org.apache.dolphinscheduler.spi.task.TaskChannelFactory; - -import com.google.common.collect.ImmutableList; - -public class HttpTaskPlugin implements DolphinSchedulerPlugin { - - @Override - public Iterable getTaskChannelFactorys() { - return ImmutableList.of(new HttpTaskChannelFactory()); - } -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-mr/pom.xml b/dolphinscheduler-task-plugin/dolphinscheduler-task-mr/pom.xml index 93c266efbf236d4e767fe804fef46db9f6a50509..3995f6d3c46566d721331db40a1005b2b998eb9d 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-mr/pom.xml +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-mr/pom.xml @@ -15,18 +15,16 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - + dolphinscheduler-task-plugin org.apache.dolphinscheduler - 2.0.0-SNAPSHOT + 2.0.10-SNAPSHOT 4.0.0 dolphinscheduler-task-mr - dolphinscheduler-plugin + jar @@ -39,11 +37,5 @@ dolphinscheduler-task-api ${project.version} - - - - dolphinscheduler-task-mr-${project.version} - - - \ No newline at end of file + diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-mr/src/main/java/org/apache/dolphinscheduler/plugin/task/mr/MapReduceTaskChannelFactory.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-mr/src/main/java/org/apache/dolphinscheduler/plugin/task/mr/MapReduceTaskChannelFactory.java index 165ee1a83e5e891f2fe62edfac91c3e6b321cf6a..6d1beadcbe7db46b25fde297917aea3e64cf1df3 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-mr/src/main/java/org/apache/dolphinscheduler/plugin/task/mr/MapReduceTaskChannelFactory.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-mr/src/main/java/org/apache/dolphinscheduler/plugin/task/mr/MapReduceTaskChannelFactory.java @@ -23,6 +23,9 @@ import org.apache.dolphinscheduler.spi.task.TaskChannelFactory; import java.util.List; +import com.google.auto.service.AutoService; + +@AutoService(TaskChannelFactory.class) public class MapReduceTaskChannelFactory implements TaskChannelFactory { @Override diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-mr/src/main/java/org/apache/dolphinscheduler/plugin/task/mr/MapReduceTaskPlugin.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-mr/src/main/java/org/apache/dolphinscheduler/plugin/task/mr/MapReduceTaskPlugin.java deleted file mode 100644 index 4e5657e1cb8b7d1189796460289dba01bfb01e43..0000000000000000000000000000000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-mr/src/main/java/org/apache/dolphinscheduler/plugin/task/mr/MapReduceTaskPlugin.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.task.mr; - -import org.apache.dolphinscheduler.spi.DolphinSchedulerPlugin; -import org.apache.dolphinscheduler.spi.task.TaskChannelFactory; - -import com.google.common.collect.ImmutableList; - -public class MapReduceTaskPlugin implements DolphinSchedulerPlugin { - - @Override - public Iterable getTaskChannelFactorys() { - return ImmutableList.of(new MapReduceTaskChannelFactory()); - } -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/pom.xml b/dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/pom.xml index 4a77c5a91ecc8b2a97887e578cc2a5f52756851d..7e78e0cf46a9ad0b573780a35890a9352034621b 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/pom.xml +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/pom.xml @@ -15,19 +15,17 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - + dolphinscheduler-task-plugin org.apache.dolphinscheduler - 2.0.0-SNAPSHOT + 2.0.10-SNAPSHOT 4.0.0 dolphinscheduler-task-pigeon - dolphinscheduler-plugin + jar @@ -40,6 +38,10 @@ dolphinscheduler-spi provided + + org.apache.commons + commons-collections4 + org.slf4j slf4j-api @@ -65,18 +67,6 @@ - - - - - - - org.jacoco - org.jacoco.agent - runtime - test - - org.java-websocket Java-WebSocket @@ -98,16 +88,5 @@ org.powermock powermock-api-mockito2 - - - junit - junit - test - - - - dolphinscheduler-task-pigeon-${project.version} - - - \ No newline at end of file + diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/src/main/java/org/apache/dolphinscheduler/plugin/task/pigeon/PigeonTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/src/main/java/org/apache/dolphinscheduler/plugin/task/pigeon/PigeonTask.java index 2ed7deef687aa90f3b3e03e3cd949d3c14ac2477..a17b9951e9ddf56be107d9ae4b2cf7374744c101 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/src/main/java/org/apache/dolphinscheduler/plugin/task/pigeon/PigeonTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/src/main/java/org/apache/dolphinscheduler/plugin/task/pigeon/PigeonTask.java @@ -21,10 +21,10 @@ import org.apache.dolphinscheduler.plugin.task.api.AbstractTaskExecutor; import org.apache.dolphinscheduler.spi.task.AbstractParameters; import org.apache.dolphinscheduler.spi.task.TaskConstants; import org.apache.dolphinscheduler.spi.task.request.TaskRequest; -import org.apache.dolphinscheduler.spi.utils.CollectionUtils; import org.apache.dolphinscheduler.spi.utils.JSONUtils; import org.apache.dolphinscheduler.spi.utils.StringUtils; +import org.apache.commons.collections4.CollectionUtils; import org.apache.http.HttpEntity; import org.apache.http.StatusLine; import org.apache.http.client.methods.CloseableHttpResponse; @@ -178,7 +178,7 @@ public class PigeonTask extends AbstractTaskExecutor { if (!cancelResult.isSuccess()) { List errormsg = triggerResult.getErrormsg(); StringBuffer errs = new StringBuffer(); - if (org.apache.dolphinscheduler.spi.utils.CollectionUtils.isNotEmpty(errormsg)) { + if (CollectionUtils.isNotEmpty(errormsg)) { errs.append(",errs:").append(errormsg.stream().collect(Collectors.joining(","))); } throw new Exception("cancel PIGEON job faild taskId:" + triggerResult.getTaskId() + errs.toString()); diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/src/main/java/org/apache/dolphinscheduler/plugin/task/pigeon/PigeonTaskChannelFactory.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/src/main/java/org/apache/dolphinscheduler/plugin/task/pigeon/PigeonTaskChannelFactory.java index bc0c6f2b9ba0f0d68d1dd1372ebc1708c2343174..290acd6f77fcb1a4d916c560eef0d2f1a5dd9b60 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/src/main/java/org/apache/dolphinscheduler/plugin/task/pigeon/PigeonTaskChannelFactory.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/src/main/java/org/apache/dolphinscheduler/plugin/task/pigeon/PigeonTaskChannelFactory.java @@ -26,6 +26,9 @@ import org.apache.dolphinscheduler.spi.task.TaskChannelFactory; import java.util.Arrays; import java.util.List; +import com.google.auto.service.AutoService; + +@AutoService(TaskChannelFactory.class) public class PigeonTaskChannelFactory implements TaskChannelFactory { @Override diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-procedure/pom.xml b/dolphinscheduler-task-plugin/dolphinscheduler-task-procedure/pom.xml index 4726e7deaad6510ba8120d27ec75a4dd1f8a94e1..c1893d1d83e3710cce58781ece2e9db47340a441 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-procedure/pom.xml +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-procedure/pom.xml @@ -15,20 +15,23 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - + dolphinscheduler-task-plugin org.apache.dolphinscheduler - 2.0.0-SNAPSHOT + 2.0.10-SNAPSHOT 4.0.0 dolphinscheduler-task-procedure - dolphinscheduler-plugin + jar + + org.apache.dolphinscheduler + dolphinscheduler-datasource-all + + org.apache.dolphinscheduler dolphinscheduler-spi @@ -39,10 +42,10 @@ dolphinscheduler-task-api ${project.version} + + org.apache.dolphinscheduler + dolphinscheduler-datasource-api + ${project.version} + - - - dolphinscheduler-task-procedure-${project.version} - - diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-procedure/src/main/java/org/apache/dolphinscheduler/plugin/task/procedure/ProcedureParameters.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-procedure/src/main/java/org/apache/dolphinscheduler/plugin/task/procedure/ProcedureParameters.java index bfb56c2f5fbf4f8e06f22347a44dff28079be10c..d0e35b964263757a5289d21e10490dce6f47b1f3 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-procedure/src/main/java/org/apache/dolphinscheduler/plugin/task/procedure/ProcedureParameters.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-procedure/src/main/java/org/apache/dolphinscheduler/plugin/task/procedure/ProcedureParameters.java @@ -17,12 +17,16 @@ package org.apache.dolphinscheduler.plugin.task.procedure; +import org.apache.commons.collections.CollectionUtils; import org.apache.dolphinscheduler.spi.task.AbstractParameters; +import org.apache.dolphinscheduler.spi.task.Property; import org.apache.dolphinscheduler.spi.task.ResourceInfo; import org.apache.dolphinscheduler.spi.utils.StringUtils; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; +import java.util.Map; /** * procedure parameter @@ -39,6 +43,8 @@ public class ProcedureParameters extends AbstractParameters { */ private int datasource; + private Map outProperty; + /** * procedure name */ @@ -86,4 +92,33 @@ public class ProcedureParameters extends AbstractParameters { + ", method='" + method + '\'' + '}'; } + + public void dealOutParam4Procedure(Object result, String pop) { + Map properties = getOutProperty(); + if (this.outProperty == null) { + return; + } + properties.get(pop).setValue(String.valueOf(result)); + varPool.add(properties.get(pop)); + } + + public Map getOutProperty() { + if (this.outProperty != null) { + return this.outProperty; + } + if (CollectionUtils.isEmpty(localParams)) { + return null; + } + List outPropertyList = getOutProperty(localParams); + Map outProperty = new HashMap<>(); + for (Property info : outPropertyList) { + outProperty.put(info.getProp(), info); + } + this.outProperty = outProperty; + return this.outProperty; + } + + public void setOutProperty(Map outProperty) { + this.outProperty = outProperty; + } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-procedure/src/main/java/org/apache/dolphinscheduler/plugin/task/procedure/ProcedureTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-procedure/src/main/java/org/apache/dolphinscheduler/plugin/task/procedure/ProcedureTask.java index 397a1500a3ed04c12bbb401d6bedac6aee53c0df..35a95f138104946019a8239a5435ab5a82e318aa 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-procedure/src/main/java/org/apache/dolphinscheduler/plugin/task/procedure/ProcedureTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-procedure/src/main/java/org/apache/dolphinscheduler/plugin/task/procedure/ProcedureTask.java @@ -19,11 +19,11 @@ package org.apache.dolphinscheduler.plugin.task.procedure; import static org.apache.dolphinscheduler.spi.task.TaskConstants.EXIT_CODE_FAILURE; import static org.apache.dolphinscheduler.spi.task.TaskConstants.EXIT_CODE_SUCCESS; -import static org.apache.dolphinscheduler.spi.task.TaskConstants.TASK_LOG_INFO_FORMAT; +import org.apache.dolphinscheduler.plugin.datasource.api.plugin.DataSourceClientProvider; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.DatasourceUtil; import org.apache.dolphinscheduler.plugin.task.api.AbstractTaskExecutor; -import org.apache.dolphinscheduler.plugin.task.datasource.ConnectionParam; -import org.apache.dolphinscheduler.plugin.task.datasource.DatasourceUtil; +import org.apache.dolphinscheduler.spi.datasource.ConnectionParam; import org.apache.dolphinscheduler.spi.enums.DataType; import org.apache.dolphinscheduler.spi.enums.DbType; import org.apache.dolphinscheduler.spi.enums.TaskTimeoutStrategy; @@ -33,7 +33,6 @@ import org.apache.dolphinscheduler.spi.task.Property; import org.apache.dolphinscheduler.spi.task.paramparser.ParamUtils; import org.apache.dolphinscheduler.spi.task.paramparser.ParameterUtils; import org.apache.dolphinscheduler.spi.task.request.TaskRequest; -import org.apache.dolphinscheduler.spi.utils.CollectionUtils; import org.apache.dolphinscheduler.spi.utils.JSONUtils; import org.apache.dolphinscheduler.spi.utils.StringUtils; @@ -42,10 +41,10 @@ import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; import java.sql.Types; -import java.util.Collection; import java.util.HashMap; import java.util.Map; + /** * procedure task */ @@ -83,9 +82,6 @@ public class ProcedureTask extends AbstractTaskExecutor { @Override public void handle() throws Exception { - // set the name of the current thread - String threadLoggerInfoName = String.format(TASK_LOG_INFO_FORMAT, taskExecutionContext.getTaskAppId()); - Thread.currentThread().setName(threadLoggerInfoName); logger.info("procedure type : {}, datasource : {}, method : {} , localParams : {}", procedureParameters.getType(), @@ -103,19 +99,18 @@ public class ProcedureTask extends AbstractTaskExecutor { taskExecutionContext.getProcedureTaskExecutionContext().getConnectionParams()); // get jdbc connection - connection = DatasourceUtil.getConnection(dbType, connectionParam); - - // combining local and global parameters - Map paramsMap = ParamUtils.convert(taskExecutionContext,getParameters()); - + connection = DataSourceClientProvider.getInstance().getConnection(dbType, connectionParam); + Map sqlParamsMap = new HashMap<>(); + Map paramsMap = ParamUtils.convert(taskExecutionContext, getParameters()); + String proceduerSql = formatSql(sqlParamsMap, paramsMap); // call method - stmt = connection.prepareCall(procedureParameters.getMethod()); + stmt = connection.prepareCall(proceduerSql); // set timeout setTimeout(stmt); // outParameterMap - Map outParameterMap = getOutParameterMap(stmt, paramsMap); + Map outParameterMap = getOutParameterMap(stmt, sqlParamsMap, paramsMap); stmt.executeUpdate(); @@ -132,6 +127,12 @@ public class ProcedureTask extends AbstractTaskExecutor { } } + private String formatSql(Map sqlParamsMap, Map paramsMap) { + // combining local and global parameters + setSqlParamsMap(procedureParameters.getMethod(), rgex, sqlParamsMap, paramsMap, taskExecutionContext.getTaskInstanceId()); + return procedureParameters.getMethod().replaceAll(rgex, "?"); + } + /** * print outParameter * @@ -147,7 +148,7 @@ public class ProcedureTask extends AbstractTaskExecutor { String prop = property.getProp(); DataType dataType = property.getType(); // get output parameter - getOutputParameter(stmt, index, prop, dataType); + procedureParameters.dealOutParam4Procedure(getOutputParameter(stmt, index, prop, dataType), prop); } } @@ -159,34 +160,25 @@ public class ProcedureTask extends AbstractTaskExecutor { * @return outParameterMap * @throws Exception Exception */ - private Map getOutParameterMap(CallableStatement stmt, Map paramsMap) throws Exception { + private Map getOutParameterMap(CallableStatement stmt, Map paramsMap + , Map totalParamsMap) throws Exception { Map outParameterMap = new HashMap<>(); if (procedureParameters.getLocalParametersMap() == null) { return outParameterMap; } - Collection userDefParamsList = procedureParameters.getLocalParametersMap().values(); - - if (CollectionUtils.isEmpty(userDefParamsList)) { - return outParameterMap; - } - int index = 1; - for (Property property : userDefParamsList) { - logger.info("localParams : prop : {} , dirct : {} , type : {} , value : {}" - , property.getProp(), - property.getDirect(), - property.getType(), - property.getValue()); - // set parameters - if (property.getDirect().equals(Direct.IN)) { - ParameterUtils.setInParameter(index, stmt, property.getType(), paramsMap.get(property.getProp()).getValue()); - } else if (property.getDirect().equals(Direct.OUT)) { - setOutParameter(index, stmt, property.getType(), paramsMap.get(property.getProp()).getValue()); - property.setValue(paramsMap.get(property.getProp()).getValue()); - outParameterMap.put(index, property); + if (paramsMap != null) { + for (Map.Entry entry : paramsMap.entrySet()) { + Property property = entry.getValue(); + if (property.getDirect().equals(Direct.IN)) { + ParameterUtils.setInParameter(index, stmt, property.getType(), totalParamsMap.get(property.getProp()).getValue()); + } else if (property.getDirect().equals(Direct.OUT)) { + setOutParameter(index, stmt, property.getType(), totalParamsMap.get(property.getProp()).getValue()); + outParameterMap.put(index, property); + } + index++; } - index++; } return outParameterMap; @@ -237,38 +229,49 @@ public class ProcedureTask extends AbstractTaskExecutor { * @param dataType dataType * @throws SQLException SQLException */ - private void getOutputParameter(CallableStatement stmt, int index, String prop, DataType dataType) throws SQLException { + private Object getOutputParameter(CallableStatement stmt, int index, String prop, DataType dataType) throws SQLException { + Object value = null; switch (dataType) { case VARCHAR: logger.info("out prameter varchar key : {} , value : {}", prop, stmt.getString(index)); + value = stmt.getString(index); break; case INTEGER: logger.info("out prameter integer key : {} , value : {}", prop, stmt.getInt(index)); + value = stmt.getInt(index); break; case LONG: logger.info("out prameter long key : {} , value : {}", prop, stmt.getLong(index)); + value = stmt.getLong(index); break; case FLOAT: logger.info("out prameter float key : {} , value : {}", prop, stmt.getFloat(index)); + value = stmt.getFloat(index); break; case DOUBLE: logger.info("out prameter double key : {} , value : {}", prop, stmt.getDouble(index)); + value = stmt.getDouble(index); break; case DATE: logger.info("out prameter date key : {} , value : {}", prop, stmt.getDate(index)); + value = stmt.getDate(index); break; case TIME: logger.info("out prameter time key : {} , value : {}", prop, stmt.getTime(index)); + value = stmt.getTime(index); break; case TIMESTAMP: logger.info("out prameter timestamp key : {} , value : {}", prop, stmt.getTimestamp(index)); + value = stmt.getTimestamp(index); break; case BOOLEAN: logger.info("out prameter boolean key : {} , value : {}", prop, stmt.getBoolean(index)); + value = stmt.getBoolean(index); break; default: break; } + return value; } @Override @@ -323,4 +326,4 @@ public class ProcedureTask extends AbstractTaskExecutor { stmt.registerOutParameter(index, sqlType, value); } } -} \ No newline at end of file +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-procedure/src/main/java/org/apache/dolphinscheduler/plugin/task/procedure/ProcedureTaskChannelFactory.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-procedure/src/main/java/org/apache/dolphinscheduler/plugin/task/procedure/ProcedureTaskChannelFactory.java index 2530feb37d71bf913adba5693f853815d4a65d58..d313ddfc3f95f4c9c5e1ee52d16beb9fdb3bae8b 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-procedure/src/main/java/org/apache/dolphinscheduler/plugin/task/procedure/ProcedureTaskChannelFactory.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-procedure/src/main/java/org/apache/dolphinscheduler/plugin/task/procedure/ProcedureTaskChannelFactory.java @@ -23,6 +23,9 @@ import org.apache.dolphinscheduler.spi.task.TaskChannelFactory; import java.util.List; +import com.google.auto.service.AutoService; + +@AutoService(TaskChannelFactory.class) public class ProcedureTaskChannelFactory implements TaskChannelFactory { @Override diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-procedure/src/main/java/org/apache/dolphinscheduler/plugin/task/procedure/ProcedureTaskPlugin.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-procedure/src/main/java/org/apache/dolphinscheduler/plugin/task/procedure/ProcedureTaskPlugin.java deleted file mode 100644 index 160bccd2361b4f3c8cfca054e2327ed5110c41d5..0000000000000000000000000000000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-procedure/src/main/java/org/apache/dolphinscheduler/plugin/task/procedure/ProcedureTaskPlugin.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.task.procedure; - -import org.apache.dolphinscheduler.spi.DolphinSchedulerPlugin; -import org.apache.dolphinscheduler.spi.task.TaskChannelFactory; - -import com.google.common.collect.ImmutableList; - -public class ProcedureTaskPlugin implements DolphinSchedulerPlugin { - - @Override - public Iterable getTaskChannelFactorys() { - return ImmutableList.of(new ProcedureTaskChannelFactory()); - } -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-python/pom.xml b/dolphinscheduler-task-plugin/dolphinscheduler-task-python/pom.xml index 529359522707b9f21681272b46dc8fea2e67ec92..a1af6be7ce01e11efdfd6b0f6e5e8553da024200 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-python/pom.xml +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-python/pom.xml @@ -15,33 +15,25 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - + dolphinscheduler-task-plugin org.apache.dolphinscheduler - 2.0.0-SNAPSHOT + 2.0.10-SNAPSHOT 4.0.0 dolphinscheduler-task-python - dolphinscheduler-plugin + jar - - org.apache.dolphinscheduler - dolphinscheduler-spi - provided - - - org.apache.dolphinscheduler - dolphinscheduler-task-api - ${project.version} - + + org.apache.dolphinscheduler + dolphinscheduler-spi + + + org.apache.dolphinscheduler + dolphinscheduler-task-api + - - - dolphinscheduler-task-python-${project.version} - - \ No newline at end of file + diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-python/src/main/java/org/apache/dolphinscheduler/plugin/task/python/PythonCommandExecutor.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-python/src/main/java/org/apache/dolphinscheduler/plugin/task/python/PythonCommandExecutor.java deleted file mode 100644 index 8e118f94bec65132b4084397ad124434aadbc848..0000000000000000000000000000000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-python/src/main/java/org/apache/dolphinscheduler/plugin/task/python/PythonCommandExecutor.java +++ /dev/null @@ -1,179 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.task.python; - -import org.apache.dolphinscheduler.plugin.task.api.AbstractCommandExecutor; -import org.apache.dolphinscheduler.spi.task.request.TaskRequest; -import org.apache.dolphinscheduler.spi.utils.StringUtils; - -import org.apache.commons.io.FileUtils; - -import java.io.BufferedReader; -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStreamReader; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Paths; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.function.Consumer; -import java.util.regex.Pattern; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * python command executor - */ -public class PythonCommandExecutor extends AbstractCommandExecutor { - - /** - * logger - */ - private static final Logger logger = LoggerFactory.getLogger(PythonCommandExecutor.class); - - /** - * python - */ - public static final String PYTHON = "python"; - - private static final Pattern PYTHON_PATH_PATTERN = Pattern.compile("/bin/python[\\d.]*$"); - - /** - * constructor - * - * @param logHandler log handler - * @param taskRequest TaskRequest - * @param logger logger - */ - public PythonCommandExecutor(Consumer> logHandler, - TaskRequest taskRequest, - Logger logger) { - super(logHandler, taskRequest, logger); - } - - - /** - * build command file path - * - * @return command file path - */ - @Override - protected String buildCommandFilePath() { - return String.format("%s/py_%s.command", taskRequest.getExecutePath(), taskRequest.getTaskAppId()); - } - - /** - * create command file if not exists - * - * @param execCommand exec command - * @param commandFile command file - * @throws IOException io exception - */ - @Override - protected void createCommandFileIfNotExists(String execCommand, String commandFile) throws IOException { - logger.info("tenantCode :{}, task dir:{}", taskRequest.getTenantCode(), taskRequest.getExecutePath()); - - if (!Files.exists(Paths.get(commandFile))) { - logger.info("generate command file:{}", commandFile); - - StringBuilder sb = new StringBuilder(); - sb.append("#-*- encoding=utf8 -*-\n"); - - sb.append("\n\n"); - sb.append(execCommand); - logger.info(sb.toString()); - - // write data to file - FileUtils.writeStringToFile(new File(commandFile), - sb.toString(), - StandardCharsets.UTF_8); - } - } - - /** - * get the absolute path of the Python command - * note : - * common.properties - * PYTHON_HOME configured under common.properties is Python absolute path, not PYTHON_HOME itself - *

- * for example : - * your PYTHON_HOM is /opt/python3.7/ - * you must set PYTHON_HOME is /opt/python3.7/python under nder common.properties - * dolphinscheduler.env.path file. - * - * @param envPath env path - * @return python home - */ - private static String getPythonHome(String envPath) { - // BufferedReader br = null; - StringBuilder sb = new StringBuilder(); - try (BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(envPath)));) { - String line; - while ((line = br.readLine()) != null) { - if (line.contains(PythonConstants.PYTHON_HOME)) { - sb.append(line); - break; - } - } - String result = sb.toString(); - if (StringUtils.isEmpty(result)) { - return null; - } - String[] arrs = result.split(PythonConstants.EQUAL_SIGN); - if (arrs.length == 2) { - return arrs[1]; - } - } catch (IOException e) { - logger.error("read file failure", e); - } - return null; - } - - /** - * Gets the command path to which Python can execute - * @return python command path - */ - @Override - protected String commandInterpreter() { - String pythonHome = getPythonHome(taskRequest.getEnvFile()); - return getPythonCommand(pythonHome); - } - - /** - * get python command - * - * @param pythonHome python home - * @return python command - */ - public static String getPythonCommand(String pythonHome) { - if (StringUtils.isEmpty(pythonHome)) { - return PYTHON; - } - File file = new File(pythonHome); - if (file.exists() && file.isFile()) { - return pythonHome; - } - if (PYTHON_PATH_PATTERN.matcher(pythonHome).find()) { - return pythonHome; - } - return Paths.get(pythonHome, "/bin/python").toString(); - } - -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-python/src/main/java/org/apache/dolphinscheduler/plugin/task/python/PythonTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-python/src/main/java/org/apache/dolphinscheduler/plugin/task/python/PythonTask.java index 01457da6421a0d463304fcef25691991044058fc..382f1778b929afc9c7221d0833ca81945583fb50 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-python/src/main/java/org/apache/dolphinscheduler/plugin/task/python/PythonTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-python/src/main/java/org/apache/dolphinscheduler/plugin/task/python/PythonTask.java @@ -18,6 +18,7 @@ package org.apache.dolphinscheduler.plugin.task.python; import org.apache.dolphinscheduler.plugin.task.api.AbstractTaskExecutor; +import org.apache.dolphinscheduler.plugin.task.api.ShellCommandExecutor; import org.apache.dolphinscheduler.plugin.task.api.TaskException; import org.apache.dolphinscheduler.plugin.task.api.TaskResponse; import org.apache.dolphinscheduler.plugin.task.util.MapUtils; @@ -29,6 +30,13 @@ import org.apache.dolphinscheduler.spi.task.paramparser.ParameterUtils; import org.apache.dolphinscheduler.spi.task.request.TaskRequest; import org.apache.dolphinscheduler.spi.utils.JSONUtils; +import org.apache.commons.io.FileUtils; + +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Paths; import java.util.HashMap; import java.util.Map; @@ -43,14 +51,9 @@ public class PythonTask extends AbstractTaskExecutor { private PythonParameters pythonParameters; /** - * task dir - */ - private String taskDir; - - /** - * python command executor + * shell command executor */ - private PythonCommandExecutor pythonCommandExecutor; + private ShellCommandExecutor shellCommandExecutor; private TaskRequest taskRequest; @@ -63,7 +66,7 @@ public class PythonTask extends AbstractTaskExecutor { super(taskRequest); this.taskRequest = taskRequest; - this.pythonCommandExecutor = new PythonCommandExecutor(this::logHandle, + this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle, taskRequest, logger); } @@ -93,13 +96,20 @@ public class PythonTask extends AbstractTaskExecutor { @Override public void handle() throws Exception { try { - // construct process - String command = buildCommand(); - TaskResponse taskResponse = pythonCommandExecutor.run(command); + // generate the content of this python script + String pythonScriptContent = buildPythonScriptContent(); + // generate the file path of this python script + String pythonScriptFile = buildPythonCommandFilePath(); + + // create this file + createPythonCommandFileIfNotExists(pythonScriptContent,pythonScriptFile); + String command = "python " + pythonScriptFile; + + TaskResponse taskResponse = shellCommandExecutor.run(command); setExitStatusCode(taskResponse.getExitStatusCode()); setAppIds(taskResponse.getAppIds()); setProcessId(taskResponse.getProcessId()); - setVarPool(pythonCommandExecutor.getVarPool()); + setVarPool(shellCommandExecutor.getVarPool()); } catch (Exception e) { logger.error("python task failure", e); setExitStatusCode(TaskConstants.EXIT_CODE_FAILURE); @@ -110,7 +120,7 @@ public class PythonTask extends AbstractTaskExecutor { @Override public void cancelApplication(boolean cancelApplication) throws Exception { // cancel process - pythonCommandExecutor.cancelApplication(); + shellCommandExecutor.cancelApplication(); } @Override @@ -151,12 +161,48 @@ public class PythonTask extends AbstractTaskExecutor { } /** - * build command + * create python command file if not exists + * + * @param pythonScript exec python script + * @param pythonScriptFile python script file + * @throws IOException io exception + */ + protected void createPythonCommandFileIfNotExists(String pythonScript, String pythonScriptFile) throws IOException { + logger.info("tenantCode :{}, task dir:{}", taskRequest.getTenantCode(), taskRequest.getExecutePath()); + + if (!Files.exists(Paths.get(pythonScriptFile))) { + logger.info("generate python script file:{}", pythonScriptFile); + + StringBuilder sb = new StringBuilder(); + sb.append("#-*- encoding=utf8 -*-\n"); + + sb.append("\n\n"); + sb.append(pythonScript); + logger.info(sb.toString()); + + // write data to file + FileUtils.writeStringToFile(new File(pythonScriptFile), + sb.toString(), + StandardCharsets.UTF_8); + } + } + + /** + * build python command file path + * + * @return python command file path + */ + protected String buildPythonCommandFilePath() { + return String.format("%s/py_%s.py", taskRequest.getExecutePath(), taskRequest.getTaskAppId()); + } + + /** + * build python script content * * @return raw python script * @throws Exception exception */ - private String buildCommand() throws Exception { + private String buildPythonScriptContent() throws Exception { String rawPythonScript = pythonParameters.getRawScript().replaceAll("\\r\\n", "\n"); // replace placeholder @@ -170,7 +216,6 @@ public class PythonTask extends AbstractTaskExecutor { rawPythonScript = ParameterUtils.convertParameterPlaceholders(rawPythonScript, ParamUtils.convert(paramsMap)); logger.info("raw python script : {}", pythonParameters.getRawScript()); - logger.info("task dir : {}", taskDir); return rawPythonScript; } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-python/src/main/java/org/apache/dolphinscheduler/plugin/task/python/PythonTaskChannelFactory.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-python/src/main/java/org/apache/dolphinscheduler/plugin/task/python/PythonTaskChannelFactory.java index c41938b38c058cfe43bb0effdb72a34d635fa57a..55514e48f64bc5194cbc96c593893bba1bd27a98 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-python/src/main/java/org/apache/dolphinscheduler/plugin/task/python/PythonTaskChannelFactory.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-python/src/main/java/org/apache/dolphinscheduler/plugin/task/python/PythonTaskChannelFactory.java @@ -21,6 +21,9 @@ import org.apache.dolphinscheduler.spi.task.TaskChannelFactory; import java.util.List; +import com.google.auto.service.AutoService; + +@AutoService(TaskChannelFactory.class) public class PythonTaskChannelFactory implements TaskChannelFactory { @Override public TaskChannel create() { diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-shell/pom.xml b/dolphinscheduler-task-plugin/dolphinscheduler-task-shell/pom.xml index adf300835703fdec72c354d221a87e313da65ed6..61c2f5ab544b777c92009d99ba440b03432e67d2 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-shell/pom.xml +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-shell/pom.xml @@ -15,18 +15,16 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - + dolphinscheduler-task-plugin org.apache.dolphinscheduler - 2.0.0-SNAPSHOT + 2.0.10-SNAPSHOT 4.0.0 dolphinscheduler-task-shell - dolphinscheduler-plugin + jar @@ -45,8 +43,4 @@ commons-collections4 - - - dolphinscheduler-task-shell-${project.version} - - \ No newline at end of file + diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-shell/src/main/java/org/apache/dolphinscheduler/plugin/task/shell/ShellTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-shell/src/main/java/org/apache/dolphinscheduler/plugin/task/shell/ShellTask.java index 5d9fb80ce145ae739346052361aa2c30ae48c65e..43859e8da10338f1bcf2dea143c7f9b5a86e164d 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-shell/src/main/java/org/apache/dolphinscheduler/plugin/task/shell/ShellTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-shell/src/main/java/org/apache/dolphinscheduler/plugin/task/shell/ShellTask.java @@ -158,7 +158,7 @@ public class ShellTask extends AbstractTaskExecutor { private String parseScript(String script) { // combining local and global parameters - Map paramsMap = ParamUtils.convert(taskExecutionContext,getParameters()); + Map paramsMap = ParamUtils.convert(taskExecutionContext, getParameters()); if (MapUtils.isEmpty(paramsMap)) { paramsMap = new HashMap<>(); } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-shell/src/main/java/org/apache/dolphinscheduler/plugin/task/shell/ShellTaskChannelFactory.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-shell/src/main/java/org/apache/dolphinscheduler/plugin/task/shell/ShellTaskChannelFactory.java index da294c511f845d31f15f4703efa0d6a56787ab2e..521e59bcce98cc7da33eb3b3c896999c50a47404 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-shell/src/main/java/org/apache/dolphinscheduler/plugin/task/shell/ShellTaskChannelFactory.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-shell/src/main/java/org/apache/dolphinscheduler/plugin/task/shell/ShellTaskChannelFactory.java @@ -28,6 +28,9 @@ import org.apache.dolphinscheduler.spi.task.TaskChannelFactory; import java.util.ArrayList; import java.util.List; +import com.google.auto.service.AutoService; + +@AutoService(TaskChannelFactory.class) public class ShellTaskChannelFactory implements TaskChannelFactory { @Override public TaskChannel create() { diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-shell/src/main/java/org/apache/dolphinscheduler/plugin/task/shell/ShellTaskPlugin.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-shell/src/main/java/org/apache/dolphinscheduler/plugin/task/shell/ShellTaskPlugin.java deleted file mode 100644 index 512c5cffe1e67c573309205d57c83c4386cae13f..0000000000000000000000000000000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-shell/src/main/java/org/apache/dolphinscheduler/plugin/task/shell/ShellTaskPlugin.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.task.shell; - -import org.apache.dolphinscheduler.spi.DolphinSchedulerPlugin; -import org.apache.dolphinscheduler.spi.task.TaskChannelFactory; - -import com.google.common.collect.ImmutableList; - -public class ShellTaskPlugin implements DolphinSchedulerPlugin { - - @Override - public Iterable getTaskChannelFactorys() { - return ImmutableList.of(new ShellTaskChannelFactory()); - } -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-spark/pom.xml b/dolphinscheduler-task-plugin/dolphinscheduler-task-spark/pom.xml index 87d8c3fc958b02a55f2bb4cf1ea6709ef03a07e6..534f5d1640260981d964a00b708dcbb5a92e4adb 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-spark/pom.xml +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-spark/pom.xml @@ -15,17 +15,15 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - + dolphinscheduler-task-plugin org.apache.dolphinscheduler - 2.0.0-SNAPSHOT + 2.0.10-SNAPSHOT 4.0.0 dolphinscheduler-task-spark - dolphinscheduler-plugin + jar @@ -39,8 +37,4 @@ ${project.version} - - - dolphinscheduler-task-spark-${project.version} - diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-spark/src/main/java/org/apache/dolphinscheduler/plugin/task/spark/SparkTaskChannelFactory.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-spark/src/main/java/org/apache/dolphinscheduler/plugin/task/spark/SparkTaskChannelFactory.java index 29be5773ea1ad6c2c23991651ab9cf9a5158b017..a468a9274f2e48a89ddd7ad6ee0ef368dfc91cb4 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-spark/src/main/java/org/apache/dolphinscheduler/plugin/task/spark/SparkTaskChannelFactory.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-spark/src/main/java/org/apache/dolphinscheduler/plugin/task/spark/SparkTaskChannelFactory.java @@ -23,6 +23,9 @@ import org.apache.dolphinscheduler.spi.task.TaskChannelFactory; import java.util.List; +import com.google.auto.service.AutoService; + +@AutoService(TaskChannelFactory.class) public class SparkTaskChannelFactory implements TaskChannelFactory { @Override public String getName() { diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-spark/src/main/java/org/apache/dolphinscheduler/plugin/task/spark/SparkTaskPlugin.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-spark/src/main/java/org/apache/dolphinscheduler/plugin/task/spark/SparkTaskPlugin.java deleted file mode 100644 index ed35d3d5b5b9cabe325145d13362587306fe0c36..0000000000000000000000000000000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-spark/src/main/java/org/apache/dolphinscheduler/plugin/task/spark/SparkTaskPlugin.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.task.spark; - -import org.apache.dolphinscheduler.spi.DolphinSchedulerPlugin; -import org.apache.dolphinscheduler.spi.task.TaskChannelFactory; - -import com.google.common.collect.ImmutableList; - -public class SparkTaskPlugin implements DolphinSchedulerPlugin { - - @Override - public Iterable getTaskChannelFactorys() { - return ImmutableList.of(new SparkTaskChannelFactory()); - } -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-sql/pom.xml b/dolphinscheduler-task-plugin/dolphinscheduler-task-sql/pom.xml index c2a44f5bc8e9db6a0d83fec2fe31c14ea4834c5c..d0fb376259e14b728b1ec38f7e54873dd647236b 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-sql/pom.xml +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-sql/pom.xml @@ -15,20 +15,23 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - + dolphinscheduler-task-plugin org.apache.dolphinscheduler - 2.0.0-SNAPSHOT + 2.0.10-SNAPSHOT 4.0.0 dolphinscheduler-task-sql - dolphinscheduler-plugin + jar + + org.apache.dolphinscheduler + dolphinscheduler-datasource-all + + org.apache.dolphinscheduler dolphinscheduler-spi @@ -39,6 +42,11 @@ dolphinscheduler-task-api ${project.version} + + org.apache.dolphinscheduler + dolphinscheduler-datasource-api + ${project.version} + com.fasterxml.jackson.core @@ -47,8 +55,4 @@ - - - dolphinscheduler-task-sql-${project.version} - - \ No newline at end of file + diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-sql/src/main/java/org/apache/dolphinscheduler/plugin/task/sql/SqlParameters.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-sql/src/main/java/org/apache/dolphinscheduler/plugin/task/sql/SqlParameters.java index 487c5bd481d555514dcbc62e381126d617abb8a8..40d6d9cbd3cbbb3e3ef30b1501d7ca0a7e74941e 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-sql/src/main/java/org/apache/dolphinscheduler/plugin/task/sql/SqlParameters.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-sql/src/main/java/org/apache/dolphinscheduler/plugin/task/sql/SqlParameters.java @@ -21,10 +21,11 @@ import org.apache.dolphinscheduler.spi.enums.DataType; import org.apache.dolphinscheduler.spi.task.AbstractParameters; import org.apache.dolphinscheduler.spi.task.Property; import org.apache.dolphinscheduler.spi.task.ResourceInfo; -import org.apache.dolphinscheduler.spi.utils.CollectionUtils; import org.apache.dolphinscheduler.spi.utils.JSONUtils; import org.apache.dolphinscheduler.spi.utils.StringUtils; +import org.apache.commons.collections.CollectionUtils; + import java.util.ArrayList; import java.util.HashMap; import java.util.List; diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-sql/src/main/java/org/apache/dolphinscheduler/plugin/task/sql/SqlTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-sql/src/main/java/org/apache/dolphinscheduler/plugin/task/sql/SqlTask.java index cc18d5e4688116b6582d3332f736f54a328fed56..2ae7769af918ae094565036ea7bfab5574709639 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-sql/src/main/java/org/apache/dolphinscheduler/plugin/task/sql/SqlTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-sql/src/main/java/org/apache/dolphinscheduler/plugin/task/sql/SqlTask.java @@ -17,11 +17,13 @@ package org.apache.dolphinscheduler.plugin.task.sql; +import org.apache.dolphinscheduler.plugin.datasource.api.plugin.DataSourceClientProvider; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.CommonUtils; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.DatasourceUtil; import org.apache.dolphinscheduler.plugin.task.api.AbstractTaskExecutor; import org.apache.dolphinscheduler.plugin.task.api.TaskException; -import org.apache.dolphinscheduler.plugin.task.datasource.BaseConnectionParam; -import org.apache.dolphinscheduler.plugin.task.datasource.DatasourceUtil; import org.apache.dolphinscheduler.plugin.task.util.MapUtils; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; import org.apache.dolphinscheduler.spi.enums.DbType; import org.apache.dolphinscheduler.spi.enums.TaskTimeoutStrategy; import org.apache.dolphinscheduler.spi.task.AbstractParameters; @@ -34,10 +36,11 @@ import org.apache.dolphinscheduler.spi.task.paramparser.ParameterUtils; import org.apache.dolphinscheduler.spi.task.request.SQLTaskExecutionContext; import org.apache.dolphinscheduler.spi.task.request.TaskRequest; import org.apache.dolphinscheduler.spi.task.request.UdfFuncRequest; -import org.apache.dolphinscheduler.spi.utils.CollectionUtils; import org.apache.dolphinscheduler.spi.utils.JSONUtils; import org.apache.dolphinscheduler.spi.utils.StringUtils; +import org.apache.commons.collections.CollectionUtils; + import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; @@ -49,7 +52,9 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Optional; +import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; @@ -84,7 +89,7 @@ public class SqlTask extends AbstractTaskExecutor { /** * default query sql limit */ - private static final int LIMIT = 10000; + private static final int QUERY_LIMIT = 10000; /** * Abstract Yarn Task @@ -109,9 +114,6 @@ public class SqlTask extends AbstractTaskExecutor { @Override public void handle() throws Exception { - // set the name of the current thread - String threadLoggerInfoName = String.format(TaskConstants.TASK_LOG_INFO_FORMAT, taskExecutionContext.getTaskAppId()); - Thread.currentThread().setName(threadLoggerInfoName); logger.info("Full sql parameters: {}", sqlParameters); logger.info("sql type : {}, datasource : {}, sql : {} , localParams : {},udfs : {},showType : {},connParams : {},varPool : {} ,query max result limit {}", @@ -146,7 +148,7 @@ public class SqlTask extends AbstractTaskExecutor { .collect(Collectors.toList()); List createFuncs = createFuncs(sqlTaskExecutionContext.getUdfFuncTenantCodeMap(), - logger); + sqlTaskExecutionContext.getDefaultFS(), logger); // execute sql task executeFuncAndSql(mainSqlBinds, preStatementSqlBinds, postStatementSqlBinds, createFuncs); @@ -178,7 +180,7 @@ public class SqlTask extends AbstractTaskExecutor { try { // create connection - connection = DatasourceUtil.getConnection(DbType.valueOf(sqlParameters.getType()), baseConnectionParam); + connection = DataSourceClientProvider.getInstance().getConnection(DbType.valueOf(sqlParameters.getType()), baseConnectionParam); // create temp function if (CollectionUtils.isNotEmpty(createFuncs)) { createTempFunction(connection, createFuncs); @@ -239,7 +241,7 @@ public class SqlTask extends AbstractTaskExecutor { int num = md.getColumnCount(); int rowCount = 0; - int limit = sqlParameters.getLimit() == 0 ? LIMIT : sqlParameters.getLimit(); + int limit = sqlParameters.getLimit() == 0 ? QUERY_LIMIT : sqlParameters.getLimit(); while (rowCount < limit && resultSet.next()) { ObjectNode mapOfColValues = JSONUtils.createObjectNode(); @@ -249,7 +251,6 @@ public class SqlTask extends AbstractTaskExecutor { resultJSONArray.add(mapOfColValues); rowCount++; } - int displayRows = sqlParameters.getDisplayRows() > 0 ? sqlParameters.getDisplayRows() : TaskConstants.DEFAULT_DISPLAY_ROWS; displayRows = Math.min(displayRows, resultJSONArray.size()); logger.info("display sql result {} rows as follows:", displayRows); @@ -257,6 +258,11 @@ public class SqlTask extends AbstractTaskExecutor { String row = JSONUtils.toJsonString(resultJSONArray.get(i)); logger.info("row {} : {}", i + 1, row); } + if (resultSet.next()) { + logger.info("sql result limit : {} exceeding results are filtered", limit); + String log = String.format("sql result limit : %d exceeding results are filtered", limit); + resultJSONArray.add(JSONUtils.toJsonNode(log)); + } } String result = JSONUtils.toJsonString(resultJSONArray); if (sqlParameters.getSendEmail() == null || sqlParameters.getSendEmail()) { @@ -280,6 +286,7 @@ public class SqlTask extends AbstractTaskExecutor { taskAlertInfo.setAlertGroupId(groupId); taskAlertInfo.setContent(content); taskAlertInfo.setTitle(title); + setTaskAlertInfo(taskAlertInfo); } /** @@ -398,35 +405,6 @@ public class SqlTask extends AbstractTaskExecutor { } - /** - * regular expressions match the contents between two specified strings - * - * @param content content - * @param rgex rgex - * @param sqlParamsMap sql params map - * @param paramsPropsMap params props map - */ - public void setSqlParamsMap(String content, String rgex, Map sqlParamsMap, Map paramsPropsMap) { - Pattern pattern = Pattern.compile(rgex); - Matcher m = pattern.matcher(content); - int index = 1; - while (m.find()) { - - String paramName = m.group(1); - Property prop = paramsPropsMap.get(paramName); - - if (prop == null) { - logger.error("setSqlParamsMap: No Property with paramName: {} is found in paramsPropsMap of task instance" - + " with id: {}. So couldn't put Property in sqlParamsMap.", paramName, taskExecutionContext.getTaskInstanceId()); - } else { - sqlParamsMap.put(index, prop); - index++; - logger.info("setSqlParamsMap: Property with paramName: {} put in sqlParamsMap of content {} successfully.", paramName, content); - } - - } - } - /** * print replace sql * @@ -458,6 +436,8 @@ public class SqlTask extends AbstractTaskExecutor { Map sqlParamsMap = new HashMap<>(); StringBuilder sqlBuilder = new StringBuilder(); + //replace variable TIME with $[YYYYmmddd...] in sql when history run job and batch complement job + sql = ParameterUtils.replaceScheduleTime(sql, taskExecutionContext.getScheduleTime()); // combining local and global parameters Map paramsMap = ParamUtils.convert(taskExecutionContext, getParameters()); @@ -474,12 +454,8 @@ public class SqlTask extends AbstractTaskExecutor { sqlParameters.setTitle(title); } - //new - //replace variable TIME with $[YYYYmmddd...] in sql when history run job and batch complement job - sql = ParameterUtils.replaceScheduleTime(sql, taskExecutionContext.getScheduleTime()); // special characters need to be escaped, ${} needs to be escaped - String rgex = "['\"]*\\$\\{(.*?)\\}['\"]*"; - setSqlParamsMap(sql, rgex, sqlParamsMap, paramsMap); + setSqlParamsMap(sql, rgex, sqlParamsMap, paramsMap,taskExecutionContext.getTaskInstanceId()); //Replace the original value in sql !{...} ,Does not participate in precompilation String rgexo = "['\"]*\\!\\{(.*?)\\}['\"]*"; sql = replaceOriginalValue(sql, rgexo, paramsMap); @@ -513,13 +489,17 @@ public class SqlTask extends AbstractTaskExecutor { * @param logger logger * @return create function list */ - public static List createFuncs(Map udfFuncTenantCodeMap, Logger logger) { + public static List createFuncs(Map udfFuncTenantCodeMap, String defaultFS, Logger logger) { if (MapUtils.isEmpty(udfFuncTenantCodeMap)) { logger.info("can't find udf function resource"); return null; } List funcList = new ArrayList<>(); + + // build jar sql + buildJarSql(funcList, udfFuncTenantCodeMap, defaultFS); + // build temp function sql buildTempFuncSql(funcList, new ArrayList<>(udfFuncTenantCodeMap.keySet())); @@ -541,4 +521,21 @@ public class SqlTask extends AbstractTaskExecutor { } } + /** + * build jar sql + * @param sqls sql list + * @param udfFuncTenantCodeMap key is udf function,value is tenant code + */ + private static void buildJarSql(List sqls, Map udfFuncTenantCodeMap, String defaultFS) { + String resourceFullName; + Set> entries = udfFuncTenantCodeMap.entrySet(); + for (Map.Entry entry : entries) { + String prefixPath = defaultFS.startsWith("file://") ? "file://" : defaultFS; + String uploadPath = CommonUtils.getHdfsUdfDir(entry.getValue()); + resourceFullName = entry.getKey().getResourceName(); + resourceFullName = resourceFullName.startsWith("/") ? resourceFullName : String.format("/%s", resourceFullName); + sqls.add(String.format("add jar %s%s%s", prefixPath, uploadPath, resourceFullName)); + } + } + } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-sql/src/main/java/org/apache/dolphinscheduler/plugin/task/sql/SqlTaskChannelFactory.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-sql/src/main/java/org/apache/dolphinscheduler/plugin/task/sql/SqlTaskChannelFactory.java index 47091ebb70a4b05af9b83965b42e827c419b1474..72596719542713392cf14bf9f1f455b0ada56f14 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-sql/src/main/java/org/apache/dolphinscheduler/plugin/task/sql/SqlTaskChannelFactory.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-sql/src/main/java/org/apache/dolphinscheduler/plugin/task/sql/SqlTaskChannelFactory.java @@ -23,6 +23,9 @@ import org.apache.dolphinscheduler.spi.task.TaskChannelFactory; import java.util.List; +import com.google.auto.service.AutoService; + +@AutoService(TaskChannelFactory.class) public class SqlTaskChannelFactory implements TaskChannelFactory { @Override public String getName() { diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-sql/src/main/java/org/apache/dolphinscheduler/plugin/task/sql/SqlTaskPlugin.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-sql/src/main/java/org/apache/dolphinscheduler/plugin/task/sql/SqlTaskPlugin.java deleted file mode 100644 index 65a3977bba809682a6aa89c573f99326dd8df6e5..0000000000000000000000000000000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-sql/src/main/java/org/apache/dolphinscheduler/plugin/task/sql/SqlTaskPlugin.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.task.sql; - -import org.apache.dolphinscheduler.spi.DolphinSchedulerPlugin; -import org.apache.dolphinscheduler.spi.task.TaskChannelFactory; - -import com.google.common.collect.ImmutableList; - -public class SqlTaskPlugin implements DolphinSchedulerPlugin { - - @Override - public Iterable getTaskChannelFactorys() { - return ImmutableList.of(new SqlTaskChannelFactory()); - } -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/pom.xml b/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/pom.xml index 1229c845f6b3fee5314a807e570c1e96518e1586..7031b60148f9d00c13f3300d62099cd3f28c7412 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/pom.xml +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/pom.xml @@ -15,18 +15,16 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - + dolphinscheduler-task-plugin org.apache.dolphinscheduler - 2.0.0-SNAPSHOT + 2.0.10-SNAPSHOT 4.0.0 dolphinscheduler-task-sqoop - dolphinscheduler-plugin + jar @@ -34,15 +32,20 @@ dolphinscheduler-spi provided + + + org.apache.commons + commons-collections4 + org.apache.dolphinscheduler dolphinscheduler-task-api ${project.version} - + + org.apache.dolphinscheduler + dolphinscheduler-datasource-api + ${project.version} + - - - dolphinscheduler-task-sqoop-${project.version} - - \ No newline at end of file + diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/SqoopTaskChannelFactory.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/SqoopTaskChannelFactory.java index c89ed00020c74f0d695334f28880133af766ec71..462c0397f0b84a42c3c3820aa793ba8503ade39e 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/SqoopTaskChannelFactory.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/SqoopTaskChannelFactory.java @@ -23,6 +23,9 @@ import org.apache.dolphinscheduler.spi.task.TaskChannelFactory; import java.util.List; +import com.google.auto.service.AutoService; + +@AutoService(TaskChannelFactory.class) public class SqoopTaskChannelFactory implements TaskChannelFactory { @Override diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/SqoopTaskPlugin.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/SqoopTaskPlugin.java deleted file mode 100644 index e300ee50ee7abec48a4647407e2e92b4d565428f..0000000000000000000000000000000000000000 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/SqoopTaskPlugin.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.plugin.task.sqoop; - -import org.apache.dolphinscheduler.spi.DolphinSchedulerPlugin; -import org.apache.dolphinscheduler.spi.task.TaskChannelFactory; - -import com.google.common.collect.ImmutableList; - -public class SqoopTaskPlugin implements DolphinSchedulerPlugin { - - @Override - public Iterable getTaskChannelFactorys() { - return ImmutableList.of(new SqoopTaskChannelFactory()); - } -} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/CommonGenerator.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/CommonGenerator.java index 36a99a56b8ec2f2b2e8348643a2513388a5f4925..0635296433a8b8964ca8368f46d716c6b463a86a 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/CommonGenerator.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/CommonGenerator.java @@ -24,7 +24,8 @@ import static org.apache.dolphinscheduler.spi.task.TaskConstants.SPACE; import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants; import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SqoopParameters; import org.apache.dolphinscheduler.spi.task.Property; -import org.apache.dolphinscheduler.spi.utils.CollectionUtils; + +import org.apache.commons.collections4.CollectionUtils; import java.util.List; diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/MysqlSourceGenerator.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/MysqlSourceGenerator.java index b1ee6a9c4c6bac17c6b53e9779eedb4b605cf921..998ebcae50e6d09f9a9ea3e9a1f46ca193f64db9 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/MysqlSourceGenerator.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/MysqlSourceGenerator.java @@ -17,7 +17,7 @@ package org.apache.dolphinscheduler.plugin.task.sqoop.generator.sources; -import static org.apache.dolphinscheduler.plugin.task.datasource.PasswordUtils.decodePassword; +import static org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils.decodePassword; import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.COLUMNS; import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_CONNECT; import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_PWD; @@ -34,12 +34,12 @@ import static org.apache.dolphinscheduler.spi.task.TaskConstants.DOUBLE_QUOTES; import static org.apache.dolphinscheduler.spi.task.TaskConstants.EQUAL_SIGN; import static org.apache.dolphinscheduler.spi.task.TaskConstants.SPACE; -import org.apache.dolphinscheduler.plugin.task.datasource.BaseConnectionParam; -import org.apache.dolphinscheduler.plugin.task.datasource.DatasourceUtil; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.DatasourceUtil; import org.apache.dolphinscheduler.plugin.task.sqoop.SqoopQueryType; import org.apache.dolphinscheduler.plugin.task.sqoop.generator.ISourceGenerator; import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SqoopParameters; import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources.SourceMysqlParameter; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; import org.apache.dolphinscheduler.spi.enums.DbType; import org.apache.dolphinscheduler.spi.task.Property; import org.apache.dolphinscheduler.spi.task.request.TaskRequest; diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/targets/HiveTargetGenerator.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/targets/HiveTargetGenerator.java index 05ba68fe9e076500ba9504fc230a10bcbd055990..5e768f3850fa13f498d2d02d49486d7e25561511 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/targets/HiveTargetGenerator.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/targets/HiveTargetGenerator.java @@ -27,6 +27,7 @@ import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.HIVE_ import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.HIVE_PARTITION_KEY; import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.HIVE_PARTITION_VALUE; import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.HIVE_TABLE; +import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.TARGET_DIR; import static org.apache.dolphinscheduler.spi.task.TaskConstants.SPACE; import org.apache.dolphinscheduler.plugin.task.sqoop.generator.ITargetGenerator; @@ -91,6 +92,11 @@ public class HiveTargetGenerator implements ITargetGenerator { .append(SPACE).append(targetHiveParameter.getHivePartitionValue()); } + if (StringUtils.isNotEmpty(targetHiveParameter.getHiveTargetDir())) { + hiveTargetSb.append(SPACE).append(TARGET_DIR) + .append(SPACE).append(targetHiveParameter.getHiveTargetDir()); + } + } } catch (Exception e) { logger.error(String.format("Sqoop hive target params build failed: [%s]", e.getMessage())); diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/targets/MysqlTargetGenerator.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/targets/MysqlTargetGenerator.java index 8c3450262a73c563c980a650d15c541bba240c6e..642fe1105634065800dd7839c3517f8e6c145dd6 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/targets/MysqlTargetGenerator.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/targets/MysqlTargetGenerator.java @@ -17,7 +17,7 @@ package org.apache.dolphinscheduler.plugin.task.sqoop.generator.targets; -import static org.apache.dolphinscheduler.plugin.task.datasource.PasswordUtils.decodePassword; +import static org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils.decodePassword; import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.COLUMNS; import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_CONNECT; import static org.apache.dolphinscheduler.plugin.task.sqoop.SqoopConstants.DB_PWD; @@ -31,11 +31,11 @@ import static org.apache.dolphinscheduler.spi.task.TaskConstants.DOUBLE_QUOTES; import static org.apache.dolphinscheduler.spi.task.TaskConstants.SINGLE_QUOTES; import static org.apache.dolphinscheduler.spi.task.TaskConstants.SPACE; -import org.apache.dolphinscheduler.plugin.task.datasource.BaseConnectionParam; -import org.apache.dolphinscheduler.plugin.task.datasource.DatasourceUtil; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.DatasourceUtil; import org.apache.dolphinscheduler.plugin.task.sqoop.generator.ITargetGenerator; import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.SqoopParameters; import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets.TargetMysqlParameter; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; import org.apache.dolphinscheduler.spi.enums.DbType; import org.apache.dolphinscheduler.spi.task.request.TaskRequest; import org.apache.dolphinscheduler.spi.utils.JSONUtils; diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/targets/TargetHiveParameter.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/targets/TargetHiveParameter.java index 7358de7546ba86a6d10ebe03b9ff9c72a9d9876d..9f2579fb26fb7dae337a3b65763e2368d182f288 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/targets/TargetHiveParameter.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/parameter/targets/TargetHiveParameter.java @@ -54,6 +54,10 @@ public class TargetHiveParameter { * hive partition value */ private String hivePartitionValue; + /** + * hive target dir + */ + private String hiveTargetDir; public String getHiveDatabase() { return hiveDatabase; @@ -118,4 +122,12 @@ public class TargetHiveParameter { public void setHivePartitionValue(String hivePartitionValue) { this.hivePartitionValue = hivePartitionValue; } + + public String getHiveTargetDir() { + return hiveTargetDir; + } + + public void setHiveTargetDir(String hiveTargetDir) { + this.hiveTargetDir = hiveTargetDir; + } } diff --git a/dolphinscheduler-task-plugin/pom.xml b/dolphinscheduler-task-plugin/pom.xml index bd223db8e12846e5681ae732804f2decbcc435f7..78f78058b046721207f6de7d336934235bad4b2d 100644 --- a/dolphinscheduler-task-plugin/pom.xml +++ b/dolphinscheduler-task-plugin/pom.xml @@ -15,13 +15,11 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - + dolphinscheduler org.apache.dolphinscheduler - 2.0.0-SNAPSHOT + 2.0.10-SNAPSHOT 4.0.0 @@ -42,6 +40,4 @@ dolphinscheduler-task-procedure dolphinscheduler-task-pigeon - - - \ No newline at end of file + diff --git a/dolphinscheduler-ui/pom.xml b/dolphinscheduler-ui/pom.xml index a50b82e674021c3c99f77c2d704585226f426fe4..25361ceb210aac62cb7dfde1be216611956923cf 100644 --- a/dolphinscheduler-ui/pom.xml +++ b/dolphinscheduler-ui/pom.xml @@ -20,7 +20,7 @@ dolphinscheduler org.apache.dolphinscheduler - 2.0.0-SNAPSHOT + 2.0.10-SNAPSHOT 4.0.0 @@ -89,61 +89,6 @@ - - rpmbuild - - - - com.github.eirslett - frontend-maven-plugin - ${frontend-maven-plugin.version} - - - install node and npm - - install-node-and-npm - - - ${node.version} - ${npm.version} - - - - npm install node-sass --unsafe-perm - - npm - - generate-resources - - install node-sass --unsafe-perm - - - - npm install - - npm - - generate-resources - - install - - - - npm run build:release - - npm - - - run build:release - - - - - - - - - diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/canvas.scss b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/canvas.scss index bcf6a36b4d577176b9bf14fd9574d546388588f4..ea15323d400349d1d22d0b70b4faff697b6f53d9 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/canvas.scss +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/canvas.scss @@ -34,14 +34,35 @@ .minimap { position: absolute; - width: 300px; - height: 200px; - right: 10px; - bottom: 10px; + right: 0px; + bottom: 0px; border: dashed 1px #e4e4e4; z-index: 9; } + .scale-slider{ + position: absolute; + height: 140px; + width: 70px; + right: 0px; + bottom: 140px; + z-index: 9; + display: flex; + justify-content: center; + + ::v-deep .el-slider__runway{ + background-color: #fff; + } + + .scale-title{ + position: absolute; + top: -30px; + left: 22px; + font-size: 12px; + color: #666; + } + } + .context-menu{ position: absolute; left: 100px; diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/canvas.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/canvas.vue index 2e4bc44bf457965d7866a262168a1d7d5868432a..38fb84936100d56d00e7724f1336137f5ed31cff 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/canvas.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/canvas.vue @@ -27,8 +27,21 @@ >

+
+ {{$t('dagScale')}} + +
+ @@ -37,41 +50,41 @@ import { Graph, DataUri } from '@antv/x6' import dagTaskbar from './taskbar.vue' import contextMenu from './contextMenu.vue' + import layoutConfigModal, { LAYOUT_TYPE, DEFAULT_LAYOUT_CONFIG } from './layoutConfigModal.vue' import { - NODE_PROPS, - EDGE_PROPS, - PORT_PROPS, + NODE, + EDGE, X6_NODE_NAME, - X6_PORT_OUT_NAME, - X6_PORT_IN_NAME, X6_EDGE_NAME, - NODE_HIGHLIGHT_PROPS, - PORT_HIGHLIGHT_PROPS, - EDGE_HIGHLIGHT_PROPS, NODE_STATUS_MARKUP } from './x6-helper' - import { DagreLayout } from '@antv/layout' + import { DagreLayout, GridLayout } from '@antv/layout' import { tasksType, tasksState } from '../config' import { mapActions, mapMutations, mapState } from 'vuex' import nodeStatus from './nodeStatus' + import x6StyleMixin from './x6-style-mixin' + + const SCALE_MARKS = { + 0.2: '0.2', + 1: '1', + 2: '2' + } export default { name: 'dag-canvas', data () { return { graph: null, - // Used to calculate the context menu location - originalScrollPosition: { - left: 0, - top: 0 - }, editable: true, dragging: { // Distance from the mouse to the top-left corner of the dragging element x: 0, y: 0, type: '' - } + }, + // The canvas scale + scale: 1, + SCALE_MARKS } }, provide () { @@ -79,10 +92,12 @@ dagCanvas: this } }, + mixins: [x6StyleMixin], inject: ['dagChart'], components: { dagTaskbar, - contextMenu + contextMenu, + layoutConfigModal }, computed: { ...mapState('dag', ['tasks']) @@ -118,6 +133,14 @@ movable: true, showNodeSelectionBox: false }, + scaling: { + min: 0.2, + max: 2 + }, + mousewheel: { + enabled: true, + modifiers: ['ctrl', 'meta'] + }, scroller: true, grid: { size: 10, @@ -126,7 +149,10 @@ snapline: true, minimap: { enabled: true, - container: minimap + container: minimap, + scalable: false, + width: 200, + height: 120 }, interacting: { edgeLabelMovable: false, @@ -134,9 +160,6 @@ magnetConnectable: !!editable }, connecting: { - snap: { - radius: 30 - }, // Whether multiple edges can be created between the same start node and end allowMulti: false, // Whether a point is allowed to connect to a blank position on the canvas @@ -148,32 +171,14 @@ // Whether edges are allowed to link to nodes allowNode: true, // Whether to allow edge links to ports - allowPort: true, + allowPort: false, // Whether all available ports or nodes are highlighted when you drag the edge highlight: true, createEdge () { return graph.createEdge({ shape: X6_EDGE_NAME }) }, - validateMagnet ({ magnet }) { - return magnet.getAttribute('port-group') !== X6_PORT_IN_NAME - }, validateConnection (data) { - const { sourceCell, targetCell, sourceMagnet, targetMagnet } = data - // Connections can only be created from the output link post - if ( - !sourceMagnet || - sourceMagnet.getAttribute('port-group') !== X6_PORT_OUT_NAME - ) { - return false - } - - // Can only be connected to the input link post - if ( - !targetMagnet || - targetMagnet.getAttribute('port-group') !== X6_PORT_IN_NAME - ) { - return false - } + const { sourceCell, targetCell } = data if ( sourceCell && @@ -214,9 +219,9 @@ } } })) + this.registerX6Shape() this.bindGraphEvent() - this.originalScrollPosition = graph.getScrollbarPosition() }, /** * Register custom shapes @@ -224,43 +229,22 @@ registerX6Shape () { Graph.unregisterNode(X6_NODE_NAME) Graph.unregisterEdge(X6_EDGE_NAME) - Graph.registerNode(X6_NODE_NAME, { ...NODE_PROPS }) - Graph.registerEdge(X6_EDGE_NAME, { ...EDGE_PROPS }) + Graph.registerNode(X6_NODE_NAME, { ...NODE }) + Graph.registerEdge(X6_EDGE_NAME, { ...EDGE }) }, /** * Bind grap event */ bindGraphEvent () { - // nodes and edges hover - this.graph.on('cell:mouseenter', (data) => { - const { cell, e } = data - const isStatusIcon = (tagName) => - tagName && - (tagName.toLocaleLowerCase() === 'em' || - tagName.toLocaleLowerCase() === 'body') - if (!isStatusIcon(e.target.tagName)) { - this.setHighlight(cell) - } - }) - this.graph.on('cell:mouseleave', ({ cell }) => { - if (!this.graph.isSelected(cell)) { - this.resetHighlight(cell) - } - }) - // select - this.graph.on('cell:selected', ({ cell }) => { - this.setHighlight(cell) - }) - this.graph.on('cell:unselected', ({ cell }) => { - if (!this.graph.isSelected(cell)) { - this.resetHighlight(cell) - } + this.bindStyleEvent(this.graph) + // update scale bar + this.graph.on('scale', ({ sx }) => { + this.scale = sx }) // right click - this.graph.on('node:contextmenu', ({ x, y, cell }) => { - const { left, top } = this.graph.getScrollbarPosition() - const o = this.originalScrollPosition - this.$refs.contextMenu.show(x + (o.left - left), y + (o.top - top)) + this.graph.on('node:contextmenu', ({ x, y, cell, e }) => { + const { x: pageX, y: pageY } = this.graph.localToPage(x, y) + this.$refs.contextMenu.show(pageX, pageY) this.$refs.contextMenu.setCurrentTask({ name: cell.data.taskName, type: cell.data.taskType, @@ -279,6 +263,42 @@ label: labelName }) }) + // Make sure the edge starts with node, not port + this.graph.on('edge:connected', ({ isNew, edge }) => { + if (isNew) { + const sourceNode = edge.getSourceNode() + edge.setSource(sourceNode) + } + }) + + // Add a node tool when the mouse entering + this.graph.on('node:mouseenter', ({ e, x, y, node, view }) => { + const nodeName = node.getData().taskName + node.addTools({ + name: 'button', + args: { + markup: [ + { + tagName: 'text', + textContent: nodeName, + attrs: { + fill: '#868686', + 'font-size': 16, + 'text-anchor': 'center' + } + } + ], + x: 0, + y: 0, + offset: { x: 0, y: -10 } + } + }) + }) + + // Remove all tools when the mouse leaving + this.graph.on('node:mouseleave', ({ node }) => { + node.removeTool('button') + }) }, /** * @param {Edge|string} edge @@ -297,9 +317,6 @@ setEdgeLabel (id, label) { const edge = this.graph.getCellById(id) edge.setLabels(label) - if (this.graph.isSelected(edge)) { - this.setEdgeHighlight(edge) - } }, /** * @param {number} limit @@ -348,92 +365,15 @@ node.setData({ taskName: name }) } }, - /** - * Set node highlight - * @param {Node} node - */ - setNodeHighlight (node) { - const url = require(`../images/task-icos/${node.data.taskType.toLocaleLowerCase()}_hover.png`) - node.setAttrs(NODE_HIGHLIGHT_PROPS.attrs) - node.setAttrByPath('image/xlink:href', url) - node.setPortProp( - X6_PORT_OUT_NAME, - 'attrs', - PORT_HIGHLIGHT_PROPS[X6_PORT_OUT_NAME].attrs - ) - }, - /** - * Reset node style - * @param {Node} node - */ - resetNodeStyle (node) { - const url = require(`../images/task-icos/${node.data.taskType.toLocaleLowerCase()}.png`) - node.setAttrs(NODE_PROPS.attrs) - node.setAttrByPath('image/xlink:href', url) - node.setPortProp( - X6_PORT_OUT_NAME, - 'attrs', - PORT_PROPS.groups[X6_PORT_OUT_NAME].attrs - ) - }, - /** - * Set edge highlight - * @param {Edge} edge - */ - setEdgeHighlight (edge) { - const labelName = this.getEdgeLabelName(edge) - edge.setAttrs(EDGE_HIGHLIGHT_PROPS.attrs) - edge.setLabels([ - _.merge( - { - attrs: _.cloneDeep(EDGE_HIGHLIGHT_PROPS.defaultLabel.attrs) - }, - { - attrs: { label: { text: labelName } } - } - ) - ]) - }, - /** - * Reset edge style - * @param {Edge} edge - */ - resetEdgeStyle (edge) { - const labelName = this.getEdgeLabelName(edge) - edge.setAttrs(EDGE_PROPS.attrs) - edge.setLabels([ - { - ..._.merge( - { - attrs: _.cloneDeep(EDGE_PROPS.defaultLabel.attrs) - }, - { - attrs: { label: { text: labelName } } - } - ) + setNodeForbiddenStatus (id, flag) { + id += '' + const node = this.graph.getCellById(id) + if (node) { + if (flag) { + node.attr('rect/fill', '#c4c4c4') + } else { + node.attr('rect/fill', '#ffffff') } - ]) - }, - /** - * Set cell highlight - * @param {Cell} cell - */ - setHighlight (cell) { - if (cell.isEdge()) { - this.setEdgeHighlight(cell) - } else if (cell.isNode()) { - this.setNodeHighlight(cell) - } - }, - /** - * Reset cell highlight - * @param {Cell} cell - */ - resetHighlight (cell) { - if (cell.isEdge()) { - this.resetEdgeStyle(cell) - } else if (cell.isNode()) { - this.resetNodeStyle(cell) } }, /** @@ -512,38 +452,70 @@ } ) }, + showLayoutModal () { + const layoutModal = this.$refs.layoutModal + if (layoutModal) { + layoutModal.show() + } + }, /** * format * @desc Auto layout use @antv/layout */ - format () { - const dagreLayout = new DagreLayout({ - type: 'dagre', - rankdir: 'LR', - align: 'UL', - // Calculate the node spacing based on the edge label length - ranksepFunc: (d) => { - const edges = this.graph.getOutgoingEdges(d.id) - let max = 0 - if (edges && edges.length > 0) { - edges.forEach((edge) => { - const edgeView = this.graph.findViewByCell(edge) - const labelWidth = +edgeView.findAttr( - 'width', - _.get(edgeView, ['labelSelectors', '0', 'body'], null) - ) - max = Math.max(max, labelWidth) - }) - } - return 50 + max - }, - nodesep: 50, - controlPoints: true - }) + format (layoutConfig) { + if (!layoutConfig) { + layoutConfig = DEFAULT_LAYOUT_CONFIG + } + this.graph.cleanSelection() + + let layoutFunc = null + if (layoutConfig.type === LAYOUT_TYPE.DAGRE) { + layoutFunc = new DagreLayout({ + type: LAYOUT_TYPE.DAGRE, + rankdir: 'LR', + align: 'UL', + // Calculate the node spacing based on the edge label length + ranksepFunc: (d) => { + const edges = this.graph.getOutgoingEdges(d.id) + let max = 0 + if (edges && edges.length > 0) { + edges.forEach((edge) => { + const edgeView = this.graph.findViewByCell(edge) + const labelWidth = +edgeView.findAttr( + 'width', + _.get(edgeView, ['labelSelectors', '0', 'body'], null) + ) + max = Math.max(max, labelWidth) + }) + } + return layoutConfig.ranksep + max + }, + nodesep: layoutConfig.nodesep, + controlPoints: true + }) + } else if (layoutConfig.type === LAYOUT_TYPE.GRID) { + layoutFunc = new GridLayout({ + type: LAYOUT_TYPE.GRID, + preventOverlap: true, + preventOverlapPadding: layoutConfig.padding, + sortBy: '_index', + rows: layoutConfig.rows || undefined, + cols: layoutConfig.cols || undefined, + nodeSize: 220 + }) + } const json = this.toJSON() - const nodes = json.cells.filter((cell) => cell.shape === X6_NODE_NAME) + const nodes = json.cells + .filter((cell) => cell.shape === X6_NODE_NAME) + .map((item) => { + return { + ...item, + // sort by code aesc + _index: -item.id + } + }) const edges = json.cells.filter((cell) => cell.shape === X6_EDGE_NAME) - const newModel = dagreLayout.layout({ + const newModel = layoutFunc.layout({ nodes: nodes, edges: edges }) @@ -561,20 +533,22 @@ console.warn(`taskType:${taskType} is invalid!`) return } - const node = this.genNodeJSON(id, taskType, '', coordinate) + const node = this.genNodeJSON(id, taskType, '', false, coordinate) this.graph.addNode(node) }, /** * generate node json * @param {number|string} id * @param {string} taskType + * @param {boolean} forbidden flag * @param {{x:number;y:number}} coordinate Default is { x: 100, y: 100 } */ - genNodeJSON (id, taskType, taskName, coordinate = { x: 100, y: 100 }) { + genNodeJSON (id, taskType, taskName, flag, coordinate = { x: 100, y: 100 }) { id += '' const url = require(`../images/task-icos/${taskType.toLocaleLowerCase()}.png`) const truncation = taskName ? this.truncateText(taskName, 18) : id - return { + + const nodeJson = { id: id, shape: X6_NODE_NAME, x: coordinate.x, @@ -593,6 +567,12 @@ } } } + + if (flag) { + nodeJson.attrs.rect = { fill: '#c4c4c4' } + } + + return nodeJson }, /** * generate edge json @@ -606,12 +586,10 @@ return { shape: X6_EDGE_NAME, source: { - cell: sourceId, - port: X6_PORT_OUT_NAME + cell: sourceId }, target: { - cell: targetId, - port: X6_PORT_IN_NAME + cell: targetId }, labels: label ? [label] : undefined } @@ -688,7 +666,7 @@ if (node) { // Destroy the previous dom node.removeMarkup() - node.setMarkup(NODE_PROPS.markup.concat(NODE_STATUS_MARKUP)) + node.setMarkup(NODE.markup.concat(NODE_STATUS_MARKUP)) const nodeView = this.graph.findViewByCell(node) const el = nodeView.find('div')[0] nodeStatus({ @@ -712,41 +690,20 @@ } }, onDrop (e) { - const { type } = this.dragging - const { x, y } = this.calcGraphCoordinate(e.clientX, e.clientY) + const { type, x: eX, y: eY } = this.dragging + const { x, y } = this.graph.clientToLocal(e.clientX, e.clientY) this.genTaskCodeList({ genNum: 1 }) .then((res) => { const [code] = res - this.addNode(code, type, { x, y }) + this.addNode(code, type, { x: x - eX, y: y - eY }) this.dagChart.openFormModel(code, type) }) .catch((err) => { console.error(err) }) }, - calcGraphCoordinate (mClientX, mClientY) { - // Distance from the mouse to the top-left corner of the container; - const { left: cX, top: cY } = - this.$refs.container.getBoundingClientRect() - const mouseX = mClientX - cX - const mouseY = mClientY - cY - - // The distance that paper has been scrolled - const { left: sLeft, top: sTop } = this.graph.getScrollbarPosition() - const { left: oLeft, top: oTop } = this.originalScrollPosition - const scrollX = sLeft - oLeft - const scrollY = sTop - oTop - - // Distance from the mouse to the top-left corner of the dragging element; - const { x: eX, y: eY } = this.dragging - - return { - x: mouseX + scrollX - eX, - y: mouseY + scrollY - eY - } - }, /** * Get prev nodes by code * @param {number} code @@ -828,6 +785,28 @@ const edge = this.genEdgeJSON(code, postCode) this.graph.addEdge(edge) }) + }, + /** + * Navigate to cell + * @param {string} taskName + */ + navigateTo (taskName) { + const nodes = this.getNodes() + nodes.forEach((node) => { + if (node.data.taskName === taskName) { + const id = node.id + const cell = this.graph.getCellById(id) + this.graph.scrollToCell(cell, { animation: { duration: 600 } }) + this.graph.cleanSelection() + this.graph.select(cell) + } + }) + }, + /** + * Canvas scale + */ + scaleChange (val) { + this.graph.zoomTo(val) } } } diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/contextMenu.scss b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/contextMenu.scss index 6ea9a3cdefdbee90bacfface7580399f5743af1e..8872b623ccbc46a7a7fbe28ca85f554acb051932 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/contextMenu.scss +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/contextMenu.scss @@ -15,7 +15,7 @@ * limitations under the License. */ .dag-context-menu{ - position: absolute; + position: fixed; left: 0; top: 0; width: 100px; diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/contextMenu.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/contextMenu.vue index 56021337b5dd6c033176e1ffff02e41b097ecd67..fc85c63840198aae040dadd3cca61d9e6af53e1f 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/contextMenu.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/contextMenu.vue @@ -45,6 +45,7 @@ import { mapState, mapActions, mapMutations } from 'vuex' import { findComponentDownward, uuid } from '@/module/util/' import MenuItem from './menuItem.vue' + import { cloneDeep } from 'lodash' export default { name: 'dag-context-menu', @@ -104,7 +105,7 @@ this.currentTask = { ...this.currentTask, ...task } }, onStart () { - this.dagChart.startRunning(this.currentTask.name) + this.dagChart.startRunning(this.currentTask.code) }, onEdit () { this.dagChart.openFormModel(this.currentTask.code, this.currentTask.type) @@ -114,9 +115,9 @@ const targetNode = nodes.find( (node) => node.id === this.currentTask.code ) - const targetTask = this.tasks.find( + const targetTask = cloneDeep(this.tasks.find( (task) => task.code === this.currentTask.code - ) + )) if (!targetNode || !targetTask) return diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/zookeeper.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/layoutConfigModal.vue similarity index 31% rename from dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/zookeeper.vue rename to dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/layoutConfigModal.vue index 215b9051f2239e3d11eceafaee04f1188fe7617b..d13bd6877c581b1b0bdd986bd1f86f33b97999cb 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/zookeeper.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/layoutConfigModal.vue @@ -15,53 +15,101 @@ * limitations under the License. */ - diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/taskbar.scss b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/taskbar.scss index 9f1c6ee3291aca9c195f1d71f9a91bd039e88ed7..aec283f77fe30ed8c1e5a937ddeb59e0ee72b2cd 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/taskbar.scss +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/taskbar.scss @@ -174,6 +174,10 @@ } } } + + &.disabled{ + cursor: default + } } } } diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/taskbar.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/taskbar.vue index 0ffc40c50d8df30255d9ea061b584ff4e445a083..7c5b66544d832cd530072d686505be1a292c04c8 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/taskbar.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/taskbar.vue @@ -24,6 +24,9 @@
@@ -38,6 +41,7 @@ diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/toolbar.scss b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/toolbar.scss index 03578f32d9641968a95c303511e7946449e944bf..155083c0fc58c252d0781e8876b963cf6277f73c 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/toolbar.scss +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/toolbar.scss @@ -110,4 +110,18 @@ } } } + + .process-online-tag{ + margin-left: 10px; + } + + .search-box{ + width: 0; + overflow: hidden; + transition: all 0.5s; + + &.visible{ + width: 200px; + } + } } diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/toolbar.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/toolbar.vue index 4071fd955b9dfa0a11bb9d6e5dca5ae266765dc7..d8f57ad9598209fe92d7ee224edfef18d8e3374d 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/toolbar.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/toolbar.vue @@ -23,76 +23,111 @@ :content="$t('Copy name')" placement="bottom" > - +
+ {{ $t("processOnline") }} - + > - + >
+ + + +
+ +
- + - + - + - + - + {{$t('Version Info')}}{{ $t("Version Info") }} {{ $t("Close") }} @@ -143,15 +177,28 @@ inject: ['dagChart'], data () { return { - canvasRef: null + canvasRef: null, + searchText: '', + searchInputVisible: false } }, computed: { - ...mapState('dag', [ - 'isDetails' - ]) + ...mapState('dag', ['isDetails', 'releaseState']) }, methods: { + onSearch () { + const canvas = this.getDagCanvasRef() + canvas.navigateTo(this.searchText) + }, + showSearchInput () { + this.searchInputVisible = true + this.$refs.searchInput.focus() + }, + searchInputBlur () { + if (!this.searchText) { + this.searchInputVisible = false + } + }, getDagCanvasRef () { if (this.canvasRef) { return this.canvasRef @@ -200,7 +247,7 @@ }, chartFormat () { const canvas = this.getDagCanvasRef() - canvas.format() + canvas.showLayoutModal() }, refreshTaskStatus () { this.dagChart.refreshTaskStatus() diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/x6-helper.js b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/x6-helper.js index 96a48ebd7f1111e1b86dc0af29f3de05db249284..1be067f86b949eb78cf9c7f826b000a7a816dccf 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/x6-helper.js +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/x6-helper.js @@ -17,16 +17,17 @@ export const X6_NODE_NAME = 'dag-task' export const X6_EDGE_NAME = 'dag-edge' export const X6_PORT_OUT_NAME = 'dag-port-out' -export const X6_PORT_IN_NAME = 'dag-port-in' -const EDGE = '#999999' -const BG_BLUE = 'rgba(40, 143, 255, 0.1)' +const EDGE_COLOR = '#999999' +const BG_BLUE = '#DFE9F7' const BG_WHITE = '#FFFFFF' -const NODE_BORDER = '#e4e4e4' -const TITLE = '#333' +const NODE_BORDER = '#CCCCCC' +const TITLE = '#333333' const STROKE_BLUE = '#288FFF' +const NODE_SHADOW = 'drop-shadow(3px 3px 4px rgba(0, 0, 0, 0.2))' +const EDGE_SHADOW = 'drop-shadow(3px 3px 2px rgba(0, 0, 0, 0.2))' -export const PORT_PROPS = { +export const PORT = { groups: { [X6_PORT_OUT_NAME]: { position: { @@ -62,14 +63,14 @@ export const PORT_PROPS = { }, 'plus-text': { fontSize: 12, - fill: EDGE, + fill: NODE_BORDER, text: '+', textAnchor: 'middle', x: 0, y: 3 }, 'circle-outer': { - stroke: EDGE, + stroke: NODE_BORDER, strokeWidth: 1, r: 6, fill: BG_WHITE @@ -79,57 +80,42 @@ export const PORT_PROPS = { fill: 'transparent' } } - }, - [X6_PORT_IN_NAME]: { - position: { - name: 'absolute', - args: { - x: 0, - y: 24 - } - }, - markup: [ - { - tagName: 'g', - selector: 'body', - className: 'in-port-body', - children: [{ - tagName: 'circle', - selector: 'circle', - className: 'circle' - }] - } - ], + } + } +} + +export const PORT_HOVER = { + groups: { + [X6_PORT_OUT_NAME]: { attrs: { - body: { - magnet: true + 'circle-outer': { + stroke: STROKE_BLUE, + fill: BG_BLUE, + r: 8 }, - circle: { - r: 4, - strokeWidth: 0, - fill: 'transparent' + 'circle-inner': { + fill: STROKE_BLUE, + r: 6 } } } } } -export const PORT_HIGHLIGHT_PROPS = { - [X6_PORT_OUT_NAME]: { - attrs: { - 'circle-outer': { - stroke: STROKE_BLUE, - fill: BG_BLUE - }, - 'plus-text': { - fill: STROKE_BLUE - }, - 'circle-inner': { - fill: STROKE_BLUE +export const PORT_SELECTED = { + groups: { + [X6_PORT_OUT_NAME]: { + attrs: { + 'plus-text': { + fill: STROKE_BLUE + }, + 'circle-outer': { + stroke: STROKE_BLUE, + fill: BG_WHITE + } } } - }, - [X6_PORT_IN_NAME]: {} + } } export const NODE_STATUS_MARKUP = [{ @@ -148,13 +134,14 @@ export const NODE_STATUS_MARKUP = [{ ] }] -export const NODE_PROPS = { +export const NODE = { width: 220, height: 48, markup: [ { tagName: 'rect', - selector: 'body' + selector: 'body', + className: 'dag-task-body' }, { tagName: 'image', @@ -174,7 +161,9 @@ export const NODE_PROPS = { pointerEvents: 'visiblePainted', fill: BG_WHITE, stroke: NODE_BORDER, - strokeWidth: 1 + strokeWidth: 1, + strokeDasharray: 'none', + filter: 'none' }, image: { width: 30, @@ -199,21 +188,17 @@ export const NODE_PROPS = { } }, ports: { - ...PORT_PROPS, + ...PORT, items: [ { id: X6_PORT_OUT_NAME, group: X6_PORT_OUT_NAME - }, - { - id: X6_PORT_IN_NAME, - group: X6_PORT_IN_NAME } ] } } -export const NODE_HIGHLIGHT_PROPS = { +export const NODE_HOVER = { attrs: { body: { fill: BG_BLUE, @@ -226,28 +211,42 @@ export const NODE_HIGHLIGHT_PROPS = { } } -export const EDGE_PROPS = { +export const NODE_SELECTED = { + attrs: { + body: { + filter: NODE_SHADOW, + fill: BG_WHITE, + stroke: STROKE_BLUE, + strokeDasharray: '5,2', + strokeWidth: '1.5' + }, + title: { + fill: STROKE_BLUE + } + } +} + +export const EDGE = { attrs: { line: { - stroke: EDGE, - strokeWidth: 0.8, + stroke: EDGE_COLOR, + strokeWidth: 1, targetMarker: { tagName: 'path', - fill: EDGE, + fill: EDGE_COLOR, strokeWidth: 0, d: 'M 6 -3 0 0 6 3 Z' - } + }, + filter: 'none' } }, connector: { name: 'rounded' }, router: { - name: 'er', + name: 'manhattan', args: { - offset: 20, - min: 20, - direction: 'L' + endDirections: ['top', 'bottom', 'left'] } }, defaultLabel: { @@ -263,7 +262,7 @@ export const EDGE_PROPS = { ], attrs: { label: { - fill: EDGE, + fill: EDGE_COLOR, fontSize: 14, textAnchor: 'middle', textVerticalAnchor: 'middle', @@ -272,7 +271,7 @@ export const EDGE_PROPS = { body: { ref: 'label', fill: BG_WHITE, - stroke: EDGE, + stroke: EDGE_COLOR, strokeWidth: 1, rx: 4, ry: 4, @@ -292,7 +291,7 @@ export const EDGE_PROPS = { } } -export const EDGE_HIGHLIGHT_PROPS = { +export const EDGE_HOVER = { attrs: { line: { stroke: STROKE_BLUE, @@ -313,3 +312,27 @@ export const EDGE_HIGHLIGHT_PROPS = { } } } + +export const EDGE_SELECTED = { + attrs: { + line: { + stroke: STROKE_BLUE, + targetMarker: { + fill: STROKE_BLUE + }, + strokeWidth: 2, + filter: EDGE_SHADOW + } + }, + defaultLabel: { + attrs: { + label: { + fill: STROKE_BLUE + }, + body: { + fill: BG_WHITE, + stroke: STROKE_BLUE + } + } + } +} diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/x6-style-mixin.js b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/x6-style-mixin.js new file mode 100644 index 0000000000000000000000000000000000000000..d5f9f7b3a590a0d3ab36c15508c1b80cf991aa7c --- /dev/null +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/x6-style-mixin.js @@ -0,0 +1,145 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { + NODE, + EDGE, + PORT, + NODE_HOVER, + PORT_HOVER, + EDGE_HOVER, + PORT_SELECTED, + NODE_SELECTED, + EDGE_SELECTED, + X6_PORT_OUT_NAME +} from './x6-helper' +import _ from 'lodash' + +export default { + data () { + return { + hoverCell: null + } + }, + methods: { + bindStyleEvent (graph) { + // nodes and edges hover + graph.on('cell:mouseenter', (data) => { + const { cell, e } = data + const isStatusIcon = (tagName) => + tagName && + (tagName.toLocaleLowerCase() === 'em' || + tagName.toLocaleLowerCase() === 'body') + if (!isStatusIcon(e.target.tagName)) { + this.hoverCell = cell + this.updateCellStyle(cell, graph) + } + }) + graph.on('cell:mouseleave', ({ cell }) => { + this.hoverCell = null + this.updateCellStyle(cell, graph) + }) + // select + graph.on('cell:selected', ({ cell }) => { + this.updateCellStyle(cell, graph) + }) + graph.on('cell:unselected', ({ cell }) => { + this.updateCellStyle(cell, graph) + }) + }, + updateCellStyle (cell, graph) { + if (cell.isEdge()) { + this.setEdgeStyle(cell, graph) + } else if (cell.isNode()) { + this.setNodeStyle(cell, graph) + } + }, + /** + * Set node style + * @param {Node} node + * @param {Graph} graph + */ + setNodeStyle (node, graph) { + const isHover = node === this.hoverCell + const isSelected = graph.isSelected(node) + const portHover = _.cloneDeep(PORT_HOVER.groups[X6_PORT_OUT_NAME].attrs) + const portSelected = _.cloneDeep(PORT_SELECTED.groups[X6_PORT_OUT_NAME].attrs) + const portDefault = _.cloneDeep(PORT.groups[X6_PORT_OUT_NAME].attrs) + const nodeHover = _.merge(_.cloneDeep(NODE.attrs), NODE_HOVER.attrs) + const nodeSelected = _.merge(_.cloneDeep(NODE.attrs), NODE_SELECTED.attrs) + + let img = null + let nodeAttrs = null + let portAttrs = null + + if (isHover || isSelected) { + img = require(`../images/task-icos/${node.data.taskType.toLocaleLowerCase()}_hover.png`) + if (isHover) { + nodeAttrs = nodeHover + portAttrs = _.merge(portDefault, portHover) + } else { + nodeAttrs = nodeSelected + portAttrs = _.merge(portDefault, portSelected) + } + } else { + img = require(`../images/task-icos/${node.data.taskType.toLocaleLowerCase()}.png`) + nodeAttrs = NODE.attrs + portAttrs = portDefault + } + node.setAttrByPath('image/xlink:href', img) + node.setAttrs(nodeAttrs) + node.setPortProp( + X6_PORT_OUT_NAME, + 'attrs', + portAttrs + ) + }, + /** + * Set edge style + * @param {Edge} edge + * @param {Graph} graph + */ + setEdgeStyle (edge, graph) { + const isHover = edge === this.hoverCell + const isSelected = graph.isSelected(edge) + const labelName = this.getEdgeLabelName ? this.getEdgeLabelName(edge) : '' + let edgeProps = null + + if (isHover) { + edgeProps = _.merge(_.cloneDeep(EDGE), EDGE_HOVER) + } else if (isSelected) { + edgeProps = _.merge(_.cloneDeep(EDGE), EDGE_SELECTED) + } else { + edgeProps = _.cloneDeep(EDGE) + } + + edge.setAttrs(edgeProps.attrs) + edge.setLabels([ + { + ..._.merge( + { + attrs: _.cloneDeep(edgeProps.defaultLabel.attrs) + }, + { + attrs: { label: { text: labelName } } + } + ) + } + ]) + } + } +} diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/x6-style.scss b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/x6-style.scss index b86b51afc63df0470bab74ce11373da0d3c54dc7..bc56d51b35e41306b7f7644f7035fd9d73b4ad56 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/x6-style.scss +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/x6-style.scss @@ -14,16 +14,19 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -$STROKE_BLUE: #288FFF; -$BG_WHITE: #FFFFFF; +$STROKE_BLUE: #288fff; +$BG_WHITE: #ffffff; -.x6-node[data-shape="dag-task"]{ - .in-port-body{ - &.adsorbed,&.available{ - .circle { - stroke: $STROKE_BLUE; - stroke-width: 4; - fill: $BG_WHITE; +.x6-node[data-shape="dag-task"] { + &.available { + .dag-task-body { + stroke: $STROKE_BLUE; + stroke-width: 1; + stroke-dasharray: 5, 2; + } + &.adsorbed { + .dag-task-body { + stroke-width: 3; } } } diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.scss b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.scss index 12dd8d99b589e781145232f02c0c7b516fdd44e7..49eed5361f0af3ed756f6f4857317ac6a8f776b3 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.scss +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.scss @@ -27,7 +27,7 @@ height: 100%; top: 0; left: 0; - z-index: 1000; + z-index: 10000; } } diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue index 37b5e10669000e8d0eb1c6e78307be538cb6143c..7c330aacb4c7d79921d5907ac84ea3de70acc595 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue @@ -60,6 +60,10 @@ + +
+ +
{ - this.refreshTaskStatus() - }, 90000) + this.refreshTaskStatus(() => { + // set the timer after the first refresh + // status polling + this.statusTimer = setInterval(() => { + this.refreshTaskStatus() + }, 90000) + }) } }, beforeDestroy () { this.resetParams() - clearInterval(this.statusTimer) window.removeEventListener('resize', this.resizeDebounceFunc) }, @@ -199,7 +206,8 @@ 'name', 'isDetails', 'projectCode', - 'version' + 'version', + 'code' ]) }, methods: { @@ -221,7 +229,8 @@ 'setIsEditDag', 'setName', 'setLocations', - 'resetLocalParam' + 'resetLocalParam', + 'setDependResult' ]), /** * Toggle full screen @@ -263,6 +272,7 @@ addTaskInfo ({ item }) { this.addTask(item) this.$refs.canvas.setNodeName(item.code, item.name) + this.$refs.canvas.setNodeForbiddenStatus(item.code, item.flag === 'NO') this.taskDrawer = false }, closeTaskDrawer ({ flag }) { @@ -311,12 +321,12 @@ .then((res) => { if (this.verifyConditions(res.tasks)) { this.loading(true) - const definitionCode = this.definitionCode - if (definitionCode) { + const isEdit = !!this.definitionCode + if (isEdit) { + const methodName = this.type === 'instance' ? 'updateInstance' : 'updateDefinition' + const methodParam = this.type === 'instance' ? this.instanceId : this.definitionCode // Edit - return this[ - this.type === 'instance' ? 'updateInstance' : 'updateDefinition' - ](definitionCode) + return this[methodName](methodParam) .then((res) => { this.$message({ message: res.msg, @@ -399,12 +409,14 @@ buildGraphJSON (tasks, locations, connects) { const nodes = [] const edges = [] + if (!locations) { locations = [] } tasks.forEach((task) => { const location = locations.find((l) => l.taskCode === task.code) || {} const node = this.$refs.canvas.genNodeJSON( task.code, task.taskType, task.name, + task.flag === 'NO', { x: location.x, y: location.y @@ -412,6 +424,7 @@ ) nodes.push(node) }) + connects .filter((r) => !!r.preTaskCode) .forEach((c) => { @@ -483,6 +496,10 @@ const connects = this.connects const json = this.buildGraphJSON(tasks, locations, connects) this.$refs.canvas.fromJSON(json) + // Auto format + if (!locations) { + this.$refs.canvas.format() + } }, /** * Return to the previous process @@ -520,7 +537,7 @@ this.$router.push({ name: 'task-instance', query: { - processInstanceId: this.$route.params.code, + processInstanceId: this.instanceId, taskName: taskName } }) @@ -545,26 +562,46 @@ /** * Task status */ - refreshTaskStatus () { + refreshTaskStatus (callback = undefined) { const instanceId = this.$route.params.id this.loading(true) this.getTaskState(instanceId) .then((res) => { this.$message(this.$t('Refresh status succeeded')) const { taskList } = res.data + const list = res.list if (taskList) { this.taskInstances = taskList taskList.forEach((taskInstance) => { - this.$refs.canvas.setNodeStatus({ - code: taskInstance.taskCode, - state: taskInstance.state, - taskInstance - }) + const lastStatus = this.lastStatues.get(taskInstance.taskCode) + if (!lastStatus || + (lastStatus.host !== taskInstance.host) || + (lastStatus.retryTimes !== taskInstance.retryTimes) || + (lastStatus.submitTime !== taskInstance.submitTime) || + (lastStatus.startTime !== taskInstance.startTime) || + (lastStatus.endTime !== taskInstance.endTime)) { + this.lastStatues.set(taskInstance.taskCode, taskInstance) + this.$refs.canvas.setNodeStatus({ + code: taskInstance.taskCode, + state: taskInstance.state, + taskInstance + }) + } + }) + } + if (list) { + list.forEach((dependent) => { + if (dependent.dependentResult) { + this.setDependResult(JSON.parse(dependent.dependentResult)) + } }) } }) .finally(() => { this.loading(false) + if (callback) { + callback() + } }) }, /** diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/_source/dependentTimeout.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/_source/dependentTimeout.vue index 8d8f13cfee91b13fb2e093ba0ff3876ee8908880..a87b666331153950f90a65d3c5b208f49369f87e 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/_source/dependentTimeout.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/_source/dependentTimeout.vue @@ -28,7 +28,8 @@
-
+ +
{{$t('Waiting Dependent start')}}
@@ -40,25 +41,44 @@
+
+
+ {{$t('Timeout period')}} +
+ +
+
+ {{$t('Check interval')}} +
+
+ +
+
+ +
+
+ {{$t('Timeout strategy')}} +
+
+
+
{{$t('Waiting Dependent complete')}} @@ -78,19 +99,29 @@
+ +
+
+ {{$t('Timeout period')}} +
+
+ +
+
+
+
+ {{$t('Timeout strategy')}} +