diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 7a9e8f785a08..d1bc3385dc53 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -26,7 +26,6 @@
/dolphinscheduler-common/ @SbloodyS
/dolphinscheduler-dao/ @SbloodyS @ruanwenjun
/dolphinscheduler-dao-plugin/ @SbloodyS @ruanwenjun
-/dolphinscheduler-data-quality/ @SbloodyS
/dolphinscheduler-datasource-plugin/ @SbloodyS
/dolphinscheduler-dist/ @SbloodyS
/dolphinscheduler-e2e/ @SbloodyS
diff --git a/.github/actions/labeler/labeler.yml b/.github/actions/labeler/labeler.yml
index 6bd9b6daf1e5..de0f8319bf46 100644
--- a/.github/actions/labeler/labeler.yml
+++ b/.github/actions/labeler/labeler.yml
@@ -23,7 +23,6 @@ backend:
- 'dolphinscheduler-common/**/*'
- 'dolphinscheduler-dao/**/*'
- 'dolphinscheduler-dao-plugin/**/*'
- - 'dolphinscheduler-data-quality/**/*'
- 'dolphinscheduler-datasource-plugin/**/*'
- 'dolphinscheduler-dist/**/*'
- 'dolphinscheduler-extract/**/*'
diff --git a/config/plugins_config b/config/plugins_config
index 6fac612b01c0..eff859100fe5 100644
--- a/config/plugins_config
+++ b/config/plugins_config
@@ -77,7 +77,6 @@ dolphinscheduler-storage-s3
dolphinscheduler-task-aliyunserverlessspark
dolphinscheduler-task-chunjun
dolphinscheduler-task-datafactory
-dolphinscheduler-task-dataquality
dolphinscheduler-task-datasync
dolphinscheduler-task-datax
dolphinscheduler-task-dinky
diff --git a/deploy/kubernetes/dolphinscheduler/README.md b/deploy/kubernetes/dolphinscheduler/README.md
index 4a38a5e4d3f3..d64053ffdd27 100644
--- a/deploy/kubernetes/dolphinscheduler/README.md
+++ b/deploy/kubernetes/dolphinscheduler/README.md
@@ -131,7 +131,6 @@ Please refer to the [Quick Start in Kubernetes](../../../docs/docs/en/guide/inst
| conf.common."aws.s3.endpoint" | string | `"http://minio:9000"` | You need to set this parameter when private cloud s3. If S3 uses public cloud, you only need to set resource.aws.region or set to the endpoint of a public cloud such as S3.cn-north-1.amazonaws.com.cn |
| conf.common."aws.s3.region" | string | `"ca-central-1"` | The AWS Region to use. if resource.storage.type=S3, This configuration is required |
| conf.common."conda.path" | string | `"/opt/anaconda3/etc/profile.d/conda.sh"` | set path of conda.sh |
-| conf.common."data-quality.jar.dir" | string | `nil` | data quality option |
| conf.common."data.basedir.path" | string | `"/tmp/dolphinscheduler"` | user data local directory path, please make sure the directory exists and have read write permissions |
| conf.common."datasource.encryption.enable" | bool | `false` | datasource encryption enable |
| conf.common."datasource.encryption.salt" | string | `"!@#$%^&*"` | datasource encryption salt |
diff --git a/deploy/kubernetes/dolphinscheduler/values.yaml b/deploy/kubernetes/dolphinscheduler/values.yaml
index 41a7dfb9c286..5658a29c1189 100644
--- a/deploy/kubernetes/dolphinscheduler/values.yaml
+++ b/deploy/kubernetes/dolphinscheduler/values.yaml
@@ -336,9 +336,6 @@ conf:
# -- datasource encryption salt
datasource.encryption.salt: '!@#$%^&*'
- # -- data quality option
- data-quality.jar.dir:
-
# -- Whether hive SQL is executed in the same session
support.hive.oneSession: false
@@ -987,7 +984,6 @@ api:
# cloud: []
# logic: []
# dataIntegration: []
- # dataQuality: []
# machineLearning: []
# other: []
diff --git a/docs/configs/docsdev.js b/docs/configs/docsdev.js
index 8cf9d4d2f92a..0fa7adf2612d 100644
--- a/docs/configs/docsdev.js
+++ b/docs/configs/docsdev.js
@@ -457,10 +457,6 @@ export default {
}
],
},
- {
- title: 'Data Quality',
- link: '/en-us/docs/dev/user_doc/guide/data-quality.html',
- },
{
title: 'Remote Logging',
link: '/en-us/docs/dev/user_doc/guide/remote-logging.html',
@@ -1160,10 +1156,6 @@ export default {
}
],
},
- {
- title: '数据质量',
- link: '/zh-cn/docs/dev/user_doc/guide/data-quality.html',
- },
{
title: '远程日志存储',
link: '/zh-cn/docs/dev/user_doc/guide/remote-logging.html',
diff --git a/docs/docs/en/architecture/configuration.md b/docs/docs/en/architecture/configuration.md
index 567163faed19..86d4357e1bd4 100644
--- a/docs/docs/en/architecture/configuration.md
+++ b/docs/docs/en/architecture/configuration.md
@@ -224,7 +224,6 @@ The default configuration is as follows:
| yarn.job.history.status.address | http://ds1:19888/ws/v1/history/mapreduce/jobs/%s | job history status url of yarn |
| datasource.encryption.enable | false | whether to enable datasource encryption |
| datasource.encryption.salt | !@#$%^&* | the salt of the datasource encryption |
-| data-quality.jar.dir | | the jar of data quality |
| support.hive.oneSession | false | specify whether hive SQL is executed in the same session |
| sudo.enable | true | whether to enable sudo |
| alert.rpc.port | 50052 | the RPC port of Alert Server |
diff --git a/docs/docs/en/guide/data-quality.md b/docs/docs/en/guide/data-quality.md
deleted file mode 100644
index dca777d76fb8..000000000000
--- a/docs/docs/en/guide/data-quality.md
+++ /dev/null
@@ -1,313 +0,0 @@
-# Data Quality
-
-## Introduction
-
-The data quality task is used to check the data accuracy during the integration and processing of data. Data quality tasks in this release include single-table checking, single-table custom SQL checking, multi-table accuracy, and two-table value comparisons. The running environment of the data quality task is Spark 2.4.0, and other versions have not been verified, and users can verify by themselves.
-
-The execution logic of the data quality task is as follows:
-
-- The user defines the task in the interface, and the user input value is stored in `TaskParam`.
-- When running a task, `Master` will parse `TaskParam`, encapsulate the parameters required by `DataQualityTask` and send it to `Worker`.
-- Worker runs the data quality task. After the data quality task finishes running, it writes the statistical results to the specified storage engine.
-- The current data quality task result is stored in the `t_ds_dq_execute_result` table of `dolphinscheduler`
- `Worker` sends the task result to `Master`, after `Master` receives `TaskResponse`, it will judge whether the task type is `DataQualityTask`, if so, it will read the corresponding result from `t_ds_dq_execute_result` according to `taskInstanceId`, and then The result is judged according to the check mode, operator and threshold configured by the user.
-- If the result is a failure, the corresponding operation, alarm or interruption will be performed according to the failure policy configured by the user.
-- If you package `data-quality` separately, remember to modify the package name to be consistent with `data-quality.jar.dir` in `common.properties` with attribute name `data-quality.jar.dir`
-- If the old version is upgraded and used, you need to execute the `sql` update script to initialize the database before running.
-- `dolphinscheduler-data-quality-dev-SNAPSHOT.jar` was built with no dependencies. If a `JDBC` driver is required, you can set the `-jars` parameter in the `node settings` `Option Parameters`, e.g. `--jars /lib/jars/mysql-connector-java-8.0.16.jar`.
-- Currently only `MySQL`, `PostgreSQL` and `HIVE` data sources have been tested, other data sources have not been tested yet.
-- `Spark` needs to be configured to read `Hive` metadata, `Spark` does not use `jdbc` to read `Hive`.
-
-## Detailed Inspection Logic
-
-| **Parameter** | **Description** |
-|---------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| CheckMethod | [CheckFormula][Operator][Threshold], if the result is true, it indicates that the data does not meet expectations, and the failure strategy is executed. |
-| CheckFormula |
Expected-Actual Actual-Expected (Actual/Expected)x100% (Expected-Actual)/Expected x100% |
-| Operator | =, >, >=, <, <=, != |
-| ExpectedValue | FixValue DailyAvg WeeklyAvg MonthlyAvg Last7DayAvg Last30DayAvg SrcTableTotalRows TargetTableTotalRows |
-| Example | CheckFormula:Actual-Expected Operator:> Threshold:0 ExpectedValue:FixValue=9 |
-
-In the example, assuming that the actual value is 10, the operator is >, and the expected value is 9, then the result 10 -9 > 0 is true, which means that the row data in the empty column has exceeded the threshold, and the task is judged to fail.
-
-# Task Operation Guide
-
-## Null Value Check for Single Table Check
-
-### Inspection Introduction
-
-The goal of the null value check is to check the number of empty rows in the specified column. The number of empty rows can be compared with the total number of rows or a specified threshold. If it is greater than a certain threshold, it will be judged as failure.
-
-- The SQL statement that calculates the null of the specified column is as follows:
-
- ```sql
- SELECT COUNT(*) AS miss FROM ${src_table} WHERE (${src_field} is null or ${src_field} = '') AND (${src_filter})
- ```
-- The SQL to calculate the total number of rows in the table is as follows:
-
- ```sql
- SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})
- ```
-
-### Interface Operation Guide
-
-![dataquality_null_check](../../../img/tasks/demo/null_check.png)
-
-| **Parameter** | **Description** |
-|------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| Source data type | Select MySQL, PostgreSQL, etc. |
-| Source data source | The corresponding data source under the source data type. |
-| Source data table | Drop-down to select the table where the validation data is located. |
-| Src filter conditions | Such as the title, it will also be used when counting the total number of rows in the table, optional. |
-| Src table check column | Drop-down to select the check column name. |
-| Check method | [Expected-Actual] [Actual-Expected] [Actual/Expected]x100% [(Expected-Actual)/Expected]x100% |
-| Check operators | =, >, >=, <, <=, ! = |
-| Threshold | The value used in the formula for comparison. |
-| Failure strategy | Alert: The data quality task failed, the DolphinScheduler task result is successful, and an alert is sent. Blocking: The data quality task fails, the DolphinScheduler task result is failed, and an alarm is sent. |
-| Expected value type | Select the desired type from the drop-down menu. |
-
-## Timeliness Check of Single Table Check
-
-### Inspection Introduction
-
-The timeliness check is used to check whether the data is processed within the expected time. The start time and end time can be specified to define the time range. If the amount of data within the time range does not reach the set threshold, the check task will be judged as fail.
-
-### Interface Operation Guide
-
-![dataquality_timeliness_check](../../../img/tasks/demo/timeliness_check.png)
-
-| **Parameter** | **Description** |
-|------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| Source data type | Select MySQL, PostgreSQL, etc. |
-| Source data source | The corresponding data source under the source data type. |
-| Source data table | Drop-down to select the table where the validation data is located. |
-| Src filter conditions | Such as the title, it will also be used when counting the total number of rows in the table, optional. |
-| Src table check column | Drop-down to select check column name. |
-| Start time | The start time of a time range. |
-| end time | The end time of a time range. |
-| Time Format | Set the corresponding time format. |
-| Check method | [Expected-Actual] [Actual-Expected] [Actual/Expected]x100% [(Expected-Actual)/Expected]x100% |
-| Check operators | =, >, >=, <, <=, ! = |
-| Threshold | The value used in the formula for comparison. |
-| Failure strategy | Alert: The data quality task failed, the DolphinScheduler task result is successful, and an alert is sent. Blocking: The data quality task fails, the DolphinScheduler task result is failed, and an alarm is sent. |
-| Expected value type | Select the desired type from the drop-down menu. |
-
-## Field Length Check for Single Table Check
-
-### Inspection Introduction
-
-The goal of field length verification is to check whether the length of the selected field meets the expectations. If there is data that does not meet the requirements, and the number of rows exceeds the threshold, the task will be judged to fail.
-
-### Interface Operation Guide
-
-![dataquality_length_check](../../../img/tasks/demo/field_length_check.png)
-
-| **Parameter** | **Description** |
-|------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| Source data type | Select MySQL, PostgreSQL, etc. |
-| Source data source | The corresponding data source under the source data type. |
-| Source data table | Drop-down to select the table where the validation data is located. |
-| Src filter conditions | Such as the title, it will also be used when counting the total number of rows in the table, optional. |
-| Src table check column | Drop-down to select the check column name. |
-| Logical operators | =, >, >=, <, <=, ! = |
-| Field length limit | Like the title. |
-| Check method | [Expected-Actual] [Actual-Expected] [Actual/Expected]x100% [(Expected-Actual)/Expected]x100% |
-| Check operators | =, >, >=, <, <=, ! = |
-| Threshold | The value used in the formula for comparison. |
-| Failure strategy | Alert: The data quality task failed, the DolphinScheduler task result is successful, and an alert is sent. Blocking: The data quality task fails, the DolphinScheduler task result is failed, and an alarm is sent. |
-| Expected value type | Select the desired type from the drop-down menu. |
-
-## Uniqueness Check for Single Table Check
-
-### Inspection Introduction
-
-The goal of the uniqueness check is to check whether the fields are duplicated. It is generally used to check whether the primary key is duplicated. If there are duplicates and the threshold is reached, the check task will be judged to be failed.
-
-### Interface Operation Guide
-
-![dataquality_uniqueness_check](../../../img/tasks/demo/uniqueness_check.png)
-
-| **Parameter** | **Description** |
-|------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| Source data type | Select MySQL, PostgreSQL, etc. |
-| Source data source | The corresponding data source under the source data type. |
-| Source data table | Drop-down to select the table where the validation data is located. |
-| Src filter conditions | Such as the title, it will also be used when counting the total number of rows in the table, optional. |
-| Src table check column | Drop-down to select the check column name. |
-| Check method | [Expected-Actual] [Actual-Expected] [Actual/Expected]x100% [(Expected-Actual)/Expected]x100% |
-| Check operators | =, >, >=, <, <=, ! = |
-| Threshold | The value used in the formula for comparison. |
-| Failure strategy | Alert: The data quality task failed, the DolphinScheduler task result is successful, and an alert is sent. Blocking: The data quality task fails, the DolphinScheduler task result is failed, and an alarm is sent. |
-| Expected value type | Select the desired type from the drop-down menu. |
-
-## Regular Expression Check for Single Table Check
-
-### Inspection Introduction
-
-The goal of regular expression verification is to check whether the format of the value of a field meets the requirements, such as time format, email format, ID card format, etc. If there is data that does not meet the format and exceeds the threshold, the task will be judged as failed.
-
-### Interface Operation Guide
-
-![dataquality_regex_check](../../../img/tasks/demo/regexp_check.png)
-
-| **Parameter** | **Description** |
-|------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| Source data type | Select MySQL, PostgreSQL, etc. |
-| Source data source | The corresponding data source under the source data type. |
-| Source data table | Drop-down to select the table where the validation data is located. |
-| Src filter conditions | Such as the title, it will also be used when counting the total number of rows in the table, optional. |
-| Src table check column | Drop-down to select check column name. |
-| Regular expression | As title. |
-| Check method | [Expected-Actual] [Actual-Expected] [Actual/Expected]x100% [(Expected-Actual)/Expected]x100% |
-| Check operators | =, >, >=, <, <=, ! = |
-| Threshold | The value used in the formula for comparison. |
-| Failure strategy | Alert: The data quality task failed, the DolphinScheduler task result is successful, and an alert is sent. Blocking: The data quality task fails, the DolphinScheduler task result is failed, and an alarm is sent. |
-| Expected value type | Select the desired type from the drop-down menu. |
-
-## Enumeration Value Validation for Single Table Check
-
-### Inspection Introduction
-
-The goal of enumeration value verification is to check whether the value of a field is within the range of the enumeration value. If there is data that is not in the range of the enumeration value and exceeds the threshold, the task will be judged to fail.
-
-### Interface Operation Guide
-
-![dataquality_enum_check](../../../img/tasks/demo/enumeration_check.png)
-
-| **Parameter** | **Description** |
-|-----------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| Source data type | Select MySQL, PostgreSQL, etc. |
-| Source data source | The corresponding data source under the source data type. |
-| Source data table | Drop-down to select the table where the validation data is located. |
-| Src table filter conditions | Such as title, also used when counting the total number of rows in the table, optional. |
-| Src table check column | Drop-down to select the check column name. |
-| List of enumeration values | Separated by commas. |
-| Check method | [Expected-Actual] [Actual-Expected] [Actual/Expected]x100% [(Expected-Actual)/Expected]x100% |
-| Check operators | =, >, >=, <, <=, ! = |
-| Threshold | The value used in the formula for comparison. |
-| Failure strategy | Alert: The data quality task failed, the DolphinScheduler task result is successful, and an alert is sent. Blocking: The data quality task fails, the DolphinScheduler task result is failed, and an alarm is sent. |
-| Expected value type | Select the desired type from the drop-down menu. |
-
-## Table Row Number Verification for Single Table Check
-
-### Inspection Introduction
-
-The goal of table row number verification is to check whether the number of rows in the table reaches the expected value. If the number of rows does not meet the standard, the task will be judged as failed.
-
-### Interface Operation Guide
-
-![dataquality_count_check](../../../img/tasks/demo/table_count_check.png)
-
-| **Parameter** | **Description** |
-|------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| Source data type | Select MySQL, PostgreSQL, etc. |
-| Source data source | The corresponding data source under the source data type. |
-| Source data table | Drop-down to select the table where the validation data is located. |
-| Src filter conditions | Such as the title, it will also be used when counting the total number of rows in the table, optional. |
-| Src table check column | Drop-down to select the check column name. |
-| Check method | [Expected-Actual] [Actual-Expected] [Actual/Expected]x100% [(Expected-Actual)/Expected]x100% |
-| Check operators | =, >, >=, <, <=, ! = |
-| Threshold | The value used in the formula for comparison. |
-| Failure strategy | Alert: The data quality task failed, the DolphinScheduler task result is successful, and an alert is sent. Blocking: The data quality task fails, the DolphinScheduler task result is failed, and an alarm is sent. |
-| Expected value type | Select the desired type from the drop-down menu. |
-
-## Custom SQL Check for Single Table Check
-
-### Interface Operation Guide
-
-![dataquality_custom_sql_check](../../../img/tasks/demo/custom_sql_check.png)
-
-| **Parameter** | **Description** |
-|------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| Source data type | Select MySQL, PostgreSQL, etc. |
-| Source data source | The corresponding data source under the source data type. |
-| Source data table | Drop-down to select the table where the data to be verified is located. |
-| Actual value name | Alias in SQL for statistical value calculation, such as max_num. |
-| Actual value calculation SQL | SQL for outputting actual values. Note:The SQL must be statistical SQL, such as counting the number of rows, calculating the maximum value, minimum value, etc. Select max(a) as max_num from ${src_table}, the table name must be filled like this. |
-| Src filter conditions | Such as the title, it will also be used when counting the total number of rows in the table, optional. |
-| Check method | [Expected-Actual] [Actual-Expected] [Actual/Expected]x100% [(Expected-Actual)/Expected]x100% |
-| Check operators | =, >, >=, <, <=, ! = |
-| Threshold | The value used in the formula for comparison. |
-| Failure strategy | Alert: The data quality task failed, the DolphinScheduler task result is successful, and an alert is sent. Blocking: The data quality task fails, the DolphinScheduler task result is failed, and an alarm is sent. |
-| Expected value type | Select the desired type from the drop-down menu. |
-
-## Accuracy Check of Multi-table
-
-### Inspection Introduction
-
-Accuracy checks are performed by comparing the accuracy differences of data records for selected fields between two tables, examples are as follows
-- table test1
-
-| c1 | c2 |
-|:--:|:--:|
-| a | 1 |
-| b | 2 |
-
-- table test2
-
-| c21 | c22 |
-|:---:|:---:|
-| a | 1 |
-| b | 3 |
-
-If you compare the data in c1 and c21, the tables test1 and test2 are exactly the same. If you compare c2 and c22, the data in table test1 and table test2 are inconsistent.
-
-### Interface Operation Guide
-
-![dataquality_multi_table_accuracy_check](../../../img/tasks/demo/multi_table_accuracy_check.png)
-
-| **Parameter** | **Description** |
-|--------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| Source data type | Select MySQL, PostgreSQL, etc. |
-| Source data source | The corresponding data source under the source data type. |
-| Source data table | Drop-down to select the table where the data to be verified is located. |
-| Src filter conditions | Such as the title, it will also be used when counting the total number of rows in the table, optional. |
-| Target data type | Choose MySQL, PostgreSQL, etc. |
-| Target data source | The corresponding data source under the source data type. |
-| Target data table | Drop-down to select the table where the data to be verified is located. |
-| Target filter conditions | Such as the title, it will also be used when counting the total number of rows in the table, optional. |
-| Check column | Fill in the source data column, operator and target data column respectively. |
-| Verification method | Select the desired verification method. |
-| Operators | =, >, >=, <, <=, ! = |
-| Failure strategy | Alert: The data quality task failed, the DolphinScheduler task result is successful, and an alert is sent. Blocking: The data quality task fails, the DolphinScheduler task result is failed, and an alarm is sent. |
-| Expected value type | Select the desired type in the drop-down menu, only `SrcTableTotalRow`, `TargetTableTotalRow` and fixed value are suitable for selection here. |
-
-## Comparison of the values checked by the two tables
-
-### Inspection Introduction
-
-Two-table value comparison allows users to customize different SQL statistics for two tables and compare the corresponding values. For example, for the source table A, the total amount of a certain column is calculated, and for the target table, the total amount of a certain column is calculated. value sum2, compare sum1 and sum2 to determine the check result.
-
-### Interface Operation Guide
-
-![dataquality_multi_table_comparison_check](../../../img/tasks/demo/multi_table_comparison_check.png)
-
-| **Parameter** | **Description** |
-|--------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| Source data type | Select MySQL, PostgreSQL, etc. |
-| Source data source | The corresponding data source under the source data type. |
-| Source data table | The table where the data is to be verified. |
-| Actual value name | Calculate the alias in SQL for the actual value, such as max_age1. |
-| Actual value calculation SQL | SQL for outputting actual values. Note: The SQL must be statistical SQL, such as counting the number of rows, calculating the maximum value, minimum value, etc. Select max(age) as max_age1 from ${src_table} The table name must be filled like this. |
-| Target data type | Choose MySQL, PostgreSQL, etc. |
-| Target data source | The corresponding data source under the source data type. |
-| Target data table | The table where the data is to be verified. |
-| Expected value name | Calculate the alias in SQL for the expected value, such as max_age2. |
-| Expected value calculation SQL | SQL for outputting expected value. Note: The SQL must be statistical SQL, such as counting the number of rows, calculating the maximum value, minimum value, etc. Select max(age) as max_age2 from ${target_table} The table name must be filled like this. |
-| Verification method | Select the desired verification method. |
-| Operators | =, >, >=, <, <=, ! = |
-| Failure strategy | Alert: The data quality task failed, the DolphinScheduler task result is successful, and an alert is sent. Blocking: The data quality task fails, the DolphinScheduler task result is failed, and an alarm is sent. |
-
-## Task result view
-
-![dataquality_result](../../../img/tasks/demo/result.png)
-
-## Rule View
-
-### List of rules
-
-![dataquality_rule_list](../../../img/tasks/demo/rule_list.png)
-
-### Rules Details
-
-![dataquality_rule_detail](../../../img/tasks/demo/rule_detail.png)
diff --git a/docs/docs/en/guide/upgrade/incompatible.md b/docs/docs/en/guide/upgrade/incompatible.md
index c0549badd208..c263d97ac4ca 100644
--- a/docs/docs/en/guide/upgrade/incompatible.md
+++ b/docs/docs/en/guide/upgrade/incompatible.md
@@ -31,5 +31,6 @@ This document records the incompatible updates between each version. You need to
* Remove the `udf-manage` function from the `resource center` ([#16209])(https://github.com/apache/dolphinscheduler/pull/16209)
* Remove the `Pigeon` from the `Task Plugin` ([#16218])(https://github.com/apache/dolphinscheduler/pull/16218)
* Uniformly name `process` in code as `workflow` ([#16515])(https://github.com/apache/dolphinscheduler/pull/16515)
-* Deprecated upgrade code of 1.x and 2.x in 3.3.0-release ([#16543])(https://github.com/apache/dolphinscheduler/pull/16543)
+* Deprecated upgrade code of 1.x and 2.x ([#16543])(https://github.com/apache/dolphinscheduler/pull/16543)
+* Remove the `Data Quality` module ([#16794])(https://github.com/apache/dolphinscheduler/pull/16794)
diff --git a/docs/docs/zh/architecture/configuration.md b/docs/docs/zh/architecture/configuration.md
index 8bec9c042e25..8eed519f78fe 100644
--- a/docs/docs/zh/architecture/configuration.md
+++ b/docs/docs/zh/architecture/configuration.md
@@ -224,7 +224,6 @@ common.properties配置文件目前主要是配置hadoop/s3/yarn/applicationId
| yarn.job.history.status.address | http://ds1:19888/ws/v1/history/mapreduce/jobs/%s | yarn的作业历史状态URL |
| datasource.encryption.enable | false | 是否启用datasource 加密 |
| datasource.encryption.salt | !@#$%^&* | datasource加密使用的salt |
-| data-quality.jar.dir | | 配置数据质量使用的jar包 |
| support.hive.oneSession | false | 设置hive SQL是否在同一个session中执行 |
| sudo.enable | true | 是否开启sudo |
| alert.rpc.port | 50052 | Alert Server的RPC端口 |
diff --git a/docs/docs/zh/guide/data-quality.md b/docs/docs/zh/guide/data-quality.md
deleted file mode 100644
index 17b2a55cb2bf..000000000000
--- a/docs/docs/zh/guide/data-quality.md
+++ /dev/null
@@ -1,357 +0,0 @@
-# 概述
-
-## 任务类型介绍
-
-数据质量任务是用于检查数据在集成、处理过程中的数据准确性。本版本的数据质量任务包括单表检查、单表自定义SQL检查、多表准确性以及两表值比对。数据质量任务的运行环境为Spark2.4.0,其他版本尚未进行过验证,用户可自行验证。
-
-- 数据质量任务的执行逻辑如下:
-
-> 用户在界面定义任务,用户输入值保存在`TaskParam`中
-> 运行任务时,`Master`会解析`TaskParam`,封装`DataQualityTask`所需要的参数下发至`Worker。
-> Worker`运行数据质量任务,数据质量任务在运行结束之后将统计结果写入到指定的存储引擎中,当前数据质量任务结果存储在`dolphinscheduler`的`t_ds_dq_execute_result`表中
-> `Worker`发送任务结果给`Master`,`Master`收到`TaskResponse`之后会判断任务类型是否为`DataQualityTask`,如果是的话会根据`taskInstanceId`从`t_ds_dq_execute_result`中读取相应的结果,然后根据用户配置好的检查方式,操作符和阈值进行结果判断,如果结果为失败的话,会根据用户配置好的的失败策略进行相应的操作,告警或者中断
->
- ## 注意事项
-
-- 如果单独打包`data-quality`的话,记得修改包路径和`data-quality.jar.dir`一致,配置内容在 `common.properties` 中的 `data-quality.jar.dir`
-- 如果是老版本升级使用,运行之前需要先执行`SQL`更新脚本进行数据库初始化。
-- 当前 `dolphinscheduler-data-quality-dev-SNAPSHOT.jar` 是瘦包,不包含任何 `JDBC` 驱动。
- 如果有 `JDBC` 驱动需要,可以在`节点设置` `选项参数`处设置 `--jars` 参数,
- 如:`--jars /lib/jars/mysql-connector-java-8.0.16.jar`。
-- 当前只测试了`MySQL`、`PostgreSQL`和`HIVE`数据源,其他数据源暂时未测试过。
-- `Spark`需要配置好读取`Hive`元数据,`Spark`不是采用`JDBC`的方式读取`Hive`。
-
-## 检查逻辑详解
-
-- 校验公式:[校验方式][操作符][阈值],如果结果为真,则表明数据不符合期望,执行失败策略
-- 校验方式:
- - [Expected-Actual][期望值-实际值]
- - [Actual-Expected][实际值-期望值]
- - [Actual/Expected][实际值/期望值]x100%
- - [(Expected-Actual)/Expected][(期望值-实际值)/期望值]x100%
-- 操作符:=、>、>=、<、<=、!=
-- 期望值类型
- - 固定值
- - 日均值
- - 周均值
- - 月均值
- - 最近7天均值
- - 最近30天均值
- - 源表总行数
- - 目标表总行数
-- 例子
- - 校验方式为:[Actual-Expected][实际值-期望值]
- - [操作符]:>
- - [阈值]:0
- - 期望值类型:固定值=9。
-
- 假设实际值为10,操作符为 >, 期望值为9,那么结果 10 -9 > 0 为真,那就意味列为空的行数据已经超过阈值,任务被判定为失败
-
-# 任务操作指南
-
-## 单表检查之空值检查
-
-### 检查介绍
-
-空值检查的目标是检查出指定列为空的行数,可将为空的行数与总行数或者指定阈值进行比较,如果大于某个阈值则判定为失败
-- 计算指定列为空的SQL语句如下:
-
-```sql
-SELECT COUNT(*) AS miss FROM ${src_table} WHERE (${src_field} is null or ${src_field} = '') AND (${src_filter})
-```
-
-- 计算表总行数的SQL如下:
-
- ```sql
- SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})
- ```
-
-### 界面操作指南
-
-![dataquality_null_check](../../../img/tasks/demo/null_check.png)
-- 源数据类型:选择MySQL、PostgreSQL等
-- 源数据源:源数据类型下对应的数据源
-- 源数据表:下拉选择验证数据所在表
-- 源过滤条件:如标题,统计表总行数的时候也会用到,选填
-- 源表检查列:下拉选择检查列名
-- 校验方式:
-- [Expected-Actual][期望值-实际值]
-- [Actual-Expected][实际值-期望值]
-- [Actual/Expected][实际值/期望值]x100%
-- [(Expected-Actual)/Expected][(期望值-实际值)/期望值]x100%
-- 校验操作符:=,>、>=、<、<=、!=
-- 阈值:公式中用于比较的值
-- 失败策略
-- 告警:数据质量任务失败了,DolphinScheduler任务结果为成功,发送告警
-- 阻断:数据质量任务失败了,DolphinScheduler任务结果为失败,发送告警
-- 期望值类型:在下拉菜单中选择所要的类型
-
-## 单表检查之及时性检查
-
-### 检查介绍
-
-及时性检查用于检查数据是否在预期时间内处理完成,可指定开始时间、结束时间来界定时间范围,如果在该时间范围内的数据量没有达到设定的阈值,那么会判断该检查任务为失败
-
-### 界面操作指南
-
-![dataquality_timeliness_check](../../../img/tasks/demo/timeliness_check.png)
-- 源数据类型:选择MySQL、PostgreSQL等
-- 源数据源:源数据类型下对应的数据源
-- 源数据表:下拉选择验证数据所在表
-- 源过滤条件:如标题,统计表总行数的时候也会用到,选填
-- 源表检查列:下拉选择检查列名
-- 起始时间:某个时间范围的开始时间
-- 结束时间:某个时间范围的结束时间
-- 时间格式:设置对应的时间格式
-- 校验方式:
-- [Expected-Actual][期望值-实际值]
-- [Actual-Expected][实际值-期望值]
-- [Actual/Expected][实际值/期望值]x100%
-- [(Expected-Actual)/Expected][(期望值-实际值)/期望值]x100%
-- 校验操作符:=,>、>=、<、<=、!=
-- 阈值:公式中用于比较的值
-- 失败策略
-- 告警:数据质量任务失败了,DolphinScheduler任务结果为成功,发送告警
-- 阻断:数据质量任务失败了,DolphinScheduler任务结果为失败,发送告警
-- 期望值类型:在下拉菜单中选择所要的类型
-
-## 单表检查之字段长度校验
-
-### 检查介绍
-
-字段长度校验的目标是检查所选字段的长度是否满足预期,如果有存在不满足要求的数据,并且行数超过阈值则会判断任务为失败
-
-### 界面操作指南
-
-![dataquality_length_check](../../../img/tasks/demo/field_length_check.png)
-- 源数据类型:选择MySQL、PostgreSQL等
-- 源数据源:源数据类型下对应的数据源
-- 源数据表:下拉选择验证数据所在表
-- 源过滤条件:如标题,统计表总行数的时候也会用到,选填
-- 源表检查列:下拉选择检查列名
-- 逻辑操作符:=,>、>=、<、<=、!=
-- 字段长度限制:如标题
-- 校验方式:
-- [Expected-Actual][期望值-实际值]
-- [Actual-Expected][实际值-期望值]
-- [Actual/Expected][实际值/期望值]x100%
-- [(Expected-Actual)/Expected][(期望值-实际值)/期望值]x100%
-- 校验操作符:=,>、>=、<、<=、!=
-- 阈值:公式中用于比较的值
-- 失败策略
-- 告警:数据质量任务失败了,DolphinScheduler任务结果为成功,发送告警
-- 阻断:数据质量任务失败了,DolphinScheduler任务结果为失败,发送告警
-- 期望值类型:在下拉菜单中选择所要的类型
-
-## 单表检查之唯一性校验
-
-### 检查介绍
-
-唯一性校验的目标是检查字段是否存在重复的情况,一般用于检验primary key是否有重复,如果存在重复且达到阈值,则会判断检查任务为失败
-
-### 界面操作指南
-
-![dataquality_uniqueness_check](../../../img/tasks/demo/uniqueness_check.png)
-- 源数据类型:选择MySQL、PostgreSQL等
-- 源数据源:源数据类型下对应的数据源
-- 源数据表:下拉选择验证数据所在表
-- 源过滤条件:如标题,统计表总行数的时候也会用到,选填
-- 源表检查列:下拉选择检查列名
-- 校验方式:
-- [Expected-Actual][期望值-实际值]
-- [Actual-Expected][实际值-期望值]
-- [Actual/Expected][实际值/期望值]x100%
-- [(Expected-Actual)/Expected][(期望值-实际值)/期望值]x100%
-- 校验操作符:=,>、>=、<、<=、!=
-- 阈值:公式中用于比较的值
-- 失败策略
-- 告警:数据质量任务失败了,DolphinScheduler任务结果为成功,发送告警
-- 阻断:数据质量任务失败了,DolphinScheduler任务结果为失败,发送告警
-- 期望值类型:在下拉菜单中选择所要的类型
-
-## 单表检查之正则表达式校验
-
-### 检查介绍
-
-正则表达式校验的目标是检查某字段的值的格式是否符合要求,例如时间格式、邮箱格式、身份证格式等等,如果存在不符合格式的数据并超过阈值,则会判断任务为失败
-
-### 界面操作指南
-
-![dataquality_regex_check](../../../img/tasks/demo/regexp_check.png)
-- 源数据类型:选择MySQL、PostgreSQL等
-- 源数据源:源数据类型下对应的数据源
-- 源数据表:下拉选择验证数据所在表
-- 源过滤条件:如标题,统计表总行数的时候也会用到,选填
-- 源表检查列:下拉选择检查列名
-- 正则表达式:如标题
-- 校验方式:
-- [Expected-Actual][期望值-实际值]
-- [Actual-Expected][实际值-期望值]
-- [Actual/Expected][实际值/期望值]x100%
-- [(Expected-Actual)/Expected][(期望值-实际值)/期望值]x100%
-- 校验操作符:=,>、>=、<、<=、!=
-- 阈值:公式中用于比较的值
-- 失败策略
-- 告警:数据质量任务失败了,DolphinScheduler任务结果为成功,发送告警
-- 阻断:数据质量任务失败了,DolphinScheduler任务结果为失败,发送告警
-- 期望值类型:在下拉菜单中选择所要的类型
-
-## 单表检查之枚举值校验
-
-### 检查介绍
-
-枚举值校验的目标是检查某字段的值是否在枚举值的范围内,如果存在不在枚举值范围里的数据并超过阈值,则会判断任务为失败
-
-### 界面操作指南
-
-![dataquality_enum_check](../../../img/tasks/demo/enumeration_check.png)
-- 源数据类型:选择MySQL、PostgreSQL等
-- 源数据源:源数据类型下对应的数据源
-- 源数据表:下拉选择验证数据所在表
-- 源表过滤条件:如标题,统计表总行数的时候也会用到,选填
-- 源表检查列:下拉选择检查列名
-- 枚举值列表:用英文逗号,隔开
-- 校验方式:
-- [Expected-Actual][期望值-实际值]
-- [Actual-Expected][实际值-期望值]
-- [Actual/Expected][实际值/期望值]x100%
-- [(Expected-Actual)/Expected][(期望值-实际值)/期望值]x100%
-- 校验操作符:=,>、>=、<、<=、!=
-- 阈值:公式中用于比较的值
-- 失败策略
-- 告警:数据质量任务失败了,DolphinScheduler任务结果为成功,发送告警
-- 阻断:数据质量任务失败了,DolphinScheduler任务结果为失败,发送告警
-- 期望值类型:在下拉菜单中选择所要的类型
-
-## 单表检查之表行数校验
-
-### 检查介绍
-
-表行数校验的目标是检查表的行数是否达到预期的值,如果行数未达标,则会判断任务为失败
-
-### 界面操作指南
-
-![dataquality_count_check](../../../img/tasks/demo/table_count_check.png)
-- 源数据类型:选择MySQL、PostgreSQL等
-- 源数据源:源数据类型下对应的数据源
-- 源数据表:下拉选择验证数据所在表
-- 源过滤条件:如标题,统计表总行数的时候也会用到,选填
-- 源表检查列:下拉选择检查列名
-- 校验方式:
-- [Expected-Actual][期望值-实际值]
-- [Actual-Expected][实际值-期望值]
-- [Actual/Expected][实际值/期望值]x100%
-- [(Expected-Actual)/Expected][(期望值-实际值)/期望值]x100%
-- 校验操作符:=,>、>=、<、<=、!=
-- 阈值:公式中用于比较的值
-- 失败策略
-- 告警:数据质量任务失败了,DolphinScheduler任务结果为成功,发送告警
-- 阻断:数据质量任务失败了,DolphinScheduler任务结果为失败,发送告警
-- 期望值类型:在下拉菜单中选择所要的类型
-
-## 单表检查之自定义SQL检查
-
-### 检查介绍
-
-### 界面操作指南
-
-![dataquality_custom_sql_check](../../../img/tasks/demo/custom_sql_check.png)
-- 源数据类型:选择MySQL、PostgreSQL等
-- 源数据源:源数据类型下对应的数据源
-- 源数据表:下拉选择要验证数据所在表
-- 实际值名:为统计值计算SQL中的别名,如max_num
-- 实际值计算SQL: 用于输出实际值的SQL、
-- 注意点:该SQL必须为统计SQL,例如统计行数,计算最大值、最小值等
-- select max(a) as max_num from ${src_table},表名必须这么填
-- 源过滤条件:如标题,统计表总行数的时候也会用到,选填
-- 校验方式:
-- 校验操作符:=,>、>=、<、<=、!=
-- 阈值:公式中用于比较的值
-- 失败策略
-- 告警:数据质量任务失败了,DolphinScheduler任务结果为成功,发送告警
-- 阻断:数据质量任务失败了,DolphinScheduler任务结果为失败,发送告警
-- 期望值类型:在下拉菜单中选择所要的类型
-
-## 多表检查之准确性检查
-
-### 检查介绍
-
-准确性检查是通过比较两个表之间所选字段的数据记录的准确性差异,例子如下
-- 表test1
-
-| c1 | c2 |
-|:--:|:--:|
-| a | 1 |
-| b | 2 |
-
-- 表test2
-
-| c21 | c22 |
-|:---:|:---:|
-| a | 1 |
-| b | 3 |
-
-如果对比c1和c21中的数据,则表test1和test2完全一致。 如果对比c2和c22则表test1和表test2中的数据则存在不一致了。
-
-### 界面操作指南
-
-![dataquality_multi_table_accuracy_check](../../../img/tasks/demo/multi_table_accuracy_check.png)
-- 源数据类型:选择MySQL、PostgreSQL等
-- 源数据源:源数据类型下对应的数据源
-- 源数据表:下拉选择要验证数据所在表
-- 源过滤条件:如标题,统计表总行数的时候也会用到,选填
-- 目标数据类型:选择MySQL、PostgreSQL等
-- 目标数据源:源数据类型下对应的数据源
-- 目标数据表:下拉选择要验证数据所在表
-- 目标过滤条件:如标题,统计表总行数的时候也会用到,选填
-- 检查列:
-- 分别填写 源数据列,操作符,目标数据列
-- 校验方式:选择想要的校验方式
-- 操作符:=,>、>=、<、<=、!=
-- 失败策略
-- 告警:数据质量任务失败了,DolphinScheduler任务结果为成功,发送告警
-- 阻断:数据质量任务失败了,DolphinScheduler任务结果为失败,发送告警
-- 期望值类型:在下拉菜单中选择所要的类型,这里只适合选择SrcTableTotalRow、TargetTableTotalRow和固定值
-
-## 两表检查之值比对
-
-### 检查介绍
-
-两表值比对允许用户对两张表自定义不同的SQL统计出相应的值进行比对,例如针对源表A统计出某一列的金额总值sum1,针对目标表统计出某一列的金额总值sum2,将sum1和sum2进行比较来判定检查结果
-
-### 界面操作指南
-
-![dataquality_multi_table_comparison_check](../../../img/tasks/demo/multi_table_comparison_check.png)
-- 源数据类型:选择MySQL、PostgreSQL等
-- 源数据源:源数据类型下对应的数据源
-- 源数据表:要验证数据所在表
-- 实际值名:为实际值计算SQL中的别名,如max_age1
-- 实际值计算SQL: 用于输出实际值的SQL、
-- 注意点:该SQL必须为统计SQL,例如统计行数,计算最大值、最小值等
-- select max(age) as max_age1 from ${src_table} 表名必须这么填
-- 目标数据类型:选择MySQL、PostgreSQL等
-- 目标数据源:源数据类型下对应的数据源
-- 目标数据表:要验证数据所在表
-- 期望值名:为期望值计算SQL中的别名,如max_age2
-- 期望值计算SQL: 用于输出期望值的SQL、
-- 注意点:该SQL必须为统计SQL,例如统计行数,计算最大值、最小值等
-- select max(age) as max_age2 from ${target_table} 表名必须这么填
-- 校验方式:选择想要的校验方式
-- 操作符:=,>、>=、<、<=、!=
-- 失败策略
-- 告警:数据质量任务失败了,DolphinScheduler任务结果为成功,发送告警
-- 阻断:数据质量任务失败了,DolphinScheduler任务结果为失败,发送告警
-
-## 任务结果查看
-
-![dataquality_result](../../../img/tasks/demo/result.png)
-
-## 规则查看
-
-### 规则列表
-
-![dataquality_rule_list](../../../img/tasks/demo/rule_list.png)
-
-### 规则详情
-
-![dataquality_rule_detail](../../../img/tasks/demo/rule_detail.png)
diff --git a/docs/docs/zh/guide/upgrade/incompatible.md b/docs/docs/zh/guide/upgrade/incompatible.md
index 098992294dbf..f1fb24d9c955 100644
--- a/docs/docs/zh/guide/upgrade/incompatible.md
+++ b/docs/docs/zh/guide/upgrade/incompatible.md
@@ -26,8 +26,9 @@
## 3.3.0
-* 从 `资源中心` 中移除了 `udf-manage` 功能 ([#16209])(https://github.com/apache/dolphinscheduler/pull/16209)
-* 从 `任务插件` 中移除了 `Pigeon` 类型 ([#16218])(https://github.com/apache/dolphinscheduler/pull/16218)
+* 从 `资源中心` 中移除 `udf-manage` 功能 ([#16209])(https://github.com/apache/dolphinscheduler/pull/16209)
+* 从 `任务插件` 中移除 `Pigeon` 类型 ([#16218])(https://github.com/apache/dolphinscheduler/pull/16218)
* 统一代码中的 `process` 为 `workflow` ([#16515])(https://github.com/apache/dolphinscheduler/pull/16515)
-* 在 3.3.0-release 中废弃了从 1.x 至 2.x 的升级代码 ([#16543])(https://github.com/apache/dolphinscheduler/pull/16543)
+* 废弃从 1.x 至 2.x 的升级代码 ([#16543])(https://github.com/apache/dolphinscheduler/pull/16543)
+* 移除 `数据质量` 模块 ([#16794])(https://github.com/apache/dolphinscheduler/pull/16794)
diff --git a/docs/img/tasks/demo/custom_sql_check.png b/docs/img/tasks/demo/custom_sql_check.png
deleted file mode 100644
index b1e1b9b6f8bb..000000000000
Binary files a/docs/img/tasks/demo/custom_sql_check.png and /dev/null differ
diff --git a/docs/img/tasks/demo/enumeration_check.png b/docs/img/tasks/demo/enumeration_check.png
deleted file mode 100644
index e2e16554cfa7..000000000000
Binary files a/docs/img/tasks/demo/enumeration_check.png and /dev/null differ
diff --git a/docs/img/tasks/demo/field_length_check.png b/docs/img/tasks/demo/field_length_check.png
deleted file mode 100644
index 17c1822ba141..000000000000
Binary files a/docs/img/tasks/demo/field_length_check.png and /dev/null differ
diff --git a/docs/img/tasks/demo/multi_table_accuracy_check.png b/docs/img/tasks/demo/multi_table_accuracy_check.png
deleted file mode 100644
index c78fbc0e0cef..000000000000
Binary files a/docs/img/tasks/demo/multi_table_accuracy_check.png and /dev/null differ
diff --git a/docs/img/tasks/demo/multi_table_comparison_check.png b/docs/img/tasks/demo/multi_table_comparison_check.png
deleted file mode 100644
index f02715833728..000000000000
Binary files a/docs/img/tasks/demo/multi_table_comparison_check.png and /dev/null differ
diff --git a/docs/img/tasks/demo/null_check.png b/docs/img/tasks/demo/null_check.png
deleted file mode 100644
index 46beaa5b42c7..000000000000
Binary files a/docs/img/tasks/demo/null_check.png and /dev/null differ
diff --git a/docs/img/tasks/demo/regexp_check.png b/docs/img/tasks/demo/regexp_check.png
deleted file mode 100644
index de5681451fbf..000000000000
Binary files a/docs/img/tasks/demo/regexp_check.png and /dev/null differ
diff --git a/docs/img/tasks/demo/result.png b/docs/img/tasks/demo/result.png
deleted file mode 100644
index 07f93b4eb920..000000000000
Binary files a/docs/img/tasks/demo/result.png and /dev/null differ
diff --git a/docs/img/tasks/demo/rule_detail.png b/docs/img/tasks/demo/rule_detail.png
deleted file mode 100644
index bf5a2d066d01..000000000000
Binary files a/docs/img/tasks/demo/rule_detail.png and /dev/null differ
diff --git a/docs/img/tasks/demo/rule_list.png b/docs/img/tasks/demo/rule_list.png
deleted file mode 100644
index f61224f5a171..000000000000
Binary files a/docs/img/tasks/demo/rule_list.png and /dev/null differ
diff --git a/docs/img/tasks/demo/table_count_check.png b/docs/img/tasks/demo/table_count_check.png
deleted file mode 100644
index 4bcded603392..000000000000
Binary files a/docs/img/tasks/demo/table_count_check.png and /dev/null differ
diff --git a/docs/img/tasks/demo/timeliness_check.png b/docs/img/tasks/demo/timeliness_check.png
deleted file mode 100644
index 86505f6b53d0..000000000000
Binary files a/docs/img/tasks/demo/timeliness_check.png and /dev/null differ
diff --git a/docs/img/tasks/demo/uniqueness_check.png b/docs/img/tasks/demo/uniqueness_check.png
deleted file mode 100644
index 708d374458f5..000000000000
Binary files a/docs/img/tasks/demo/uniqueness_check.png and /dev/null differ
diff --git a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/resources/docker/file-manage/common.properties b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/resources/docker/file-manage/common.properties
index 96879cc2721c..d2e08600afaa 100644
--- a/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/resources/docker/file-manage/common.properties
+++ b/dolphinscheduler-api-test/dolphinscheduler-api-test-case/src/test/resources/docker/file-manage/common.properties
@@ -84,13 +84,6 @@ datasource.encryption.enable=false
# datasource encryption salt
datasource.encryption.salt=!@#$%^&*
-# data quality jar directory path, it would auto discovery data quality jar from this given dir. You should keep it empty if you do not change anything in
-# data-quality, it will auto discovery by dolphinscheduler itself. Change it only if you want to use your own data-quality jar and it is not in worker-server
-# libs directory(but may sure your jar name start with `dolphinscheduler-data-quality`).
-data-quality.jar.dir=
-
-#data-quality.error.output.path=/tmp/data-quality-error-data
-
# Network IP gets priority, default inner outer
# Whether hive SQL is executed in the same session
@@ -126,4 +119,4 @@ ml.mlflow.preset_repository=https://github.com/apache/dolphinscheduler-mlflow
ml.mlflow.preset_repository_version="main"
# way to collect applicationId: log(original regex match), aop
-appId.collect: log
\ No newline at end of file
+appId.collect: log
diff --git a/dolphinscheduler-api/pom.xml b/dolphinscheduler-api/pom.xml
index 7afcf6b89494..764c6daa9994 100644
--- a/dolphinscheduler-api/pom.xml
+++ b/dolphinscheduler-api/pom.xml
@@ -61,11 +61,6 @@
dolphinscheduler-meter
-
- org.apache.dolphinscheduler
- dolphinscheduler-data-quality
-
-
org.apache.dolphinscheduler
dolphinscheduler-datasource-all
diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/DynamicTaskTypeConfiguration.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/DynamicTaskTypeConfiguration.java
index 5798d7eee826..c151c3230c79 100644
--- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/DynamicTaskTypeConfiguration.java
+++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/DynamicTaskTypeConfiguration.java
@@ -45,13 +45,12 @@ public class DynamicTaskTypeConfiguration {
private static final List defaultTaskCategories =
Arrays.asList(Constants.TYPE_UNIVERSAL, Constants.TYPE_DATA_INTEGRATION, Constants.TYPE_CLOUD,
- Constants.TYPE_LOGIC, Constants.TYPE_DATA_QUALITY, Constants.TYPE_OTHER,
+ Constants.TYPE_LOGIC, Constants.TYPE_OTHER,
Constants.TYPE_MACHINE_LEARNING);
private List universal;
private List cloud;
private List logic;
private List dataIntegration;
- private List dataQuality;
private List other;
private List machineLearning;
@@ -69,8 +68,6 @@ public List getTaskTypesByCategory(String category) {
return logic;
case Constants.TYPE_LOGIC:
return dataIntegration;
- case Constants.TYPE_DATA_QUALITY:
- return dataQuality;
case Constants.TYPE_OTHER:
return other;
case Constants.TYPE_MACHINE_LEARNING:
@@ -86,7 +83,6 @@ public void printDefaultTypes() {
log.info("support default cloud dynamic task types: {}", cloud);
log.info("support default logic dynamic task types: {}", logic);
log.info("support default dataIntegration dynamic task types: {}", dataIntegration);
- log.info("support default dataQuality dynamic task types: {}", dataQuality);
log.info("support default machineLearning dynamic task types: {}", machineLearning);
log.info("support default other dynamic task types: {}", other);
}
diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/TaskTypeConfiguration.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/TaskTypeConfiguration.java
index 1393a70ced2a..09b5576f4262 100644
--- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/TaskTypeConfiguration.java
+++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/TaskTypeConfiguration.java
@@ -48,7 +48,6 @@ public class TaskTypeConfiguration {
private List cloud;
private List logic;
private List dataIntegration;
- private List dataQuality;
private List other;
private List machineLearning;
@@ -64,7 +63,6 @@ public List getDefaultTaskTypes() {
logic.forEach(task -> defaultTaskTypes.add(new FavTaskDto(task, false, Constants.TYPE_LOGIC)));
dataIntegration
.forEach(task -> defaultTaskTypes.add(new FavTaskDto(task, false, Constants.TYPE_DATA_INTEGRATION)));
- dataQuality.forEach(task -> defaultTaskTypes.add(new FavTaskDto(task, false, Constants.TYPE_DATA_QUALITY)));
machineLearning
.forEach(task -> defaultTaskTypes.add(new FavTaskDto(task, false, Constants.TYPE_MACHINE_LEARNING)));
other.forEach(task -> defaultTaskTypes.add(new FavTaskDto(task, false, Constants.TYPE_OTHER)));
@@ -76,7 +74,6 @@ public void printDefaultTypes() {
log.info("support default cloud task types: {}", cloud);
log.info("support default logic task types: {}", logic);
log.info("support default dataIntegration task types: {}", dataIntegration);
- log.info("support default dataQuality task types: {}", dataQuality);
log.info("support default machineLearning task types: {}", machineLearning);
log.info("support default other task types: {}", other);
}
diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataQualityController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataQualityController.java
deleted file mode 100644
index dc4dc32a3296..000000000000
--- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataQualityController.java
+++ /dev/null
@@ -1,195 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.api.controller;
-
-import static org.apache.dolphinscheduler.api.enums.Status.GET_DATASOURCE_OPTIONS_ERROR;
-import static org.apache.dolphinscheduler.api.enums.Status.GET_RULE_FORM_CREATE_JSON_ERROR;
-import static org.apache.dolphinscheduler.api.enums.Status.QUERY_EXECUTE_RESULT_LIST_PAGING_ERROR;
-import static org.apache.dolphinscheduler.api.enums.Status.QUERY_RULE_LIST_ERROR;
-import static org.apache.dolphinscheduler.api.enums.Status.QUERY_RULE_LIST_PAGING_ERROR;
-
-import org.apache.dolphinscheduler.api.exceptions.ApiException;
-import org.apache.dolphinscheduler.api.service.DqExecuteResultService;
-import org.apache.dolphinscheduler.api.service.DqRuleService;
-import org.apache.dolphinscheduler.api.utils.PageInfo;
-import org.apache.dolphinscheduler.api.utils.Result;
-import org.apache.dolphinscheduler.common.constants.Constants;
-import org.apache.dolphinscheduler.dao.entity.DqExecuteResult;
-import org.apache.dolphinscheduler.dao.entity.DqRule;
-import org.apache.dolphinscheduler.dao.entity.User;
-import org.apache.dolphinscheduler.plugin.task.api.utils.ParameterUtils;
-import org.apache.dolphinscheduler.spi.params.base.ParamsOptions;
-
-import java.util.List;
-
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.http.HttpStatus;
-import org.springframework.web.bind.annotation.GetMapping;
-import org.springframework.web.bind.annotation.RequestAttribute;
-import org.springframework.web.bind.annotation.RequestMapping;
-import org.springframework.web.bind.annotation.RequestParam;
-import org.springframework.web.bind.annotation.ResponseStatus;
-import org.springframework.web.bind.annotation.RestController;
-
-import io.swagger.v3.oas.annotations.Operation;
-import io.swagger.v3.oas.annotations.Parameter;
-import io.swagger.v3.oas.annotations.Parameters;
-import io.swagger.v3.oas.annotations.media.Schema;
-import io.swagger.v3.oas.annotations.tags.Tag;
-
-/**
- * data quality controller
- */
-@Tag(name = "DATA_QUALITY_TAG")
-@RestController
-@RequestMapping("/data-quality")
-public class DataQualityController extends BaseController {
-
- @Autowired
- private DqRuleService dqRuleService;
-
- @Autowired
- private DqExecuteResultService dqExecuteResultService;
-
- /**
- * get rule from-create json
- * @param ruleId ruleId
- * @return from-create json
- */
- @Operation(summary = "getRuleFormCreateJson", description = "GET_RULE_FORM_CREATE_JSON_NOTES")
- @Parameters({
- @Parameter(name = "ruleId", description = "RULE_ID", schema = @Schema(implementation = int.class, example = "1"))
- })
- @GetMapping(value = "/getRuleFormCreateJson")
- @ResponseStatus(HttpStatus.OK)
- @ApiException(GET_RULE_FORM_CREATE_JSON_ERROR)
- public Result getRuleFormCreateJsonById(@RequestParam(value = "ruleId") int ruleId) {
- String ruleFormCreateJsonById = dqRuleService.getRuleFormCreateJsonById(ruleId);
- return Result.success(ruleFormCreateJsonById);
- }
-
- /**
- * query rule list paging
- *
- * @param loginUser login user
- * @param searchVal search value
- * @param pageNo page number
- * @param pageSize page size
- * @return rule page
- */
- @Operation(summary = "queryRuleListPaging", description = "QUERY_RULE_LIST_PAGING_NOTES")
- @Parameters({
- @Parameter(name = "searchVal", description = "SEARCH_VAL", schema = @Schema(implementation = String.class)),
- @Parameter(name = "ruleType", description = "RULE_TYPE", schema = @Schema(implementation = int.class, example = "1")),
- @Parameter(name = "startDate", description = "START_DATE", schema = @Schema(implementation = String.class)),
- @Parameter(name = "endDate", description = "END_DATE", schema = @Schema(implementation = String.class)),
- @Parameter(name = "pageNo", description = "PAGE_NO", schema = @Schema(implementation = int.class, example = "1")),
- @Parameter(name = "pageSize", description = "PAGE_SIZE", schema = @Schema(implementation = int.class, example = "10"))
- })
- @GetMapping(value = "/rule/page")
- @ResponseStatus(HttpStatus.OK)
- @ApiException(QUERY_RULE_LIST_PAGING_ERROR)
- public Result> queryRuleListPaging(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
- @RequestParam(value = "searchVal", required = false) String searchVal,
- @RequestParam(value = "ruleType", required = false) Integer ruleType,
- @RequestParam(value = "startDate", required = false) String startTime,
- @RequestParam(value = "endDate", required = false) String endTime,
- @RequestParam("pageNo") Integer pageNo,
- @RequestParam("pageSize") Integer pageSize) {
- checkPageParams(pageNo, pageSize);
- searchVal = ParameterUtils.handleEscapes(searchVal);
-
- PageInfo dqRulePageInfo =
- dqRuleService.queryRuleListPaging(loginUser, searchVal, ruleType, startTime, endTime, pageNo, pageSize);
- return Result.success(dqRulePageInfo);
- }
-
- /**
- * query all rule list
- *
- * @return rule list
- */
- @Operation(summary = "queryRuleList", description = "QUERY_RULE_LIST_NOTES")
- @GetMapping(value = "/ruleList")
- @ResponseStatus(HttpStatus.OK)
- @ApiException(QUERY_RULE_LIST_ERROR)
- public Result> queryRuleList() {
- List dqRules = dqRuleService.queryAllRuleList();
- return Result.success(dqRules);
- }
-
- /**
- * query task execute result list paging
- *
- * @param loginUser loginUser
- * @param searchVal searchVal
- * @param ruleType ruleType
- * @param state state
- * @param startTime startTime
- * @param endTime endTime
- * @param pageNo pageNo
- * @param pageSize pageSize
- * @return
- */
- @Operation(summary = "queryExecuteResultListPaging", description = "QUERY_EXECUTE_RESULT_LIST_PAGING_NOTES")
- @Parameters({
- @Parameter(name = "searchVal", description = "SEARCH_VAL", schema = @Schema(implementation = String.class)),
- @Parameter(name = "ruleType", description = "RULE_TYPE", schema = @Schema(implementation = int.class, example = "1")),
- @Parameter(name = "state", description = "STATE", schema = @Schema(implementation = int.class, example = "1")),
- @Parameter(name = "startDate", description = "START_DATE", schema = @Schema(implementation = String.class)),
- @Parameter(name = "endDate", description = "END_DATE", schema = @Schema(implementation = String.class)),
- @Parameter(name = "pageNo", description = "PAGE_NO", schema = @Schema(implementation = int.class, example = "1")),
- @Parameter(name = "pageSize", description = "PAGE_SIZE", schema = @Schema(implementation = int.class, example = "10"))
- })
- @GetMapping(value = "/result/page")
- @ResponseStatus(HttpStatus.OK)
- @ApiException(QUERY_EXECUTE_RESULT_LIST_PAGING_ERROR)
- public Result> queryExecuteResultListPaging(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
- @RequestParam(value = "searchVal", required = false) String searchVal,
- @RequestParam(value = "ruleType", required = false) Integer ruleType,
- @RequestParam(value = "state", required = false) Integer state,
- @RequestParam(value = "startDate", required = false) String startTime,
- @RequestParam(value = "endDate", required = false) String endTime,
- @RequestParam("pageNo") Integer pageNo,
- @RequestParam("pageSize") Integer pageSize) {
-
- checkPageParams(pageNo, pageSize);
- searchVal = ParameterUtils.handleEscapes(searchVal);
-
- PageInfo dqExecuteResultPageInfo = dqExecuteResultService.queryResultListPaging(loginUser,
- searchVal, state, ruleType, startTime, endTime, pageNo, pageSize);
- return Result.success(dqExecuteResultPageInfo);
- }
-
- /**
- * get datasource options by id
- * @param datasourceId datasourceId
- * @return result
- */
- @Operation(summary = "getDatasourceOptionsById", description = "GET_DATASOURCE_OPTIONS_NOTES")
- @Parameters({
- @Parameter(name = "datasourceId", description = "DATA_SOURCE_ID", schema = @Schema(implementation = int.class, example = "1"))
- })
- @GetMapping(value = "/getDatasourceOptionsById")
- @ResponseStatus(HttpStatus.OK)
- @ApiException(GET_DATASOURCE_OPTIONS_ERROR)
- public Result> getDatasourceOptionsById(@RequestParam(value = "datasourceId") int datasourceId) {
- List paramsOptions = dqRuleService.getDatasourceOptionsById(datasourceId);
- return Result.success(paramsOptions);
- }
-}
diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/RuleDefinition.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/RuleDefinition.java
deleted file mode 100644
index 2ef0f11cbd10..000000000000
--- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/RuleDefinition.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.api.dto;
-
-import org.apache.dolphinscheduler.dao.entity.DqRuleExecuteSql;
-import org.apache.dolphinscheduler.dao.entity.DqRuleInputEntry;
-
-import java.util.List;
-
-/**
- * RuleDefinition
- */
-public class RuleDefinition {
-
- /**
- * rule input entry list
- */
- private List ruleInputEntryList;
-
- /**
- * rule execute sql list
- */
- private List executeSqlList;
-
- public RuleDefinition() {
- }
-
- public RuleDefinition(List ruleInputEntryList, List executeSqlList) {
- this.ruleInputEntryList = ruleInputEntryList;
- this.executeSqlList = executeSqlList;
- }
-
- public List getRuleInputEntryList() {
- return ruleInputEntryList;
- }
-
- public void setRuleInputEntryList(List ruleInputEntryList) {
- this.ruleInputEntryList = ruleInputEntryList;
- }
-
- public List getExecuteSqlList() {
- return executeSqlList;
- }
-
- public void setExecuteSqlList(List executeSqlList) {
- this.executeSqlList = executeSqlList;
- }
-}
diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java
index c6764a841f57..1aacebb35535 100644
--- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java
+++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java
@@ -485,12 +485,6 @@ public enum Status {
QUERY_ENVIRONMENT_BY_CODE_ERROR(1200009, "not found environment code [{0}] ", "查询环境编码[{0}]不存在"),
QUERY_ENVIRONMENT_ERROR(1200010, "login user query environment error", "分页查询环境列表错误"),
VERIFY_ENVIRONMENT_ERROR(1200011, "verify environment error", "验证环境信息错误"),
- GET_RULE_FORM_CREATE_JSON_ERROR(1200012, "get rule form create json error", "获取规则 FROM-CREATE-JSON 错误"),
- QUERY_RULE_LIST_PAGING_ERROR(1200013, "query rule list paging error", "获取规则分页列表错误"),
- QUERY_RULE_LIST_ERROR(1200014, "query rule list error", "获取规则列表错误"),
- QUERY_RULE_INPUT_ENTRY_LIST_ERROR(1200015, "query rule list error", "获取规则列表错误"),
- QUERY_EXECUTE_RESULT_LIST_PAGING_ERROR(1200016, "query execute result list paging error", "获取数据质量任务结果分页错误"),
- GET_DATASOURCE_OPTIONS_ERROR(1200017, "get datasource options error", "获取数据源Options错误"),
GET_DATASOURCE_TABLES_ERROR(1200018, "get datasource tables error", "获取数据源表列表错误"),
GET_DATASOURCE_TABLE_COLUMNS_ERROR(1200019, "get datasource table columns error", "获取数据源表列名错误"),
GET_DATASOURCE_DATABASES_ERROR(1200035, "get datasource databases error", "获取数据库列表错误"),
diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DqExecuteResultService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DqExecuteResultService.java
deleted file mode 100644
index 02842ae74f32..000000000000
--- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DqExecuteResultService.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.api.service;
-
-import org.apache.dolphinscheduler.api.utils.PageInfo;
-import org.apache.dolphinscheduler.dao.entity.DqExecuteResult;
-import org.apache.dolphinscheduler.dao.entity.User;
-
-/**
- * DqExecuteResultService
- */
-public interface DqExecuteResultService {
-
- PageInfo queryResultListPaging(User loginUser,
- String searchVal,
- Integer state,
- Integer ruleType,
- String startTime,
- String endTime,
- Integer pageNo, Integer pageSize);
-}
diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DqRuleService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DqRuleService.java
deleted file mode 100644
index 8b331f4395cc..000000000000
--- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DqRuleService.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.api.service;
-
-import org.apache.dolphinscheduler.api.utils.PageInfo;
-import org.apache.dolphinscheduler.dao.entity.DqRule;
-import org.apache.dolphinscheduler.dao.entity.User;
-import org.apache.dolphinscheduler.spi.params.base.ParamsOptions;
-
-import java.util.List;
-
-/**
- * DqsRuleService
- */
-public interface DqRuleService {
-
- String getRuleFormCreateJsonById(int id);
-
- List queryAllRuleList();
-
- PageInfo queryRuleListPaging(User loginUser,
- String searchVal,
- Integer ruleType,
- String startTime,
- String endTime,
- Integer pageNo, Integer pageSize);
-
- List getDatasourceOptionsById(int datasourceId);
-}
diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DqExecuteResultServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DqExecuteResultServiceImpl.java
deleted file mode 100644
index ec1c7d47d1e4..000000000000
--- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DqExecuteResultServiceImpl.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.api.service.impl;
-
-import org.apache.dolphinscheduler.api.enums.Status;
-import org.apache.dolphinscheduler.api.exceptions.ServiceException;
-import org.apache.dolphinscheduler.api.service.DqExecuteResultService;
-import org.apache.dolphinscheduler.api.utils.PageInfo;
-import org.apache.dolphinscheduler.common.utils.DateUtils;
-import org.apache.dolphinscheduler.dao.entity.DqExecuteResult;
-import org.apache.dolphinscheduler.dao.entity.User;
-import org.apache.dolphinscheduler.dao.mapper.DqExecuteResultMapper;
-
-import org.apache.commons.lang3.StringUtils;
-
-import java.util.Date;
-
-import lombok.extern.slf4j.Slf4j;
-
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Service;
-
-import com.baomidou.mybatisplus.core.metadata.IPage;
-import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
-
-/**
- * DqExecuteResultServiceImpl
- */
-@Service
-@Slf4j
-public class DqExecuteResultServiceImpl extends BaseServiceImpl implements DqExecuteResultService {
-
- @Autowired
- private DqExecuteResultMapper dqExecuteResultMapper;
-
- @Override
- public PageInfo queryResultListPaging(User loginUser,
- String searchVal,
- Integer state,
- Integer ruleType,
- String startTime,
- String endTime,
- Integer pageNo,
- Integer pageSize) {
-
- int[] statusArray = null;
- // filter by state
- if (state != null) {
- statusArray = new int[]{state};
- }
-
- Date start = null;
- Date end = null;
- try {
- if (StringUtils.isNotEmpty(startTime)) {
- start = DateUtils.stringToDate(startTime);
- }
- if (StringUtils.isNotEmpty(endTime)) {
- end = DateUtils.stringToDate(endTime);
- }
- } catch (Exception e) {
- throw new ServiceException(Status.REQUEST_PARAMS_NOT_VALID_ERROR, "startTime,endTime");
- }
-
- Page page = new Page<>(pageNo, pageSize);
- PageInfo pageInfo = new PageInfo<>(pageNo, pageSize);
-
- if (ruleType == null) {
- ruleType = -1;
- }
-
- IPage dqsResultPage =
- dqExecuteResultMapper.queryResultListPaging(
- page,
- searchVal,
- loginUser,
- statusArray,
- ruleType,
- start,
- end);
-
- pageInfo.setTotal((int) dqsResultPage.getTotal());
- pageInfo.setTotalList(dqsResultPage.getRecords());
- return pageInfo;
- }
-}
diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DqRuleServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DqRuleServiceImpl.java
deleted file mode 100644
index 1be85998870a..000000000000
--- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DqRuleServiceImpl.java
+++ /dev/null
@@ -1,314 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.api.service.impl;
-
-import static org.apache.dolphinscheduler.common.constants.Constants.CHANGE;
-import static org.apache.dolphinscheduler.common.constants.Constants.SMALL;
-
-import org.apache.dolphinscheduler.api.dto.RuleDefinition;
-import org.apache.dolphinscheduler.api.enums.Status;
-import org.apache.dolphinscheduler.api.exceptions.ServiceException;
-import org.apache.dolphinscheduler.api.service.DqRuleService;
-import org.apache.dolphinscheduler.api.utils.PageInfo;
-import org.apache.dolphinscheduler.common.utils.DateUtils;
-import org.apache.dolphinscheduler.common.utils.JSONUtils;
-import org.apache.dolphinscheduler.dao.entity.DataSource;
-import org.apache.dolphinscheduler.dao.entity.DqComparisonType;
-import org.apache.dolphinscheduler.dao.entity.DqRule;
-import org.apache.dolphinscheduler.dao.entity.DqRuleExecuteSql;
-import org.apache.dolphinscheduler.dao.entity.DqRuleInputEntry;
-import org.apache.dolphinscheduler.dao.entity.User;
-import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper;
-import org.apache.dolphinscheduler.dao.mapper.DqComparisonTypeMapper;
-import org.apache.dolphinscheduler.dao.mapper.DqRuleExecuteSqlMapper;
-import org.apache.dolphinscheduler.dao.mapper.DqRuleInputEntryMapper;
-import org.apache.dolphinscheduler.dao.mapper.DqRuleMapper;
-import org.apache.dolphinscheduler.dao.utils.DqRuleUtils;
-import org.apache.dolphinscheduler.plugin.task.api.enums.dp.OptionSourceType;
-import org.apache.dolphinscheduler.spi.enums.DbType;
-import org.apache.dolphinscheduler.spi.params.base.FormType;
-import org.apache.dolphinscheduler.spi.params.base.ParamsOptions;
-import org.apache.dolphinscheduler.spi.params.base.PluginParams;
-import org.apache.dolphinscheduler.spi.params.base.PropsType;
-import org.apache.dolphinscheduler.spi.params.base.Validate;
-import org.apache.dolphinscheduler.spi.params.group.GroupParam;
-import org.apache.dolphinscheduler.spi.params.group.GroupParamsProps;
-import org.apache.dolphinscheduler.spi.params.input.InputParam;
-import org.apache.dolphinscheduler.spi.params.input.InputParamProps;
-import org.apache.dolphinscheduler.spi.params.select.SelectParam;
-
-import org.apache.commons.collections4.CollectionUtils;
-import org.apache.commons.lang3.StringUtils;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Date;
-import java.util.List;
-import java.util.Objects;
-
-import lombok.extern.slf4j.Slf4j;
-
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Service;
-
-import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
-import com.baomidou.mybatisplus.core.metadata.IPage;
-import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
-import com.fasterxml.jackson.annotation.JsonInclude;
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-
-/**
- * DqRuleServiceImpl
- */
-@Service
-@Slf4j
-public class DqRuleServiceImpl extends BaseServiceImpl implements DqRuleService {
-
- @Autowired
- private DqRuleMapper dqRuleMapper;
-
- @Autowired
- private DqRuleInputEntryMapper dqRuleInputEntryMapper;
-
- @Autowired
- private DqRuleExecuteSqlMapper dqRuleExecuteSqlMapper;
-
- @Autowired
- private DataSourceMapper dataSourceMapper;
-
- @Autowired
- private DqComparisonTypeMapper dqComparisonTypeMapper;
-
- @Override
- public String getRuleFormCreateJsonById(int id) {
-
- List ruleInputEntryList = dqRuleInputEntryMapper.getRuleInputEntryList(id);
- if (ruleInputEntryList == null || ruleInputEntryList.isEmpty()) {
- throw new ServiceException(Status.QUERY_RULE_INPUT_ENTRY_LIST_ERROR);
- }
- return getRuleFormCreateJson(DqRuleUtils.transformInputEntry(ruleInputEntryList));
- }
-
- @Override
- public List queryAllRuleList() {
- return dqRuleMapper.selectList(new QueryWrapper<>());
- }
-
- @Override
- public List getDatasourceOptionsById(int datasourceId) {
-
- List dataSourceList = dataSourceMapper.listAllDataSourceByType(datasourceId);
- if (CollectionUtils.isEmpty(dataSourceList)) {
- return Collections.emptyList();
- }
-
- List options = new ArrayList<>();
- for (DataSource dataSource : dataSourceList) {
- ParamsOptions childrenOption = new ParamsOptions(dataSource.getName(), dataSource.getId(), false);
- options.add(childrenOption);
- }
- return options;
- }
-
- @Override
- public PageInfo queryRuleListPaging(User loginUser,
- String searchVal,
- Integer ruleType,
- String startTime,
- String endTime,
- Integer pageNo,
- Integer pageSize) {
-
- Date start = null;
- Date end = null;
- try {
- if (StringUtils.isNotEmpty(startTime)) {
- start = DateUtils.stringToDate(startTime);
- }
- if (StringUtils.isNotEmpty(endTime)) {
- end = DateUtils.stringToDate(endTime);
- }
- } catch (Exception e) {
- throw new ServiceException(Status.REQUEST_PARAMS_NOT_VALID_ERROR, "startTime,endTime");
- }
-
- Page page = new Page<>(pageNo, pageSize);
- PageInfo pageInfo = new PageInfo<>(pageNo, pageSize);
-
- if (ruleType == null) {
- ruleType = -1;
- }
-
- IPage dqRulePage =
- dqRuleMapper.queryRuleListPaging(
- page,
- searchVal,
- ruleType,
- start,
- end);
- if (dqRulePage != null) {
- List dataList = dqRulePage.getRecords();
- dataList.forEach(dqRule -> {
- List ruleInputEntryList =
- DqRuleUtils.transformInputEntry(dqRuleInputEntryMapper.getRuleInputEntryList(dqRule.getId()));
- List ruleExecuteSqlList = dqRuleExecuteSqlMapper.getExecuteSqlList(dqRule.getId());
-
- RuleDefinition ruleDefinition = new RuleDefinition(ruleInputEntryList, ruleExecuteSqlList);
- dqRule.setRuleJson(JSONUtils.toJsonString(ruleDefinition));
- });
-
- pageInfo.setTotal((int) dqRulePage.getTotal());
- pageInfo.setTotalList(dataList);
- }
-
- return pageInfo;
- }
-
- private String getRuleFormCreateJson(List ruleInputEntryList) {
- List params = new ArrayList<>();
-
- for (DqRuleInputEntry inputEntry : ruleInputEntryList) {
- if (Boolean.TRUE.equals(inputEntry.getIsShow())) {
- switch (Objects.requireNonNull(FormType.of(inputEntry.getType()))) {
- case INPUT:
- params.add(getInputParam(inputEntry));
- break;
- case SELECT:
- params.add(getSelectParam(inputEntry));
- break;
- case TEXTAREA:
- params.add(getTextareaParam(inputEntry));
- break;
- case GROUP:
- params.add(getGroupParam(inputEntry));
- break;
- default:
- break;
- }
- }
- }
-
- ObjectMapper mapper = new ObjectMapper();
- mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
- String result = null;
-
- try {
- result = mapper.writeValueAsString(params);
- } catch (JsonProcessingException e) {
- log.error("Json parse error.", e);
- }
-
- return result;
- }
-
- private InputParam getTextareaParam(DqRuleInputEntry inputEntry) {
-
- InputParamProps paramProps =
- new InputParamProps();
- paramProps.setDisabled(!inputEntry.getCanEdit());
- paramProps.setSize(SMALL);
- paramProps.setType(PropsType.TEXTAREA.getPropsType());
- paramProps.setRows(1);
-
- return InputParam
- .newBuilder(inputEntry.getField(), inputEntry.getTitle())
- .addValidate(Validate.newBuilder()
- .setRequired(inputEntry.getIsValidate())
- .build())
- .setProps(paramProps)
- .setValue(inputEntry.getData())
- .setPlaceholder(inputEntry.getPlaceholder())
- .setEmit(Boolean.TRUE.equals(inputEntry.getIsEmit()) ? Collections.singletonList(CHANGE) : null)
- .build();
- }
-
- private SelectParam getSelectParam(DqRuleInputEntry inputEntry) {
- List options = null;
-
- switch (OptionSourceType.of(inputEntry.getOptionSourceType())) {
- case DEFAULT:
- String optionStr = inputEntry.getOptions();
- if (StringUtils.isNotEmpty(optionStr)) {
- options = JSONUtils.toList(optionStr, ParamsOptions.class);
- }
- break;
- case DATASOURCE_TYPE:
- options = new ArrayList<>();
- ParamsOptions paramsOptions = null;
- for (DbType dbtype : DbType.values()) {
- paramsOptions = new ParamsOptions(dbtype.name(), dbtype.getCode(), false);
- options.add(paramsOptions);
- }
- break;
- case COMPARISON_TYPE:
- options = new ArrayList<>();
- ParamsOptions comparisonOptions = null;
- List list =
- dqComparisonTypeMapper.selectList(new QueryWrapper().orderByAsc("id"));
-
- for (DqComparisonType type : list) {
- comparisonOptions = new ParamsOptions(type.getType(), type.getId(), false);
- options.add(comparisonOptions);
- }
- break;
- default:
- break;
- }
-
- return SelectParam
- .newBuilder(inputEntry.getField(), inputEntry.getTitle())
- .setOptions(options)
- .setValue(inputEntry.getData())
- .setSize(SMALL)
- .setPlaceHolder(inputEntry.getPlaceholder())
- .setEmit(Boolean.TRUE.equals(inputEntry.getIsEmit()) ? Collections.singletonList(CHANGE) : null)
- .build();
- }
-
- private InputParam getInputParam(DqRuleInputEntry inputEntry) {
- InputParamProps paramProps =
- new InputParamProps();
- paramProps.setDisabled(!inputEntry.getCanEdit());
- paramProps.setSize(SMALL);
- paramProps.setRows(2);
-
- return InputParam
- .newBuilder(inputEntry.getField(), inputEntry.getTitle())
- .addValidate(Validate.newBuilder()
- .setRequired(inputEntry.getIsValidate())
- .build())
- .setProps(paramProps)
- .setValue(inputEntry.getData())
- .setPlaceholder(inputEntry.getPlaceholder())
- .setEmit(Boolean.TRUE.equals(inputEntry.getIsEmit()) ? Collections.singletonList(CHANGE) : null)
- .build();
- }
-
- private GroupParam getGroupParam(DqRuleInputEntry inputEntry) {
- return GroupParam
- .newBuilder(inputEntry.getField(), inputEntry.getTitle())
- .addValidate(Validate.newBuilder()
- .setRequired(inputEntry.getIsValidate())
- .build())
- .setProps(new GroupParamsProps().setRules(JSONUtils.toList(inputEntry.getOptions(), PluginParams.class))
- .setFontSize(20))
- .setEmit(Boolean.TRUE.equals(inputEntry.getIsEmit()) ? Collections.singletonList(CHANGE) : null)
- .build();
- }
-}
diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskInstanceServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskInstanceServiceImpl.java
index a3137d3f7ca2..957aca912fd7 100644
--- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskInstanceServiceImpl.java
+++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskInstanceServiceImpl.java
@@ -41,7 +41,6 @@
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
-import org.apache.dolphinscheduler.dao.repository.DqExecuteResultDao;
import org.apache.dolphinscheduler.dao.repository.TaskInstanceDao;
import org.apache.dolphinscheduler.dao.repository.WorkflowInstanceDao;
import org.apache.dolphinscheduler.dao.utils.TaskCacheUtils;
@@ -100,9 +99,6 @@ public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInst
@Autowired
TaskDefinitionMapper taskDefinitionMapper;
- @Autowired
- private DqExecuteResultDao dqExecuteResultDao;
-
@Autowired
private TaskGroupQueueService taskGroupQueueService;
@@ -375,7 +371,6 @@ public void deleteByWorkflowInstanceId(Integer workflowInstanceId) {
}
}
- dqExecuteResultDao.deleteByWorkflowInstanceId(workflowInstanceId);
taskGroupQueueService.deleteByWorkflowInstanceId(workflowInstanceId);
taskInstanceDao.deleteByWorkflowInstanceId(workflowInstanceId);
}
diff --git a/dolphinscheduler-api/src/main/resources/dynamic-task-type-config.yaml b/dolphinscheduler-api/src/main/resources/dynamic-task-type-config.yaml
index 3fd74c3812d7..8f2c541f8e26 100644
--- a/dolphinscheduler-api/src/main/resources/dynamic-task-type-config.yaml
+++ b/dolphinscheduler-api/src/main/resources/dynamic-task-type-config.yaml
@@ -24,7 +24,5 @@ dynamic-task:
- {name: SUB_WORKFLOW,icon: shell-icon.png,hover: shell-hover.png}
dataIntegration:
- {name: SEATUNNEL,icon: shell-icon.png,hover: shell-hover.png}
- dataQuality:
- - {name: DATA_QUALITY,icon: shell-icon.png,hover: shell-hover.png}
machineLearning:
- {name: JUPYTER,icon: shell-icon.png,hover: shell-hover.png}
diff --git a/dolphinscheduler-api/src/main/resources/i18n/messages.properties b/dolphinscheduler-api/src/main/resources/i18n/messages.properties
index 62ad55750e3d..5b946c6343b1 100644
--- a/dolphinscheduler-api/src/main/resources/i18n/messages.properties
+++ b/dolphinscheduler-api/src/main/resources/i18n/messages.properties
@@ -233,14 +233,7 @@ DELETE_WORKFLOW_DEFINITION_VERSION_NOTES=delete workflow definition version
QUERY_WORKFLOW_DEFINITION_VERSIONS_NOTES=query workflow definition versions
SWITCH_WORKFLOW_DEFINITION_VERSION_NOTES=switch workflow definition version
VERSION=version
-GET_RULE_FORM_CREATE_JSON_NOTES=get rule form-create json
-QUERY_RULE_LIST_PAGING_NOTES=query rule list paging
-QUERY_RULE_LIST_NOTES=query rule list
-QUERY_EXECUTE_RESULT_LIST_PAGING_NOTES=query execute result list paging
-RULE_ID=rule id
-RULE_TYPE=rule type
STATE=state
-GET_DATASOURCE_OPTIONS_NOTES=get datasource options
GET_DATASOURCE_TABLES_NOTES=get datasource table
GET_DATASOURCE_TABLE_COLUMNS_NOTES=get datasource table columns
TABLE_NAME=table name
@@ -278,7 +271,6 @@ DELETE_CLUSTER_BY_CODE_NOTES=delete cluster by code
QUERY_ALL_CLUSTER_LIST_NOTES=query all cluster list
VERIFY_CLUSTER_NOTES=verify cluster
-DATA_QUALITY_TAG=data quality related operation
EXPECTED_PARALLELISM_NUMBER=custom parallelism to set the complement task threads
DRY_RUN=dry run
TEST_FLAG=test flag
diff --git a/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties b/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties
index aed7f4bf9425..5385301f7be1 100644
--- a/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties
+++ b/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties
@@ -269,13 +269,6 @@ QUERY_WORKFLOW_DEFINITION_VERSIONS_NOTES=query process definition versions
SWITCH_WORKFLOW_DEFINITION_VERSION_NOTES=switch process definition version
VERSION=version
TASK_GROUP_QUEUE_PRIORITY=task group queue priority
-GET_RULE_FORM_CREATE_JSON_NOTES=get rule form-create json
-QUERY_RULE_LIST_PAGING_NOTES=query rule list paging
-QUERY_RULE_LIST_NOTES=query rule list
-QUERY_EXECUTE_RESULT_LIST_PAGING_NOTES=query execute result list paging
-RULE_ID=rule id
-RULE_TYPE=rule type
-GET_DATASOURCE_OPTIONS_NOTES=get datasource options
GET_DATASOURCE_TABLES_NOTES=get datasource table
GET_DATASOURCE_TABLE_COLUMNS_NOTES=get datasource table columns
TABLE_NAME=table name
@@ -313,7 +306,6 @@ DELETE_CLUSTER_BY_CODE_NOTES=delete cluster by code
QUERY_ALL_CLUSTER_LIST_NOTES=query all cluster list
VERIFY_CLUSTER_NOTES=verify cluster
-DATA_QUALITY_TAG=data quality service
TEST_FLAG=test flag
RUN_TASK_INSTANCE_NOTES=run task instance
diff --git a/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties b/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties
index 6529121fc630..8d1cef0667b1 100644
--- a/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties
+++ b/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties
@@ -267,13 +267,6 @@ QUERY_WORKFLOW_DEFINITION_VERSIONS_NOTES=\u67E5\u8BE2\u6D41\u7A0B\u5386\u53F2\u7
SWITCH_WORKFLOW_DEFINITION_VERSION_NOTES=\u5207\u6362\u6D41\u7A0B\u7248\u672C
VERSION=\u7248\u672C\u53F7
TASK_GROUP_QUEUE_PRIORITY=\u4EFB\u52A1\u961F\u5217\u4F18\u5148\u7EA7
-GET_RULE_FORM_CREATE_JSON_NOTES=\u83B7\u53D6\u89C4\u5219form-create json
-QUERY_RULE_LIST_PAGING_NOTES=\u67E5\u8BE2\u89C4\u5219\u5206\u9875\u5217\u8868
-QUERY_RULE_LIST_NOTES=\u67E5\u8BE2\u89C4\u5219\u5217\u8868
-QUERY_EXECUTE_RESULT_LIST_PAGING_NOTES=\u67E5\u8BE2\u6570\u636E\u8D28\u91CF\u4EFB\u52A1\u7ED3\u679C\u5206\u9875\u5217\u8868
-RULE_ID=\u89C4\u5219ID
-RULE_TYPE=\u89C4\u5219\u7C7B\u578B
-GET_DATASOURCE_OPTIONS_NOTES=\u83B7\u53D6\u6570\u636E\u6E90OPTIONS
GET_DATASOURCE_TABLES_NOTES=\u83B7\u53D6\u6570\u636E\u6E90\u8868\u5217\u8868
GET_DATASOURCE_TABLE_COLUMNS_NOTES=\u83B7\u53D6\u6570\u636E\u6E90\u8868\u5217\u540D
TABLE_NAME=\u8868\u540D
@@ -311,7 +304,6 @@ DELETE_CLUSTER_BY_CODE_NOTES=\u901A\u8FC7\u96C6\u7FA4\u4EE3\u7801\u5220\u9664\u9
QUERY_ALL_CLUSTER_LIST_NOTES=\u67E5\u8BE2\u6240\u6709\u96C6\u7FA4\u5217\u8868
VERIFY_CLUSTER_NOTES=\u6821\u9A8C\u96C6\u7FA4
-DATA_QUALITY_TAG=\u6570\u636E\u8D28\u91CF\u76F8\u5173\u64CD\u4F5C
TEST_FLAG=\u6D4B\u8BD5\u6807\u8BC6
RUN_TASK_INSTANCE_NOTES=\u8FD0\u884C\u4EFB\u52A1\u5B9E\u4F8B
diff --git a/dolphinscheduler-api/src/main/resources/task-type-config.yaml b/dolphinscheduler-api/src/main/resources/task-type-config.yaml
index 761faf1f0c28..56053b74bdf8 100644
--- a/dolphinscheduler-api/src/main/resources/task-type-config.yaml
+++ b/dolphinscheduler-api/src/main/resources/task-type-config.yaml
@@ -46,8 +46,6 @@ task:
- 'SEATUNNEL'
- 'DATAX'
- 'SQOOP'
- dataQuality:
- - 'DATA_QUALITY'
machineLearning:
- 'JUPYTER'
- 'MLFLOW'
diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataQualityControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataQualityControllerTest.java
deleted file mode 100644
index 04464550e9ad..000000000000
--- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataQualityControllerTest.java
+++ /dev/null
@@ -1,164 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.api.controller;
-
-import static org.mockito.Mockito.when;
-
-import org.apache.dolphinscheduler.api.AssertionsHelper;
-import org.apache.dolphinscheduler.api.enums.Status;
-import org.apache.dolphinscheduler.api.service.impl.DqExecuteResultServiceImpl;
-import org.apache.dolphinscheduler.api.service.impl.DqRuleServiceImpl;
-import org.apache.dolphinscheduler.api.utils.PageInfo;
-import org.apache.dolphinscheduler.api.utils.Result;
-import org.apache.dolphinscheduler.common.constants.Constants;
-import org.apache.dolphinscheduler.common.enums.UserType;
-import org.apache.dolphinscheduler.dao.entity.DqExecuteResult;
-import org.apache.dolphinscheduler.dao.entity.DqRule;
-import org.apache.dolphinscheduler.dao.entity.User;
-import org.apache.dolphinscheduler.plugin.task.api.enums.dp.RuleType;
-
-import java.text.MessageFormat;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.List;
-import java.util.Map;
-
-import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.junit.jupiter.api.extension.ExtendWith;
-import org.mockito.InjectMocks;
-import org.mockito.Mock;
-import org.mockito.Mockito;
-import org.mockito.junit.jupiter.MockitoExtension;
-
-/**
- * process definition controller test
- */
-@ExtendWith(MockitoExtension.class)
-public class DataQualityControllerTest {
-
- @InjectMocks
- private DataQualityController dataQualityController;
-
- @Mock
- private DqRuleServiceImpl dqRuleService;
-
- @Mock
- private DqExecuteResultServiceImpl dqExecuteResultService;
-
- protected User user;
-
- @BeforeEach
- public void before() {
- User loginUser = new User();
- loginUser.setId(1);
- loginUser.setUserType(UserType.GENERAL_USER);
- loginUser.setUserName("admin");
-
- user = loginUser;
- }
-
- @Test
- public void testGetRuleFormCreateJsonById() {
-
- Mockito.when(dqRuleService.getRuleFormCreateJsonById(1)).thenReturn("");
- AssertionsHelper.assertDoesNotThrow(() -> dataQualityController.getRuleFormCreateJsonById(1));
- }
-
- private void putMsg(Map result, Status status, Object... statusParams) {
- result.put(Constants.STATUS, status);
- if (statusParams != null && statusParams.length > 0) {
- result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams));
- } else {
- result.put(Constants.MSG, status.getMsg());
- }
- }
-
- public void putMsg(Result result, Status status, Object... statusParams) {
- result.setCode(status.getCode());
- if (statusParams != null && statusParams.length > 0) {
- result.setMsg(MessageFormat.format(status.getMsg(), statusParams));
- } else {
- result.setMsg(status.getMsg());
- }
- }
-
- private List getRuleList() {
- List list = new ArrayList<>();
- DqRule rule = new DqRule();
- rule.setId(1);
- rule.setName("空值检测");
- rule.setType(RuleType.SINGLE_TABLE.getCode());
- rule.setUserId(1);
- rule.setUserName("admin");
- rule.setCreateTime(new Date());
- rule.setUpdateTime(new Date());
-
- list.add(rule);
-
- return list;
- }
-
- @Test
- public void testQueryRuleListPaging() throws Exception {
-
- String searchVal = "";
- int ruleType = 0;
- String start = "2020-01-01 00:00:00";
- String end = "2020-01-02 00:00:00";
-
- PageInfo pageInfo = new PageInfo<>(1, 10);
- pageInfo.setTotal(10);
- pageInfo.setTotalList(getRuleList());
-
- when(dqRuleService.queryRuleListPaging(user, searchVal, ruleType, start, end, 1, 10)).thenReturn(pageInfo);
-
- Result> response =
- dataQualityController.queryRuleListPaging(user, searchVal, ruleType, start, end, 1, 10);
- Assertions.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
- }
-
- @Test
- public void testQueryRuleList() {
-
- when(dqRuleService.queryAllRuleList()).thenReturn(getRuleList());
-
- Result> listResult = dataQualityController.queryRuleList();
- Assertions.assertEquals(Status.SUCCESS.getCode(), listResult.getCode().intValue());
- }
-
- @Test
- public void testQueryResultListPaging() {
-
- String searchVal = "";
- int ruleType = 0;
- String start = "2020-01-01 00:00:00";
- String end = "2020-01-02 00:00:00";
-
- PageInfo pageInfo = new PageInfo<>(1, 10);
- pageInfo.setTotal(10);
-
- when(dqExecuteResultService.queryResultListPaging(user, searchVal, 0, ruleType, start, end, 1, 10))
- .thenReturn(pageInfo);
-
- Result> pageInfoResult =
- dataQualityController.queryExecuteResultListPaging(user, searchVal, ruleType, 0, start, end, 1, 10);
- Assertions.assertEquals(Status.SUCCESS.getCode(), pageInfoResult.getCode().intValue());
- }
-}
diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DqRuleServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DqRuleServiceTest.java
deleted file mode 100644
index 8c3af3e22080..000000000000
--- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DqRuleServiceTest.java
+++ /dev/null
@@ -1,249 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.api.service;
-
-import static org.apache.dolphinscheduler.api.AssertionsHelper.assertDoesNotThrow;
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.ArgumentMatchers.eq;
-import static org.mockito.Mockito.when;
-
-import org.apache.dolphinscheduler.api.ApiApplicationServer;
-import org.apache.dolphinscheduler.api.permission.ResourcePermissionCheckService;
-import org.apache.dolphinscheduler.api.service.impl.BaseServiceImpl;
-import org.apache.dolphinscheduler.api.service.impl.DqRuleServiceImpl;
-import org.apache.dolphinscheduler.common.enums.AuthorizationType;
-import org.apache.dolphinscheduler.common.enums.UserType;
-import org.apache.dolphinscheduler.common.utils.DateUtils;
-import org.apache.dolphinscheduler.dao.entity.DataSource;
-import org.apache.dolphinscheduler.dao.entity.DqRule;
-import org.apache.dolphinscheduler.dao.entity.DqRuleExecuteSql;
-import org.apache.dolphinscheduler.dao.entity.DqRuleInputEntry;
-import org.apache.dolphinscheduler.dao.entity.User;
-import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper;
-import org.apache.dolphinscheduler.dao.mapper.DqRuleExecuteSqlMapper;
-import org.apache.dolphinscheduler.dao.mapper.DqRuleInputEntryMapper;
-import org.apache.dolphinscheduler.dao.mapper.DqRuleMapper;
-import org.apache.dolphinscheduler.plugin.task.api.enums.dp.DataType;
-import org.apache.dolphinscheduler.plugin.task.api.enums.dp.ExecuteSqlType;
-import org.apache.dolphinscheduler.plugin.task.api.enums.dp.InputType;
-import org.apache.dolphinscheduler.plugin.task.api.enums.dp.OptionSourceType;
-import org.apache.dolphinscheduler.plugin.task.api.enums.dp.RuleType;
-import org.apache.dolphinscheduler.spi.enums.DbType;
-import org.apache.dolphinscheduler.spi.params.base.FormType;
-
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.List;
-
-import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.Test;
-import org.junit.jupiter.api.extension.ExtendWith;
-import org.mockito.InjectMocks;
-import org.mockito.Mock;
-import org.mockito.Mockito;
-import org.mockito.junit.jupiter.MockitoExtension;
-import org.mockito.junit.jupiter.MockitoSettings;
-import org.mockito.quality.Strictness;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.boot.test.context.SpringBootTest;
-
-import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
-import com.baomidou.mybatisplus.core.metadata.IPage;
-import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
-
-@ExtendWith(MockitoExtension.class)
-@MockitoSettings(strictness = Strictness.LENIENT)
-@SpringBootTest(classes = ApiApplicationServer.class)
-public class DqRuleServiceTest {
-
- private static final Logger baseServiceLogger = LoggerFactory.getLogger(BaseServiceImpl.class);
-
- @InjectMocks
- private DqRuleServiceImpl dqRuleService;
-
- @Mock
- DqRuleMapper dqRuleMapper;
-
- @Mock
- DqRuleInputEntryMapper dqRuleInputEntryMapper;
-
- @Mock
- DqRuleExecuteSqlMapper dqRuleExecuteSqlMapper;
-
- @Mock
- DataSourceMapper dataSourceMapper;
-
- @Mock
- private ResourcePermissionCheckService resourcePermissionCheckService;
-
- @Test
- public void testGetRuleFormCreateJsonById() {
- String json = "[{\"field\":\"src_connector_type\",\"name\":\"源数据类型\",\"props\":{\"placeholder\":"
- + "\"Please select the source connector type\",\"size\":\"small\"},\"type\":\"select\",\"title\":"
- + "\"源数据类型\",\"value\":\"JDBC\",\"emit\":[\"change\"],\"options\":[{\"label\":\"HIVE\",\"value\":"
- + "\"HIVE\",\"disabled\":false},{\"label\":\"JDBC\",\"value\":\"JDBC\",\"disabled\":false}]},{\"props\":"
- + "{\"disabled\":false,\"rows\":2,\"placeholder\":\"Please enter statistics name, the alias in "
- + "statistics execute sql\",\"size\":\"small\"},\"field\":\"statistics_name\",\"name\":"
- + "\"统计值名\",\"type\":\"input\",\"title\":\"统计值名\",\"validate\":[{\"required\":true,\"type\":"
- + "\"string\",\"trigger\":\"blur\"}]},{\"props\":{\"disabled\":false,\"type\":\"textarea\",\"rows\":"
- + "1,\"placeholder\":\"Please enter the statistics execute sql\",\"size\":\"small\"},\"field\":"
- + "\"statistics_execute_sql\",\"name\":\"统计值计算SQL\",\"type\":\"input\",\"title\":"
- + "\"统计值计算SQL\",\"validate\":[{\"required\":true,\"type\":\"string\",\"trigger\":\"blur\"}]}]";
- when(dqRuleInputEntryMapper.getRuleInputEntryList(1)).thenReturn(getRuleInputEntryList());
- String ruleFormCreateJsonById = dqRuleService.getRuleFormCreateJsonById(1);
- Assertions.assertEquals(json, ruleFormCreateJsonById);
- }
-
- @Test
- public void testQueryAllRuleList() {
- when(dqRuleMapper.selectList(new QueryWrapper<>())).thenReturn(getRuleList());
- assertDoesNotThrow(() -> dqRuleService.queryAllRuleList());
- }
-
- @Test
- public void testGetDatasourceOptionsById() {
- when(dataSourceMapper.listAllDataSourceByType(DbType.MYSQL.getCode())).thenReturn(dataSourceList());
- assertDoesNotThrow(() -> dqRuleService.queryAllRuleList());
- }
-
- @Test
- public void testQueryRuleListPaging() {
-
- String searchVal = "";
- int ruleType = 0;
- Date start = DateUtils.stringToDate("2020-01-01 00:00:00");
- Date end = DateUtils.stringToDate("2020-01-02 00:00:00");
-
- User loginUser = new User();
- loginUser.setId(1);
- loginUser.setUserType(UserType.ADMIN_USER);
- Mockito.when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.DATA_QUALITY,
- loginUser.getId(), null, baseServiceLogger)).thenReturn(true);
- Mockito.when(resourcePermissionCheckService.resourcePermissionCheck(AuthorizationType.DATA_QUALITY, null, 0,
- baseServiceLogger)).thenReturn(true);
- Page page = new Page<>(1, 10);
- page.setTotal(1);
- page.setRecords(getRuleList());
-
- when(dqRuleMapper.queryRuleListPaging(any(IPage.class), eq(""), eq(ruleType), eq(start), eq(end)))
- .thenReturn(page);
-
- when(dqRuleInputEntryMapper.getRuleInputEntryList(1)).thenReturn(getRuleInputEntryList());
- when(dqRuleExecuteSqlMapper.getExecuteSqlList(1)).thenReturn(getRuleExecuteSqlList());
-
- assertDoesNotThrow(() -> dqRuleService.queryRuleListPaging(loginUser, searchVal, 0, "2020-01-01 00:00:00",
- "2020-01-02 00:00:00", 1, 10));
- }
-
- private List dataSourceList() {
- List dataSourceList = new ArrayList<>();
- DataSource dataSource = new DataSource();
- dataSource.setId(1);
- dataSource.setName("dolphinscheduler");
- dataSource.setType(DbType.MYSQL);
- dataSource.setUserId(1);
- dataSource.setUserName("admin");
- dataSource.setConnectionParams("");
- dataSource.setCreateTime(new Date());
- dataSource.setUpdateTime(new Date());
- dataSourceList.add(dataSource);
-
- return dataSourceList;
- }
-
- private List getRuleList() {
- List list = new ArrayList<>();
- DqRule rule = new DqRule();
- rule.setId(1);
- rule.setName("空值检测");
- rule.setType(RuleType.SINGLE_TABLE.getCode());
- rule.setUserId(1);
- rule.setUserName("admin");
- rule.setCreateTime(new Date());
- rule.setUpdateTime(new Date());
-
- list.add(rule);
-
- return list;
- }
-
- private List getRuleInputEntryList() {
- List list = new ArrayList<>();
-
- DqRuleInputEntry srcConnectorType = new DqRuleInputEntry();
- srcConnectorType.setTitle("源数据类型");
- srcConnectorType.setField("src_connector_type");
- srcConnectorType.setType(FormType.SELECT.getFormType());
- srcConnectorType.setCanEdit(true);
- srcConnectorType.setIsShow(true);
- srcConnectorType.setData("JDBC");
- srcConnectorType.setPlaceholder("Please select the source connector type");
- srcConnectorType.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- srcConnectorType
- .setOptions("[{\"label\":\"HIVE\",\"value\":\"HIVE\"},{\"label\":\"JDBC\",\"value\":\"JDBC\"}]");
- srcConnectorType.setInputType(InputType.DEFAULT.getCode());
- srcConnectorType.setDataType(DataType.NUMBER.getCode());
- srcConnectorType.setIsEmit(true);
- srcConnectorType.setIsValidate(true);
-
- DqRuleInputEntry statisticsName = new DqRuleInputEntry();
- statisticsName.setTitle("统计值名");
- statisticsName.setField("statistics_name");
- statisticsName.setType(FormType.INPUT.getFormType());
- statisticsName.setCanEdit(true);
- statisticsName.setIsShow(true);
- statisticsName.setPlaceholder("Please enter statistics name, the alias in statistics execute sql");
- statisticsName.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- statisticsName.setInputType(InputType.DEFAULT.getCode());
- statisticsName.setDataType(DataType.STRING.getCode());
- statisticsName.setIsEmit(false);
- statisticsName.setIsValidate(true);
-
- DqRuleInputEntry statisticsExecuteSql = new DqRuleInputEntry();
- statisticsExecuteSql.setTitle("统计值计算SQL");
- statisticsExecuteSql.setField("statistics_execute_sql");
- statisticsExecuteSql.setType(FormType.TEXTAREA.getFormType());
- statisticsExecuteSql.setCanEdit(true);
- statisticsExecuteSql.setIsShow(true);
- statisticsExecuteSql.setPlaceholder("Please enter the statistics execute sql");
- statisticsExecuteSql.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- statisticsExecuteSql.setDataType(DataType.LIKE_SQL.getCode());
- statisticsExecuteSql.setIsEmit(false);
- statisticsExecuteSql.setIsValidate(true);
-
- list.add(srcConnectorType);
- list.add(statisticsName);
- list.add(statisticsExecuteSql);
-
- return list;
- }
-
- private List getRuleExecuteSqlList() {
- List list = new ArrayList<>();
-
- DqRuleExecuteSql executeSqlDefinition = new DqRuleExecuteSql();
- executeSqlDefinition.setIndex(0);
- executeSqlDefinition.setSql("SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})");
- executeSqlDefinition.setTableAlias("total_count");
- executeSqlDefinition.setType(ExecuteSqlType.COMPARISON.getCode());
- list.add(executeSqlDefinition);
-
- return list;
- }
-}
diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DqWorkflowInstanceExecuteResultServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DqWorkflowInstanceExecuteResultServiceTest.java
deleted file mode 100644
index 3d3f16d2c0c8..000000000000
--- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DqWorkflowInstanceExecuteResultServiceTest.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.api.service;
-
-import static org.apache.dolphinscheduler.api.AssertionsHelper.assertDoesNotThrow;
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.ArgumentMatchers.eq;
-import static org.mockito.Mockito.when;
-
-import org.apache.dolphinscheduler.api.ApiApplicationServer;
-import org.apache.dolphinscheduler.api.permission.ResourcePermissionCheckService;
-import org.apache.dolphinscheduler.api.service.impl.BaseServiceImpl;
-import org.apache.dolphinscheduler.api.service.impl.DqExecuteResultServiceImpl;
-import org.apache.dolphinscheduler.common.enums.AuthorizationType;
-import org.apache.dolphinscheduler.common.enums.UserType;
-import org.apache.dolphinscheduler.common.utils.DateUtils;
-import org.apache.dolphinscheduler.dao.entity.DqExecuteResult;
-import org.apache.dolphinscheduler.dao.entity.User;
-import org.apache.dolphinscheduler.dao.mapper.DqExecuteResultMapper;
-import org.apache.dolphinscheduler.plugin.task.api.enums.dp.DqTaskState;
-
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.List;
-
-import org.junit.jupiter.api.Test;
-import org.junit.jupiter.api.extension.ExtendWith;
-import org.mockito.InjectMocks;
-import org.mockito.Mock;
-import org.mockito.Mockito;
-import org.mockito.junit.jupiter.MockitoExtension;
-import org.mockito.junit.jupiter.MockitoSettings;
-import org.mockito.quality.Strictness;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.boot.test.context.SpringBootTest;
-
-import com.baomidou.mybatisplus.core.metadata.IPage;
-import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
-
-@ExtendWith(MockitoExtension.class)
-@MockitoSettings(strictness = Strictness.LENIENT)
-@SpringBootTest(classes = ApiApplicationServer.class)
-public class DqWorkflowInstanceExecuteResultServiceTest {
-
- private static final Logger logger = LoggerFactory.getLogger(DqWorkflowInstanceExecuteResultServiceTest.class);
- private static final Logger baseServiceLogger = LoggerFactory.getLogger(BaseServiceImpl.class);
-
- @InjectMocks
- private DqExecuteResultServiceImpl dqExecuteResultService;
-
- @Mock
- DqExecuteResultMapper dqExecuteResultMapper;
-
- @Mock
- private ResourcePermissionCheckService resourcePermissionCheckService;
-
- @Test
- public void testQueryResultListPaging() {
-
- String searchVal = "";
- int ruleType = 0;
- Date start = DateUtils.stringToDate("2020-01-01 00:00:00");
- Date end = DateUtils.stringToDate("2020-01-02 00:00:00");
-
- User loginUser = new User();
- loginUser.setId(1);
- loginUser.setUserType(UserType.ADMIN_USER);
- Mockito.when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.DATA_QUALITY,
- loginUser.getId(), null, baseServiceLogger)).thenReturn(true);
- Mockito.when(resourcePermissionCheckService.resourcePermissionCheck(AuthorizationType.DATA_QUALITY, null, 0,
- baseServiceLogger)).thenReturn(true);
- Page page = new Page<>(1, 10);
- page.setTotal(1);
- page.setRecords(getExecuteResultList());
- when(dqExecuteResultMapper.queryResultListPaging(any(IPage.class), eq(""), eq(loginUser), any(), eq(ruleType),
- eq(start), eq(end))).thenReturn(page);
-
- assertDoesNotThrow(() -> dqExecuteResultService.queryResultListPaging(loginUser, searchVal, 1, 0,
- "2020-01-01 00:00:00", "2020-01-02 00:00:00", 1, 10));
- }
-
- public List getExecuteResultList() {
-
- List list = new ArrayList<>();
- DqExecuteResult dqExecuteResult = new DqExecuteResult();
- dqExecuteResult.setId(1);
- dqExecuteResult.setState(DqTaskState.FAILURE.getCode());
- list.add(dqExecuteResult);
-
- return list;
- }
-}
diff --git a/dolphinscheduler-bom/pom.xml b/dolphinscheduler-bom/pom.xml
index 9b5ef399eec2..a16c003c7c4e 100644
--- a/dolphinscheduler-bom/pom.xml
+++ b/dolphinscheduler-bom/pom.xml
@@ -94,7 +94,6 @@
2.9.1
4.2.11
1.1.10.1
- 3.2.2
3.0.16
1.33
4.1.1
@@ -769,18 +768,6 @@
provided
-
- org.apache.spark
- spark-core_2.12
- ${spark.version}
-
-
- com.fasterxml.jackson.module
- jackson-module-scala_2.11
-
-
-
-
org.apache.sshd
sshd-sftp
@@ -792,42 +779,6 @@
${sshd.version}
-
- org.apache.spark
- spark-sql_2.12
- ${spark.version}
-
-
- com.fasterxml.jackson.core
- jackson-core
-
-
-
-
-
- org.apache.spark
- spark-hive_2.12
- ${spark.version}
-
-
- commons-httpclient
- commons-httpclient
-
-
- org.apache.httpcomponents
- httpclient
-
-
- org.codehaus.jackson
- jackson-core-asl
-
-
- org.codehaus.jackson
- jackson-mapper-asl
-
-
-
-
org.codehaus.janino
janino
diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/constants/Constants.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/constants/Constants.java
index cc01c45c1df1..3199a692596a 100644
--- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/constants/Constants.java
+++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/constants/Constants.java
@@ -535,11 +535,6 @@ public final class Constants {
public static final int DRY_RUN_FLAG_NO = 0;
public static final int DRY_RUN_FLAG_YES = 1;
- /**
- * data.quality.error.output.path
- */
- public static final String DATA_QUALITY_ERROR_OUTPUT_PATH = "data-quality.error.output.path";
-
/**
* use for k8s
*/
@@ -596,7 +591,6 @@ public final class Constants {
public static final String TYPE_DATA_INTEGRATION = "DataIntegration";
public static final String TYPE_CLOUD = "Cloud";
public static final String TYPE_LOGIC = "Logic";
- public static final String TYPE_DATA_QUALITY = "DataQuality";
public static final String TYPE_OTHER = "Other";
public static final String TYPE_MACHINE_LEARNING = "MachineLearning";
@@ -691,9 +685,6 @@ public final class Constants {
public static final String REMOTE_LOGGING_COS_REGION = "remote.logging.cos.region";
- /**
- * data quality
- */
public static final String DATABASES_QUERY = "show databases";
public static final String DATABASES_QUERY_PG = "SELECT datname FROM pg_database";
diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AuthorizationType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AuthorizationType.java
index f987bdb1a251..7b58ee3f8287 100644
--- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AuthorizationType.java
+++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AuthorizationType.java
@@ -17,11 +17,11 @@
package org.apache.dolphinscheduler.common.enums;
+import lombok.Getter;
+
import com.baomidou.mybatisplus.annotation.EnumValue;
-/**
- * Authorization type
- */
+@Getter
public enum AuthorizationType {
/**
@@ -62,7 +62,6 @@ public enum AuthorizationType {
MONITOR(13, "monitor"),
ALERT_PLUGIN_INSTANCE(14, "alert plugin instance"),
TENANT(15, "tenant"),
- DATA_QUALITY(16, "data quality"),
TASK_GROUP(17, "task group"),
;
@@ -75,11 +74,4 @@ public enum AuthorizationType {
private final int code;
private final String descp;
- public int getCode() {
- return code;
- }
-
- public String getDescp() {
- return descp;
- }
}
diff --git a/dolphinscheduler-common/src/main/resources/common.properties b/dolphinscheduler-common/src/main/resources/common.properties
index f0d894ed691d..44b9acfdfe52 100644
--- a/dolphinscheduler-common/src/main/resources/common.properties
+++ b/dolphinscheduler-common/src/main/resources/common.properties
@@ -66,13 +66,6 @@ datasource.encryption.enable=false
# datasource encryption salt
datasource.encryption.salt=!@#$%^&*
-# data quality jar directory path, it would auto discovery data quality jar from this given dir. You should keep it empty if you do not change anything in
-# data-quality, it will auto discovery by dolphinscheduler itself. Change it only if you want to use your own data-quality jar and it is not in worker-server
-# libs directory(but may sure your jar name start with `dolphinscheduler-data-quality`).
-data-quality.jar.dir=
-
-#data-quality.error.output.path=/tmp/data-quality-error-data
-
# Network IP gets priority, default inner outer
# Whether hive SQL is executed in the same session
diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/log/SensitiveDataConverterTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/log/SensitiveDataConverterTest.java
index e6078ae95fff..c641c296b8fb 100644
--- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/log/SensitiveDataConverterTest.java
+++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/log/SensitiveDataConverterTest.java
@@ -79,34 +79,6 @@ public void testPwdLogMsgConverter() {
" }\n" +
"}");
- // data quality
- tcs.put("\"readers\" : [ {\n" +
- " \"type\" : \"JDBC\",\n" +
- " \"config\" : {\n" +
- " \"database\" : \"dolphinscheduler\",\n" +
- " \"password\" : \"view1\",\n" +
- " \"driver\" : \"com.mysql.cj.jdbc.Driver\",\n" +
- " \"user\" : \"root\",\n" +
- " \"output_table\" : \"dolphinscheduler_users\",\n" +
- " \"table\" : \"users\",\n" +
- " \"url\" : \"jdbc:mysql://127.0.0.1:3307/dolphinscheduler?userSSL=true&enabledTLSProtocols=TLSv1.2\"\n"
- +
- " }\n" +
- " } ]",
- "\"readers\" : [ {\n" +
- " \"type\" : \"JDBC\",\n" +
- " \"config\" : {\n" +
- " \"database\" : \"dolphinscheduler\",\n" +
- " \"password\" : \"*****\",\n" +
- " \"driver\" : \"com.mysql.cj.jdbc.Driver\",\n" +
- " \"user\" : \"root\",\n" +
- " \"output_table\" : \"dolphinscheduler_users\",\n" +
- " \"table\" : \"users\",\n" +
- " \"url\" : \"jdbc:mysql://127.0.0.1:3307/dolphinscheduler?userSSL=true&enabledTLSProtocols=TLSv1.2\"\n"
- +
- " }\n" +
- " } ]");
-
for (String logMsg : tcs.keySet()) {
String maskedLog = SensitiveDataConverter.maskSensitiveData(logMsg);
logger.info("original parameter : {}", logMsg);
diff --git a/dolphinscheduler-common/src/test/resources/common.properties b/dolphinscheduler-common/src/test/resources/common.properties
index 2e4ad4f0957a..92609741b009 100644
--- a/dolphinscheduler-common/src/test/resources/common.properties
+++ b/dolphinscheduler-common/src/test/resources/common.properties
@@ -130,13 +130,6 @@ datasource.encryption.enable=false
# datasource encryption salt
datasource.encryption.salt=!@#$%^&*
-# data quality jar directory path, it would auto discovery data quality jar from this given dir. You should keep it empty if you do not change anything in
-# data-quality, it will auto discovery by dolphinscheduler itself. Change it only if you want to use your own data-quality jar and it is not in worker-server
-# libs directory(but may sure your jar name start with `dolphinscheduler-data-quality`).
-data-quality.jar.dir=
-
-#data-quality.error.output.path=/tmp/data-quality-error-data
-
# Network IP gets priority, default inner outer
# Whether hive SQL is executed in the same session
diff --git a/dolphinscheduler-common/src/test/resources/sql/mysql_dml.sql b/dolphinscheduler-common/src/test/resources/sql/mysql_dml.sql
index af1846ebf754..7ba292d8ac6c 100644
--- a/dolphinscheduler-common/src/test/resources/sql/mysql_dml.sql
+++ b/dolphinscheduler-common/src/test/resources/sql/mysql_dml.sql
@@ -38,47 +38,3 @@ DROP PROCEDURE dolphin_t_ds_tenant_insert_default;
UPDATE t_ds_schedules t1 JOIN t_ds_workflow_definition t2 ON t1.workflow_definition_code = t2.code LEFT JOIN t_ds_tenant t3 ON t2.tenant_id = t3.id SET t1.tenant_code = COALESCE(t3.tenant_code, 'default');
UPDATE `t_ds_workflow_instance` SET `tenant_code` = 'default' WHERE `tenant_code` IS NULL;
--- data quality support choose database
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(30, 'src_database', 'select', '$t(src_database)', NULL, NULL, 'please select source database', 0, 0, 0, 1, 1, 1, 1, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(31, 'target_database', 'select', '$t(target_database)', NULL, NULL, 'please select target database', 0, 0, 0, 1, 1, 1, 1, current_timestamp, current_timestamp);
-
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(151, 1, 30, NULL, 2, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(152, 2, 30, NULL, 2, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(153, 3, 30, NULL, 2, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(154, 4, 30, NULL, 2, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(155, 5, 30, NULL, 2, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(156, 6, 30, NULL, 2, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(157, 7, 30, NULL, 2, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(158, 8, 30, NULL, 2, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(159, 9, 30, NULL, 2, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(160, 10, 30, NULL, 2, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(161, 3, 31, NULL, 6, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(162, 4, 31, NULL, 7, current_timestamp, current_timestamp);
diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqComparisonType.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqComparisonType.java
deleted file mode 100644
index 52a304c4d1ef..000000000000
--- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqComparisonType.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.dao.entity;
-
-import java.io.Serializable;
-import java.util.Date;
-
-import lombok.Data;
-
-import com.baomidou.mybatisplus.annotation.IdType;
-import com.baomidou.mybatisplus.annotation.TableField;
-import com.baomidou.mybatisplus.annotation.TableId;
-import com.baomidou.mybatisplus.annotation.TableName;
-
-@Data
-@TableName("t_ds_dq_comparison_type")
-public class DqComparisonType implements Serializable {
-
- /**
- * primary key
- */
- @TableId(value = "id", type = IdType.AUTO)
- private Integer id;
- /**
- * type
- */
- @TableField(value = "type")
- private String type;
- /**
- * execute sql
- */
- @TableField(value = "execute_sql")
- private String executeSql;
- /**
- * output table
- */
- @TableField(value = "output_table")
- private String outputTable;
- /**
- * comparison name
- */
- @TableField(value = "name")
- private String name;
- /**
- * is inner source
- */
- @TableField(value = "is_inner_source")
- private Boolean isInnerSource;
- /**
- * create_time
- */
- @TableField(value = "create_time")
- private Date createTime;
- /**
- * update_time
- */
- @TableField(value = "update_time")
- private Date updateTime;
-}
diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqExecuteResult.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqExecuteResult.java
deleted file mode 100644
index b9065d21a520..000000000000
--- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqExecuteResult.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.dao.entity;
-
-import java.io.Serializable;
-import java.util.Date;
-
-import lombok.Data;
-
-import com.baomidou.mybatisplus.annotation.IdType;
-import com.baomidou.mybatisplus.annotation.TableField;
-import com.baomidou.mybatisplus.annotation.TableId;
-import com.baomidou.mybatisplus.annotation.TableName;
-
-@Data
-@TableName("t_ds_dq_execute_result")
-public class DqExecuteResult implements Serializable {
-
- @TableId(value = "id", type = IdType.AUTO)
- private Integer id;
-
- @TableField(value = "workflow_definition_id")
- private long workflowDefinitionId;
-
- @TableField(exist = false)
- private String workflowDefinitionName;
-
- @TableField(exist = false)
- private long processDefinitionCode;
-
- @TableField(value = "process_instance_id")
- private long processInstanceId;
-
- @TableField(exist = false)
- private String processInstanceName;
-
- @TableField(exist = false)
- private long projectCode;
-
- @TableField(value = "task_instance_id")
- private long taskInstanceId;
-
- @TableField(exist = false)
- private String taskName;
-
- @TableField(value = "rule_type")
- private int ruleType;
-
- @TableField(value = "rule_name")
- private String ruleName;
-
- @TableField(value = "statistics_value")
- private double statisticsValue;
-
- @TableField(value = "comparison_value")
- private double comparisonValue;
-
- @TableField(value = "comparison_type")
- private int comparisonType;
-
- @TableField(exist = false)
- private String comparisonTypeName;
-
- @TableField(value = "check_type")
- private int checkType;
-
- @TableField(value = "threshold")
- private double threshold;
-
- @TableField(value = "operator")
- private int operator;
-
- @TableField(value = "failure_strategy")
- private int failureStrategy;
-
- @TableField(value = "user_id")
- private int userId;
-
- @TableField(exist = false)
- private String userName;
-
- @TableField(value = "state")
- private int state;
-
- @TableField(value = "error_output_path")
- private String errorOutputPath;
-
- @TableField(value = "create_time")
- private Date createTime;
-
- @TableField(value = "update_time")
- private Date updateTime;
-}
diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqExecuteResultAlertContent.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqExecuteResultAlertContent.java
deleted file mode 100644
index 7ec52868b269..000000000000
--- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqExecuteResultAlertContent.java
+++ /dev/null
@@ -1,258 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.dao.entity;
-
-import java.io.Serializable;
-
-import com.fasterxml.jackson.annotation.JsonInclude;
-import com.fasterxml.jackson.annotation.JsonInclude.Include;
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-@JsonInclude(Include.NON_NULL)
-public class DqExecuteResultAlertContent implements Serializable {
-
- /**
- * process_defined_id
- */
- @JsonProperty(value = "processDefinitionId")
- private long processDefinitionId;
- /**
- * process define name
- */
- @JsonProperty("processDefinitionName")
- private String processDefinitionName;
- /**
- * process_instance_id
- */
- @JsonProperty(value = "processInstanceId")
- private long processInstanceId;
- /**
- * process instance name
- */
- @JsonProperty("processInstanceName")
- private String processInstanceName;
- /**
- * task_instance_id
- */
- @JsonProperty(value = "taskInstanceId")
- private long taskInstanceId;
- /**
- * task name
- */
- @JsonProperty("taskName")
- private String taskName;
- /**
- * rule_type
- */
- @JsonProperty(value = "ruleType")
- private int ruleType;
- /**
- * rule_name
- */
- @JsonProperty(value = "ruleName")
- private String ruleName;
- /**
- * statistics_value
- */
- @JsonProperty(value = "statisticsValue")
- private double statisticsValue;
- /**
- * comparison_value
- */
- @JsonProperty(value = "comparisonValue")
- private double comparisonValue;
- /**
- * check_type
- */
- @JsonProperty(value = "checkType")
- private int checkType;
- /**
- * task_instance_id
- */
- @JsonProperty(value = "threshold")
- private double threshold;
- /**
- * operator
- */
- @JsonProperty(value = "operator")
- private int operator;
- /**
- * operator
- */
- @JsonProperty(value = "failureStrategy")
- private int failureStrategy;
- /**
- * user id
- */
- @JsonProperty(value = "userId")
- private int userId;
- /**
- * user_name
- */
- @JsonProperty("userName")
- private String userName;
- /**
- * state
- */
- @JsonProperty(value = "state")
- private int state;
-
- @JsonProperty(value = "errorDataPath")
- private String errorDataPath;
-
- public DqExecuteResultAlertContent(Builder builder) {
- this.processDefinitionId = builder.processDefinitionId;
- this.processDefinitionName = builder.processDefinitionName;
- this.processInstanceId = builder.processInstanceId;
- this.processInstanceName = builder.processInstanceName;
- this.taskInstanceId = builder.taskInstanceId;
- this.taskName = builder.taskName;
- this.ruleType = builder.ruleType;
- this.ruleName = builder.ruleName;
- this.statisticsValue = builder.statisticsValue;
- this.comparisonValue = builder.comparisonValue;
- this.checkType = builder.checkType;
- this.threshold = builder.threshold;
- this.operator = builder.operator;
- this.failureStrategy = builder.failureStrategy;
- this.userId = builder.userId;
- this.userName = builder.userName;
- this.state = builder.state;
- this.errorDataPath = builder.errorDataPath;
- }
-
- public static Builder newBuilder() {
- return new Builder();
- }
-
- public static class Builder {
-
- private long processDefinitionId;
- private String processDefinitionName;
- private long processInstanceId;
- private String processInstanceName;
- private long taskInstanceId;
- private String taskName;
- private int ruleType;
- private String ruleName;
- private double statisticsValue;
- private double comparisonValue;
- private int checkType;
- private double threshold;
- private int operator;
- private int failureStrategy;
- private int userId;
- private String userName;
- private int state;
- private String errorDataPath;
-
- public Builder processDefinitionId(long processDefinitionId) {
- this.processDefinitionId = processDefinitionId;
- return this;
- }
-
- public Builder processDefinitionName(String processDefinitionName) {
- this.processDefinitionName = processDefinitionName;
- return this;
- }
-
- public Builder processInstanceId(long processInstanceId) {
- this.processInstanceId = processInstanceId;
- return this;
- }
-
- public Builder processInstanceName(String processInstanceName) {
- this.processInstanceName = processInstanceName;
- return this;
- }
-
- public Builder taskInstanceId(long taskInstanceId) {
- this.taskInstanceId = taskInstanceId;
- return this;
- }
-
- public Builder taskName(String taskName) {
- this.taskName = taskName;
- return this;
- }
-
- public Builder ruleType(int ruleType) {
- this.ruleType = ruleType;
- return this;
- }
-
- public Builder ruleName(String ruleName) {
- this.ruleName = ruleName;
- return this;
- }
-
- public Builder statisticsValue(double statisticsValue) {
- this.statisticsValue = statisticsValue;
- return this;
- }
-
- public Builder comparisonValue(double comparisonValue) {
- this.comparisonValue = comparisonValue;
- return this;
- }
-
- public Builder checkType(int checkType) {
- this.checkType = checkType;
- return this;
- }
-
- public Builder threshold(double threshold) {
- this.threshold = threshold;
- return this;
- }
-
- public Builder operator(int operator) {
- this.operator = operator;
- return this;
- }
-
- public Builder failureStrategy(int failureStrategy) {
- this.failureStrategy = failureStrategy;
- return this;
- }
-
- public Builder userId(int userId) {
- this.userId = userId;
- return this;
- }
-
- public Builder userName(String userName) {
- this.userName = userName;
- return this;
- }
-
- public Builder state(int state) {
- this.state = state;
- return this;
- }
-
- public Builder errorDataPath(String errorDataPath) {
- this.errorDataPath = errorDataPath;
- return this;
- }
-
- public DqExecuteResultAlertContent build() {
- return new DqExecuteResultAlertContent(this);
- }
- }
-}
diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqRule.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqRule.java
deleted file mode 100644
index 3623a7545bd3..000000000000
--- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqRule.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.dao.entity;
-
-import java.io.Serializable;
-import java.util.Date;
-
-import lombok.Data;
-
-import com.baomidou.mybatisplus.annotation.IdType;
-import com.baomidou.mybatisplus.annotation.TableField;
-import com.baomidou.mybatisplus.annotation.TableId;
-import com.baomidou.mybatisplus.annotation.TableName;
-
-@Data
-@TableName("t_ds_dq_rule")
-public class DqRule implements Serializable {
-
- /**
- * primary key
- */
- @TableId(value = "id", type = IdType.AUTO)
- private Integer id;
- /**
- * name
- */
- @TableField(value = "name")
- private String name;
- /**
- * type
- */
- @TableField(value = "type")
- private int type;
- /**
- * type
- */
- @TableField(exist = false)
- private String ruleJson;
- /**
- * user_id
- */
- @TableField(value = "user_id")
- private int userId;
- /**
- * user_name
- */
- @TableField(exist = false)
- private String userName;
- /**
- * create_time
- */
- @TableField(value = "create_time")
- private Date createTime;
- /**
- * update_time
- */
- @TableField(value = "update_time")
- private Date updateTime;
-}
diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqRuleExecuteSql.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqRuleExecuteSql.java
deleted file mode 100644
index 6df162c0a08c..000000000000
--- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqRuleExecuteSql.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.dao.entity;
-
-import org.apache.dolphinscheduler.plugin.task.api.enums.dp.ExecuteSqlType;
-
-import java.io.Serializable;
-import java.util.Date;
-
-import lombok.Data;
-
-import com.baomidou.mybatisplus.annotation.IdType;
-import com.baomidou.mybatisplus.annotation.TableField;
-import com.baomidou.mybatisplus.annotation.TableId;
-import com.baomidou.mybatisplus.annotation.TableName;
-
-@Data
-@TableName("t_ds_dq_rule_execute_sql")
-public class DqRuleExecuteSql implements Serializable {
-
- /**
- * primary key
- */
- @TableId(value = "id", type = IdType.AUTO)
- private Integer id;
- /**
- * index,ensure the execution order of sql
- */
- @TableField(value = "index")
- private int index;
- /**
- * SQL Statement
- */
- @TableField(value = "sql")
- private String sql;
- /**
- * table alias name
- */
- @TableField(value = "table_alias")
- private String tableAlias;
- /**
- * input entry type: default,statistics,comparison,check
- */
- @TableField(value = "type")
- private int type = ExecuteSqlType.MIDDLE.getCode();
- /**
- * is error output sql
- */
- @TableField(value = "is_error_output_sql")
- private boolean isErrorOutputSql;
- /**
- * create_time
- */
- @TableField(value = "create_time")
- private Date createTime;
- /**
- * update_time
- */
- @TableField(value = "update_time")
- private Date updateTime;
-}
diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqRuleInputEntry.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqRuleInputEntry.java
deleted file mode 100644
index 16bf4907657d..000000000000
--- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqRuleInputEntry.java
+++ /dev/null
@@ -1,130 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.dao.entity;
-
-import org.apache.dolphinscheduler.plugin.task.api.enums.dp.DataType;
-import org.apache.dolphinscheduler.plugin.task.api.enums.dp.InputType;
-import org.apache.dolphinscheduler.plugin.task.api.enums.dp.OptionSourceType;
-
-import java.io.Serializable;
-import java.util.Date;
-
-import lombok.Data;
-
-import com.baomidou.mybatisplus.annotation.IdType;
-import com.baomidou.mybatisplus.annotation.TableField;
-import com.baomidou.mybatisplus.annotation.TableId;
-import com.baomidou.mybatisplus.annotation.TableName;
-
-@Data
-@TableName("t_ds_dq_rule_input_entry")
-public class DqRuleInputEntry implements Serializable {
-
- /**
- * primary key
- */
- @TableId(value = "id", type = IdType.AUTO)
- private Integer id;
- /**
- * form field name
- */
- @TableField(value = "field")
- private String field;
- /**
- * form type
- */
- @TableField(value = "type")
- private String type;
- /**
- * form title
- */
- @TableField(value = "title")
- private String title;
- /**
- * default data,can be null
- */
- @TableField(value = "data")
- private String data;
- /**
- * default options,can be null
- * [{label:"",value:""}]
- */
- @TableField(value = "options")
- private String options;
- /**
- * ${field}
- */
- @TableField(value = "placeholder")
- private String placeholder;
- /**
- * the source type of options,use default options or other
- */
- @TableField(value = "option_source_type")
- private int optionSourceType = OptionSourceType.DEFAULT.getCode();
- /**
- * input entry type: string,array,number .etc
- */
- @TableField(value = "data_type")
- private int dataType = DataType.NUMBER.getCode();
- /**
- * input entry type: default,statistics,comparison
- */
- @TableField(value = "input_type")
- private int inputType = InputType.DEFAULT.getCode();
- /**
- * whether to display on the front end
- */
- @TableField(value = "is_show")
- private Boolean isShow;
- /**
- * whether to edit on the front end
- */
- @TableField(value = "can_edit")
- private Boolean canEdit;
- /**
- * is emit event
- */
- @TableField(value = "is_emit")
- private Boolean isEmit;
- /**
- * is validate
- */
- @TableField(value = "is_validate")
- private Boolean isValidate;
- /**
- * values map
- */
- @TableField(exist = false)
- private String valuesMap;
-
- /**
- * values map
- */
- @TableField(exist = false)
- private Integer index;
- /**
- * create_time
- */
- @TableField(value = "create_time")
- private Date createTime;
- /**
- * update_time
- */
- @TableField(value = "update_time")
- private Date updateTime;
-}
diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqTaskStatisticsValue.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqTaskStatisticsValue.java
deleted file mode 100644
index a86f782fb074..000000000000
--- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqTaskStatisticsValue.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.dao.entity;
-
-import java.io.Serializable;
-import java.util.Date;
-
-import lombok.Data;
-
-import com.baomidou.mybatisplus.annotation.IdType;
-import com.baomidou.mybatisplus.annotation.TableField;
-import com.baomidou.mybatisplus.annotation.TableId;
-import com.baomidou.mybatisplus.annotation.TableName;
-
-@Data
-@TableName("t_ds_dq_task_statistics_value")
-public class DqTaskStatisticsValue implements Serializable {
-
- @TableId(value = "id", type = IdType.AUTO)
- private Integer id;
-
- @TableField(value = "workflow_definition_id")
- private long workflowDefinitionId;
-
- @TableField(exist = false)
- private String workflowDefinitionName;
-
- @TableField(value = "task_instance_id")
- private long taskInstanceId;
-
- @TableField(exist = false)
- private String taskName;
-
- @TableField(value = "rule_id")
- private long ruleId;
-
- @TableField(exist = false)
- private int ruleType;
-
- @TableField(exist = false)
- private String ruleName;
-
- @TableField(value = "statistics_value")
- private double statisticsValue;
-
- @TableField(value = "statistics_name")
- private String statisticsName;
-
- @TableField(value = "data_time")
- private Date dataTime;
-
- @TableField(value = "create_time")
- private Date createTime;
-
- @TableField(value = "update_time")
- private Date updateTime;
-}
diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqComparisonTypeMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqComparisonTypeMapper.java
deleted file mode 100644
index 946a86e4f22b..000000000000
--- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqComparisonTypeMapper.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.dao.mapper;
-
-import org.apache.dolphinscheduler.dao.entity.DqComparisonType;
-
-import com.baomidou.mybatisplus.core.mapper.BaseMapper;
-
-/**
- * DqComparisonTypeMapper
- */
-public interface DqComparisonTypeMapper extends BaseMapper {
-
-}
diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqExecuteResultMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqExecuteResultMapper.java
deleted file mode 100644
index d320f4bd0d3e..000000000000
--- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqExecuteResultMapper.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.dao.mapper;
-
-import org.apache.dolphinscheduler.dao.entity.DqExecuteResult;
-import org.apache.dolphinscheduler.dao.entity.User;
-
-import org.apache.ibatis.annotations.Param;
-
-import java.util.Date;
-
-import com.baomidou.mybatisplus.core.mapper.BaseMapper;
-import com.baomidou.mybatisplus.core.metadata.IPage;
-
-/**
- * DqExecuteResultMapper
- */
-public interface DqExecuteResultMapper extends BaseMapper {
-
- /**
- * data quality task execute result page
- *
- * @param page page
- * @param searchVal searchVal
- * @param user user
- * @param statusArray states
- * @param ruleType ruleType
- * @param startTime startTime
- * @return endTime endTime
- */
- IPage queryResultListPaging(IPage page,
- @Param("searchVal") String searchVal,
- @Param("user") User user,
- @Param("states") int[] statusArray,
- @Param("ruleType") int ruleType,
- @Param("startTime") Date startTime,
- @Param("endTime") Date endTime);
-
- /**
- * get execute result by id
- * @param taskInstanceId taskInstanceId
- * @return DqExecuteResult
- */
- DqExecuteResult getExecuteResultById(@Param("taskInstanceId") int taskInstanceId);
-
- void deleteByWorkflowInstanceId(@Param("workflowInstanceId") Integer workflowInstanceId);
-}
diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqRuleExecuteSqlMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqRuleExecuteSqlMapper.java
deleted file mode 100644
index 7179f2e50361..000000000000
--- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqRuleExecuteSqlMapper.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.dao.mapper;
-
-import org.apache.dolphinscheduler.dao.entity.DqRuleExecuteSql;
-
-import org.apache.ibatis.annotations.Param;
-
-import java.util.List;
-
-import com.baomidou.mybatisplus.core.mapper.BaseMapper;
-
-/**
- * DqRuleExecuteSqlMapper
- */
-public interface DqRuleExecuteSqlMapper extends BaseMapper {
-
- /**
- * get execute sql list by rule id
- *
- * @param ruleId Integer
- */
- List getExecuteSqlList(@Param("ruleId") Integer ruleId);
-}
diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqRuleInputEntryMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqRuleInputEntryMapper.java
deleted file mode 100644
index 7d8c032bb596..000000000000
--- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqRuleInputEntryMapper.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.dao.mapper;
-
-import org.apache.dolphinscheduler.dao.entity.DqRuleInputEntry;
-
-import org.apache.ibatis.annotations.Param;
-
-import java.util.List;
-
-import com.baomidou.mybatisplus.core.mapper.BaseMapper;
-
-/**
- * DqRuleInputEntryMapper
- */
-public interface DqRuleInputEntryMapper extends BaseMapper {
-
- /**
- * get rule input entry list by rule id
- *
- * @param ruleId Integer
- */
- List getRuleInputEntryList(@Param("ruleId") Integer ruleId);
-}
diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqRuleMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqRuleMapper.java
deleted file mode 100644
index d5ce11392582..000000000000
--- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqRuleMapper.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.dao.mapper;
-
-import org.apache.dolphinscheduler.dao.entity.DqRule;
-
-import org.apache.ibatis.annotations.Param;
-
-import java.util.Date;
-
-import com.baomidou.mybatisplus.core.mapper.BaseMapper;
-import com.baomidou.mybatisplus.core.metadata.IPage;
-
-/**
- * DqRuleMapper
- */
-public interface DqRuleMapper extends BaseMapper {
-
- /**
- * data quality rule page
- *
- * @param page page
- * @param searchVal searchVal
- * @param ruleType ruleType
- * @param startTime startTime
- * @return endTime endTime
- */
- IPage queryRuleListPaging(IPage page,
- @Param("searchVal") String searchVal,
- @Param("ruleType") int ruleType,
- @Param("startTime") Date startTime,
- @Param("endTime") Date endTime);
-}
diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqTaskStatisticsValueMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqTaskStatisticsValueMapper.java
deleted file mode 100644
index 3c7f35f01977..000000000000
--- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqTaskStatisticsValueMapper.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.dao.mapper;
-
-import org.apache.dolphinscheduler.dao.entity.DqTaskStatisticsValue;
-
-import com.baomidou.mybatisplus.core.mapper.BaseMapper;
-
-/**
- * DqTaskStatisticsValueMapper
- */
-public interface DqTaskStatisticsValueMapper extends BaseMapper {
-
-}
diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/DqExecuteResultDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/DqExecuteResultDao.java
deleted file mode 100644
index c13cf7f0858e..000000000000
--- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/DqExecuteResultDao.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.dao.repository;
-
-import org.apache.dolphinscheduler.dao.entity.DqExecuteResult;
-
-public interface DqExecuteResultDao extends IDao {
-
- void deleteByWorkflowInstanceId(Integer workflowInstanceId);
-}
diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/DqExecuteResultDaoImpl.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/DqExecuteResultDaoImpl.java
deleted file mode 100644
index 0ca840866940..000000000000
--- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/DqExecuteResultDaoImpl.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.dao.repository.impl;
-
-import org.apache.dolphinscheduler.dao.entity.DqExecuteResult;
-import org.apache.dolphinscheduler.dao.mapper.DqExecuteResultMapper;
-import org.apache.dolphinscheduler.dao.repository.BaseDao;
-import org.apache.dolphinscheduler.dao.repository.DqExecuteResultDao;
-
-import lombok.NonNull;
-
-import org.springframework.stereotype.Repository;
-
-@Repository
-public class DqExecuteResultDaoImpl extends BaseDao
- implements
- DqExecuteResultDao {
-
- public DqExecuteResultDaoImpl(@NonNull DqExecuteResultMapper dqExecuteResultMapper) {
- super(dqExecuteResultMapper);
- }
-
- @Override
- public void deleteByWorkflowInstanceId(Integer workflowInstanceId) {
- mybatisMapper.deleteByWorkflowInstanceId(workflowInstanceId);
- }
-}
diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/DqRuleUtils.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/DqRuleUtils.java
deleted file mode 100644
index 2e0e97eb8f4e..000000000000
--- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/DqRuleUtils.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.dao.utils;
-
-import org.apache.dolphinscheduler.common.utils.JSONUtils;
-import org.apache.dolphinscheduler.dao.entity.DqRuleInputEntry;
-
-import java.util.List;
-import java.util.Map;
-
-/**
- * DqRuleUtils
- */
-public class DqRuleUtils {
-
- private DqRuleUtils() {
- throw new IllegalStateException("Utility class");
- }
-
- public static List transformInputEntry(List ruleInputEntryList) {
- for (DqRuleInputEntry dqRuleInputEntry : ruleInputEntryList) {
- Map valuesMap =
- JSONUtils.toMap(dqRuleInputEntry.getValuesMap(), String.class, Object.class);
- if (valuesMap != null) {
-
- if (valuesMap.get(dqRuleInputEntry.getField()) != null) {
- String value = String.valueOf(valuesMap.get(dqRuleInputEntry.getField()));
- dqRuleInputEntry.setData(value);
- }
-
- if (valuesMap.get("is_show") != null) {
- dqRuleInputEntry.setIsShow(Boolean.parseBoolean(String.valueOf(valuesMap.get("is_show"))));
- }
-
- if (valuesMap.get("can_edit") != null) {
- dqRuleInputEntry.setCanEdit(Boolean.parseBoolean(String.valueOf(valuesMap.get("can_edit"))));
- }
- }
- }
-
- return ruleInputEntryList;
- }
-}
diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqComparisonTypeMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqComparisonTypeMapper.xml
deleted file mode 100644
index fc6e87a7f45a..000000000000
--- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqComparisonTypeMapper.xml
+++ /dev/null
@@ -1,22 +0,0 @@
-
-
-
-
-
-
-
diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqExecuteResultMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqExecuteResultMapper.xml
deleted file mode 100644
index 096ddc03b639..000000000000
--- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqExecuteResultMapper.xml
+++ /dev/null
@@ -1,111 +0,0 @@
-
-
-
-
-
-
- SELECT a.id,
- a.workflow_definition_id,
- b.name as workflow_definition_name,
- b.code as workflow_definition_code,
- a.workflow_instance_id,
- e.name as workflow_instance_name,
- b.project_code,
- a.task_instance_id,
- c.name as task_name,
- a.rule_type,
- a.rule_name,
- a.statistics_value,
- a.comparison_value,
- a.check_type,
- a.threshold,
- cp.type as comparison_type_name,
- a.operator,
- a.failure_strategy,
- a.state,
- a.user_id,
- d.user_name,
- a.error_output_path,
- a.create_time,
- a.update_time
- FROM t_ds_dq_execute_result a
- left join t_ds_workflow_definition b on a.workflow_definition_id = b.id
- left join t_ds_task_instance c on a.task_instance_id = c.id
- left join t_ds_workflow_instance e on a.workflow_instance_id = e.id
- left join t_ds_user d on d.id = a.user_id
- left join t_ds_dq_comparison_type cp on cp.id = a.comparison_type
-
-
- and c.name like concat('%', #{searchVal}, '%')
-
-
- and a.update_time > #{startTime} and a.update_time #{endTime}
-
-
- and a.state in
-
- #{i}
-
-
-
- and a.user_id = #{user.id}
-
-
- and a.rule_type = #{ruleType}
-
-
- order by a.update_time desc
-
-
-
- SELECT a.id,
- a.workflow_definition_id,
- a.workflow_instance_id,
- a.task_instance_id,
- a.rule_type,
- a.rule_name,
- a.statistics_value,
- a.comparison_value,
- a.check_type,
- a.threshold,
- a.operator,
- a.failure_strategy,
- a.state,
- a.user_id,
- a.comparison_type,
- a.error_output_path,
- b.name as workflow_definition_name,
- e.name as workflow_instance_name,
- c.name as task_name,
- cp.type as comparison_type_name,
- d.user_name
- FROM t_ds_dq_execute_result a
- left join t_ds_workflow_definition b on a.workflow_definition_id = b.id
- left join t_ds_task_instance c on a.task_instance_id = c.id
- left join t_ds_workflow_instance e on a.workflow_instance_id = e.id
- left join t_ds_user d on d.id = a.user_id
- left join t_ds_dq_comparison_type cp on cp.id = a.comparison_type
- where task_instance_id = #{taskInstanceId}
-
-
-
- delete
- from t_ds_dq_execute_result
- where workflow_instance_id = #{workflowInstanceId}
-
-
diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqExecuteSqlMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqExecuteSqlMapper.xml
deleted file mode 100644
index 6424a8df252b..000000000000
--- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqExecuteSqlMapper.xml
+++ /dev/null
@@ -1,27 +0,0 @@
-
-
-
-
-
-
-
- SELECT * FROM t_ds_dq_rule_execute_sql a join ( SELECT *
- FROM t_ds_relation_rule_execute_sql where rule_id = #{ruleId}) b
- on a.id = b.execute_sql_id
-
-
diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqRuleInputEntryMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqRuleInputEntryMapper.xml
deleted file mode 100644
index dbb29f694fca..000000000000
--- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqRuleInputEntryMapper.xml
+++ /dev/null
@@ -1,43 +0,0 @@
-
-
-
-
-
-
-
- SELECT a.id,
- a.field,
- a.type,
- a.title,
- a.data,
- a.options,
- a.placeholder,
- a.option_source_type,
- a.data_type,
- a.input_type,
- a.is_show,
- a.can_edit,
- a.is_emit,
- a.is_validate,
- b.values_map,
- b.index
- FROM t_ds_dq_rule_input_entry a join ( SELECT *
- FROM t_ds_relation_rule_input_entry where rule_id = #{ruleId} ) b
- on a.id = b.rule_input_entry_id order by b.index
-
-
diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqRuleMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqRuleMapper.xml
deleted file mode 100644
index 1a1380d82c7d..000000000000
--- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqRuleMapper.xml
+++ /dev/null
@@ -1,37 +0,0 @@
-
-
-
-
-
-
- SELECT a.id, a.name, a.type, b.user_name, a.create_time, a.update_time
- FROM t_ds_dq_rule a left join t_ds_user b on a.user_id = b.id
-
-
- and a.name like concat('%', #{searchVal}, '%')
-
-
- and a.update_time > #{startTime} and a.update_time #{endTime}
-
-
- and a.type = #{ruleType}
-
-
- order by a.update_time desc
-
-
diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqTaskStatisticsValueMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqTaskStatisticsValueMapper.xml
deleted file mode 100644
index aa64be42ef2c..000000000000
--- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqTaskStatisticsValueMapper.xml
+++ /dev/null
@@ -1,22 +0,0 @@
-
-
-
-
-
-
-
diff --git a/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_h2.sql b/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_h2.sql
index 0259e67c3c54..f4cdb31f0388 100644
--- a/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_h2.sql
+++ b/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_h2.sql
@@ -1103,825 +1103,6 @@ CREATE TABLE t_ds_alert_plugin_instance
PRIMARY KEY (id)
);
---
--- Table structure for table `t_ds_dq_comparison_type`
---
-DROP TABLE IF EXISTS `t_ds_dq_comparison_type`;
-CREATE TABLE `t_ds_dq_comparison_type` (
- `id` int(11) NOT NULL AUTO_INCREMENT,
- `type` varchar(255) NOT NULL,
- `execute_sql` text DEFAULT NULL,
- `output_table` varchar(100) DEFAULT NULL,
- `name` varchar(255) DEFAULT NULL,
- `create_time` datetime DEFAULT NULL,
- `update_time` datetime DEFAULT NULL,
- `is_inner_source` tinyint(1) DEFAULT '0',
- PRIMARY KEY (`id`)
-)ENGINE=InnoDB DEFAULT CHARSET=utf8;
-
-INSERT INTO `t_ds_dq_comparison_type`
-(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`)
-VALUES(1, 'FixValue', NULL, NULL, NULL, '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', false);
-INSERT INTO `t_ds_dq_comparison_type`
-(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`)
-VALUES(2, 'DailyAvg', 'select round(avg(statistics_value),2) as day_avg from t_ds_dq_task_statistics_value where data_time >=date_trunc(''DAY'', ${data_time}) and data_time < date_add(date_trunc(''day'', ${data_time}),1) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'day_range', 'day_range.day_avg', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', true);
-INSERT INTO `t_ds_dq_comparison_type`
-(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`)
-VALUES(3, 'WeeklyAvg', 'select round(avg(statistics_value),2) as week_avg from t_ds_dq_task_statistics_value where data_time >= date_trunc(''WEEK'', ${data_time}) and data_time = date_trunc(''MONTH'', ${data_time}) and data_time = date_add(date_trunc(''day'', ${data_time}),-7) and data_time = date_add(date_trunc(''day'', ${data_time}),-30) and data_time < date_trunc(''day'', ${data_time}) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'last_thirty_days', 'last_thirty_days.last_30_avg', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', true);
-INSERT INTO `t_ds_dq_comparison_type`
-(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`)
-VALUES(7, 'SrcTableTotalRows', 'SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})', 'total_count', 'total_count.total', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', false);
-INSERT INTO `t_ds_dq_comparison_type`
-(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`)
-VALUES(8, 'TargetTableTotalRows', 'SELECT COUNT(*) AS total FROM ${target_table} WHERE (${target_filter})', 'total_count', 'total_count.total', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', false);
-
---
--- Table structure for table `t_ds_dq_execute_result`
---
-DROP TABLE IF EXISTS `t_ds_dq_execute_result`;
-CREATE TABLE `t_ds_dq_execute_result` (
- `id` int(11) NOT NULL AUTO_INCREMENT,
- `workflow_definition_id` int(11) DEFAULT NULL,
- `workflow_instance_id` int(11) DEFAULT NULL,
- `task_instance_id` int(11) DEFAULT NULL,
- `rule_type` int(11) DEFAULT NULL,
- `rule_name` varchar(255) DEFAULT NULL,
- `statistics_value` double DEFAULT NULL,
- `comparison_value` double DEFAULT NULL,
- `check_type` int(11) DEFAULT NULL,
- `threshold` double DEFAULT NULL,
- `operator` int(11) DEFAULT NULL,
- `failure_strategy` int(11) DEFAULT NULL,
- `state` int(11) DEFAULT NULL,
- `user_id` int(11) DEFAULT NULL,
- `comparison_type` int(11) DEFAULT NULL,
- `error_output_path` text DEFAULT NULL,
- `create_time` datetime DEFAULT NULL,
- `update_time` datetime DEFAULT NULL,
- PRIMARY KEY (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-
---
--- Table structure for table t_ds_dq_rule
---
-DROP TABLE IF EXISTS `t_ds_dq_rule`;
-CREATE TABLE `t_ds_dq_rule` (
- `id` int(11) NOT NULL AUTO_INCREMENT,
- `name` varchar(255) DEFAULT NULL,
- `type` int(11) DEFAULT NULL,
- `user_id` int(11) DEFAULT NULL,
- `create_time` datetime DEFAULT NULL,
- `update_time` datetime DEFAULT NULL,
- PRIMARY KEY (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-
-INSERT INTO `t_ds_dq_rule`
-(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
-VALUES(1, '$t(null_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
-INSERT INTO `t_ds_dq_rule`
-(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
-VALUES(2, '$t(custom_sql)', 1, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
-INSERT INTO `t_ds_dq_rule`
-(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
-VALUES(3, '$t(multi_table_accuracy)', 2, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
-INSERT INTO `t_ds_dq_rule`
-(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
-VALUES(4, '$t(multi_table_value_comparison)', 3, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
-INSERT INTO `t_ds_dq_rule`
-(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
-VALUES(5, '$t(field_length_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
-INSERT INTO `t_ds_dq_rule`
-(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
-VALUES(6, '$t(uniqueness_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
-INSERT INTO `t_ds_dq_rule`
-(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
-VALUES(7, '$t(regexp_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
-INSERT INTO `t_ds_dq_rule`
-(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
-VALUES(8, '$t(timeliness_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
-INSERT INTO `t_ds_dq_rule`
-(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
-VALUES(9, '$t(enumeration_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
-INSERT INTO `t_ds_dq_rule`
-(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
-VALUES(10, '$t(table_count_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
-
---
--- Table structure for table `t_ds_dq_rule_execute_sql`
---
-DROP TABLE IF EXISTS `t_ds_dq_rule_execute_sql`;
-CREATE TABLE `t_ds_dq_rule_execute_sql` (
- `id` int(11) NOT NULL AUTO_INCREMENT,
- `index` int(11) DEFAULT NULL,
- `sql` text DEFAULT NULL,
- `table_alias` varchar(255) DEFAULT NULL,
- `type` int(11) DEFAULT NULL,
- `is_error_output_sql` tinyint(1) DEFAULT '0',
- `create_time` datetime DEFAULT NULL,
- `update_time` datetime DEFAULT NULL,
- PRIMARY KEY (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-
-INSERT INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(1, 1, 'SELECT COUNT(*) AS nulls FROM null_items', 'null_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(2, 1, 'SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})', 'total_count', 2, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(3, 1, 'SELECT COUNT(*) AS miss from miss_items', 'miss_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(4, 1, 'SELECT COUNT(*) AS valids FROM invalid_length_items', 'invalid_length_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(5, 1, 'SELECT COUNT(*) AS total FROM ${target_table} WHERE (${target_filter})', 'total_count', 2, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(6, 1, 'SELECT ${src_field} FROM ${src_table} group by ${src_field} having count(*) > 1', 'duplicate_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(7, 1, 'SELECT COUNT(*) AS duplicates FROM duplicate_items', 'duplicate_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(8, 1, 'SELECT ${src_table}.* FROM (SELECT * FROM ${src_table} WHERE (${src_filter})) ${src_table} LEFT JOIN (SELECT * FROM ${target_table} WHERE (${target_filter})) ${target_table} ON ${on_clause} WHERE ${where_clause}', 'miss_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(9, 1, 'SELECT * FROM ${src_table} WHERE (${src_field} not regexp ''${regexp_pattern}'') AND (${src_filter}) ', 'regexp_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(10, 1, 'SELECT COUNT(*) AS regexps FROM regexp_items', 'regexp_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(11, 1, 'SELECT * FROM ${src_table} WHERE (to_unix_timestamp(${src_field}, ''${datetime_format}'')-to_unix_timestamp(''${deadline}'', ''${datetime_format}'') <= 0) AND (to_unix_timestamp(${src_field}, ''${datetime_format}'')-to_unix_timestamp(''${begin_time}'', ''${datetime_format}'') >= 0) AND (${src_filter}) ', 'timeliness_items', 0, 1, '2021-03-03 11:31:24.0', '2021-03-03 11:31:24.0');
-INSERT INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(12, 1, 'SELECT COUNT(*) AS timeliness FROM timeliness_items', 'timeliness_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(13, 1, 'SELECT * FROM ${src_table} where (${src_field} not in ( ${enum_list} ) or ${src_field} is null) AND (${src_filter}) ', 'enum_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(14, 1, 'SELECT COUNT(*) AS enums FROM enum_items', 'enum_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(15, 1, 'SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})', 'table_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(16, 1, 'SELECT * FROM ${src_table} WHERE (${src_field} is null or ${src_field} = '''') AND (${src_filter})', 'null_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(17, 1, 'SELECT * FROM ${src_table} WHERE (length(${src_field}) ${logic_operator} ${field_length}) AND (${src_filter})', 'invalid_length_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-
---
--- Table structure for table `t_ds_dq_rule_input_entry`
---
-DROP TABLE IF EXISTS `t_ds_dq_rule_input_entry`;
-CREATE TABLE `t_ds_dq_rule_input_entry` (
- `id` int(11) NOT NULL AUTO_INCREMENT,
- `field` varchar(255) DEFAULT NULL,
- `type` varchar(255) DEFAULT NULL,
- `title` varchar(255) DEFAULT NULL,
- `data` varchar(255) DEFAULT NULL,
- `options` text DEFAULT NULL,
- `placeholder` varchar(255) DEFAULT NULL,
- `option_source_type` int(11) DEFAULT NULL,
- `data_type` int(11) DEFAULT NULL,
- `input_type` int(11) DEFAULT NULL,
- `is_show` tinyint(1) DEFAULT '1',
- `can_edit` tinyint(1) DEFAULT '1',
- `is_emit` tinyint(1) DEFAULT '0',
- `is_validate` tinyint(1) DEFAULT '1',
- `create_time` datetime DEFAULT NULL,
- `update_time` datetime DEFAULT NULL,
- PRIMARY KEY (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-
-INSERT INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(1, 'src_connector_type', 'select', '$t(src_connector_type)', '', '[{"label":"HIVE","value":"HIVE"},{"label":"JDBC","value":"JDBC"}]', 'please select source connector type', 2, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(2, 'src_datasource_id', 'select', '$t(src_datasource_id)', '', NULL, 'please select source datasource id', 1, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(3, 'src_table', 'select', '$t(src_table)', NULL, NULL, 'Please enter source table name', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(4, 'src_filter', 'input', '$t(src_filter)', NULL, NULL, 'Please enter filter expression', 0, 3, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(5, 'src_field', 'select', '$t(src_field)', NULL, NULL, 'Please enter column, only single column is supported', 0, 0, 0, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(6, 'statistics_name', 'input', '$t(statistics_name)', NULL, NULL, 'Please enter statistics name, the alias in statistics execute sql', 0, 0, 1, 0, 0, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(7, 'check_type', 'select', '$t(check_type)', '0', '[{"label":"Expected - Actual","value":"0"},{"label":"Actual - Expected","value":"1"},{"label":"Actual / Expected","value":"2"},{"label":"(Expected - Actual) / Expected","value":"3"}]', 'please select check type', 0, 0, 3, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(8, 'operator', 'select', '$t(operator)', '0', '[{"label":"=","value":"0"},{"label":"<","value":"1"},{"label":"<=","value":"2"},{"label":">","value":"3"},{"label":">=","value":"4"},{"label":"!=","value":"5"}]', 'please select operator', 0, 0, 3, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(9, 'threshold', 'input', '$t(threshold)', NULL, NULL, 'Please enter threshold, number is needed', 0, 2, 3, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(10, 'failure_strategy', 'select', '$t(failure_strategy)', '0', '[{"label":"Alert","value":"0"},{"label":"Block","value":"1"}]', 'please select failure strategy', 0, 0, 3, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(11, 'target_connector_type', 'select', '$t(target_connector_type)', '', '[{"label":"HIVE","value":"HIVE"},{"label":"JDBC","value":"JDBC"}]', 'Please select target connector type', 2, 0, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(12, 'target_datasource_id', 'select', '$t(target_datasource_id)', '', NULL, 'Please select target datasource', 1, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(13, 'target_table', 'select', '$t(target_table)', NULL, NULL, 'Please enter target table', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(14, 'target_filter', 'input', '$t(target_filter)', NULL, NULL, 'Please enter target filter expression', 0, 3, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(15, 'mapping_columns', 'group', '$t(mapping_columns)', NULL, '[{"field":"src_field","props":{"placeholder":"Please input src field","rows":0,"disabled":false,"size":"small"},"type":"input","title":"src_field"},{"field":"operator","props":{"placeholder":"Please input operator","rows":0,"disabled":false,"size":"small"},"type":"input","title":"operator"},{"field":"target_field","props":{"placeholder":"Please input target field","rows":0,"disabled":false,"size":"small"},"type":"input","title":"target_field"}]', 'please enter mapping columns', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(16, 'statistics_execute_sql', 'textarea', '$t(statistics_execute_sql)', NULL, NULL, 'Please enter statistics execute sql', 0, 3, 0, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(17, 'comparison_name', 'input', '$t(comparison_name)', NULL, NULL, 'Please enter comparison name, the alias in comparison execute sql', 0, 0, 0, 0, 0, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(18, 'comparison_execute_sql', 'textarea', '$t(comparison_execute_sql)', NULL, NULL, 'Please enter comparison execute sql', 0, 3, 0, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(19, 'comparison_type', 'select', '$t(comparison_type)', '', NULL, 'Please enter comparison title', 3, 0, 2, 1, 0, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(20, 'writer_connector_type', 'select', '$t(writer_connector_type)', '', '[{"label":"MYSQL","value":"0"},{"label":"POSTGRESQL","value":"1"}]', 'please select writer connector type', 0, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(21, 'writer_datasource_id', 'select', '$t(writer_datasource_id)', '', NULL, 'please select writer datasource id', 1, 2, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(22, 'target_field', 'select', '$t(target_field)', NULL, NULL, 'Please enter column, only single column is supported', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(23, 'field_length', 'input', '$t(field_length)', NULL, NULL, 'Please enter length limit', 0, 3, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(24, 'logic_operator', 'select', '$t(logic_operator)', '=', '[{"label":"=","value":"="},{"label":"<","value":"<"},{"label":"<=","value":"<="},{"label":">","value":">"},{"label":">=","value":">="},{"label":"<>","value":"<>"}]', 'please select logic operator', 0, 0, 3, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(25, 'regexp_pattern', 'input', '$t(regexp_pattern)', NULL, NULL, 'Please enter regexp pattern', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(26, 'deadline', 'input', '$t(deadline)', NULL, NULL, 'Please enter deadline', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(27, 'datetime_format', 'input', '$t(datetime_format)', NULL, NULL, 'Please enter datetime format', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(28, 'enum_list', 'input', '$t(enum_list)', NULL, NULL, 'Please enter enumeration', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(29, 'begin_time', 'input', '$t(begin_time)', NULL, NULL, 'Please enter begin time', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.0', '2021-03-03 11:31:24.0');
-INSERT INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(30, 'src_database', 'select', '$t(src_database)', NULL, NULL, 'Please select source database', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.0', '2021-03-03 11:31:24.0');
-INSERT INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(31, 'target_database', 'select', '$t(target_database)', NULL, NULL, 'Please select target database', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.0', '2021-03-03 11:31:24.0');
-
---
--- Table structure for table `t_ds_dq_task_statistics_value`
---
-DROP TABLE IF EXISTS `t_ds_dq_task_statistics_value`;
-CREATE TABLE `t_ds_dq_task_statistics_value` (
- `id` int(11) NOT NULL AUTO_INCREMENT,
- `workflow_definition_id` int(11) DEFAULT NULL,
- `task_instance_id` int(11) DEFAULT NULL,
- `rule_id` int(11) NOT NULL,
- `unique_code` varchar(255) NULL,
- `statistics_name` varchar(255) NULL,
- `statistics_value` double NULL,
- `data_time` datetime DEFAULT NULL,
- `create_time` datetime DEFAULT NULL,
- `update_time` datetime DEFAULT NULL,
- PRIMARY KEY (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-
---
--- Table structure for table `t_ds_relation_rule_execute_sql`
---
-DROP TABLE IF EXISTS `t_ds_relation_rule_execute_sql`;
-CREATE TABLE `t_ds_relation_rule_execute_sql` (
- `id` int(11) NOT NULL AUTO_INCREMENT,
- `rule_id` int(11) DEFAULT NULL,
- `execute_sql_id` int(11) DEFAULT NULL,
- `create_time` datetime NULL,
- `update_time` datetime NULL,
- PRIMARY KEY (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-
-INSERT INTO `t_ds_relation_rule_execute_sql`
-(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
-VALUES(1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_execute_sql`
-(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
-VALUES(3, 5, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_execute_sql`
-(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
-VALUES(2, 3, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_execute_sql`
-(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
-VALUES(4, 3, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_execute_sql`
-(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
-VALUES(5, 6, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_execute_sql`
-(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
-VALUES(6, 6, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_execute_sql`
-(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
-VALUES(7, 7, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_execute_sql`
-(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
-VALUES(8, 7, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_execute_sql`
-(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
-VALUES(9, 8, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_execute_sql`
-(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
-VALUES(10, 8, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_execute_sql`
-(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
-VALUES(11, 9, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_execute_sql`
-(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
-VALUES(12, 9, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_execute_sql`
-(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
-VALUES(13, 10, 15, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_execute_sql`
-(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
-VALUES(14, 1, 16, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_execute_sql`
-(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
-VALUES(15, 5, 17, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-
---
--- Table structure for table `t_ds_relation_rule_input_entry`
---
-DROP TABLE IF EXISTS `t_ds_relation_rule_input_entry`;
-CREATE TABLE `t_ds_relation_rule_input_entry` (
- `id` int(11) NOT NULL AUTO_INCREMENT,
- `rule_id` int(11) DEFAULT NULL,
- `rule_input_entry_id` int(11) DEFAULT NULL,
- `values_map` text DEFAULT NULL,
- `index` int(11) DEFAULT NULL,
- `create_time` datetime DEFAULT NULL,
- `update_time` datetime DEFAULT NULL,
- PRIMARY KEY (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(1, 1, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(2, 1, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(3, 1, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(4, 1, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(5, 1, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(6, 1, 6, '{"statistics_name":"null_count.nulls"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(7, 1, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(8, 1, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(9, 1, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(10, 1, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(11, 1, 17, '', 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(12, 1, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(13, 2, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(14, 2, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(15, 2, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(16, 2, 6, '{"is_show":"true","can_edit":"true"}', 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(17, 2, 16, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(18, 2, 4, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(19, 2, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(20, 2, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(21, 2, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(22, 2, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(24, 2, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(25, 3, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(26, 3, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(27, 3, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(28, 3, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(29, 3, 11, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(30, 3, 12, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(31, 3, 13, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(32, 3, 14, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(33, 3, 15, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(34, 3, 7, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(35, 3, 8, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(36, 3, 9, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(37, 3, 10, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(38, 3, 17, '{"comparison_name":"total_count.total"}', 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(39, 3, 19, NULL, 15, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(40, 4, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(41, 4, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(42, 4, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(43, 4, 6, '{"is_show":"true","can_edit":"true"}', 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(44, 4, 16, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(45, 4, 11, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(46, 4, 12, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(47, 4, 13, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(48, 4, 17, '{"is_show":"true","can_edit":"true"}', 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(49, 4, 18, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(50, 4, 7, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(51, 4, 8, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(52, 4, 9, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(53, 4, 10, NULL, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(62, 3, 6, '{"statistics_name":"miss_count.miss"}', 18, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(63, 5, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(64, 5, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(65, 5, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(66, 5, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(67, 5, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(68, 5, 6, '{"statistics_name":"invalid_length_count.valids"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(69, 5, 24, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(70, 5, 23, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(71, 5, 7, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(72, 5, 8, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(73, 5, 9, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(74, 5, 10, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(75, 5, 17, '', 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(76, 5, 19, NULL, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(79, 6, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(80, 6, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(81, 6, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(82, 6, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(83, 6, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(84, 6, 6, '{"statistics_name":"duplicate_count.duplicates"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(85, 6, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(86, 6, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(87, 6, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(88, 6, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(89, 6, 17, '', 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(90, 6, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(93, 7, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(94, 7, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(95, 7, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(96, 7, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(97, 7, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(98, 7, 6, '{"statistics_name":"regexp_count.regexps"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(99, 7, 25, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(100, 7, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(101, 7, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(102, 7, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(103, 7, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(104, 7, 17, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(105, 7, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(108, 8, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(109, 8, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(110, 8, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(111, 8, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(112, 8, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(113, 8, 6, '{"statistics_name":"timeliness_count.timeliness"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(114, 8, 26, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(115, 8, 27, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(116, 8, 7, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(117, 8, 8, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(118, 8, 9, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(119, 8, 10, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(120, 8, 17, NULL, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(121, 8, 19, NULL, 15, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(124, 9, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(125, 9, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(126, 9, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(127, 9, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(128, 9, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(129, 9, 6, '{"statistics_name":"enum_count.enums"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(130, 9, 28, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(131, 9, 7, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(132, 9, 8, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(133, 9, 9, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(134, 9, 10, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(135, 9, 17, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(136, 9, 19, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(139, 10, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(140, 10, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(141, 10, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(142, 10, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(143, 10, 6, '{"statistics_name":"table_count.total"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(144, 10, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(145, 10, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(146, 10, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(147, 10, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(148, 10, 17, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(149, 10, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(150, 8, 29, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(151, 1, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(152, 2, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(153, 3, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(154, 4, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(155, 5, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(156, 6, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(157, 7, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(158, 8, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(159, 9, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(160, 10, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(161, 3, 31, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(162, 4, 31, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
--
-- Table structure for table t_ds_environment
diff --git a/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_mysql.sql b/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_mysql.sql
index cfb4997fbeab..19cc8efaf284 100644
--- a/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_mysql.sql
+++ b/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_mysql.sql
@@ -1093,827 +1093,6 @@ CREATE TABLE `t_ds_relation_project_worker_group` (
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COLLATE = utf8_bin;
---
--- Table structure for table `t_ds_dq_comparison_type`
---
-DROP TABLE IF EXISTS `t_ds_dq_comparison_type`;
-CREATE TABLE `t_ds_dq_comparison_type` (
- `id` int(11) NOT NULL AUTO_INCREMENT,
- `type` varchar(255) NOT NULL,
- `execute_sql` text DEFAULT NULL,
- `output_table` varchar(100) DEFAULT NULL,
- `name` varchar(255) DEFAULT NULL,
- `create_time` datetime DEFAULT NULL,
- `update_time` datetime DEFAULT NULL,
- `is_inner_source` tinyint(1) DEFAULT '0',
- PRIMARY KEY (`id`)
-)ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE = utf8_bin;
-
-
-INSERT IGNORE INTO `t_ds_dq_comparison_type`
-(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`)
-VALUES(1, 'FixValue', NULL, NULL, NULL, current_timestamp, current_timestamp, false);
-INSERT IGNORE INTO `t_ds_dq_comparison_type`
-(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`)
-VALUES(2, 'DailyAvg', 'select round(avg(statistics_value),2) as day_avg from t_ds_dq_task_statistics_value where data_time >=date_trunc(''DAY'', ${data_time}) and data_time < date_add(date_trunc(''day'', ${data_time}),1) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'day_range', 'day_range.day_avg', current_timestamp, current_timestamp, true);
-INSERT IGNORE INTO `t_ds_dq_comparison_type`
-(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`)
-VALUES(3, 'WeeklyAvg', 'select round(avg(statistics_value),2) as week_avg from t_ds_dq_task_statistics_value where data_time >= date_trunc(''WEEK'', ${data_time}) and data_time = date_trunc(''MONTH'', ${data_time}) and data_time = date_add(date_trunc(''day'', ${data_time}),-7) and data_time = date_add(date_trunc(''day'', ${data_time}),-30) and data_time < date_trunc(''day'', ${data_time}) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'last_thirty_days', 'last_thirty_days.last_30_avg', current_timestamp, current_timestamp, true);
-INSERT IGNORE INTO `t_ds_dq_comparison_type`
-(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`)
-VALUES(7, 'SrcTableTotalRows', 'SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})', 'total_count', 'total_count.total', current_timestamp, current_timestamp, false);
-INSERT IGNORE INTO `t_ds_dq_comparison_type`
-(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`)
-VALUES(8, 'TargetTableTotalRows', 'SELECT COUNT(*) AS total FROM ${target_table} WHERE (${target_filter})', 'total_count', 'total_count.total', current_timestamp, current_timestamp, false);
-
---
--- Table structure for table `t_ds_dq_execute_result`
---
-DROP TABLE IF EXISTS `t_ds_dq_execute_result`;
-CREATE TABLE `t_ds_dq_execute_result` (
- `id` int(11) NOT NULL AUTO_INCREMENT,
- `workflow_definition_id` int(11) DEFAULT NULL,
- `workflow_instance_id` int(11) DEFAULT NULL,
- `task_instance_id` int(11) DEFAULT NULL,
- `rule_type` int(11) DEFAULT NULL,
- `rule_name` varchar(255) DEFAULT NULL,
- `statistics_value` double DEFAULT NULL,
- `comparison_value` double DEFAULT NULL,
- `check_type` int(11) DEFAULT NULL,
- `threshold` double DEFAULT NULL,
- `operator` int(11) DEFAULT NULL,
- `failure_strategy` int(11) DEFAULT NULL,
- `state` int(11) DEFAULT NULL,
- `user_id` int(11) DEFAULT NULL,
- `comparison_type` int(11) DEFAULT NULL,
- `error_output_path` text DEFAULT NULL,
- `create_time` datetime DEFAULT NULL,
- `update_time` datetime DEFAULT NULL,
- PRIMARY KEY (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE = utf8_bin;
-
---
--- Table structure for table t_ds_dq_rule
---
-DROP TABLE IF EXISTS `t_ds_dq_rule`;
-CREATE TABLE `t_ds_dq_rule` (
- `id` int(11) NOT NULL AUTO_INCREMENT,
- `name` varchar(255) DEFAULT NULL,
- `type` int(11) DEFAULT NULL,
- `user_id` int(11) DEFAULT NULL,
- `create_time` datetime DEFAULT NULL,
- `update_time` datetime DEFAULT NULL,
- PRIMARY KEY (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE = utf8_bin;
-
-INSERT IGNORE INTO `t_ds_dq_rule`
-(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
-VALUES(1, '$t(null_check)', 0, 1, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule`
-(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
-VALUES(2, '$t(custom_sql)', 1, 1, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule`
-(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
-VALUES(3, '$t(multi_table_accuracy)', 2, 1, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule`
-(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
-VALUES(4, '$t(multi_table_value_comparison)', 3, 1, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule`
-(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
-VALUES(5, '$t(field_length_check)', 0, 1, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule`
-(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
-VALUES(6, '$t(uniqueness_check)', 0, 1, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule`
-(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
-VALUES(7, '$t(regexp_check)', 0, 1, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule`
-(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
-VALUES(8, '$t(timeliness_check)', 0, 1, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule`
-(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
-VALUES(9, '$t(enumeration_check)', 0, 1, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule`
-(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`)
-VALUES(10, '$t(table_count_check)', 0, 1, current_timestamp, current_timestamp);
-
---
--- Table structure for table `t_ds_dq_rule_execute_sql`
---
-DROP TABLE IF EXISTS `t_ds_dq_rule_execute_sql`;
-CREATE TABLE `t_ds_dq_rule_execute_sql` (
- `id` int(11) NOT NULL AUTO_INCREMENT,
- `index` int(11) DEFAULT NULL,
- `sql` text DEFAULT NULL,
- `table_alias` varchar(255) DEFAULT NULL,
- `type` int(11) DEFAULT NULL,
- `is_error_output_sql` tinyint(1) DEFAULT '0',
- `create_time` datetime DEFAULT NULL,
- `update_time` datetime DEFAULT NULL,
- PRIMARY KEY (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE = utf8_bin;
-
-INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(1, 1, 'SELECT COUNT(*) AS nulls FROM null_items', 'null_count', 1, false, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(2, 1, 'SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})', 'total_count', 2, false, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(3, 1, 'SELECT COUNT(*) AS miss from miss_items', 'miss_count', 1, false, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(4, 1, 'SELECT COUNT(*) AS valids FROM invalid_length_items', 'invalid_length_count', 1, false, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(5, 1, 'SELECT COUNT(*) AS total FROM ${target_table} WHERE (${target_filter})', 'total_count', 2, false, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(6, 1, 'SELECT ${src_field} FROM ${src_table} group by ${src_field} having count(*) > 1', 'duplicate_items', 0, true, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(7, 1, 'SELECT COUNT(*) AS duplicates FROM duplicate_items', 'duplicate_count', 1, false, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(8, 1, 'SELECT ${src_table}.* FROM (SELECT * FROM ${src_table} WHERE (${src_filter})) ${src_table} LEFT JOIN (SELECT * FROM ${target_table} WHERE (${target_filter})) ${target_table} ON ${on_clause} WHERE ${where_clause}', 'miss_items', 0, true, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(9, 1, 'SELECT * FROM ${src_table} WHERE (${src_field} not regexp ''${regexp_pattern}'') AND (${src_filter}) ', 'regexp_items', 0, true, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(10, 1, 'SELECT COUNT(*) AS regexps FROM regexp_items', 'regexp_count', 1, false, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(11, 1, 'SELECT * FROM ${src_table} WHERE (to_unix_timestamp(${src_field}, ''${datetime_format}'')-to_unix_timestamp(''${deadline}'', ''${datetime_format}'') <= 0) AND (to_unix_timestamp(${src_field}, ''${datetime_format}'')-to_unix_timestamp(''${begin_time}'', ''${datetime_format}'') >= 0) AND (${src_filter}) ', 'timeliness_items', 0, 1, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(12, 1, 'SELECT COUNT(*) AS timeliness FROM timeliness_items', 'timeliness_count', 1, false, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(13, 1, 'SELECT * FROM ${src_table} where (${src_field} not in ( ${enum_list} ) or ${src_field} is null) AND (${src_filter}) ', 'enum_items', 0, true, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(14, 1, 'SELECT COUNT(*) AS enums FROM enum_items', 'enum_count', 1, false, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(15, 1, 'SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})', 'table_count', 1, false, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(16, 1, 'SELECT * FROM ${src_table} WHERE (${src_field} is null or ${src_field} = '''') AND (${src_filter})', 'null_items', 0, true, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_execute_sql`
-(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`)
-VALUES(17, 1, 'SELECT * FROM ${src_table} WHERE (length(${src_field}) ${logic_operator} ${field_length}) AND (${src_filter})', 'invalid_length_items', 0, true, current_timestamp, current_timestamp);
-
---
--- Table structure for table `t_ds_dq_rule_input_entry`
---
-DROP TABLE IF EXISTS `t_ds_dq_rule_input_entry`;
-CREATE TABLE `t_ds_dq_rule_input_entry` (
- `id` int(11) NOT NULL AUTO_INCREMENT,
- `field` varchar(255) DEFAULT NULL,
- `type` varchar(255) DEFAULT NULL,
- `title` varchar(255) DEFAULT NULL,
- `data` varchar(255) DEFAULT NULL,
- `options` text DEFAULT NULL,
- `placeholder` varchar(255) DEFAULT NULL,
- `option_source_type` int(11) DEFAULT NULL,
- `data_type` int(11) DEFAULT NULL,
- `input_type` int(11) DEFAULT NULL,
- `is_show` tinyint(1) DEFAULT '1',
- `can_edit` tinyint(1) DEFAULT '1',
- `is_emit` tinyint(1) DEFAULT '0',
- `is_validate` tinyint(1) DEFAULT '1',
- `create_time` datetime DEFAULT NULL,
- `update_time` datetime DEFAULT NULL,
- PRIMARY KEY (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE = utf8_bin;
-
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(1, 'src_connector_type', 'select', '$t(src_connector_type)', '', '[{"label":"HIVE","value":"HIVE"},{"label":"JDBC","value":"JDBC"}]', 'please select source connector type', 2, 2, 0, 1, 1, 1, 0, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(2, 'src_datasource_id', 'select', '$t(src_datasource_id)', '', NULL, 'please select source datasource id', 1, 2, 0, 1, 1, 1, 0, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(3, 'src_table', 'select', '$t(src_table)', NULL, NULL, 'Please enter source table name', 0, 0, 0, 1, 1, 1, 1, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(4, 'src_filter', 'input', '$t(src_filter)', NULL, NULL, 'Please enter filter expression', 0, 3, 0, 1, 1, 0, 0, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(5, 'src_field', 'select', '$t(src_field)', NULL, NULL, 'Please enter column, only single column is supported', 0, 0, 0, 1, 1, 0, 1, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(6, 'statistics_name', 'input', '$t(statistics_name)', NULL, NULL, 'Please enter statistics name, the alias in statistics execute sql', 0, 0, 1, 0, 0, 0, 1, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(7, 'check_type', 'select', '$t(check_type)', '0', '[{"label":"Expected - Actual","value":"0"},{"label":"Actual - Expected","value":"1"},{"label":"Actual / Expected","value":"2"},{"label":"(Expected - Actual) / Expected","value":"3"}]', 'please select check type', 0, 0, 3, 1, 1, 1, 0, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(8, 'operator', 'select', '$t(operator)', '0', '[{"label":"=","value":"0"},{"label":"<","value":"1"},{"label":"<=","value":"2"},{"label":">","value":"3"},{"label":">=","value":"4"},{"label":"!=","value":"5"}]', 'please select operator', 0, 0, 3, 1, 1, 0, 0, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(9, 'threshold', 'input', '$t(threshold)', NULL, NULL, 'Please enter threshold, number is needed', 0, 2, 3, 1, 1, 0, 1, current_timestamp, current_timestamp);
-INSERT Ignore INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(10, 'failure_strategy', 'select', '$t(failure_strategy)', '0', '[{"label":"Alert","value":"0"},{"label":"Block","value":"1"}]', 'please select failure strategy', 0, 0, 3, 1, 1, 0, 0, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(11, 'target_connector_type', 'select', '$t(target_connector_type)', '', '[{"label":"HIVE","value":"HIVE"},{"label":"JDBC","value":"JDBC"}]', 'Please select target connector type', 2, 0, 0, 1, 1, 1, 0, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(12, 'target_datasource_id', 'select', '$t(target_datasource_id)', '', NULL, 'Please select target datasource', 1, 2, 0, 1, 1, 1, 0, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(13, 'target_table', 'select', '$t(target_table)', NULL, NULL, 'Please enter target table', 0, 0, 0, 1, 1, 1, 1, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(14, 'target_filter', 'input', '$t(target_filter)', NULL, NULL, 'Please enter target filter expression', 0, 3, 0, 1, 1, 0, 0, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(15, 'mapping_columns', 'group', '$t(mapping_columns)', NULL, '[{"field":"src_field","props":{"placeholder":"Please input src field","rows":0,"disabled":false,"size":"small"},"type":"input","title":"src_field"},{"field":"operator","props":{"placeholder":"Please input operator","rows":0,"disabled":false,"size":"small"},"type":"input","title":"operator"},{"field":"target_field","props":{"placeholder":"Please input target field","rows":0,"disabled":false,"size":"small"},"type":"input","title":"target_field"}]', 'please enter mapping columns', 0, 0, 0, 1, 1, 0, 0, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(16, 'statistics_execute_sql', 'textarea', '$t(statistics_execute_sql)', NULL, NULL, 'Please enter statistics execute sql', 0, 3, 0, 1, 1, 0, 1, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(17, 'comparison_name', 'input', '$t(comparison_name)', NULL, NULL, 'Please enter comparison name, the alias in comparison execute sql', 0, 0, 0, 0, 0, 0, 1, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(18, 'comparison_execute_sql', 'textarea', '$t(comparison_execute_sql)', NULL, NULL, 'Please enter comparison execute sql', 0, 3, 0, 1, 1, 0, 1, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(19, 'comparison_type', 'select', '$t(comparison_type)', '', NULL, 'Please enter comparison title', 3, 0, 2, 1, 0, 1, 0, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(20, 'writer_connector_type', 'select', '$t(writer_connector_type)', '', '[{"label":"MYSQL","value":"0"},{"label":"POSTGRESQL","value":"1"}]', 'please select writer connector type', 0, 2, 0, 1, 1, 1, 0, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(21, 'writer_datasource_id', 'select', '$t(writer_datasource_id)', '', NULL, 'please select writer datasource id', 1, 2, 0, 1, 1, 0, 0, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(22, 'target_field', 'select', '$t(target_field)', NULL, NULL, 'Please enter column, only single column is supported', 0, 0, 0, 1, 1, 0, 0, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(23, 'field_length', 'input', '$t(field_length)', NULL, NULL, 'Please enter length limit', 0, 3, 0, 1, 1, 0, 0, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(24, 'logic_operator', 'select', '$t(logic_operator)', '=', '[{"label":"=","value":"="},{"label":"<","value":"<"},{"label":"<=","value":"<="},{"label":">","value":">"},{"label":">=","value":">="},{"label":"<>","value":"<>"}]', 'please select logic operator', 0, 0, 3, 1, 1, 0, 0, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(25, 'regexp_pattern', 'input', '$t(regexp_pattern)', NULL, NULL, 'Please enter regexp pattern', 0, 0, 0, 1, 1, 0, 0, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(26, 'deadline', 'input', '$t(deadline)', NULL, NULL, 'Please enter deadline', 0, 0, 0, 1, 1, 0, 0, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(27, 'datetime_format', 'input', '$t(datetime_format)', NULL, NULL, 'Please enter datetime format', 0, 0, 0, 1, 1, 0, 0, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(28, 'enum_list', 'input', '$t(enum_list)', NULL, NULL, 'Please enter enumeration', 0, 0, 0, 1, 1, 0, 0, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(29, 'begin_time', 'input', '$t(begin_time)', NULL, NULL, 'Please enter begin time', 0, 0, 0, 1, 1, 0, 0, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(30, 'src_database', 'select', '$t(src_database)', NULL, NULL, 'Please select source database', 0, 0, 0, 1, 1, 1, 1, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `data`, `options`, `placeholder`, `option_source_type`, `data_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(31, 'target_database', 'select', '$t(target_database)', NULL, NULL, 'Please select target database', 0, 0, 0, 1, 1, 1, 1, current_timestamp, current_timestamp);
-
---
--- Table structure for table `t_ds_dq_task_statistics_value`
---
-DROP TABLE IF EXISTS `t_ds_dq_task_statistics_value`;
-CREATE TABLE `t_ds_dq_task_statistics_value` (
- `id` int(11) NOT NULL AUTO_INCREMENT,
- `workflow_definition_id` int(11) DEFAULT NULL,
- `task_instance_id` int(11) DEFAULT NULL,
- `rule_id` int(11) NOT NULL,
- `unique_code` varchar(255) NULL,
- `statistics_name` varchar(255) NULL,
- `statistics_value` double NULL,
- `data_time` datetime DEFAULT NULL,
- `create_time` datetime DEFAULT NULL,
- `update_time` datetime DEFAULT NULL,
- PRIMARY KEY (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE = utf8_bin;
-
---
--- Table structure for table `t_ds_relation_rule_execute_sql`
---
-DROP TABLE IF EXISTS `t_ds_relation_rule_execute_sql`;
-CREATE TABLE `t_ds_relation_rule_execute_sql` (
- `id` int(11) NOT NULL AUTO_INCREMENT,
- `rule_id` int(11) DEFAULT NULL,
- `execute_sql_id` int(11) DEFAULT NULL,
- `create_time` datetime NULL,
- `update_time` datetime NULL,
- PRIMARY KEY (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE = utf8_bin;
-
-INSERT IGNORE INTO `t_ds_relation_rule_execute_sql`
-(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
-VALUES(1, 1, 1, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_execute_sql`
-(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
-VALUES(3, 5, 4, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_execute_sql`
-(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
-VALUES(2, 3, 3, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_execute_sql`
-(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
-VALUES(4, 3, 8, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_execute_sql`
-(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
-VALUES(5, 6, 6, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_execute_sql`
-(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
-VALUES(6, 6, 7, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_execute_sql`
-(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
-VALUES(7, 7, 9, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_execute_sql`
-(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
-VALUES(8, 7, 10, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_execute_sql`
-(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
-VALUES(9, 8, 11, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_execute_sql`
-(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
-VALUES(10, 8, 12, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_execute_sql`
-(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
-VALUES(11, 9, 13, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_execute_sql`
-(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
-VALUES(12, 9, 14, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_execute_sql`
-(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
-VALUES(13, 10, 15, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_execute_sql`
-(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
-VALUES(14, 1, 16, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_execute_sql`
-(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`)
-VALUES(15, 5, 17, current_timestamp, current_timestamp);
-
---
--- Table structure for table `t_ds_relation_rule_input_entry`
---
-DROP TABLE IF EXISTS `t_ds_relation_rule_input_entry`;
-CREATE TABLE `t_ds_relation_rule_input_entry` (
- `id` int(11) NOT NULL AUTO_INCREMENT,
- `rule_id` int(11) DEFAULT NULL,
- `rule_input_entry_id` int(11) DEFAULT NULL,
- `values_map` text DEFAULT NULL,
- `index` int(11) DEFAULT NULL,
- `create_time` datetime DEFAULT NULL,
- `update_time` datetime DEFAULT NULL,
- PRIMARY KEY (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE = utf8_bin;
-
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(1, 1, 1, NULL, 1, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(2, 1, 2, NULL, 2, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(3, 1, 3, NULL, 3, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(4, 1, 4, NULL, 4, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(5, 1, 5, NULL, 5, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(6, 1, 6, '{"statistics_name":"null_count.nulls"}', 6, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(7, 1, 7, NULL, 7, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(8, 1, 8, NULL, 8, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(9, 1, 9, NULL, 9, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(10, 1, 10, NULL, 10, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(11, 1, 17, '', 11, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(12, 1, 19, NULL, 12, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(13, 2, 1, NULL, 1, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(14, 2, 2, NULL, 2, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(15, 2, 3, NULL, 3, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(16, 2, 6, '{"is_show":"true","can_edit":"true"}', 4, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(17, 2, 16, NULL, 5, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(18, 2, 4, NULL, 6, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(19, 2, 7, NULL, 7, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(20, 2, 8, NULL, 8, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(21, 2, 9, NULL, 9, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(22, 2, 10, NULL, 10, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(24, 2, 19, NULL, 12, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(25, 3, 1, NULL, 1, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(26, 3, 2, NULL, 2, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(27, 3, 3, NULL, 3, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(28, 3, 4, NULL, 4, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(29, 3, 11, NULL, 5, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(30, 3, 12, NULL, 6, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(31, 3, 13, NULL, 7, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(32, 3, 14, NULL, 8, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(33, 3, 15, NULL, 9, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(34, 3, 7, NULL, 10, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(35, 3, 8, NULL, 11, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(36, 3, 9, NULL, 12, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(37, 3, 10, NULL, 13, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(38, 3, 17, '{"comparison_name":"total_count.total"}', 14, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(39, 3, 19, NULL, 15, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(40, 4, 1, NULL, 1, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(41, 4, 2, NULL, 2, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(42, 4, 3, NULL, 3, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(43, 4, 6, '{"is_show":"true","can_edit":"true"}', 4, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(44, 4, 16, NULL, 5, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(45, 4, 11, NULL, 6, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(46, 4, 12, NULL, 7, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(47, 4, 13, NULL, 8, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(48, 4, 17, '{"is_show":"true","can_edit":"true"}', 9, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(49, 4, 18, NULL, 10, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(50, 4, 7, NULL, 11, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(51, 4, 8, NULL, 12, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(52, 4, 9, NULL, 13, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(53, 4, 10, NULL, 14, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(62, 3, 6, '{"statistics_name":"miss_count.miss"}', 18, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(63, 5, 1, NULL, 1, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(64, 5, 2, NULL, 2, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(65, 5, 3, NULL, 3, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(66, 5, 4, NULL, 4, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(67, 5, 5, NULL, 5, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(68, 5, 6, '{"statistics_name":"invalid_length_count.valids"}', 6, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(69, 5, 24, NULL, 7, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(70, 5, 23, NULL, 8, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(71, 5, 7, NULL, 9, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(72, 5, 8, NULL, 10, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(73, 5, 9, NULL, 11, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(74, 5, 10, NULL, 12, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(75, 5, 17, '', 13, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(76, 5, 19, NULL, 14, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(79, 6, 1, NULL, 1, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(80, 6, 2, NULL, 2, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(81, 6, 3, NULL, 3, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(82, 6, 4, NULL, 4, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(83, 6, 5, NULL, 5, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(84, 6, 6, '{"statistics_name":"duplicate_count.duplicates"}', 6, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(85, 6, 7, NULL, 7, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(86, 6, 8, NULL, 8, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(87, 6, 9, NULL, 9, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(88, 6, 10, NULL, 10, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(89, 6, 17, '', 11, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(90, 6, 19, NULL, 12, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(93, 7, 1, NULL, 1, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(94, 7, 2, NULL, 2, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(95, 7, 3, NULL, 3, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(96, 7, 4, NULL, 4, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(97, 7, 5, NULL, 5, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(98, 7, 6, '{"statistics_name":"regexp_count.regexps"}', 6, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(99, 7, 25, NULL, 5, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(100, 7, 7, NULL, 7, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(101, 7, 8, NULL, 8, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(102, 7, 9, NULL, 9, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(103, 7, 10, NULL, 10, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(104, 7, 17, NULL, 11, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(105, 7, 19, NULL, 12, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(108, 8, 1, NULL, 1, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(109, 8, 2, NULL, 2, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(110, 8, 3, NULL, 3, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(111, 8, 4, NULL, 4, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(112, 8, 5, NULL, 5, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(113, 8, 6, '{"statistics_name":"timeliness_count.timeliness"}', 6, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(114, 8, 26, NULL, 8, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(115, 8, 27, NULL, 9, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(116, 8, 7, NULL, 10, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(117, 8, 8, NULL, 11, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(118, 8, 9, NULL, 12, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(119, 8, 10, NULL, 13, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(120, 8, 17, NULL, 14, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(121, 8, 19, NULL, 15, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(124, 9, 1, NULL, 1, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(125, 9, 2, NULL, 2, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(126, 9, 3, NULL, 3, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(127, 9, 4, NULL, 4, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(128, 9, 5, NULL, 5, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(129, 9, 6, '{"statistics_name":"enum_count.enums"}', 6, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(130, 9, 28, NULL, 7, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(131, 9, 7, NULL, 8, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(132, 9, 8, NULL, 9, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(133, 9, 9, NULL, 10, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(134, 9, 10, NULL, 11, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(135, 9, 17, NULL, 12, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(136, 9, 19, NULL, 13, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(139, 10, 1, NULL, 1, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(140, 10, 2, NULL, 2, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(141, 10, 3, NULL, 3, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(142, 10, 4, NULL, 4, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(143, 10, 6, '{"statistics_name":"table_count.total"}', 6, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(144, 10, 7, NULL, 7, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(145, 10, 8, NULL, 8, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(146, 10, 9, NULL, 9, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(147, 10, 10, NULL, 10, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(148, 10, 17, NULL, 11, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(149, 10, 19, NULL, 12, current_timestamp, current_timestamp);
-INSERT IGNORE INTO t_ds_relation_rule_input_entry
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(150, 8, 29, NULL, 7, current_timestamp, current_timestamp);
-
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(151, 1, 30, NULL, 2, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(152, 2, 30, NULL, 2, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(153, 3, 30, NULL, 2, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(154, 4, 30, NULL, 2, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(155, 5, 30, NULL, 2, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(156, 6, 30, NULL, 2, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(157, 7, 30, NULL, 2, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(158, 8, 30, NULL, 2, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(159, 9, 30, NULL, 2, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(160, 10, 30, NULL, 2, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(161, 3, 31, NULL, 6, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
-(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(162, 4, 31, NULL, 7, current_timestamp, current_timestamp);
-- ----------------------------
-- Table structure for t_ds_environment
-- ----------------------------
diff --git a/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_postgresql.sql b/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_postgresql.sql
index 6243772bf2d9..bc6cf3d731b1 100644
--- a/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_postgresql.sql
+++ b/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_postgresql.sql
@@ -1095,819 +1095,7 @@ CREATE TABLE t_ds_alert_plugin_instance (
CONSTRAINT t_ds_alert_plugin_instance_pk PRIMARY KEY (id)
);
---
--- Table structure for table t_ds_dq_comparison_type
---
-DROP TABLE IF EXISTS t_ds_dq_comparison_type;
-CREATE TABLE t_ds_dq_comparison_type (
- id serial NOT NULL,
- "type" varchar NOT NULL,
- execute_sql varchar NULL,
- output_table varchar NULL,
- "name" varchar NULL,
- create_time timestamp NULL,
- update_time timestamp NULL,
- is_inner_source bool NULL,
- CONSTRAINT t_ds_dq_comparison_type_pk PRIMARY KEY (id)
-);
-INSERT INTO t_ds_dq_comparison_type
-(id, "type", execute_sql, output_table, "name", create_time, update_time, is_inner_source)
-VALUES(1, 'FixValue', NULL, NULL, NULL, '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', false);
-INSERT INTO t_ds_dq_comparison_type
-(id, "type", execute_sql, output_table, "name", create_time, update_time, is_inner_source)
-VALUES(2, 'DailyAvg', 'select round(avg(statistics_value),2) as day_avg from t_ds_dq_task_statistics_value where data_time >=date_trunc(''DAY'', ${data_time}) and data_time < date_add(date_trunc(''day'', ${data_time}),1) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'day_range', 'day_range.day_avg', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', true);
-INSERT INTO t_ds_dq_comparison_type
-(id, "type", execute_sql, output_table, "name", create_time, update_time, is_inner_source)
-VALUES(3, 'WeeklyAvg', 'select round(avg(statistics_value),2) as week_avg from t_ds_dq_task_statistics_value where data_time >= date_trunc(''WEEK'', ${data_time}) and data_time = date_trunc(''MONTH'', ${data_time}) and data_time = date_add(date_trunc(''day'', ${data_time}),-7) and data_time = date_add(date_trunc(''day'', ${data_time}),-30) and data_time < date_trunc(''day'', ${data_time}) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'last_thirty_days', 'last_thirty_days.last_30_avg', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', true);
-INSERT INTO t_ds_dq_comparison_type
-(id, "type", execute_sql, output_table, "name", create_time, update_time, is_inner_source)
-VALUES(7, 'SrcTableTotalRows', 'SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})', 'total_count', 'total_count.total', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', false);
-INSERT INTO t_ds_dq_comparison_type
-(id, "type", execute_sql, output_table, "name", create_time, update_time, is_inner_source)
-VALUES(8, 'TargetTableTotalRows', 'SELECT COUNT(*) AS total FROM ${target_table} WHERE (${target_filter})', 'total_count', 'total_count.total', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', false);
-
---
--- Table structure for table t_ds_dq_execute_result
---
-DROP TABLE IF EXISTS t_ds_dq_execute_result;
-CREATE TABLE t_ds_dq_execute_result (
- id serial NOT NULL,
- workflow_definition_id int4 NULL,
- workflow_instance_id int4 NULL,
- task_instance_id int4 NULL,
- rule_type int4 NULL,
- rule_name varchar(255) DEFAULT NULL,
- statistics_value float8 NULL,
- comparison_value float8 NULL,
- check_type int4 NULL,
- threshold float8 NULL,
- "operator" int4 NULL,
- failure_strategy int4 NULL,
- state int4 NULL,
- user_id int4 NULL,
- create_time timestamp NULL,
- update_time timestamp NULL,
- comparison_type int4 NULL,
- error_output_path text NULL,
- CONSTRAINT t_ds_dq_execute_result_pk PRIMARY KEY (id)
-);
---
--- Table structure for table t_ds_dq_rule
---
-DROP TABLE IF EXISTS t_ds_dq_rule;
-CREATE TABLE t_ds_dq_rule (
- id serial NOT NULL,
- "name" varchar(255) DEFAULT NULL,
- "type" int4 NULL,
- user_id int4 NULL,
- create_time timestamp NULL,
- update_time timestamp NULL,
- CONSTRAINT t_ds_dq_rule_pk PRIMARY KEY (id)
-);
-INSERT INTO t_ds_dq_rule
-(id, "name", "type", user_id, create_time, update_time)
-VALUES(1, '$t(null_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
-INSERT INTO t_ds_dq_rule
-(id, "name", "type", user_id, create_time, update_time)
-VALUES(2, '$t(custom_sql)', 1, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
-INSERT INTO t_ds_dq_rule
-(id, "name", "type", user_id, create_time, update_time)
-VALUES(3, '$t(multi_table_accuracy)', 2, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
-INSERT INTO t_ds_dq_rule
-(id, "name", "type", user_id, create_time, update_time)
-VALUES(4, '$t(multi_table_value_comparison)', 3, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
-INSERT INTO t_ds_dq_rule
-(id, "name", "type", user_id, create_time, update_time)
-VALUES(5, '$t(field_length_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
-INSERT INTO t_ds_dq_rule
-(id, "name", "type", user_id, create_time, update_time)
-VALUES(6, '$t(uniqueness_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
-INSERT INTO t_ds_dq_rule
-(id, "name", "type", user_id, create_time, update_time)
-VALUES(7, '$t(regexp_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
-INSERT INTO t_ds_dq_rule
-(id, "name", "type", user_id, create_time, update_time)
-VALUES(8, '$t(timeliness_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
-INSERT INTO t_ds_dq_rule
-(id, "name", "type", user_id, create_time, update_time)
-VALUES(9, '$t(enumeration_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
-INSERT INTO t_ds_dq_rule
-(id, "name", "type", user_id, create_time, update_time)
-VALUES(10, '$t(table_count_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000');
-
---
--- Table structure for table t_ds_dq_rule_execute_sql
---
-DROP TABLE IF EXISTS t_ds_dq_rule_execute_sql;
-CREATE TABLE t_ds_dq_rule_execute_sql (
- id serial NOT NULL,
- "index" int4 NULL,
- "sql" text NULL,
- table_alias varchar(255) DEFAULT NULL,
- "type" int4 NULL,
- create_time timestamp NULL,
- update_time timestamp NULL,
- is_error_output_sql bool NULL,
- CONSTRAINT t_ds_dq_rule_execute_sql_pk PRIMARY KEY (id)
-);
-INSERT INTO t_ds_dq_rule_execute_sql
-(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
-VALUES(1, 1, 'SELECT COUNT(*) AS nulls FROM null_items', 'null_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_execute_sql
-(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
-VALUES(2, 1, 'SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})', 'total_count', 2, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_execute_sql
-(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
-VALUES(3, 1, 'SELECT COUNT(*) AS miss from miss_items', 'miss_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_execute_sql
-(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
-VALUES(4, 1, 'SELECT COUNT(*) AS valids FROM invalid_length_items', 'invalid_length_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_execute_sql
-(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
-VALUES(5, 1, 'SELECT COUNT(*) AS total FROM ${target_table} WHERE (${target_filter})', 'total_count', 2, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_execute_sql
-(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
-VALUES(6, 1, 'SELECT ${src_field} FROM ${src_table} group by ${src_field} having count(*) > 1', 'duplicate_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_execute_sql
-(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
-VALUES(7, 1, 'SELECT COUNT(*) AS duplicates FROM duplicate_items', 'duplicate_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_execute_sql
-(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
-VALUES(8, 1, 'SELECT ${src_table}.* FROM (SELECT * FROM ${src_table} WHERE (${src_filter})) ${src_table} LEFT JOIN (SELECT * FROM ${target_table} WHERE (${target_filter})) ${target_table} ON ${on_clause} WHERE ${where_clause}', 'miss_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_execute_sql
-(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
-VALUES(9, 1, 'SELECT * FROM ${src_table} WHERE (${src_field} not regexp ''${regexp_pattern}'') AND (${src_filter}) ', 'regexp_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_execute_sql
-(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
-VALUES(10, 1, 'SELECT COUNT(*) AS regexps FROM regexp_items', 'regexp_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_execute_sql
-(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
-VALUES(11, 1, 'SELECT * FROM ${src_table} WHERE (to_unix_timestamp(${src_field}, ''${datetime_format}'')-to_unix_timestamp(''${deadline}'', ''${datetime_format}'') <= 0) AND (to_unix_timestamp(${src_field}, ''${datetime_format}'')-to_unix_timestamp(''${begin_time}'', ''${datetime_format}'') >= 0) AND (${src_filter}) ', 'timeliness_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_execute_sql
-(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
-VALUES(12, 1, 'SELECT COUNT(*) AS timeliness FROM timeliness_items', 'timeliness_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_execute_sql
-(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
-VALUES(13, 1, 'SELECT * FROM ${src_table} where (${src_field} not in ( ${enum_list} ) or ${src_field} is null) AND (${src_filter}) ', 'enum_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_execute_sql
-(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
-VALUES(14, 1, 'SELECT COUNT(*) AS enums FROM enum_items', 'enum_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_execute_sql
-(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
-VALUES(15, 1, 'SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})', 'table_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_execute_sql
-(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
-VALUES(16, 1, 'SELECT * FROM ${src_table} WHERE (${src_field} is null or ${src_field} = '''') AND (${src_filter})', 'null_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_execute_sql
-(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time)
-VALUES(17, 1, 'SELECT * FROM ${src_table} WHERE (length(${src_field}) ${logic_operator} ${field_length}) AND (${src_filter})', 'invalid_length_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-
---
--- Table structure for table t_ds_dq_rule_input_entry
---
-DROP TABLE IF EXISTS t_ds_dq_rule_input_entry;
-CREATE TABLE t_ds_dq_rule_input_entry (
- id serial NOT NULL,
- field varchar(255) DEFAULT NULL,
- "type" varchar(255) DEFAULT NULL,
- title varchar(255) DEFAULT NULL,
- data varchar(255) DEFAULT NULL,
- "options" text DEFAULT NULL,
- placeholder varchar(255) DEFAULT NULL,
- option_source_type int4 NULL,
- data_type int4 NULL,
- input_type int4 NULL,
- is_show int2 NULL DEFAULT '1'::smallint,
- can_edit int2 NULL DEFAULT '1'::smallint,
- is_emit int2 NULL DEFAULT '0'::smallint,
- is_validate int2 NULL DEFAULT '0'::smallint,
- create_time timestamp NULL,
- update_time timestamp NULL,
- CONSTRAINT t_ds_dq_rule_input_entry_pk PRIMARY KEY (id)
-);
-INSERT INTO t_ds_dq_rule_input_entry
-(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
-VALUES(1, 'src_connector_type', 'select', '$t(src_connector_type)', '', '[{"label":"HIVE","value":"HIVE"},{"label":"JDBC","value":"JDBC"}]', 'please select source connector type', 2, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_input_entry
-(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
-VALUES(2, 'src_datasource_id', 'select', '$t(src_datasource_id)', '', NULL, 'please select source datasource id', 1, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_input_entry
-(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
-VALUES(3, 'src_table', 'select', '$t(src_table)', NULL, NULL, 'Please enter source table name', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_input_entry
-(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
-VALUES(4, 'src_filter', 'input', '$t(src_filter)', NULL, NULL, 'Please enter filter expression', 0, 3, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_input_entry
-(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
-VALUES(5, 'src_field', 'select', '$t(src_field)', NULL, NULL, 'Please enter column, only single column is supported', 0, 0, 0, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_input_entry
-(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
-VALUES(6, 'statistics_name', 'input', '$t(statistics_name)', NULL, NULL, 'Please enter statistics name, the alias in statistics execute sql', 0, 0, 1, 0, 0, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_input_entry
-(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
-VALUES(7, 'check_type', 'select', '$t(check_type)', '0', '[{"label":"Expected - Actual","value":"0"},{"label":"Actual - Expected","value":"1"},{"label":"Actual / Expected","value":"2"},{"label":"(Expected - Actual) / Expected","value":"3"}]', 'please select check type', 0, 0, 3, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_input_entry
-(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
-VALUES(8, 'operator', 'select', '$t(operator)', '0', '[{"label":"=","value":"0"},{"label":"<","value":"1"},{"label":"<=","value":"2"},{"label":">","value":"3"},{"label":">=","value":"4"},{"label":"!=","value":"5"}]', 'please select operator', 0, 0, 3, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_input_entry
-(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
-VALUES(9, 'threshold', 'input', '$t(threshold)', NULL, NULL, 'Please enter threshold, number is needed', 0, 2, 3, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_input_entry
-(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
-VALUES(10, 'failure_strategy', 'select', '$t(failure_strategy)', '0', '[{"label":"Alert","value":"0"},{"label":"Block","value":"1"}]', 'please select failure strategy', 0, 0, 3, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_input_entry
-(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
-VALUES(11, 'target_connector_type', 'select', '$t(target_connector_type)', '', '[{"label":"HIVE","value":"HIVE"},{"label":"JDBC","value":"JDBC"}]', 'Please select target connector type', 2, 0, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_input_entry
-(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
-VALUES(12, 'target_datasource_id', 'select', '$t(target_datasource_id)', '', NULL, 'Please select target datasource', 1, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_input_entry
-(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
-VALUES(13, 'target_table', 'select', '$t(target_table)', NULL, NULL, 'Please enter target table', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_input_entry
-(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
-VALUES(14, 'target_filter', 'input', '$t(target_filter)', NULL, NULL, 'Please enter target filter expression', 0, 3, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_input_entry
-(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
-VALUES(15, 'mapping_columns', 'group', '$t(mapping_columns)', NULL, '[{"field":"src_field","props":{"placeholder":"Please input src field","rows":0,"disabled":false,"size":"small"},"type":"input","title":"src_field"},{"field":"operator","props":{"placeholder":"Please input operator","rows":0,"disabled":false,"size":"small"},"type":"input","title":"operator"},{"field":"target_field","props":{"placeholder":"Please input target field","rows":0,"disabled":false,"size":"small"},"type":"input","title":"target_field"}]', 'please enter mapping columns', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_input_entry
-(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
-VALUES(16, 'statistics_execute_sql', 'textarea', '$t(statistics_execute_sql)', NULL, NULL, 'Please enter statistics execute sql', 0, 3, 0, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_input_entry
-(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
-VALUES(17, 'comparison_name', 'input', '$t(comparison_name)', NULL, NULL, 'Please enter comparison name, the alias in comparison execute sql', 0, 0, 0, 0, 0, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_input_entry
-(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
-VALUES(18, 'comparison_execute_sql', 'textarea', '$t(comparison_execute_sql)', NULL, NULL, 'Please enter comparison execute sql', 0, 3, 0, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_input_entry
-(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
-VALUES(19, 'comparison_type', 'select', '$t(comparison_type)', '', NULL, 'Please enter comparison title', 3, 0, 2, 1, 0, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_input_entry
-(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
-VALUES(20, 'writer_connector_type', 'select', '$t(writer_connector_type)', '', '[{"label":"MYSQL","value":"0"},{"label":"POSTGRESQL","value":"1"}]', 'please select writer connector type', 0, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_input_entry
-(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
-VALUES(21, 'writer_datasource_id', 'select', '$t(writer_datasource_id)', '', NULL, 'please select writer datasource id', 1, 2, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_input_entry
-(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
-VALUES(22, 'target_field', 'select', '$t(target_field)', NULL, NULL, 'Please enter column, only single column is supported', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_input_entry
-(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
-VALUES(23, 'field_length', 'input', '$t(field_length)', NULL, NULL, 'Please enter length limit', 0, 3, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_input_entry
-(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
-VALUES(24, 'logic_operator', 'select', '$t(logic_operator)', '=', '[{"label":"=","value":"="},{"label":"<","value":"<"},{"label":"<=","value":"<="},{"label":">","value":">"},{"label":">=","value":">="},{"label":"<>","value":"<>"}]', 'please select logic operator', 0, 0, 3, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_input_entry
-(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
-VALUES(25, 'regexp_pattern', 'input', '$t(regexp_pattern)', NULL, NULL, 'Please enter regexp pattern', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_input_entry
-(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
-VALUES(26, 'deadline', 'input', '$t(deadline)', NULL, NULL, 'Please enter deadline', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_input_entry
-(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
-VALUES(27, 'datetime_format', 'input', '$t(datetime_format)', NULL, NULL, 'Please enter datetime format', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_input_entry
-(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
-VALUES(28, 'enum_list', 'input', '$t(enum_list)', NULL, NULL, 'Please enter enumeration', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_input_entry
-(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
-VALUES(29, 'begin_time', 'input', '$t(begin_time)', NULL, NULL, 'Please enter begin time', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_input_entry
-(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
-VALUES(30, 'src_database', 'select', '$t(src_database)', NULL, NULL, 'Please select source database', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_dq_rule_input_entry
-(id, field, "type", title, data, "options", placeholder, option_source_type, data_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
-VALUES(31, 'target_database', 'select', '$t(target_database)', NULL, NULL, 'Please select target database', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-
---
--- Table structure for table t_ds_dq_task_statistics_value
---
-DROP TABLE IF EXISTS t_ds_dq_task_statistics_value;
-CREATE TABLE t_ds_dq_task_statistics_value (
- id serial NOT NULL,
- workflow_definition_id int4 NOT NULL,
- task_instance_id int4 NULL,
- rule_id int4 NOT NULL,
- unique_code varchar NOT NULL,
- statistics_name varchar NULL,
- statistics_value float8 NULL,
- data_time timestamp(0) NULL,
- create_time timestamp(0) NULL,
- update_time timestamp(0) NULL,
- CONSTRAINT t_ds_dq_task_statistics_value_pk PRIMARY KEY (id)
-);
-
---
--- Table structure for table t_ds_relation_rule_execute_sql
---
-DROP TABLE IF EXISTS t_ds_relation_rule_execute_sql;
-CREATE TABLE t_ds_relation_rule_execute_sql (
- id serial NOT NULL,
- rule_id int4 NULL,
- execute_sql_id int4 NULL,
- create_time timestamp NULL,
- update_time timestamp NULL,
- CONSTRAINT t_ds_relation_rule_execute_sql_pk PRIMARY KEY (id)
-);
-INSERT INTO t_ds_relation_rule_execute_sql
-(id, rule_id, execute_sql_id, create_time, update_time)
-VALUES(1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_execute_sql
-(id, rule_id, execute_sql_id, create_time, update_time)
-VALUES(3, 5, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_execute_sql
-(id, rule_id, execute_sql_id, create_time, update_time)
-VALUES(2, 3, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_execute_sql
-(id, rule_id, execute_sql_id, create_time, update_time)
-VALUES(4, 3, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_execute_sql
-(id, rule_id, execute_sql_id, create_time, update_time)
-VALUES(5, 6, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_execute_sql
-(id, rule_id, execute_sql_id, create_time, update_time)
-VALUES(6, 6, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_execute_sql
-(id, rule_id, execute_sql_id, create_time, update_time)
-VALUES(7, 7, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_execute_sql
-(id, rule_id, execute_sql_id, create_time, update_time)
-VALUES(8, 7, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_execute_sql
-(id, rule_id, execute_sql_id, create_time, update_time)
-VALUES(9, 8, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_execute_sql
-(id, rule_id, execute_sql_id, create_time, update_time)
-VALUES(10, 8, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_execute_sql
-(id, rule_id, execute_sql_id, create_time, update_time)
-VALUES(11, 9, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_execute_sql
-(id, rule_id, execute_sql_id, create_time, update_time)
-VALUES(12, 9, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_execute_sql
-(id, rule_id, execute_sql_id, create_time, update_time)
-VALUES(13, 10, 15, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_execute_sql
-(id, rule_id, execute_sql_id, create_time, update_time)
-VALUES(14, 1, 16, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_execute_sql
-(id, rule_id, execute_sql_id, create_time, update_time)
-VALUES(15, 5, 17, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-
---
--- Table structure for table t_ds_relation_rule_input_entry
---
-DROP TABLE IF EXISTS t_ds_relation_rule_input_entry;
-CREATE TABLE t_ds_relation_rule_input_entry (
- id serial NOT NULL,
- rule_id int4 NULL,
- rule_input_entry_id int4 NULL,
- values_map text NULL,
- "index" int4 NULL,
- create_time timestamp NULL,
- update_time timestamp NULL,
- CONSTRAINT t_ds_relation_rule_input_entry_pk PRIMARY KEY (id)
-);
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(1, 1, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(2, 1, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(3, 1, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(4, 1, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(5, 1, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(6, 1, 6, '{"statistics_name":"null_count.nulls"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(7, 1, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(8, 1, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(9, 1, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(10, 1, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(11, 1, 17, '', 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(12, 1, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(13, 2, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(14, 2, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(15, 2, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(16, 2, 6, '{"is_show":"true","can_edit":"true"}', 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(17, 2, 16, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(18, 2, 4, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(19, 2, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(20, 2, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(21, 2, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(22, 2, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(24, 2, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(25, 3, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(26, 3, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(27, 3, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(28, 3, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(29, 3, 11, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(30, 3, 12, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(31, 3, 13, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(32, 3, 14, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(33, 3, 15, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(34, 3, 7, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(35, 3, 8, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(36, 3, 9, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(37, 3, 10, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(38, 3, 17, '{"comparison_name":"total_count.total"}', 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(39, 3, 19, NULL, 15, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(40, 4, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(41, 4, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(42, 4, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(43, 4, 6, '{"is_show":"true","can_edit":"true"}', 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(44, 4, 16, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(45, 4, 11, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(46, 4, 12, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(47, 4, 13, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(48, 4, 17, '{"is_show":"true","can_edit":"true"}', 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(49, 4, 18, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(50, 4, 7, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(51, 4, 8, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(52, 4, 9, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(53, 4, 10, NULL, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(62, 3, 6, '{"statistics_name":"miss_count.miss"}', 18, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(63, 5, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(64, 5, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(65, 5, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(66, 5, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(67, 5, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(68, 5, 6, '{"statistics_name":"invalid_length_count.valids"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(69, 5, 24, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(70, 5, 23, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(71, 5, 7, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(72, 5, 8, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(73, 5, 9, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(74, 5, 10, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(75, 5, 17, '', 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(76, 5, 19, NULL, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(79, 6, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(80, 6, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(81, 6, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(82, 6, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(83, 6, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(84, 6, 6, '{"statistics_name":"duplicate_count.duplicates"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(85, 6, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(86, 6, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(87, 6, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(88, 6, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(89, 6, 17, '', 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(90, 6, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(93, 7, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(94, 7, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(95, 7, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(96, 7, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(97, 7, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(98, 7, 6, '{"statistics_name":"regexp_count.regexps"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(99, 7, 25, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(100, 7, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(101, 7, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(102, 7, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(103, 7, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(104, 7, 17, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(105, 7, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(108, 8, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(109, 8, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(110, 8, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(111, 8, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(112, 8, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(113, 8, 6, '{"statistics_name":"timeliness_count.timeliness"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(114, 8, 26, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(115, 8, 27, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(116, 8, 7, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(117, 8, 8, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(118, 8, 9, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(119, 8, 10, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(120, 8, 17, NULL, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(121, 8, 19, NULL, 15, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(124, 9, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(125, 9, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(126, 9, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(127, 9, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(128, 9, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(129, 9, 6, '{"statistics_name":"enum_count.enums"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(130, 9, 28, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(131, 9, 7, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(132, 9, 8, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(133, 9, 9, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(134, 9, 10, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(135, 9, 17, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(136, 9, 19, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(139, 10, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(140, 10, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(141, 10, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(142, 10, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(143, 10, 6, '{"statistics_name":"table_count.total"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(144, 10, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(145, 10, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(146, 10, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(147, 10, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(148, 10, 17, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(149, 10, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(150, 8, 29, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(151, 1, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(152, 2, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(153, 3, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(154, 4, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(155, 5, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(156, 6, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(157, 7, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(158, 8, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(159, 9, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(160, 10, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(161, 3, 31, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
-(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
-VALUES(162, 4, 31, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
--
-- Table structure for table t_ds_environment
--
diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.2.0_schema/mysql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.2.0_schema/mysql/dolphinscheduler_dml.sql
index 21189c77a3a3..4a9351c6b167 100644
--- a/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.2.0_schema/mysql/dolphinscheduler_dml.sql
+++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.2.0_schema/mysql/dolphinscheduler_dml.sql
@@ -41,14 +41,6 @@ DROP PROCEDURE dolphin_t_ds_tenant_insert_default;
UPDATE t_ds_schedules t1 JOIN t_ds_process_definition t2 ON t1.process_definition_code = t2.code LEFT JOIN t_ds_tenant t3 ON t2.tenant_id = t3.id SET t1.tenant_code = COALESCE(t3.tenant_code, 'default');
UPDATE `t_ds_process_instance` SET `tenant_code` = 'default' WHERE `tenant_code` IS NULL;
--- data quality support choose database
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(30, 'src_database', 'select', '$t(src_database)', NULL, NULL, 'please select source database', 0, 0, 0, 1, 1, 1, 1, current_timestamp, current_timestamp);
-INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
-(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
-VALUES(31, 'target_database', 'select', '$t(target_database)', NULL, NULL, 'please select target database', 0, 0, 0, 1, 1, 1, 1, current_timestamp, current_timestamp);
-
INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
VALUES(151, 1, 30, NULL, 2, current_timestamp, current_timestamp);
diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.2.0_schema/postgresql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.2.0_schema/postgresql/dolphinscheduler_dml.sql
index f47d16f7c7db..ad8bda203207 100644
--- a/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.2.0_schema/postgresql/dolphinscheduler_dml.sql
+++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.2.0_schema/postgresql/dolphinscheduler_dml.sql
@@ -24,14 +24,6 @@ UPDATE t_ds_process_instance SET tenant_code = 'default' WHERE tenant_code IS NU
-- If the admin account is not associated with a tenant, the admin's tenant will be set to the default tenant.
UPDATE t_ds_user SET tenant_id = '-1' WHERE (user_name = 'admin') AND (tenant_id = '0');
--- data quality support choose database
-INSERT INTO t_ds_dq_rule_input_entry
-(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
-VALUES(30, 'src_database', 'select', '$t(src_database)', NULL, NULL, 'Please select source database', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000') ON CONFLICT (id) DO NOTHING;
-INSERT INTO t_ds_dq_rule_input_entry
-(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
-VALUES(31, 'target_database', 'select', '$t(target_database)', NULL, NULL, 'Please select target database', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000') ON CONFLICT (id) DO NOTHING;
-
INSERT INTO t_ds_relation_rule_input_entry
(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
VALUES(151, 1, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000') ON CONFLICT (id) DO NOTHING;
diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.3.0_schema/mysql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.3.0_schema/mysql/dolphinscheduler_ddl.sql
index f3bb0b88847c..fb87d88ee66f 100644
--- a/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.3.0_schema/mysql/dolphinscheduler_ddl.sql
+++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.3.0_schema/mysql/dolphinscheduler_ddl.sql
@@ -202,3 +202,23 @@ delimiter ;
CALL rename_tables_and_fields_from_process_to_workflow;
DROP PROCEDURE rename_tables_and_fields_from_process_to_workflow;
+-- Drop data quality tables
+DROP PROCEDURE if EXISTS drop_data_quality_tables;
+delimiter d//
+CREATE PROCEDURE drop_data_quality_tables()
+BEGIN
+
+DROP TABLE IF EXISTS t_ds_dq_comparison_type;
+DROP TABLE IF EXISTS t_ds_dq_rule_execute_sql;
+DROP TABLE IF EXISTS t_ds_dq_rule_input_entry;
+DROP TABLE IF EXISTS t_ds_dq_task_statistics_value;
+DROP TABLE IF EXISTS t_ds_dq_execute_result;
+DROP TABLE IF EXISTS t_ds_dq_rule;
+DROP TABLE IF EXISTS t_ds_relation_rule_input_entry;
+DROP TABLE IF EXISTS t_ds_relation_rule_execute_sql;
+
+END;
+d//
+delimiter ;
+CALL drop_data_quality_tables;
+DROP PROCEDURE drop_data_quality_tables;
diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.3.0_schema/postgresql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.3.0_schema/postgresql/dolphinscheduler_ddl.sql
index 82759e1a5054..b9fe362c8d14 100644
--- a/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.3.0_schema/postgresql/dolphinscheduler_ddl.sql
+++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/3.3.0_schema/postgresql/dolphinscheduler_ddl.sql
@@ -204,3 +204,24 @@ d//
select rename_tables_and_fields_from_process_to_workflow();
DROP FUNCTION IF EXISTS rename_tables_and_fields_from_process_to_workflow();
+
+-- Drop data quality tables
+delimiter d//
+CREATE OR REPLACE FUNCTION drop_data_quality_tables() RETURNS void AS $$
+BEGIN
+
+DROP TABLE IF EXISTS t_ds_dq_comparison_type;
+DROP TABLE IF EXISTS t_ds_dq_rule_execute_sql;
+DROP TABLE IF EXISTS t_ds_dq_rule_input_entry;
+DROP TABLE IF EXISTS t_ds_dq_task_statistics_value;
+DROP TABLE IF EXISTS t_ds_dq_execute_result;
+DROP TABLE IF EXISTS t_ds_dq_rule;
+DROP TABLE IF EXISTS t_ds_relation_rule_input_entry;
+DROP TABLE IF EXISTS t_ds_relation_rule_execute_sql;
+
+END;
+$$ LANGUAGE plpgsql;
+d//
+
+select drop_data_quality_tables();
+DROP FUNCTION IF EXISTS drop_data_quality_tables();
diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/DqRuleInputEntryMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/DqRuleInputEntryMapperTest.java
deleted file mode 100644
index 8fb967236217..000000000000
--- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/DqRuleInputEntryMapperTest.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.dao.mapper;
-
-import org.apache.dolphinscheduler.dao.BaseDaoTest;
-import org.apache.dolphinscheduler.dao.entity.DqRule;
-import org.apache.dolphinscheduler.dao.entity.DqRuleInputEntry;
-
-import java.util.List;
-
-import org.junit.jupiter.api.Test;
-import org.springframework.beans.factory.annotation.Autowired;
-
-import com.baomidou.mybatisplus.core.metadata.IPage;
-import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
-
-/**
- * DQC rule mapper UT.
- */
-public class DqRuleInputEntryMapperTest extends BaseDaoTest {
-
- @Autowired
- private DqRuleInputEntryMapper dqRuleInputEntryMapper;
-
- @Autowired
- private DqRuleMapper dqRuleMapper;
-
- @Test
- public void testDqcRulePageList() {
-
- Page page = new Page<>(1, 10);
-
- IPage dqRulePage =
- dqRuleMapper.queryRuleListPaging(
- page,
- "",
- -1,
- null,
- null);
-
- dqRulePage.getRecords().forEach(rule -> {
- final List ruleInputEntryList = dqRuleInputEntryMapper.getRuleInputEntryList(1);
- assert ruleInputEntryList != null;
- });
- }
-}
diff --git a/dolphinscheduler-data-quality/pom.xml b/dolphinscheduler-data-quality/pom.xml
deleted file mode 100644
index 0643fdac5af4..000000000000
--- a/dolphinscheduler-data-quality/pom.xml
+++ /dev/null
@@ -1,201 +0,0 @@
-
-
-
- 4.0.0
-
- org.apache.dolphinscheduler
- dolphinscheduler
- dev-SNAPSHOT
-
- dolphinscheduler-data-quality
-
- jar
- dolphinscheduler-data-quality
-
-
-
-
- org.apache.dolphinscheduler
- dolphinscheduler-bom
- ${project.version}
- pom
- import
-
-
-
-
-
-
- org.apache.spark
- spark-core_2.12
- provided
-
-
- com.fasterxml.jackson.module
- jackson-module-scala_2.11
-
-
-
-
-
- org.apache.spark
- spark-sql_2.12
- provided
-
-
- com.fasterxml.jackson.core
- jackson-core
-
-
-
-
-
- org.apache.spark
- spark-hive_2.12
- provided
-
-
- commons-httpclient
- commons-httpclient
-
-
- org.apache.httpcomponents
- httpclient
-
-
- org.codehaus.jackson
- jackson-core-asl
-
-
- org.codehaus.jackson
- jackson-mapper-asl
-
-
-
-
-
- com.h2database
- h2
- test
-
-
-
- mysql
- mysql-connector-java
-
-
-
- org.postgresql
- postgresql
-
-
-
- io.trino
- trino-jdbc
-
-
-
- com.clickhouse
- clickhouse-jdbc
-
-
- com.fasterxml.jackson.core
- jackson-core
-
-
-
-
-
- com.databend
- databend-jdbc
-
-
-
- com.microsoft.sqlserver
- mssql-jdbc
-
-
- com.microsoft.azure
- azure-keyvault
-
-
-
-
-
- com.facebook.presto
- presto-jdbc
-
-
-
- com.google.guava
- guava
-
-
-
- com.fasterxml.jackson.core
- jackson-databind
- provided
-
-
- com.fasterxml.jackson.core
- jackson-core
-
-
-
-
-
- com.fasterxml.jackson.module
- jackson-module-scala_2.11
- provided
-
-
- com.fasterxml.jackson.core
- jackson-core
-
-
-
-
-
- org.codehaus.janino
- janino
- provided
-
-
- org.apache.commons
- commons-collections4
- provided
-
-
-
-
-
-
-
- maven-jar-plugin
-
-
-
- org.apache.dolphinscheduler.data.quality.DataQualityApplication
-
-
-
-
-
-
-
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/Constants.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/Constants.java
deleted file mode 100644
index 3d2cc314b62d..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/Constants.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality;
-
-/**
- * Constants
- */
-public final class Constants {
-
- private Constants() {
- throw new IllegalStateException("Construct Constants");
- }
-
- public static final String DATABASE = "database";
-
- public static final String TABLE = "table";
-
- public static final String URL = "url";
-
- public static final String USER = "user";
-
- public static final String PASSWORD = "password";
-
- public static final String DRIVER = "driver";
-
- public static final String EMPTY = "";
-
- public static final String SQL = "sql";
-
- public static final String DOTS = ".";
-
- public static final String INPUT_TABLE = "input_table";
-
- public static final String OUTPUT_TABLE = "output_table";
-
- public static final String TMP_TABLE = "tmp_table";
-
- public static final String DB_TABLE = "dbtable";
-
- public static final String JDBC = "jdbc";
-
- public static final String SAVE_MODE = "save_mode";
-
- public static final String APPEND = "append";
-
- public static final String SPARK_APP_NAME = "spark.app.name";
-
- /**
- * date format of yyyy-MM-dd HH:mm:ss
- */
- public static final String YYYY_MM_DD_HH_MM_SS = "yyyy-MM-dd HH:mm:ss";
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/DataQualityApplication.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/DataQualityApplication.java
deleted file mode 100644
index 6918a548ac36..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/DataQualityApplication.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality;
-
-import static org.apache.dolphinscheduler.data.quality.Constants.SPARK_APP_NAME;
-import static org.apache.dolphinscheduler.data.quality.enums.ReaderType.HIVE;
-
-import org.apache.dolphinscheduler.data.quality.config.Config;
-import org.apache.dolphinscheduler.data.quality.config.DataQualityConfiguration;
-import org.apache.dolphinscheduler.data.quality.config.EnvConfig;
-import org.apache.dolphinscheduler.data.quality.context.DataQualityContext;
-import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment;
-import org.apache.dolphinscheduler.data.quality.utils.JsonUtils;
-
-import lombok.extern.slf4j.Slf4j;
-
-import com.google.common.base.Strings;
-
-/**
- * DataQualityApplication is spark application.
- * It mainly includes three components: reader, transformer and writer.
- * These three components realize the functions of connecting data, executing intermediate SQL
- * and writing execution results and error data to the specified storage engine
- */
-@Slf4j
-public class DataQualityApplication {
-
- public static void main(String[] args) throws Exception {
-
- if (args.length < 1) {
- log.error("Can not find DataQualityConfiguration");
- System.exit(-1);
- }
-
- String dataQualityParameter = args[0];
-
- DataQualityConfiguration dataQualityConfiguration =
- JsonUtils.fromJson(dataQualityParameter, DataQualityConfiguration.class);
- if (dataQualityConfiguration == null) {
- log.info("DataQualityConfiguration is null");
- System.exit(-1);
- } else {
- dataQualityConfiguration.validate();
- }
-
- EnvConfig envConfig = dataQualityConfiguration.getEnvConfig();
- Config config = new Config(envConfig.getConfig());
- config.put("type", envConfig.getType());
- if (Strings.isNullOrEmpty(config.getString(SPARK_APP_NAME))) {
- config.put(SPARK_APP_NAME, dataQualityConfiguration.getName());
- }
-
- boolean hiveClientSupport = dataQualityConfiguration
- .getReaderConfigs()
- .stream()
- .anyMatch(line -> line.getType().equalsIgnoreCase(HIVE.name()));
-
- SparkRuntimeEnvironment sparkRuntimeEnvironment = new SparkRuntimeEnvironment(config, hiveClientSupport);
-
- DataQualityContext dataQualityContext =
- new DataQualityContext(sparkRuntimeEnvironment, dataQualityConfiguration);
-
- dataQualityContext.execute();
- }
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/BaseConfig.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/BaseConfig.java
deleted file mode 100644
index 60c5b810c3b4..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/BaseConfig.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.config;
-
-import java.util.Map;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
-
-/**
- * BaseConfig
- */
-public class BaseConfig implements IConfig {
-
- @JsonProperty("type")
- private String type;
-
- @JsonProperty("config")
- private Map config;
-
- public BaseConfig() {
- }
-
- public BaseConfig(String type, Map config) {
- this.type = type;
- this.config = config;
- }
-
- public String getType() {
- return type;
- }
-
- public void setType(String type) {
- this.type = type;
- }
-
- public Map getConfig() {
- return config;
- }
-
- public void setConfig(Map config) {
- this.config = config;
- }
-
- @Override
- public void validate() {
- Preconditions.checkArgument(!Strings.isNullOrEmpty(type), "type should not be empty");
- Preconditions.checkArgument(config != null, "config should not be empty");
- }
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/Config.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/Config.java
deleted file mode 100644
index 0bfe9e8df1ba..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/Config.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.config;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-
-/**
- * Config
- */
-public class Config {
-
- private Map configuration = new HashMap<>();
-
- public Config() {
-
- }
-
- public Config(Map configuration) {
- if (configuration != null) {
- this.configuration = configuration;
- }
- }
-
- public String getString(String key) {
- return configuration.get(key) == null ? null : String.valueOf(configuration.get(key));
- }
-
- public List getStringList(String key) {
- return (List) configuration.get(key);
- }
-
- public Integer getInt(String key) {
- return Integer.valueOf(String.valueOf(configuration.get(key)));
- }
-
- public Boolean getBoolean(String key) {
- return Boolean.valueOf(String.valueOf(configuration.get(key)));
- }
-
- public Double getDouble(String key) {
- return Double.valueOf(String.valueOf(configuration.get(key)));
- }
-
- public Long getLong(String key) {
- return Long.valueOf(String.valueOf(configuration.get(key)));
- }
-
- public Boolean has(String key) {
- return configuration.get(key) != null;
- }
-
- public Set> entrySet() {
- return configuration.entrySet();
- }
-
- public boolean isEmpty() {
- return configuration.size() <= 0;
- }
-
- public boolean isNotEmpty() {
- return configuration.size() > 0;
- }
-
- public void put(String key, Object value) {
- this.configuration.put(key, value);
- }
-
- public void merge(Map configuration) {
- configuration.forEach(this.configuration::putIfAbsent);
- }
-
- public Map configurationMap() {
- return this.configuration;
- }
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/DataQualityConfiguration.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/DataQualityConfiguration.java
deleted file mode 100644
index 43a4a271c47a..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/DataQualityConfiguration.java
+++ /dev/null
@@ -1,133 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.config;
-
-import java.util.List;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
-
-/**
- * DataQualityConfiguration
- */
-public class DataQualityConfiguration implements IConfig {
-
- @JsonProperty("name")
- private String name;
-
- @JsonProperty("env")
- private EnvConfig envConfig;
-
- @JsonProperty("readers")
- private List readerConfigs;
-
- @JsonProperty("transformers")
- private List transformerConfigs;
-
- @JsonProperty("writers")
- private List writerConfigs;
-
- public DataQualityConfiguration() {
- }
-
- public DataQualityConfiguration(String name,
- EnvConfig envConfig,
- List readerConfigs,
- List writerConfigs,
- List transformerConfigs) {
- this.name = name;
- this.envConfig = envConfig;
- this.readerConfigs = readerConfigs;
- this.writerConfigs = writerConfigs;
- this.transformerConfigs = transformerConfigs;
- }
-
- public String getName() {
- return name;
- }
-
- public void setName(String name) {
- this.name = name;
- }
-
- public EnvConfig getEnvConfig() {
- return envConfig;
- }
-
- public void setEnvConfig(EnvConfig envConfig) {
- this.envConfig = envConfig;
- }
-
- public List getReaderConfigs() {
- return readerConfigs;
- }
-
- public void setReaderConfigs(List readerConfigs) {
- this.readerConfigs = readerConfigs;
- }
-
- public List getTransformerConfigs() {
- return transformerConfigs;
- }
-
- public void setTransformerConfigs(List transformerConfigs) {
- this.transformerConfigs = transformerConfigs;
- }
-
- public List getWriterConfigs() {
- return writerConfigs;
- }
-
- public void setWriterConfigs(List writerConfigs) {
- this.writerConfigs = writerConfigs;
- }
-
- @Override
- public void validate() {
- Preconditions.checkArgument(!Strings.isNullOrEmpty(name), "name should not be empty");
-
- Preconditions.checkArgument(envConfig != null, "env config should not be empty");
-
- Preconditions.checkArgument(readerConfigs != null, "reader config should not be empty");
- for (ReaderConfig readerConfig : readerConfigs) {
- readerConfig.validate();
- }
-
- Preconditions.checkArgument(transformerConfigs != null, "transform config should not be empty");
- for (TransformerConfig transformParameter : transformerConfigs) {
- transformParameter.validate();
- }
-
- Preconditions.checkArgument(writerConfigs != null, "writer config should not be empty");
- for (WriterConfig writerConfig : writerConfigs) {
- writerConfig.validate();
- }
- }
-
- @Override
- public String toString() {
- return "DataQualityConfiguration{"
- + "name='" + name + '\''
- + ", envConfig=" + envConfig
- + ", readerConfigs=" + readerConfigs
- + ", transformerConfigs=" + transformerConfigs
- + ", writerConfigs=" + writerConfigs
- + '}';
- }
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/EnvConfig.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/EnvConfig.java
deleted file mode 100644
index b37c857338b4..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/EnvConfig.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.config;
-
-import java.util.Map;
-
-/**
- * EnvConfig
- */
-public class EnvConfig extends BaseConfig {
-
- public EnvConfig() {
- }
-
- public EnvConfig(String type, Map config) {
- super(type, config);
- }
-
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/IConfig.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/IConfig.java
deleted file mode 100644
index fede660f8aaa..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/IConfig.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.config;
-
-/**
- * IConfig
- */
-public interface IConfig {
-
- /**
- * check the parameter
- */
- void validate();
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/ReaderConfig.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/ReaderConfig.java
deleted file mode 100644
index 900be619ad15..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/ReaderConfig.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.config;
-
-import java.util.Map;
-
-/**
- * ReaderConfig
- */
-public class ReaderConfig extends BaseConfig {
-
- public ReaderConfig() {
- }
-
- public ReaderConfig(String type, Map config) {
- super(type, config);
- }
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/TransformerConfig.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/TransformerConfig.java
deleted file mode 100644
index 47a540760ba3..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/TransformerConfig.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.config;
-
-import java.util.Map;
-
-/**
- * TransformerConfig
- */
-public class TransformerConfig extends BaseConfig {
-
- public TransformerConfig() {
- }
-
- public TransformerConfig(String type, Map config) {
- super(type, config);
- }
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/ValidateResult.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/ValidateResult.java
deleted file mode 100644
index d74534e8039e..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/ValidateResult.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.config;
-
-public class ValidateResult {
-
- private boolean success;
-
- private String msg;
-
- public ValidateResult(boolean success, String msg) {
- this.success = success;
- this.msg = msg;
- }
-
- public boolean isSuccess() {
- return success;
- }
-
- public void setSuccess(boolean success) {
- this.success = success;
- }
-
- public String getMsg() {
- return msg;
- }
-
- public void setMsg(String msg) {
- this.msg = msg;
- }
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/WriterConfig.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/WriterConfig.java
deleted file mode 100644
index 9132c8930fe0..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/WriterConfig.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.config;
-
-import java.util.Map;
-
-/**
- * WriterConfig
- */
-public class WriterConfig extends BaseConfig {
-
- public WriterConfig() {
- }
-
- public WriterConfig(String type, Map config) {
- super(type, config);
- }
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/context/DataQualityContext.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/context/DataQualityContext.java
deleted file mode 100644
index b702461533b5..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/context/DataQualityContext.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.context;
-
-import org.apache.dolphinscheduler.data.quality.config.DataQualityConfiguration;
-import org.apache.dolphinscheduler.data.quality.exception.DataQualityException;
-import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment;
-import org.apache.dolphinscheduler.data.quality.flow.batch.BatchReader;
-import org.apache.dolphinscheduler.data.quality.flow.batch.BatchTransformer;
-import org.apache.dolphinscheduler.data.quality.flow.batch.BatchWriter;
-import org.apache.dolphinscheduler.data.quality.flow.batch.reader.ReaderFactory;
-import org.apache.dolphinscheduler.data.quality.flow.batch.transformer.TransformerFactory;
-import org.apache.dolphinscheduler.data.quality.flow.batch.writer.WriterFactory;
-
-import java.util.List;
-
-/**
- * DataQualityContext
- */
-public class DataQualityContext {
-
- private SparkRuntimeEnvironment sparkRuntimeEnvironment;
-
- private DataQualityConfiguration dataQualityConfiguration;
-
- public DataQualityContext() {
- }
-
- public DataQualityContext(SparkRuntimeEnvironment sparkRuntimeEnvironment,
- DataQualityConfiguration dataQualityConfiguration) {
- this.sparkRuntimeEnvironment = sparkRuntimeEnvironment;
- this.dataQualityConfiguration = dataQualityConfiguration;
- }
-
- public void execute() throws DataQualityException {
- List readers = ReaderFactory
- .getInstance()
- .getReaders(this.sparkRuntimeEnvironment, dataQualityConfiguration.getReaderConfigs());
- List transformers = TransformerFactory
- .getInstance()
- .getTransformer(this.sparkRuntimeEnvironment, dataQualityConfiguration.getTransformerConfigs());
- List writers = WriterFactory
- .getInstance()
- .getWriters(this.sparkRuntimeEnvironment, dataQualityConfiguration.getWriterConfigs());
-
- if (sparkRuntimeEnvironment.isBatch()) {
- sparkRuntimeEnvironment.getBatchExecution().execute(readers, transformers, writers);
- } else {
- throw new DataQualityException("stream mode is not supported now");
- }
- }
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/enums/ReaderType.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/enums/ReaderType.java
deleted file mode 100644
index e0cc02b3d870..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/enums/ReaderType.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.enums;
-
-/**
- * ReaderType
- */
-public enum ReaderType {
-
- /**
- * JDBC
- * HIVE
- */
- JDBC,
- HIVE;
-
- public static ReaderType getType(String name) {
- for (ReaderType type : ReaderType.values()) {
- if (type.name().equalsIgnoreCase(name)) {
- return type;
- }
- }
-
- return null;
- }
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/enums/TransformerType.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/enums/TransformerType.java
deleted file mode 100644
index 07744d9eec17..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/enums/TransformerType.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.enums;
-
-/**
- * TransformerType
- */
-public enum TransformerType {
-
- /**
- * JDBC
- */
- SQL;
-
- public static TransformerType getType(String name) {
- for (TransformerType type : TransformerType.values()) {
- if (type.name().equalsIgnoreCase(name)) {
- return type;
- }
- }
-
- return null;
- }
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/enums/WriterType.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/enums/WriterType.java
deleted file mode 100644
index f46430a4f68f..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/enums/WriterType.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.enums;
-
-/**
- * WriterType
- */
-public enum WriterType {
-
- /**
- * JDBC
- */
- JDBC,
- LOCAL_FILE,
- HDFS_FILE;
-
- public static WriterType getType(String name) {
- for (WriterType type : WriterType.values()) {
- if (type.name().equalsIgnoreCase(name)) {
- return type;
- }
- }
-
- return null;
- }
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/exception/ConfigRuntimeException.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/exception/ConfigRuntimeException.java
deleted file mode 100644
index 057090138b0d..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/exception/ConfigRuntimeException.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.exception;
-
-/**
- * ConfigRuntimeException
- */
-public class ConfigRuntimeException extends RuntimeException {
-
- public ConfigRuntimeException() {
- super();
- }
-
- public ConfigRuntimeException(String message) {
- super(message);
- }
-
- public ConfigRuntimeException(String message, Throwable cause) {
- super(message, cause);
- }
-
- public ConfigRuntimeException(Throwable cause) {
- super(cause);
- }
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/exception/DataQualityException.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/exception/DataQualityException.java
deleted file mode 100644
index 34df8ad6cf51..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/exception/DataQualityException.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.exception;
-
-/**
- * data quality exception
- */
-public class DataQualityException extends Exception {
-
- public DataQualityException() {
- super();
- }
-
- /**
- * Construct a new runtime exception with the detail message
- *
- * @param message detail message
- */
- public DataQualityException(String message) {
- super(message);
- }
-
- /**
- * Construct a new runtime exception with the detail message and cause
- *
- * @param message the detail message
- * @param cause the cause
- * @since 1.4
- */
- public DataQualityException(String message, Throwable cause) {
- super(message, cause);
- }
-
- /**
- * Construct a new runtime exception with throwable
- *
- * @param cause the cause
- */
- public DataQualityException(Throwable cause) {
- super(cause);
- }
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/execution/Execution.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/execution/Execution.java
deleted file mode 100644
index 527e10397ad8..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/execution/Execution.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.execution;
-
-import org.apache.dolphinscheduler.data.quality.flow.Component;
-
-import java.util.List;
-
-/**
- * Execution
- */
-public interface Execution {
-
- /**
- * execute
- * @param readers readers
- * @param transformers transformers
- * @param writers writers
- */
- void execute(List readers, List transformers, List writers);
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/execution/SparkBatchExecution.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/execution/SparkBatchExecution.java
deleted file mode 100644
index cf55c1c23de4..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/execution/SparkBatchExecution.java
+++ /dev/null
@@ -1,134 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.execution;
-
-import static org.apache.dolphinscheduler.data.quality.Constants.INPUT_TABLE;
-import static org.apache.dolphinscheduler.data.quality.Constants.OUTPUT_TABLE;
-import static org.apache.dolphinscheduler.data.quality.Constants.TMP_TABLE;
-
-import org.apache.dolphinscheduler.data.quality.config.Config;
-import org.apache.dolphinscheduler.data.quality.exception.ConfigRuntimeException;
-import org.apache.dolphinscheduler.data.quality.flow.batch.BatchReader;
-import org.apache.dolphinscheduler.data.quality.flow.batch.BatchTransformer;
-import org.apache.dolphinscheduler.data.quality.flow.batch.BatchWriter;
-
-import org.apache.spark.sql.Dataset;
-import org.apache.spark.sql.Row;
-
-import java.util.List;
-
-/**
- * SparkBatchExecution is responsible for executing readers、transformers and writers
- */
-public class SparkBatchExecution implements Execution {
-
- private final SparkRuntimeEnvironment environment;
-
- public SparkBatchExecution(SparkRuntimeEnvironment environment) throws ConfigRuntimeException {
- this.environment = environment;
- }
-
- @Override
- public void execute(List readers, List transformers, List writers) {
- readers.forEach(reader -> registerInputTempView(reader, environment));
-
- if (!readers.isEmpty()) {
- Dataset ds = readers.get(0).read(environment);
- for (BatchTransformer tf : transformers) {
- ds = executeTransformer(environment, tf, ds);
- registerTransformTempView(tf, ds);
- }
-
- for (BatchWriter sink : writers) {
- executeWriter(environment, sink, ds);
- }
- }
-
- environment.sparkSession().stop();
- }
-
- private void registerTempView(String tableName, Dataset ds) {
- if (ds != null) {
- ds.createOrReplaceTempView(tableName);
- } else {
- throw new ConfigRuntimeException("dataset is null, can not createOrReplaceTempView");
- }
- }
-
- private void registerInputTempView(BatchReader reader, SparkRuntimeEnvironment environment) {
- Config conf = reader.getConfig();
- if (Boolean.TRUE.equals(conf.has(OUTPUT_TABLE))) {
- String tableName = conf.getString(OUTPUT_TABLE);
- registerTempView(tableName, reader.read(environment));
- } else {
- throw new ConfigRuntimeException(
- "[" + reader.getClass().getName()
- + "] must be registered as dataset, please set \"output_table\" config");
- }
- }
-
- private Dataset executeTransformer(SparkRuntimeEnvironment environment, BatchTransformer transformer,
- Dataset dataset) {
- Config config = transformer.getConfig();
- Dataset inputDataset;
- Dataset outputDataset = null;
- if (Boolean.TRUE.equals(config.has(INPUT_TABLE))) {
- String[] tableNames = config.getString(INPUT_TABLE).split(",");
-
- for (String sourceTableName : tableNames) {
- inputDataset = environment.sparkSession().read().table(sourceTableName);
-
- if (outputDataset == null) {
- outputDataset = inputDataset;
- } else {
- outputDataset = outputDataset.union(inputDataset);
- }
- }
- } else {
- outputDataset = dataset;
- }
-
- if (Boolean.TRUE.equals(config.has(TMP_TABLE))) {
- if (outputDataset == null) {
- outputDataset = dataset;
- }
- String tableName = config.getString(TMP_TABLE);
- registerTempView(tableName, outputDataset);
- }
-
- return transformer.transform(outputDataset, environment);
- }
-
- private void registerTransformTempView(BatchTransformer transformer, Dataset ds) {
- Config config = transformer.getConfig();
- if (Boolean.TRUE.equals(config.has(OUTPUT_TABLE))) {
- String tableName = config.getString(OUTPUT_TABLE);
- registerTempView(tableName, ds);
- }
- }
-
- private void executeWriter(SparkRuntimeEnvironment environment, BatchWriter writer, Dataset ds) {
- Config config = writer.getConfig();
- Dataset inputDataSet = ds;
- if (Boolean.TRUE.equals(config.has(INPUT_TABLE))) {
- String sourceTableName = config.getString(INPUT_TABLE);
- inputDataSet = environment.sparkSession().read().table(sourceTableName);
- }
- writer.write(inputDataSet, environment);
- }
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/execution/SparkRuntimeEnvironment.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/execution/SparkRuntimeEnvironment.java
deleted file mode 100644
index 34a9906e14b2..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/execution/SparkRuntimeEnvironment.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.execution;
-
-import org.apache.dolphinscheduler.data.quality.config.Config;
-
-import org.apache.spark.SparkConf;
-import org.apache.spark.sql.SparkSession;
-
-/**
- * The SparkRuntimeEnvironment is responsible for creating SparkSession and SparkExecution
- */
-public class SparkRuntimeEnvironment {
-
- private static final String TYPE = "type";
- private static final String BATCH = "batch";
-
- private SparkSession sparkSession;
-
- private Config config = new Config();
-
- public SparkRuntimeEnvironment(Config config, boolean hiveClientSupport) {
- if (config != null) {
- this.config = config;
- }
-
- this.prepare(hiveClientSupport);
- }
-
- public Config getConfig() {
- return this.config;
- }
-
- public void prepare(boolean hiveClientSupport) {
- SparkSession.Builder sparkSessionBuilder = SparkSession.builder().config(createSparkConf());
-
- this.sparkSession = hiveClientSupport ? sparkSessionBuilder.enableHiveSupport().getOrCreate()
- : sparkSessionBuilder.getOrCreate();
- }
-
- private SparkConf createSparkConf() {
- SparkConf conf = new SparkConf();
- this.config.entrySet()
- .forEach(entry -> conf.set(entry.getKey(), String.valueOf(entry.getValue())));
- conf.set("spark.sql.crossJoin.enabled", "true");
- return conf;
- }
-
- public SparkSession sparkSession() {
- return sparkSession;
- }
-
- public boolean isBatch() {
- return BATCH.equalsIgnoreCase(config.getString(TYPE));
- }
-
- public SparkBatchExecution getBatchExecution() {
- return new SparkBatchExecution(this);
- }
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/Component.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/Component.java
deleted file mode 100644
index c85c2cf50d14..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/Component.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.flow;
-
-import org.apache.dolphinscheduler.data.quality.config.Config;
-import org.apache.dolphinscheduler.data.quality.config.ValidateResult;
-import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.stream.Collectors;
-
-/**
- * Component
- */
-public interface Component {
-
- Config getConfig();
-
- ValidateResult validateConfig();
-
- default ValidateResult validate(List requiredOptions) {
- List nonExistsOptions = new ArrayList<>();
- requiredOptions.forEach(x -> {
- if (Boolean.FALSE.equals(getConfig().has(x))) {
- nonExistsOptions.add(x);
- }
- });
-
- if (!nonExistsOptions.isEmpty()) {
- return new ValidateResult(
- false,
- nonExistsOptions.stream().map(option -> "[" + option + "]").collect(Collectors.joining(","))
- + " is not exist");
- } else {
- return new ValidateResult(true, "");
- }
- }
-
- void prepare(SparkRuntimeEnvironment prepareEnv);
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/BatchReader.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/BatchReader.java
deleted file mode 100644
index d785910798e0..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/BatchReader.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.flow.batch;
-
-import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment;
-import org.apache.dolphinscheduler.data.quality.flow.Component;
-
-import org.apache.spark.sql.Dataset;
-import org.apache.spark.sql.Row;
-
-/**
- * BatchReader
- */
-public interface BatchReader extends Component {
-
- /**
- * read data from source return dataset
- * @param env env
- * @return Dataset
- */
- Dataset read(SparkRuntimeEnvironment env);
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/BatchTransformer.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/BatchTransformer.java
deleted file mode 100644
index e6a04839b07f..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/BatchTransformer.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.flow.batch;
-
-import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment;
-import org.apache.dolphinscheduler.data.quality.flow.Component;
-
-import org.apache.spark.sql.Dataset;
-import org.apache.spark.sql.Row;
-
-/**
- * BatchTransformer
- */
-public interface BatchTransformer extends Component {
-
- /**
- * transform the dataset
- * @param data data
- * @param env env
- * @return Dataset
- */
- Dataset transform(Dataset data, SparkRuntimeEnvironment env);
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/BatchWriter.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/BatchWriter.java
deleted file mode 100644
index c7a3efc7080f..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/BatchWriter.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.flow.batch;
-
-import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment;
-import org.apache.dolphinscheduler.data.quality.flow.Component;
-
-import org.apache.spark.sql.Dataset;
-import org.apache.spark.sql.Row;
-
-/**
- * BatchWriter
- */
-public interface BatchWriter extends Component {
-
- /**
- * write data to target storage
- * @param data data
- * @param environment environment
- */
- void write(Dataset data, SparkRuntimeEnvironment environment);
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/reader/HiveReader.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/reader/HiveReader.java
deleted file mode 100644
index 4022077a0161..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/reader/HiveReader.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.flow.batch.reader;
-
-import static org.apache.dolphinscheduler.data.quality.Constants.DATABASE;
-import static org.apache.dolphinscheduler.data.quality.Constants.SQL;
-import static org.apache.dolphinscheduler.data.quality.Constants.TABLE;
-
-import org.apache.dolphinscheduler.data.quality.config.Config;
-import org.apache.dolphinscheduler.data.quality.config.ValidateResult;
-import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment;
-import org.apache.dolphinscheduler.data.quality.flow.batch.BatchReader;
-
-import org.apache.spark.sql.Dataset;
-import org.apache.spark.sql.Row;
-
-import java.util.Arrays;
-
-import com.google.common.base.Strings;
-
-/**
- * HiveReader
- */
-public class HiveReader implements BatchReader {
-
- private final Config config;
-
- public HiveReader(Config config) {
- this.config = config;
- }
-
- @Override
- public Config getConfig() {
- return config;
- }
-
- @Override
- public ValidateResult validateConfig() {
- return validate(Arrays.asList(DATABASE, TABLE));
- }
-
- @Override
- public void prepare(SparkRuntimeEnvironment prepareEnv) {
- if (Strings.isNullOrEmpty(config.getString(SQL))) {
- config.put(SQL, "select * from " + config.getString(DATABASE) + "." + config.getString(TABLE));
- }
- }
-
- @Override
- public Dataset read(SparkRuntimeEnvironment env) {
- return env.sparkSession().sql(config.getString(SQL));
- }
-
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/reader/JdbcReader.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/reader/JdbcReader.java
deleted file mode 100644
index 97ae41405146..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/reader/JdbcReader.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.flow.batch.reader;
-
-import static java.nio.charset.StandardCharsets.UTF_8;
-import static org.apache.dolphinscheduler.data.quality.Constants.DATABASE;
-import static org.apache.dolphinscheduler.data.quality.Constants.DB_TABLE;
-import static org.apache.dolphinscheduler.data.quality.Constants.DOTS;
-import static org.apache.dolphinscheduler.data.quality.Constants.DRIVER;
-import static org.apache.dolphinscheduler.data.quality.Constants.JDBC;
-import static org.apache.dolphinscheduler.data.quality.Constants.PASSWORD;
-import static org.apache.dolphinscheduler.data.quality.Constants.TABLE;
-import static org.apache.dolphinscheduler.data.quality.Constants.URL;
-import static org.apache.dolphinscheduler.data.quality.Constants.USER;
-
-import org.apache.dolphinscheduler.data.quality.config.Config;
-import org.apache.dolphinscheduler.data.quality.config.ValidateResult;
-import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment;
-import org.apache.dolphinscheduler.data.quality.flow.batch.BatchReader;
-import org.apache.dolphinscheduler.data.quality.utils.ConfigUtils;
-
-import org.apache.spark.sql.DataFrameReader;
-import org.apache.spark.sql.Dataset;
-import org.apache.spark.sql.Row;
-import org.apache.spark.sql.SparkSession;
-
-import java.net.URLDecoder;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.Map;
-
-import lombok.SneakyThrows;
-
-/**
- * AbstractJdbcSource
- */
-public class JdbcReader implements BatchReader {
-
- private final Config config;
-
- public JdbcReader(Config config) {
- this.config = config;
- }
-
- @Override
- public Config getConfig() {
- return config;
- }
-
- @Override
- public ValidateResult validateConfig() {
- return validate(Arrays.asList(URL, TABLE, USER, PASSWORD));
- }
-
- @Override
- public void prepare(SparkRuntimeEnvironment prepareEnv) {
- // Do nothing
- }
-
- @Override
- public Dataset read(SparkRuntimeEnvironment env) {
- return jdbcReader(env.sparkSession()).load();
- }
-
- @SneakyThrows
- private DataFrameReader jdbcReader(SparkSession sparkSession) {
-
- DataFrameReader reader = sparkSession.read()
- .format(JDBC)
- .option(URL, config.getString(URL))
- .option(DB_TABLE, config.getString(DATABASE) + "." + config.getString(TABLE))
- .option(USER, config.getString(USER))
- .option(PASSWORD, URLDecoder.decode(config.getString(PASSWORD), UTF_8.name()))
- .option(DRIVER, config.getString(DRIVER));
-
- Config jdbcConfig = ConfigUtils.extractSubConfig(config, JDBC + DOTS, false);
-
- if (!config.isEmpty()) {
- Map optionMap = new HashMap<>(16);
- jdbcConfig.entrySet().forEach(x -> optionMap.put(x.getKey(), String.valueOf(x.getValue())));
- reader.options(optionMap);
- }
-
- return reader;
- }
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/reader/ReaderFactory.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/reader/ReaderFactory.java
deleted file mode 100644
index cafe3decdb8c..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/reader/ReaderFactory.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.flow.batch.reader;
-
-import org.apache.dolphinscheduler.data.quality.config.Config;
-import org.apache.dolphinscheduler.data.quality.config.ReaderConfig;
-import org.apache.dolphinscheduler.data.quality.enums.ReaderType;
-import org.apache.dolphinscheduler.data.quality.exception.DataQualityException;
-import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment;
-import org.apache.dolphinscheduler.data.quality.flow.batch.BatchReader;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * ReaderFactory
- */
-public class ReaderFactory {
-
- private static class Singleton {
-
- static ReaderFactory instance = new ReaderFactory();
- }
-
- public static ReaderFactory getInstance() {
- return Singleton.instance;
- }
-
- public List getReaders(SparkRuntimeEnvironment sparkRuntimeEnvironment,
- List readerConfigs) throws DataQualityException {
-
- List readerList = new ArrayList<>();
-
- for (ReaderConfig readerConfig : readerConfigs) {
- BatchReader reader = getReader(readerConfig);
- if (reader != null) {
- reader.validateConfig();
- reader.prepare(sparkRuntimeEnvironment);
- readerList.add(reader);
- }
- }
-
- return readerList;
- }
-
- private BatchReader getReader(ReaderConfig readerConfig) throws DataQualityException {
- ReaderType readerType = ReaderType.getType(readerConfig.getType());
- Config config = new Config(readerConfig.getConfig());
- if (readerType != null) {
- switch (readerType) {
- case JDBC:
- return new JdbcReader(config);
- case HIVE:
- return new HiveReader(config);
- default:
- throw new DataQualityException("reader type " + readerType + " is not supported!");
- }
- }
-
- return null;
- }
-
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/transformer/SqlTransformer.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/transformer/SqlTransformer.java
deleted file mode 100644
index 756a7bc07f4b..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/transformer/SqlTransformer.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.flow.batch.transformer;
-
-import static org.apache.dolphinscheduler.data.quality.Constants.SQL;
-
-import org.apache.dolphinscheduler.data.quality.config.Config;
-import org.apache.dolphinscheduler.data.quality.config.ValidateResult;
-import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment;
-import org.apache.dolphinscheduler.data.quality.flow.batch.BatchTransformer;
-
-import org.apache.spark.sql.Dataset;
-import org.apache.spark.sql.Row;
-
-import java.util.Collections;
-
-/**
- * SqlTransformer
- */
-public class SqlTransformer implements BatchTransformer {
-
- private final Config config;
-
- public SqlTransformer(Config config) {
- this.config = config;
- }
-
- @Override
- public Config getConfig() {
- return config;
- }
-
- @Override
- public ValidateResult validateConfig() {
- return validate(Collections.singletonList(SQL));
- }
-
- @Override
- public void prepare(SparkRuntimeEnvironment prepareEnv) {
- // Do nothing
- }
-
- @Override
- public Dataset transform(Dataset data, SparkRuntimeEnvironment env) {
- return env.sparkSession().sql(config.getString(SQL));
- }
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/transformer/TransformerFactory.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/transformer/TransformerFactory.java
deleted file mode 100644
index bc5a19eac9cc..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/transformer/TransformerFactory.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.flow.batch.transformer;
-
-import org.apache.dolphinscheduler.data.quality.config.Config;
-import org.apache.dolphinscheduler.data.quality.config.TransformerConfig;
-import org.apache.dolphinscheduler.data.quality.enums.TransformerType;
-import org.apache.dolphinscheduler.data.quality.exception.DataQualityException;
-import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment;
-import org.apache.dolphinscheduler.data.quality.flow.batch.BatchTransformer;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * WriterFactory
- */
-public class TransformerFactory {
-
- private static class Singleton {
-
- static TransformerFactory instance = new TransformerFactory();
- }
-
- public static TransformerFactory getInstance() {
- return Singleton.instance;
- }
-
- public List getTransformer(SparkRuntimeEnvironment sparkRuntimeEnvironment,
- List transformerConfigs) throws DataQualityException {
-
- List transformers = new ArrayList<>();
-
- for (TransformerConfig transformerConfig : transformerConfigs) {
- BatchTransformer transformer = getTransformer(transformerConfig);
- if (transformer != null) {
- transformer.validateConfig();
- transformer.prepare(sparkRuntimeEnvironment);
- transformers.add(transformer);
- }
- }
-
- return transformers;
- }
-
- private BatchTransformer getTransformer(TransformerConfig transformerConfig) throws DataQualityException {
- TransformerType transformerType = TransformerType.getType(transformerConfig.getType());
- Config config = new Config(transformerConfig.getConfig());
- if (transformerType != null) {
- if (transformerType == TransformerType.SQL) {
- return new SqlTransformer(config);
- }
- throw new DataQualityException("transformer type " + transformerType + " is not supported!");
- }
-
- return null;
- }
-
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/JdbcWriter.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/JdbcWriter.java
deleted file mode 100644
index b737567f2147..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/JdbcWriter.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.flow.batch.writer;
-
-import static org.apache.dolphinscheduler.data.quality.Constants.APPEND;
-import static org.apache.dolphinscheduler.data.quality.Constants.DATABASE;
-import static org.apache.dolphinscheduler.data.quality.Constants.DB_TABLE;
-import static org.apache.dolphinscheduler.data.quality.Constants.DRIVER;
-import static org.apache.dolphinscheduler.data.quality.Constants.JDBC;
-import static org.apache.dolphinscheduler.data.quality.Constants.PASSWORD;
-import static org.apache.dolphinscheduler.data.quality.Constants.SAVE_MODE;
-import static org.apache.dolphinscheduler.data.quality.Constants.SQL;
-import static org.apache.dolphinscheduler.data.quality.Constants.TABLE;
-import static org.apache.dolphinscheduler.data.quality.Constants.URL;
-import static org.apache.dolphinscheduler.data.quality.Constants.USER;
-
-import org.apache.dolphinscheduler.data.quality.config.Config;
-import org.apache.dolphinscheduler.data.quality.config.ValidateResult;
-import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment;
-import org.apache.dolphinscheduler.data.quality.flow.batch.BatchWriter;
-
-import org.apache.spark.sql.Dataset;
-import org.apache.spark.sql.Row;
-
-import java.net.URLDecoder;
-import java.nio.charset.StandardCharsets;
-import java.util.Arrays;
-
-import lombok.SneakyThrows;
-
-import com.google.common.base.Strings;
-
-/**
- * JdbcWriter
- */
-public class JdbcWriter implements BatchWriter {
-
- private final Config config;
-
- public JdbcWriter(Config config) {
- this.config = config;
- }
-
- @Override
- public Config getConfig() {
- return config;
- }
-
- @Override
- public ValidateResult validateConfig() {
- return validate(Arrays.asList(URL, TABLE, USER, PASSWORD));
- }
-
- @Override
- public void prepare(SparkRuntimeEnvironment prepareEnv) {
- if (Strings.isNullOrEmpty(config.getString(SAVE_MODE))) {
- config.put(SAVE_MODE, APPEND);
- }
- }
-
- @SneakyThrows
- @Override
- public void write(Dataset data, SparkRuntimeEnvironment env) {
- if (!Strings.isNullOrEmpty(config.getString(SQL))) {
- data = env.sparkSession().sql(config.getString(SQL));
- }
-
- data.write()
- .format(JDBC)
- .option(DRIVER, config.getString(DRIVER))
- .option(URL, config.getString(URL))
- .option(DB_TABLE, config.getString(DATABASE) + "." + config.getString(TABLE))
- .option(USER, config.getString(USER))
- .option(PASSWORD, URLDecoder.decode(config.getString(PASSWORD), StandardCharsets.UTF_8.name()))
- .mode(config.getString(SAVE_MODE))
- .save();
- }
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/WriterFactory.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/WriterFactory.java
deleted file mode 100644
index e9fc2af6c851..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/WriterFactory.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.flow.batch.writer;
-
-import org.apache.dolphinscheduler.data.quality.config.Config;
-import org.apache.dolphinscheduler.data.quality.config.WriterConfig;
-import org.apache.dolphinscheduler.data.quality.enums.WriterType;
-import org.apache.dolphinscheduler.data.quality.exception.DataQualityException;
-import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment;
-import org.apache.dolphinscheduler.data.quality.flow.batch.BatchWriter;
-import org.apache.dolphinscheduler.data.quality.flow.batch.writer.file.HdfsFileWriter;
-import org.apache.dolphinscheduler.data.quality.flow.batch.writer.file.LocalFileWriter;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * WriterFactory
- */
-public class WriterFactory {
-
- private static class Singleton {
-
- static WriterFactory instance = new WriterFactory();
- }
-
- public static WriterFactory getInstance() {
- return Singleton.instance;
- }
-
- public List getWriters(SparkRuntimeEnvironment sparkRuntimeEnvironment,
- List writerConfigs) throws DataQualityException {
-
- List writerList = new ArrayList<>();
-
- for (WriterConfig writerConfig : writerConfigs) {
- BatchWriter writer = getWriter(writerConfig);
- if (writer != null) {
- writer.validateConfig();
- writer.prepare(sparkRuntimeEnvironment);
- writerList.add(writer);
- }
- }
-
- return writerList;
- }
-
- private BatchWriter getWriter(WriterConfig writerConfig) throws DataQualityException {
-
- WriterType writerType = WriterType.getType(writerConfig.getType());
- Config config = new Config(writerConfig.getConfig());
- if (writerType != null) {
- switch (writerType) {
- case JDBC:
- return new JdbcWriter(config);
- case LOCAL_FILE:
- return new LocalFileWriter(config);
- case HDFS_FILE:
- return new HdfsFileWriter(config);
- default:
- throw new DataQualityException("writer type " + writerType + " is not supported!");
- }
- }
-
- return null;
- }
-
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/file/BaseFileWriter.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/file/BaseFileWriter.java
deleted file mode 100644
index 3492436a74f9..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/file/BaseFileWriter.java
+++ /dev/null
@@ -1,132 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.flow.batch.writer.file;
-
-import static org.apache.dolphinscheduler.data.quality.Constants.SAVE_MODE;
-
-import org.apache.dolphinscheduler.data.quality.config.Config;
-import org.apache.dolphinscheduler.data.quality.config.ValidateResult;
-import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment;
-import org.apache.dolphinscheduler.data.quality.flow.batch.BatchWriter;
-import org.apache.dolphinscheduler.data.quality.utils.ConfigUtils;
-
-import org.apache.commons.collections4.CollectionUtils;
-import org.apache.spark.sql.DataFrameWriter;
-import org.apache.spark.sql.Dataset;
-import org.apache.spark.sql.Row;
-
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import com.google.common.base.Strings;
-
-/**
- * BaseFileWriter
- */
-public abstract class BaseFileWriter implements BatchWriter {
-
- public static final String PARTITION_BY = "partition_by";
- public static final String SERIALIZER = "serializer";
- public static final String PATH = "path";
-
- private final Config config;
-
- protected BaseFileWriter(Config config) {
- this.config = config;
- }
-
- @Override
- public Config getConfig() {
- return config;
- }
-
- @Override
- public void prepare(SparkRuntimeEnvironment prepareEnv) {
- Map defaultConfig = new HashMap<>();
-
- defaultConfig.put(PARTITION_BY, Collections.emptyList());
- defaultConfig.put(SAVE_MODE, "error");
- defaultConfig.put(SERIALIZER, "csv");
-
- config.merge(defaultConfig);
- }
-
- protected ValidateResult checkConfigImpl(List allowedUri) {
-
- if (Boolean.TRUE.equals(config.has(PATH)) && !Strings.isNullOrEmpty(config.getString(PATH))) {
- String dir = config.getString(PATH);
- if (dir.startsWith("/") || uriInAllowedSchema(dir, allowedUri)) {
- return new ValidateResult(true, "");
- } else {
- return new ValidateResult(false,
- "invalid path URI, please set the following allowed schemas: " + String.join(",", allowedUri));
- }
- } else {
- return new ValidateResult(false, "please specify [path] as non-empty string");
- }
- }
-
- protected boolean uriInAllowedSchema(String uri, List allowedUri) {
- return allowedUri.stream().map(uri::startsWith).reduce(true, (a, b) -> a && b);
- }
-
- protected String buildPathWithDefaultSchema(String uri, String defaultUriSchema) {
- return uri.startsWith("/") ? defaultUriSchema + uri : uri;
- }
-
- protected void outputImpl(Dataset df, String defaultUriSchema) {
-
- DataFrameWriter writer = df.write().mode(config.getString(SAVE_MODE));
-
- if (CollectionUtils.isNotEmpty(config.getStringList(PARTITION_BY))) {
- List partitionKeys = config.getStringList(PARTITION_BY);
- writer.partitionBy(partitionKeys.toArray(new String[]{}));
- }
-
- Config fileConfig = ConfigUtils.extractSubConfig(config, "options.", false);
- if (fileConfig.isNotEmpty()) {
- Map optionMap = new HashMap<>(16);
- fileConfig.entrySet().forEach(x -> optionMap.put(x.getKey(), String.valueOf(x.getValue())));
- writer.options(optionMap);
- }
-
- String path = buildPathWithDefaultSchema(config.getString(PATH), defaultUriSchema);
-
- switch (config.getString(SERIALIZER)) {
- case "csv":
- writer.csv(path);
- break;
- case "json":
- writer.json(path);
- break;
- case "parquet":
- writer.parquet(path);
- break;
- case "text":
- writer.text(path);
- break;
- case "orc":
- writer.orc(path);
- break;
- default:
- break;
- }
- }
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/file/HdfsFileWriter.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/file/HdfsFileWriter.java
deleted file mode 100644
index 7fd39618689c..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/file/HdfsFileWriter.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.flow.batch.writer.file;
-
-import org.apache.dolphinscheduler.data.quality.config.Config;
-import org.apache.dolphinscheduler.data.quality.config.ValidateResult;
-import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment;
-
-import org.apache.spark.sql.Dataset;
-import org.apache.spark.sql.Row;
-
-import java.util.Collections;
-
-/**
- * HdfsFileWriter
- */
-public class HdfsFileWriter extends BaseFileWriter {
-
- public HdfsFileWriter(Config config) {
- super(config);
- }
-
- @Override
- public void write(Dataset data, SparkRuntimeEnvironment environment) {
- outputImpl(data, "hdfs://");
- }
-
- @Override
- public ValidateResult validateConfig() {
- return checkConfigImpl(Collections.singletonList("hdfs://"));
- }
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/file/LocalFileWriter.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/file/LocalFileWriter.java
deleted file mode 100644
index 1741ff20fb07..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/file/LocalFileWriter.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.flow.batch.writer.file;
-
-import org.apache.dolphinscheduler.data.quality.config.Config;
-import org.apache.dolphinscheduler.data.quality.config.ValidateResult;
-import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment;
-
-import org.apache.spark.sql.Dataset;
-import org.apache.spark.sql.Row;
-
-import java.util.Collections;
-
-/**
- * LocalFileWriter
- */
-public class LocalFileWriter extends BaseFileWriter {
-
- public LocalFileWriter(Config config) {
- super(config);
- }
-
- @Override
- public void write(Dataset data, SparkRuntimeEnvironment environment) {
- outputImpl(data, "file://");
- }
-
- @Override
- public ValidateResult validateConfig() {
- return checkConfigImpl(Collections.singletonList("file://"));
- }
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/utils/ConfigUtils.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/utils/ConfigUtils.java
deleted file mode 100644
index 877dcef2e975..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/utils/ConfigUtils.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.utils;
-
-import org.apache.dolphinscheduler.data.quality.config.Config;
-
-import java.util.LinkedHashMap;
-import java.util.Map;
-
-public class ConfigUtils {
-
- private ConfigUtils() {
- throw new IllegalStateException("Construct ConfigUtils");
- }
-
- /**
- * Extract sub config with fixed prefix
- *
- * @param source config source
- * @param prefix config prefix
- * @param keepPrefix true if keep prefix
- */
- public static Config extractSubConfig(Config source, String prefix, boolean keepPrefix) {
- Map values = new LinkedHashMap<>();
-
- for (Map.Entry entry : source.entrySet()) {
- final String key = entry.getKey();
- final String value = String.valueOf(entry.getValue());
-
- if (key.startsWith(prefix)) {
- if (keepPrefix) {
- values.put(key, value);
- } else {
- values.put(key.substring(prefix.length()), value);
- }
- }
- }
-
- return new Config(values);
- }
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/utils/JsonUtils.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/utils/JsonUtils.java
deleted file mode 100644
index f20cc8568732..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/utils/JsonUtils.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.utils;
-
-import static com.fasterxml.jackson.databind.DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT;
-import static com.fasterxml.jackson.databind.DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT;
-import static com.fasterxml.jackson.databind.DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES;
-import static com.fasterxml.jackson.databind.DeserializationFeature.READ_UNKNOWN_ENUM_VALUES_AS_NULL;
-import static com.fasterxml.jackson.databind.MapperFeature.REQUIRE_SETTERS_FOR_GETTERS;
-import static com.fasterxml.jackson.databind.SerializationFeature.FAIL_ON_EMPTY_BEANS;
-
-import org.apache.dolphinscheduler.data.quality.Constants;
-
-import java.text.SimpleDateFormat;
-import java.util.TimeZone;
-
-import lombok.extern.slf4j.Slf4j;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.base.Strings;
-
-/**
- * JsonUtil
- */
-@Slf4j
-public class JsonUtils {
-
- /**
- * can use static singleton, inject: just make sure to reuse!
- */
- private static final ObjectMapper MAPPER = new ObjectMapper()
- .configure(FAIL_ON_UNKNOWN_PROPERTIES, false)
- .configure(ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT, true)
- .configure(ACCEPT_EMPTY_STRING_AS_NULL_OBJECT, true)
- .configure(READ_UNKNOWN_ENUM_VALUES_AS_NULL, true)
- .configure(REQUIRE_SETTERS_FOR_GETTERS, true)
- .configure(FAIL_ON_EMPTY_BEANS, false)
- .setTimeZone(TimeZone.getDefault())
- .setDateFormat(new SimpleDateFormat(Constants.YYYY_MM_DD_HH_MM_SS));
-
- private JsonUtils() {
- throw new UnsupportedOperationException("Construct JSONUtils");
- }
-
- public static T fromJson(String json, Class clazz) {
- if (Strings.isNullOrEmpty(json)) {
- return null;
- }
-
- try {
- return MAPPER.readValue(json, clazz);
- } catch (Exception e) {
- log.error("parse object exception!", e);
- }
-
- return null;
- }
-}
diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/utils/ParserUtils.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/utils/ParserUtils.java
deleted file mode 100644
index e2b3fc5abfc8..000000000000
--- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/utils/ParserUtils.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.utils;
-
-import static java.nio.charset.StandardCharsets.UTF_8;
-
-import java.net.URLDecoder;
-import java.net.URLEncoder;
-
-import lombok.extern.slf4j.Slf4j;
-
-/**
- * ParserUtil
- */
-@Slf4j
-public class ParserUtils {
-
- private ParserUtils() {
- throw new UnsupportedOperationException("Construct ParserUtils");
- }
-
- public static String encode(String str) {
- String rs = str;
- try {
- rs = URLEncoder.encode(str, UTF_8.toString());
- } catch (Exception e) {
- log.error("encode str exception!", e);
- }
-
- return rs;
- }
-
- public static String decode(String str) {
- String rs = str;
- try {
- rs = URLDecoder.decode(str, UTF_8.toString());
- } catch (Exception e) {
- log.error("decode str exception!", e);
- }
-
- return rs;
- }
-}
diff --git a/dolphinscheduler-data-quality/src/main/resources/log4j.properties b/dolphinscheduler-data-quality/src/main/resources/log4j.properties
deleted file mode 100644
index 1397518b8dd1..000000000000
--- a/dolphinscheduler-data-quality/src/main/resources/log4j.properties
+++ /dev/null
@@ -1,22 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-log4j.rootLogger=INFO, stdout
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.Target=System.out
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss.SSS Z} %-5p [%c] - %m%n
\ No newline at end of file
diff --git a/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/SparkApplicationTestBase.java b/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/SparkApplicationTestBase.java
deleted file mode 100644
index 693527604977..000000000000
--- a/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/SparkApplicationTestBase.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality;
-
-import org.apache.dolphinscheduler.data.quality.config.Config;
-import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import org.junit.jupiter.api.BeforeEach;
-
-/**
- * SparkApplicationTestBase
- */
-public class SparkApplicationTestBase {
-
- protected SparkRuntimeEnvironment sparkRuntimeEnvironment;
-
- @BeforeEach
- public void init() {
- Map config = new HashMap<>();
- config.put("spark.app.name", "data quality test");
- config.put("spark.sql.crossJoin.enabled", "true");
- config.put("spark.driver.bindAddress", "127.0.0.1");
- config.put("spark.ui.port", 13000);
- config.put("spark.master", "local[4]");
-
- // The hive client is disabled by default, and the local execution of Unit Test is guaranteed to be successful.
- sparkRuntimeEnvironment = new SparkRuntimeEnvironment(new Config(config), false);
- }
-}
diff --git a/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/configuration/ConfigurationParserTest.java b/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/configuration/ConfigurationParserTest.java
deleted file mode 100644
index b81e9ad4ba62..000000000000
--- a/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/configuration/ConfigurationParserTest.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.configuration;
-
-import org.apache.dolphinscheduler.data.quality.config.DataQualityConfiguration;
-import org.apache.dolphinscheduler.data.quality.utils.JsonUtils;
-
-import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.Test;
-
-/**
- * ConfigurationParserTest
- */
-public class ConfigurationParserTest {
-
- @Test
- public void testConfigurationValidate() {
- Assertions.assertEquals(1, verifyConfigurationValidate());
- }
-
- private int verifyConfigurationValidate() {
- int flag = 1;
- try {
- String parameterStr = "{\"name\":\"data quality test\",\"env\":{\"type\":\"batch\",\"config\":null},"
- + "\"readers\":[{\"type\":\"JDBC\",\"config\":{\"database\":\"test\",\"password\":\"Test@123!\","
- + "\"driver\":\"com.mysql.cj.jdbc.Driver\",\"user\":\"test\",\"output_table\":\"test1\",\"table\":\"test1\","
- + "\"url\":\"jdbc:mysql://172.16.100.199:3306/test\"} }],\"transformers\":[{\"type\":\"sql\",\"config\":"
- + "{\"index\":1,\"output_table\":\"miss_count\",\"sql\":\"SELECT COUNT(*) AS miss FROM test1 WHERE (c1 is null or c1 = '') \"} },"
- + "{\"type\":\"sql\",\"config\":{\"index\":2,\"output_table\":\"total_count\",\"sql\":\"SELECT COUNT(*) AS total FROM test1 \"} }],"
- + "\"writers\":[{\"type\":\"JDBC\",\"config\":{\"database\":\"dolphinscheduler\",\"password\":\"test\","
- + "\"driver\":\"org.postgresql.Driver\",\"user\":\"test\",\"table\":\"t_ds_dq_execute_result\","
- + "\"url\":\"jdbc:postgresql://172.16.100.199:5432/dolphinscheduler?stringtype=unspecified\","
- + "\"sql\":\"SELECT 0 as rule_type,'data quality test' as rule_name,7 as process_definition_id,80 as process_instance_id,"
- + "80 as task_instance_id,miss_count.miss AS statistics_value, total_count.total AS comparison_value,2 as check_type,10 as"
- + " threshold, 3 as operator, 0 as failure_strategy, '2021-06-29 10:18:59' as create_time,'2021-06-29 10:18:59' as update_time "
- + "from miss_count FULL JOIN total_count\"} }]}";
-
- DataQualityConfiguration dataQualityConfiguration =
- JsonUtils.fromJson(parameterStr, DataQualityConfiguration.class);
- dataQualityConfiguration.validate();
- } catch (Exception e) {
- flag = 0;
- e.printStackTrace();
- }
- return flag;
- }
-}
diff --git a/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/FlowTestBase.java b/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/FlowTestBase.java
deleted file mode 100644
index 4efcd67ae9f7..000000000000
--- a/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/FlowTestBase.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.flow;
-
-import org.apache.dolphinscheduler.data.quality.SparkApplicationTestBase;
-
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.util.Properties;
-
-/**
- * FlowTestBase
- */
-public class FlowTestBase extends SparkApplicationTestBase {
-
- protected String url = "jdbc:h2:mem:test;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true";
-
- protected String driver = "org.h2.Driver";
-
- protected Connection getConnection() throws Exception {
- Properties properties = new Properties();
- properties.setProperty("user", "test");
- properties.setProperty("password", "123456");
- properties.setProperty("rowId", "false");
- DriverManager.registerDriver(new org.h2.Driver());
- Class.forName(driver, false, this.getClass().getClassLoader());
- return DriverManager.getConnection(url, properties);
- }
-
-}
diff --git a/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/reader/JdbcReaderTest.java b/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/reader/JdbcReaderTest.java
deleted file mode 100644
index 8ce8d54cf850..000000000000
--- a/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/reader/JdbcReaderTest.java
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.flow.reader;
-
-import static org.apache.dolphinscheduler.data.quality.Constants.DATABASE;
-import static org.apache.dolphinscheduler.data.quality.Constants.DRIVER;
-import static org.apache.dolphinscheduler.data.quality.Constants.PASSWORD;
-import static org.apache.dolphinscheduler.data.quality.Constants.TABLE;
-import static org.apache.dolphinscheduler.data.quality.Constants.URL;
-import static org.apache.dolphinscheduler.data.quality.Constants.USER;
-
-import org.apache.dolphinscheduler.data.quality.config.Config;
-import org.apache.dolphinscheduler.data.quality.flow.FlowTestBase;
-import org.apache.dolphinscheduler.data.quality.flow.batch.reader.JdbcReader;
-
-import java.sql.Connection;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-
-/**
- * JdbcConnectorTest
- */
-public class JdbcReaderTest extends FlowTestBase {
-
- @BeforeEach
- public void before() {
- super.init();
- createConnectorTable();
- }
-
- @Test
- public void testJdbcConnectorExecute() {
- JdbcReader jdbcReader = new JdbcReader(buildReaderConfig());
- Assertions.assertNotNull(jdbcReader.read(sparkRuntimeEnvironment));
- }
-
- private Config buildReaderConfig() {
- Map config = new HashMap<>();
- config.put(DATABASE, "test");
- config.put(TABLE, "test1");
- config.put(URL, url);
- config.put(USER, "test");
- config.put(PASSWORD, "123456");
- config.put(DRIVER, driver);
- return new Config(config);
- }
-
- private void createConnectorTable() {
- try {
- Connection connection = getConnection();
- connection.prepareStatement("create schema if not exists test").executeUpdate();
-
- connection.prepareStatement("drop table if exists test.test1").executeUpdate();
- connection
- .prepareStatement(
- "CREATE TABLE test.test1 (\n"
- + " `id` int(11) NOT NULL AUTO_INCREMENT,\n"
- + " `company` varchar(255) DEFAULT NULL,\n"
- + " `date` varchar(255) DEFAULT NULL,\n"
- + " `c1` varchar(255) DEFAULT NULL,\n"
- + " `c2` varchar(255) DEFAULT NULL,\n"
- + " `c3` varchar(255) DEFAULT NULL,\n"
- + " `c4` int(11) DEFAULT NULL,\n"
- + " PRIMARY KEY (`id`)\n"
- + ")")
- .executeUpdate();
- connection.prepareStatement("INSERT INTO test.test1 (company,`date`,c1,c2,c3,c4) VALUES\n"
- + "\t ('1','2019-03-01','11','12','13',1),\n"
- + "\t ('2','2019-06-01','21','22','23',1),\n"
- + "\t ('3','2019-09-01','31','32','33',1),\n"
- + "\t ('4','2019-12-01','41','42','43',1),\n"
- + "\t ('5','2013','42','43','54',1),\n"
- + "\t ('6','2020','42','43','54',1);").executeUpdate();
- connection.commit();
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
-
-}
diff --git a/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/reader/ReaderFactoryTest.java b/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/reader/ReaderFactoryTest.java
deleted file mode 100644
index dbe1aa6b53c4..000000000000
--- a/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/reader/ReaderFactoryTest.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.flow.reader;
-
-import static org.apache.dolphinscheduler.data.quality.Constants.DATABASE;
-import static org.apache.dolphinscheduler.data.quality.Constants.DRIVER;
-import static org.apache.dolphinscheduler.data.quality.Constants.PASSWORD;
-import static org.apache.dolphinscheduler.data.quality.Constants.TABLE;
-import static org.apache.dolphinscheduler.data.quality.Constants.URL;
-import static org.apache.dolphinscheduler.data.quality.Constants.USER;
-
-import org.apache.dolphinscheduler.data.quality.config.ReaderConfig;
-import org.apache.dolphinscheduler.data.quality.exception.DataQualityException;
-import org.apache.dolphinscheduler.data.quality.flow.batch.BatchReader;
-import org.apache.dolphinscheduler.data.quality.flow.batch.reader.ReaderFactory;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.Test;
-
-/**
- * ConnectorFactoryTest
- */
-public class ReaderFactoryTest {
-
- @Test
- public void testConnectorGenerate() throws DataQualityException {
-
- List readerConfigs = new ArrayList<>();
- ReaderConfig readerConfig = new ReaderConfig();
- readerConfig.setType("JDBC");
- Map config = new HashMap<>();
- config.put(DATABASE, "test");
- config.put(TABLE, "test1");
- config.put(URL, "jdbc:mysql://localhost:3306/test");
- config.put(USER, "test");
- config.put(PASSWORD, "123456");
- config.put(DRIVER, "com.mysql.cj.jdbc.Driver");
- readerConfig.setConfig(config);
- readerConfigs.add(readerConfig);
-
- int flag = 0;
-
- List readers = ReaderFactory.getInstance().getReaders(null, readerConfigs);
- if (readers != null && readers.size() >= 1) {
- flag = 1;
- }
-
- Assertions.assertEquals(1, flag);
- }
-}
diff --git a/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/writer/JdbcWriterTest.java b/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/writer/JdbcWriterTest.java
deleted file mode 100644
index 51bbca843a0e..000000000000
--- a/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/writer/JdbcWriterTest.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.flow.writer;
-
-import static org.apache.dolphinscheduler.data.quality.Constants.DATABASE;
-import static org.apache.dolphinscheduler.data.quality.Constants.DRIVER;
-import static org.apache.dolphinscheduler.data.quality.Constants.PASSWORD;
-import static org.apache.dolphinscheduler.data.quality.Constants.TABLE;
-import static org.apache.dolphinscheduler.data.quality.Constants.URL;
-import static org.apache.dolphinscheduler.data.quality.Constants.USER;
-
-import org.apache.dolphinscheduler.data.quality.config.Config;
-import org.apache.dolphinscheduler.data.quality.flow.FlowTestBase;
-import org.apache.dolphinscheduler.data.quality.flow.batch.reader.JdbcReader;
-import org.apache.dolphinscheduler.data.quality.flow.batch.writer.JdbcWriter;
-
-import java.sql.Connection;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-
-/**
- * JdbcWriterTest
- */
-public class JdbcWriterTest extends FlowTestBase {
-
- @BeforeEach
- public void before() {
- super.init();
- createWriterTable();
- }
-
- @Test
- public void testJdbcWriterExecute() {
- JdbcReader jdbcConnector = new JdbcReader(buildJdbcReaderConfig());
- JdbcWriter jdbcWriter = new JdbcWriter(buildJdbcConfig());
- jdbcWriter.write(jdbcConnector.read(sparkRuntimeEnvironment), sparkRuntimeEnvironment);
- }
-
- private Config buildJdbcConfig() {
- Map config = new HashMap<>();
- config.put(DATABASE, "test");
- config.put(TABLE, "test.test2");
- config.put(URL, url);
- config.put(USER, "test");
- config.put(PASSWORD, "123456");
- config.put(DRIVER, driver);
- config.put("save_mode", "append");
- return new Config(config);
- }
-
- private Config buildJdbcReaderConfig() {
- Config config = buildJdbcConfig();
- config.put("sql", "SELECT '1' as company,'1' as date,'2' as c1,'2' as c2,'2' as c3, 2 as c4");
- return config;
- }
-
- private void createWriterTable() {
- try {
- Connection connection = getConnection();
- connection.prepareStatement("create schema if not exists test").executeUpdate();
-
- connection.prepareStatement("drop table if exists test.test2").executeUpdate();
- connection
- .prepareStatement(
- "CREATE TABLE test.test2 (\n"
- + " `id` int(11) NOT NULL AUTO_INCREMENT,\n"
- + " `company` varchar(255) DEFAULT NULL,\n"
- + " `date` varchar(255) DEFAULT NULL,\n"
- + " `c1` varchar(255) DEFAULT NULL,\n"
- + " `c2` varchar(255) DEFAULT NULL,\n"
- + " `c3` varchar(255) DEFAULT NULL,\n"
- + " `c4` int(11) DEFAULT NULL,\n"
- + " PRIMARY KEY (`id`)\n"
- + ")")
- .executeUpdate();
- connection.prepareStatement("set schema test").executeUpdate();
- connection.commit();
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
-
-}
diff --git a/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/writer/WriterFactoryTest.java b/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/writer/WriterFactoryTest.java
deleted file mode 100644
index 4ef59d0bf6f3..000000000000
--- a/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/writer/WriterFactoryTest.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.flow.writer;
-
-import org.apache.dolphinscheduler.data.quality.config.WriterConfig;
-import org.apache.dolphinscheduler.data.quality.exception.DataQualityException;
-import org.apache.dolphinscheduler.data.quality.flow.batch.BatchWriter;
-import org.apache.dolphinscheduler.data.quality.flow.batch.writer.WriterFactory;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.Test;
-
-/**
- * WriterFactoryTest
- */
-public class WriterFactoryTest {
-
- @Test
- public void testWriterGenerate() throws DataQualityException {
-
- List writerConfigs = new ArrayList<>();
- WriterConfig writerConfig = new WriterConfig();
- writerConfig.setType("JDBC");
- writerConfig.setConfig(null);
- writerConfigs.add(writerConfig);
-
- int flag = 0;
-
- List writers = WriterFactory.getInstance().getWriters(null, writerConfigs);
- if (writers != null && writers.size() >= 1) {
- flag = 1;
- }
-
- Assertions.assertEquals(1, flag);
- }
-}
diff --git a/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/utils/ConfigUtilsTest.java b/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/utils/ConfigUtilsTest.java
deleted file mode 100644
index 2aa7a9dca081..000000000000
--- a/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/utils/ConfigUtilsTest.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.data.quality.utils;
-
-import org.apache.dolphinscheduler.data.quality.config.Config;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.Test;
-
-public class ConfigUtilsTest {
-
- @Test
- public void testExtractSubConfig() {
- // Setup
- Map configMap = new HashMap<>();
- configMap.put("aaa.www", "1");
- configMap.put("bbb.www", "1");
-
- final Config source = new Config(configMap);
-
- // Run the test
- final Config result = ConfigUtils.extractSubConfig(source, "aaa", false);
- int expect = 1;
- int actual = result.entrySet().size();
-
- Assertions.assertEquals(expect, actual);
- }
-}
diff --git a/dolphinscheduler-data-quality/src/test/resources/logback.xml b/dolphinscheduler-data-quality/src/test/resources/logback.xml
deleted file mode 100644
index 9a182a18ef12..000000000000
--- a/dolphinscheduler-data-quality/src/test/resources/logback.xml
+++ /dev/null
@@ -1,21 +0,0 @@
-
-
-
-
-
-
diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/constants/DataSourceConstants.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/constants/DataSourceConstants.java
index 18ae55d7e9c4..46b81a416568 100644
--- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/constants/DataSourceConstants.java
+++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/constants/DataSourceConstants.java
@@ -414,13 +414,6 @@ public class DataSourceConstants {
*/
public static final String RESOURCE_UPLOAD_PATH = "resource.storage.upload.base.path";
- /**
- * data.quality.jar.dir
- */
- public static final String DATA_QUALITY_JAR_DIR = "data-quality.jar.dir";
-
- public static final String TASK_TYPE_DATA_QUALITY = "DATA_QUALITY";
-
public static final Set TASK_TYPE_SET_K8S = Sets.newHashSet("K8S", "KUBEFLOW");
/**
diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/CommonUtils.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/CommonUtils.java
index 04bd243c0e03..7e5fb6555a0b 100644
--- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/CommonUtils.java
+++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/CommonUtils.java
@@ -18,7 +18,6 @@
package org.apache.dolphinscheduler.plugin.datasource.api.utils;
import static org.apache.dolphinscheduler.common.constants.Constants.RESOURCE_STORAGE_TYPE;
-import static org.apache.dolphinscheduler.plugin.datasource.api.constants.DataSourceConstants.DATA_QUALITY_JAR_DIR;
import static org.apache.dolphinscheduler.plugin.datasource.api.constants.DataSourceConstants.HADOOP_SECURITY_AUTHENTICATION;
import static org.apache.dolphinscheduler.plugin.datasource.api.constants.DataSourceConstants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE;
import static org.apache.dolphinscheduler.plugin.datasource.api.constants.DataSourceConstants.JAVA_SECURITY_KRB5_CONF;
@@ -36,14 +35,10 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
-import java.io.File;
import java.io.IOException;
-import java.util.Optional;
import lombok.extern.slf4j.Slf4j;
-import org.springframework.core.io.ClassPathResource;
-
/**
* common utils
*/
@@ -54,8 +49,6 @@ private CommonUtils() {
throw new UnsupportedOperationException("Construct CommonUtils");
}
- private static String DEFAULT_DATA_QUALITY_JAR_PATH = null;
-
private static final boolean IS_DEVELOP_MODE = PropertyUtils.getBoolean(Constants.DEVELOPMENT_STATE, true);
/**
@@ -132,83 +125,6 @@ public static boolean loadKerberosConf(String javaSecurityKrb5Conf, String login
return false;
}
- public static String getDataQualityJarPath() {
- log.info("Trying to get data quality jar in path");
- String dqJarDir = PropertyUtils.getString(DATA_QUALITY_JAR_DIR);
-
- if (StringUtils.isNotEmpty(dqJarDir)) {
- log.info(
- "Configuration data-quality.jar.dir is not empty, will try to get data quality jar from directory {}",
- dqJarDir);
- getDataQualityJarPathFromPath(dqJarDir).ifPresent(jarName -> DEFAULT_DATA_QUALITY_JAR_PATH = jarName);
- }
-
- if (StringUtils.isEmpty(DEFAULT_DATA_QUALITY_JAR_PATH)) {
- log.info("data quality jar path is empty, will try to auto discover it from build-in rules.");
- getDefaultDataQualityJarPath();
- }
-
- if (StringUtils.isEmpty(DEFAULT_DATA_QUALITY_JAR_PATH)) {
- log.error(
- "Can not find data quality jar in both configuration and auto discover, please check your configuration or report a bug.");
- throw new RuntimeException("data quality jar path is empty");
- }
-
- return DEFAULT_DATA_QUALITY_JAR_PATH;
- }
-
- private static String getDefaultDataQualityJarPath() {
- if (StringUtils.isNotEmpty(DEFAULT_DATA_QUALITY_JAR_PATH)) {
- return DEFAULT_DATA_QUALITY_JAR_PATH;
- }
- try {
- // not standalone mode
- String currentAbsolutePath = new ClassPathResource("./").getFile().getAbsolutePath();
- String currentLibPath = currentAbsolutePath + "/../libs";
- getDataQualityJarPathFromPath(currentLibPath).ifPresent(jarName -> DEFAULT_DATA_QUALITY_JAR_PATH = jarName);
-
- // standalone mode
- if (StringUtils.isEmpty(DEFAULT_DATA_QUALITY_JAR_PATH)) {
- log.info(
- "Can not get data quality jar from path {}, maybe service running in standalone mode, will try to find another path",
- currentLibPath);
- currentLibPath = currentAbsolutePath + "/../../worker-server/libs";
- getDataQualityJarPathFromPath(currentLibPath)
- .ifPresent(jarName -> DEFAULT_DATA_QUALITY_JAR_PATH = jarName);
- }
- } catch (IOException e) {
- throw new RuntimeException("get default data quality jar path error", e);
- }
- log.info("get default data quality jar name: {}", DEFAULT_DATA_QUALITY_JAR_PATH);
- return DEFAULT_DATA_QUALITY_JAR_PATH;
- }
-
- private static Optional getDataQualityJarPathFromPath(String path) {
- log.info("Try to get data quality jar from path {}", path);
- File[] jars = new File(path).listFiles();
- if (jars == null) {
- log.warn("No any files find given path {}", path);
- return Optional.empty();
- }
- for (File jar : jars) {
- if (jar.getName().startsWith("dolphinscheduler-data-quality")) {
- return Optional.of(jar.getAbsolutePath());
- }
- }
- log.warn("No data quality related jar found from path {}", path);
- return Optional.empty();
- }
-
- /**
- * hdfs udf dir
- *
- * @param tenantCode tenant code
- * @return get udf dir on hdfs
- */
- public static String getHdfsUdfDir(String tenantCode) {
- return String.format("%s/udfs", getHdfsTenantDir(tenantCode));
- }
-
/**
* @param tenantCode tenant code
* @return file directory of tenants on hdfs
diff --git a/dolphinscheduler-dist/pom.xml b/dolphinscheduler-dist/pom.xml
index b0b5b7f9a078..2b63dee4753d 100644
--- a/dolphinscheduler-dist/pom.xml
+++ b/dolphinscheduler-dist/pom.xml
@@ -59,11 +59,6 @@
dolphinscheduler-alert-server
-
- org.apache.dolphinscheduler
- dolphinscheduler-data-quality
-
-
org.apache.dolphinscheduler
dolphinscheduler-ui
diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/common/NavBarPage.java b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/common/NavBarPage.java
index 9f68ec5a1367..cf126ed3c659 100644
--- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/common/NavBarPage.java
+++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/java/org/apache/dolphinscheduler/e2e/pages/common/NavBarPage.java
@@ -43,9 +43,6 @@ public class NavBarPage {
@FindBy(xpath = "//div[contains(@class, 'tab-horizontal')]//div[contains(@role,'menubar')]//span[contains(text(), 'Resources')]")
private WebElement resourceTab;
- @FindBy(xpath = "//div[contains(@class, 'tab-horizontal')]//div[contains(@role,'menubar')]//span[contains(text(), 'Data Quality')]")
- private WebElement dataQualityTab;
-
@FindBy(xpath = "//div[contains(@class, 'tab-horizontal')]//div[contains(@role,'menubar')]//span[contains(text(), 'Datasource')]")
private WebElement dataSourceTab;
diff --git a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/file-manage/common.properties b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/file-manage/common.properties
index 604befdbf8ca..ff146269d9cf 100644
--- a/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/file-manage/common.properties
+++ b/dolphinscheduler-e2e/dolphinscheduler-e2e-case/src/test/resources/docker/file-manage/common.properties
@@ -84,13 +84,6 @@ datasource.encryption.enable=false
# datasource encryption salt
datasource.encryption.salt=!@#$%^&*
-# data quality jar directory path, it would auto discovery data quality jar from this given dir. You should keep it empty if you do not change anything in
-# data-quality, it will auto discovery by dolphinscheduler itself. Change it only if you want to use your own data-quality jar and it is not in worker-server
-# libs directory(but may sure your jar name start with `dolphinscheduler-data-quality`).
-data-quality.jar.dir=
-
-#data-quality.error.output.path=/tmp/data-quality-error-data
-
# Network IP gets priority, default inner outer
# Whether hive SQL is executed in the same session
@@ -126,4 +119,4 @@ ml.mlflow.preset_repository=https://github.com/apache/dolphinscheduler-mlflow
ml.mlflow.preset_repository_version="main"
# way to collect applicationId: log(original regex match), aop
-appId.collect: log
\ No newline at end of file
+appId.collect: log
diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/TaskExecutionContextFactory.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/TaskExecutionContextFactory.java
index fd4c50c7cb75..66ee6b33f6dd 100644
--- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/TaskExecutionContextFactory.java
+++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/TaskExecutionContextFactory.java
@@ -19,42 +19,22 @@
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.CLUSTER;
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.NAMESPACE_NAME;
-import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.TASK_TYPE_DATA_QUALITY;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.COMPARISON_NAME;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.COMPARISON_TABLE;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.COMPARISON_TYPE;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.SRC_CONNECTOR_TYPE;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.SRC_DATASOURCE_ID;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.TARGET_CONNECTOR_TYPE;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.TARGET_DATASOURCE_ID;
-import org.apache.dolphinscheduler.common.constants.Constants;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
-import org.apache.dolphinscheduler.common.utils.PropertyUtils;
import org.apache.dolphinscheduler.dao.entity.DataSource;
-import org.apache.dolphinscheduler.dao.entity.DqComparisonType;
-import org.apache.dolphinscheduler.dao.entity.DqRule;
-import org.apache.dolphinscheduler.dao.entity.DqRuleExecuteSql;
-import org.apache.dolphinscheduler.dao.entity.DqRuleInputEntry;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.entity.WorkflowDefinition;
import org.apache.dolphinscheduler.dao.entity.WorkflowInstance;
-import org.apache.dolphinscheduler.plugin.task.api.DataQualityTaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.api.K8sTaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
import org.apache.dolphinscheduler.plugin.task.api.TaskPluginManager;
-import org.apache.dolphinscheduler.plugin.task.api.enums.dp.ConnectorType;
-import org.apache.dolphinscheduler.plugin.task.api.enums.dp.ExecuteSqlType;
-import org.apache.dolphinscheduler.plugin.task.api.model.JdbcInfo;
import org.apache.dolphinscheduler.plugin.task.api.model.Property;
import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters;
import org.apache.dolphinscheduler.plugin.task.api.parameters.K8sTaskParameters;
-import org.apache.dolphinscheduler.plugin.task.api.parameters.dataquality.DataQualityParameters;
import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.AbstractResourceParameters;
import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.DataSourceParameters;
import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.ResourceParametersHelper;
-import org.apache.dolphinscheduler.plugin.task.api.utils.JdbcUrlParser;
import org.apache.dolphinscheduler.plugin.task.api.utils.MapUtils;
import org.apache.dolphinscheduler.plugin.task.spark.SparkParameters;
import org.apache.dolphinscheduler.server.master.config.MasterConfig;
@@ -62,15 +42,9 @@
import org.apache.dolphinscheduler.server.master.engine.task.runnable.TaskExecutionContextCreateRequest;
import org.apache.dolphinscheduler.service.expand.CuringParamsService;
import org.apache.dolphinscheduler.service.process.ProcessService;
-import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam;
-import org.apache.dolphinscheduler.spi.datasource.DefaultConnectionParam;
-import org.apache.dolphinscheduler.spi.enums.DbType;
-import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
-import java.util.ArrayList;
-import java.util.List;
import java.util.Map;
import java.util.Objects;
@@ -126,23 +100,10 @@ public TaskExecutionContext createTaskExecutionContext(TaskExecutionContextCreat
.buildParamInfo(propertyMap)
.create();
- setDataQualityTaskExecutionContext(taskExecutionContext, taskInstance, workflowInstance.getTenantCode());
setK8sTaskRelatedInfo(taskExecutionContext, taskInstance);
return taskExecutionContext;
}
- // todo: don't merge the dq context here.
- public void setDataQualityTaskExecutionContext(TaskExecutionContext taskExecutionContext,
- TaskInstance taskInstance,
- String tenantCode) {
- if (!TASK_TYPE_DATA_QUALITY.equalsIgnoreCase(taskInstance.getTaskType())) {
- return;
- }
- DataQualityTaskExecutionContext dataQualityTaskExecutionContext = new DataQualityTaskExecutionContext();
- setDataQualityTaskRelation(dataQualityTaskExecutionContext, taskInstance, tenantCode);
- taskExecutionContext.setDataQualityTaskExecutionContext(dataQualityTaskExecutionContext);
- }
-
public void setK8sTaskRelatedInfo(TaskExecutionContext taskExecutionContext, TaskInstance taskInstance) {
K8sTaskExecutionContext k8sTaskExecutionContext = setK8sTaskRelation(taskInstance);
taskExecutionContext.setK8sTaskExecutionContext(k8sTaskExecutionContext);
@@ -180,51 +141,6 @@ private void setTaskDataSourceResourceInfo(Map config = dataQualityParameters.getRuleInputParameter();
-
- int ruleId = dataQualityParameters.getRuleId();
- DqRule dqRule = processService.getDqRule(ruleId);
- if (dqRule == null) {
- log.error("Can not get dataQuality rule by id {}", ruleId);
- return;
- }
-
- dataQualityTaskExecutionContext.setRuleId(ruleId);
- dataQualityTaskExecutionContext.setRuleType(dqRule.getType());
- dataQualityTaskExecutionContext.setRuleName(dqRule.getName());
-
- List ruleInputEntryList = processService.getRuleInputEntry(ruleId);
- if (CollectionUtils.isEmpty(ruleInputEntryList)) {
- log.error("Rule input entry list is empty, ruleId: {}", ruleId);
- return;
- }
- List executeSqlList = processService.getDqExecuteSql(ruleId);
- setComparisonParams(dataQualityTaskExecutionContext, config, ruleInputEntryList, executeSqlList);
- dataQualityTaskExecutionContext.setRuleInputEntryList(JSONUtils.toJsonString(ruleInputEntryList));
- dataQualityTaskExecutionContext.setExecuteSqlList(JSONUtils.toJsonString(executeSqlList));
-
- // set the path used to store data quality task check error data
- dataQualityTaskExecutionContext.setHdfsPath(
- PropertyUtils.getString(Constants.FS_DEFAULT_FS)
- + PropertyUtils.getString(
- Constants.DATA_QUALITY_ERROR_OUTPUT_PATH,
- "/user/" + tenantCode + "/data_quality_error_data"));
-
- setSourceConfig(dataQualityTaskExecutionContext, config);
- setTargetConfig(dataQualityTaskExecutionContext, config);
- setWriterConfig(dataQualityTaskExecutionContext);
- setStatisticsValueWriterConfig(dataQualityTaskExecutionContext);
- }
-
private K8sTaskExecutionContext setK8sTaskRelation(TaskInstance taskInstance) {
K8sTaskExecutionContext k8sTaskExecutionContext = null;
String namespace = "";
@@ -257,155 +173,4 @@ private K8sTaskExecutionContext setK8sTaskRelation(TaskInstance taskInstance) {
return k8sTaskExecutionContext;
}
- /**
- * The SourceConfig will be used in DataQualityApplication that
- * get the data which be used to get the statistics value
- *
- * @param dataQualityTaskExecutionContext
- * @param config
- */
- private void setSourceConfig(DataQualityTaskExecutionContext dataQualityTaskExecutionContext,
- Map config) {
- if (StringUtils.isNotEmpty(config.get(SRC_DATASOURCE_ID))) {
- DataSource dataSource = processService.findDataSourceById(Integer.parseInt(config.get(SRC_DATASOURCE_ID)));
- if (dataSource != null) {
- ConnectorType srcConnectorType = ConnectorType.of(
- DbType.of(Integer.parseInt(config.get(SRC_CONNECTOR_TYPE))).isHive() ? 1 : 0);
- dataQualityTaskExecutionContext.setSourceConnectorType(srcConnectorType.getDescription());
- dataQualityTaskExecutionContext.setSourceType(dataSource.getType().getCode());
- dataQualityTaskExecutionContext.setSourceConnectionParams(dataSource.getConnectionParams());
- }
- }
- }
-
- /**
- * It is used to get comparison params, the param contains
- * comparison name、comparison table and execute sql.
- * When the type is fixed_value, params will be null.
- *
- * @param dataQualityTaskExecutionContext
- * @param config
- * @param ruleInputEntryList
- * @param executeSqlList
- */
- private void setComparisonParams(DataQualityTaskExecutionContext dataQualityTaskExecutionContext,
- Map config,
- List ruleInputEntryList,
- List executeSqlList) {
- if (config.get(COMPARISON_TYPE) != null) {
- int comparisonTypeId = Integer.parseInt(config.get(COMPARISON_TYPE));
- // comparison type id 1 is fixed value ,do not need set param
- if (comparisonTypeId > 1) {
- DqComparisonType type = processService.getComparisonTypeById(comparisonTypeId);
- if (type != null) {
- DqRuleInputEntry comparisonName = new DqRuleInputEntry();
- comparisonName.setField(COMPARISON_NAME);
- comparisonName.setData(type.getName());
- ruleInputEntryList.add(comparisonName);
-
- DqRuleInputEntry comparisonTable = new DqRuleInputEntry();
- comparisonTable.setField(COMPARISON_TABLE);
- comparisonTable.setData(type.getOutputTable());
- ruleInputEntryList.add(comparisonTable);
-
- if (executeSqlList == null) {
- executeSqlList = new ArrayList<>();
- }
-
- DqRuleExecuteSql dqRuleExecuteSql = new DqRuleExecuteSql();
- dqRuleExecuteSql.setType(ExecuteSqlType.MIDDLE.getCode());
- dqRuleExecuteSql.setIndex(1);
- dqRuleExecuteSql.setSql(type.getExecuteSql());
- dqRuleExecuteSql.setTableAlias(type.getOutputTable());
- executeSqlList.add(0, dqRuleExecuteSql);
-
- if (Boolean.TRUE.equals(type.getIsInnerSource())) {
- dataQualityTaskExecutionContext.setComparisonNeedStatisticsValueTable(true);
- }
- }
- } else if (comparisonTypeId == 1) {
- dataQualityTaskExecutionContext.setCompareWithFixedValue(true);
- }
- }
- }
-
- /**
- * The TargetConfig will be used in DataQualityApplication that
- * get the data which be used to compare to src value
- *
- * @param dataQualityTaskExecutionContext
- * @param config
- */
- private void setTargetConfig(DataQualityTaskExecutionContext dataQualityTaskExecutionContext,
- Map config) {
- if (StringUtils.isNotEmpty(config.get(TARGET_DATASOURCE_ID))) {
- DataSource dataSource =
- processService.findDataSourceById(Integer.parseInt(config.get(TARGET_DATASOURCE_ID)));
- if (dataSource != null) {
- ConnectorType targetConnectorType = ConnectorType.of(
- DbType.of(Integer.parseInt(config.get(TARGET_CONNECTOR_TYPE))).isHive() ? 1 : 0);
- dataQualityTaskExecutionContext.setTargetConnectorType(targetConnectorType.getDescription());
- dataQualityTaskExecutionContext.setTargetType(dataSource.getType().getCode());
- dataQualityTaskExecutionContext.setTargetConnectionParams(dataSource.getConnectionParams());
- }
- }
- }
-
- /**
- * The WriterConfig will be used in DataQualityApplication that
- * writes the data quality check result into dolphin scheduler datasource
- *
- * @param dataQualityTaskExecutionContext
- */
- private void setWriterConfig(DataQualityTaskExecutionContext dataQualityTaskExecutionContext) {
- DataSource dataSource = getDefaultDataSource();
- ConnectorType writerConnectorType = ConnectorType.of(dataSource.getType().isHive() ? 1 : 0);
- dataQualityTaskExecutionContext.setWriterConnectorType(writerConnectorType.getDescription());
- dataQualityTaskExecutionContext.setWriterType(dataSource.getType().getCode());
- dataQualityTaskExecutionContext.setWriterConnectionParams(dataSource.getConnectionParams());
- dataQualityTaskExecutionContext.setWriterTable("t_ds_dq_execute_result");
- }
-
- /**
- * The default datasource is used to get the dolphinscheduler datasource info,
- * and the info will be used in StatisticsValueConfig and WriterConfig
- *
- * @return DataSource
- */
- public DataSource getDefaultDataSource() {
- DataSource dataSource = new DataSource();
-
- dataSource.setUserName(hikariDataSource.getUsername());
- JdbcInfo jdbcInfo = JdbcUrlParser.getJdbcInfo(hikariDataSource.getJdbcUrl());
- if (jdbcInfo != null) {
- //
- BaseConnectionParam baseConnectionParam = new DefaultConnectionParam();
- baseConnectionParam.setUser(hikariDataSource.getUsername());
- baseConnectionParam.setPassword(hikariDataSource.getPassword());
- baseConnectionParam.setDatabase(jdbcInfo.getDatabase());
- baseConnectionParam.setAddress(jdbcInfo.getAddress());
- baseConnectionParam.setJdbcUrl(jdbcInfo.getJdbcUrl());
- baseConnectionParam.setOther(jdbcInfo.getParams());
- dataSource.setType(DbType.of(JdbcUrlParser.getDbType(jdbcInfo.getDriverName()).getCode()));
- dataSource.setConnectionParams(JSONUtils.toJsonString(baseConnectionParam));
- }
-
- return dataSource;
- }
-
- /**
- * The StatisticsValueWriterConfig will be used in DataQualityApplication that
- * writes the statistics value into dolphin scheduler datasource
- *
- * @param dataQualityTaskExecutionContext
- */
- private void setStatisticsValueWriterConfig(DataQualityTaskExecutionContext dataQualityTaskExecutionContext) {
- DataSource dataSource = getDefaultDataSource();
- ConnectorType writerConnectorType = ConnectorType.of(dataSource.getType().isHive() ? 1 : 0);
- dataQualityTaskExecutionContext.setStatisticsValueConnectorType(writerConnectorType.getDescription());
- dataQualityTaskExecutionContext.setStatisticsValueType(dataSource.getType().getCode());
- dataQualityTaskExecutionContext.setStatisticsValueWriterConnectionParams(dataSource.getConnectionParams());
- dataQualityTaskExecutionContext.setStatisticsValueTable("t_ds_dq_task_statistics_value");
- }
-
}
diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/alert/WorkflowAlertManager.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/alert/WorkflowAlertManager.java
index 1be47623d406..df71a9676250 100644
--- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/alert/WorkflowAlertManager.java
+++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/alert/WorkflowAlertManager.java
@@ -26,10 +26,7 @@
import org.apache.dolphinscheduler.common.utils.PropertyUtils;
import org.apache.dolphinscheduler.dao.AlertDao;
import org.apache.dolphinscheduler.dao.entity.Alert;
-import org.apache.dolphinscheduler.dao.entity.DqExecuteResult;
-import org.apache.dolphinscheduler.dao.entity.DqExecuteResultAlertContent;
import org.apache.dolphinscheduler.dao.entity.ProjectUser;
-import org.apache.dolphinscheduler.dao.entity.TaskAlertContent;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.entity.WorkflowAlertContent;
@@ -37,7 +34,6 @@
import org.apache.dolphinscheduler.dao.entity.WorkflowInstance;
import org.apache.dolphinscheduler.dao.mapper.UserMapper;
import org.apache.dolphinscheduler.dao.mapper.WorkflowDefinitionLogMapper;
-import org.apache.dolphinscheduler.plugin.task.api.enums.dp.DqTaskState;
import org.apache.commons.collections4.CollectionUtils;
@@ -324,107 +320,6 @@ public void closeAlert(WorkflowInstance workflowInstance) {
alertDao.addAlert(alert);
}
- /**
- * send workflow timeout alert
- *
- * @param workflowInstance workflow instance
- * @param projectUser projectUser
- */
- public void sendWorkflowTimeoutAlert(WorkflowInstance workflowInstance, ProjectUser projectUser) {
- alertDao.sendWorkflowTimeoutAlert(workflowInstance, projectUser);
- }
-
- /**
- * send data quality task alert
- */
- public void sendDataQualityTaskExecuteResultAlert(DqExecuteResult result, WorkflowInstance workflowInstance) {
- Alert alert = new Alert();
- String state = DqTaskState.of(result.getState()).getDescription();
- alert.setTitle("DataQualityResult [" + result.getTaskName() + "] " + state);
- String content = getDataQualityAlterContent(result);
- alert.setContent(content);
- alert.setAlertGroupId(workflowInstance.getWarningGroupId());
- alert.setCreateTime(new Date());
- alert.setProjectCode(result.getProjectCode());
- alert.setWorkflowDefinitionCode(workflowInstance.getWorkflowDefinitionCode());
- alert.setWorkflowInstanceId(workflowInstance.getId());
- // might need to change to data quality status
- alert.setAlertType(workflowInstance.getState().isSuccess() ? AlertType.WORKFLOW_INSTANCE_SUCCESS
- : AlertType.WORKFLOW_INSTANCE_FAILURE);
- alertDao.addAlert(alert);
- }
-
- /**
- * send data quality task error alert
- */
- public void sendTaskErrorAlert(TaskInstance taskInstance, WorkflowInstance workflowInstance) {
- Alert alert = new Alert();
- alert.setTitle("Task [" + taskInstance.getName() + "] Failure Warning");
- String content = getTaskAlterContent(taskInstance);
- alert.setContent(content);
- alert.setAlertGroupId(workflowInstance.getWarningGroupId());
- alert.setCreateTime(new Date());
- alert.setWorkflowDefinitionCode(workflowInstance.getWorkflowDefinitionCode());
- alert.setWorkflowInstanceId(workflowInstance.getId());
- alert.setAlertType(AlertType.TASK_FAILURE);
- alertDao.addAlert(alert);
- }
-
- /**
- * getDataQualityAlterContent
- * @param result DqExecuteResult
- * @return String String
- */
- public String getDataQualityAlterContent(DqExecuteResult result) {
-
- DqExecuteResultAlertContent content = DqExecuteResultAlertContent.newBuilder()
- .processDefinitionId(result.getWorkflowDefinitionId())
- .processDefinitionName(result.getWorkflowDefinitionName())
- .processInstanceId(result.getProcessInstanceId())
- .processInstanceName(result.getProcessInstanceName())
- .taskInstanceId(result.getTaskInstanceId())
- .taskName(result.getTaskName())
- .ruleType(result.getRuleType())
- .ruleName(result.getRuleName())
- .statisticsValue(result.getStatisticsValue())
- .comparisonValue(result.getComparisonValue())
- .checkType(result.getCheckType())
- .threshold(result.getThreshold())
- .operator(result.getOperator())
- .failureStrategy(result.getFailureStrategy())
- .userId(result.getUserId())
- .userName(result.getUserName())
- .state(result.getState())
- .errorDataPath(result.getErrorOutputPath())
- .build();
-
- return JSONUtils.toJsonString(content);
- }
-
- /**
- * getTaskAlterContent
- * @param taskInstance TaskInstance
- * @return String String
- */
- public String getTaskAlterContent(TaskInstance taskInstance) {
-
- TaskAlertContent content = TaskAlertContent.builder()
- .processInstanceName(taskInstance.getWorkflowInstanceName())
- .processInstanceId(taskInstance.getWorkflowInstanceId())
- .taskInstanceId(taskInstance.getId())
- .taskName(taskInstance.getName())
- .taskType(taskInstance.getTaskType())
- .state(taskInstance.getState())
- .startTime(taskInstance.getStartTime())
- .endTime(taskInstance.getEndTime())
- .host(taskInstance.getHost())
- .taskPriority(taskInstance.getTaskInstancePriority().getDescp())
- .logPath(taskInstance.getLogPath())
- .build();
-
- return JSONUtils.toJsonString(content);
- }
-
public void sendTaskTimeoutAlert(WorkflowInstance workflowInstance,
TaskInstance taskInstance,
ProjectUser projectUser) {
diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
index 9a349b8cf552..3d0b317b4119 100644
--- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
+++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
@@ -25,10 +25,6 @@
import org.apache.dolphinscheduler.dao.entity.Command;
import org.apache.dolphinscheduler.dao.entity.DagData;
import org.apache.dolphinscheduler.dao.entity.DataSource;
-import org.apache.dolphinscheduler.dao.entity.DqComparisonType;
-import org.apache.dolphinscheduler.dao.entity.DqRule;
-import org.apache.dolphinscheduler.dao.entity.DqRuleExecuteSql;
-import org.apache.dolphinscheduler.dao.entity.DqRuleInputEntry;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.TaskDefinition;
import org.apache.dolphinscheduler.dao.entity.TaskDefinitionLog;
@@ -118,14 +114,6 @@ int saveTaskRelation(User operator, long projectCode, long workflowDefinitionCod
List transformTask(List taskRelationList,
List taskDefinitionLogs);
- DqRule getDqRule(int ruleId);
-
- List getRuleInputEntry(int ruleId);
-
- List getDqExecuteSql(int ruleId);
-
- DqComparisonType getComparisonTypeById(int id);
-
TaskGroupQueue insertIntoTaskGroupQueue(Integer taskId,
String taskName,
Integer groupId,
diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessServiceImpl.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessServiceImpl.java
index 31c3772b70c4..d46a6698f344 100644
--- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessServiceImpl.java
+++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessServiceImpl.java
@@ -45,10 +45,6 @@
import org.apache.dolphinscheduler.dao.entity.Command;
import org.apache.dolphinscheduler.dao.entity.DagData;
import org.apache.dolphinscheduler.dao.entity.DataSource;
-import org.apache.dolphinscheduler.dao.entity.DqComparisonType;
-import org.apache.dolphinscheduler.dao.entity.DqRule;
-import org.apache.dolphinscheduler.dao.entity.DqRuleExecuteSql;
-import org.apache.dolphinscheduler.dao.entity.DqRuleInputEntry;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.TaskDefinition;
import org.apache.dolphinscheduler.dao.entity.TaskDefinitionLog;
@@ -65,10 +61,6 @@
import org.apache.dolphinscheduler.dao.mapper.ClusterMapper;
import org.apache.dolphinscheduler.dao.mapper.CommandMapper;
import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper;
-import org.apache.dolphinscheduler.dao.mapper.DqComparisonTypeMapper;
-import org.apache.dolphinscheduler.dao.mapper.DqRuleExecuteSqlMapper;
-import org.apache.dolphinscheduler.dao.mapper.DqRuleInputEntryMapper;
-import org.apache.dolphinscheduler.dao.mapper.DqRuleMapper;
import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionLogMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper;
@@ -86,7 +78,6 @@
import org.apache.dolphinscheduler.dao.repository.TaskInstanceDao;
import org.apache.dolphinscheduler.dao.repository.WorkflowInstanceDao;
import org.apache.dolphinscheduler.dao.repository.WorkflowInstanceMapDao;
-import org.apache.dolphinscheduler.dao.utils.DqRuleUtils;
import org.apache.dolphinscheduler.dao.utils.EnvironmentUtils;
import org.apache.dolphinscheduler.dao.utils.WorkerGroupUtils;
import org.apache.dolphinscheduler.extract.base.client.Clients;
@@ -101,7 +92,6 @@
import org.apache.dolphinscheduler.service.command.CommandService;
import org.apache.dolphinscheduler.service.cron.CronUtils;
import org.apache.dolphinscheduler.service.exceptions.CronParseException;
-import org.apache.dolphinscheduler.service.exceptions.ServiceException;
import org.apache.dolphinscheduler.service.expand.CuringParamsService;
import org.apache.dolphinscheduler.service.model.TaskNode;
import org.apache.dolphinscheduler.service.utils.ClusterConfUtils;
@@ -189,18 +179,6 @@ public class ProcessServiceImpl implements ProcessService {
@Autowired
private TenantMapper tenantMapper;
- @Autowired
- private DqRuleMapper dqRuleMapper;
-
- @Autowired
- private DqRuleInputEntryMapper dqRuleInputEntryMapper;
-
- @Autowired
- private DqRuleExecuteSqlMapper dqRuleExecuteSqlMapper;
-
- @Autowired
- private DqComparisonTypeMapper dqComparisonTypeMapper;
-
@Autowired
private TaskDefinitionMapper taskDefinitionMapper;
@@ -1355,26 +1333,6 @@ public List transformTask(List taskRelationList,
return taskNodeList;
}
- @Override
- public DqRule getDqRule(int ruleId) {
- return dqRuleMapper.selectById(ruleId);
- }
-
- @Override
- public List getRuleInputEntry(int ruleId) {
- return DqRuleUtils.transformInputEntry(dqRuleInputEntryMapper.getRuleInputEntryList(ruleId));
- }
-
- @Override
- public List getDqExecuteSql(int ruleId) {
- return dqRuleExecuteSqlMapper.getExecuteSqlList(ruleId);
- }
-
- @Override
- public DqComparisonType getComparisonTypeById(int id) {
- return dqComparisonTypeMapper.selectById(id);
- }
-
@Override
public TaskGroupQueue insertIntoTaskGroupQueue(Integer taskInstanceId,
String taskName,
@@ -1399,13 +1357,6 @@ public TaskGroupQueue insertIntoTaskGroupQueue(Integer taskInstanceId,
return taskGroupQueue;
}
- protected void deleteCommandWithCheck(int commandId) {
- int delete = this.commandMapper.deleteById(commandId);
- if (delete != 1) {
- throw new ServiceException("delete command fail, id:" + commandId);
- }
- }
-
/**
* find k8s config yaml by clusterName
*
diff --git a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/process/ProcessServiceTest.java b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/process/ProcessServiceTest.java
index 5878cbbc4a24..c81d90878cdf 100644
--- a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/process/ProcessServiceTest.java
+++ b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/process/ProcessServiceTest.java
@@ -27,10 +27,6 @@
import org.apache.dolphinscheduler.common.graph.DAG;
import org.apache.dolphinscheduler.common.model.TaskNodeRelation;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
-import org.apache.dolphinscheduler.dao.entity.DqExecuteResult;
-import org.apache.dolphinscheduler.dao.entity.DqRule;
-import org.apache.dolphinscheduler.dao.entity.DqRuleExecuteSql;
-import org.apache.dolphinscheduler.dao.entity.DqRuleInputEntry;
import org.apache.dolphinscheduler.dao.entity.TaskDefinitionLog;
import org.apache.dolphinscheduler.dao.entity.TaskGroupQueue;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
@@ -39,9 +35,6 @@
import org.apache.dolphinscheduler.dao.entity.WorkflowDefinitionLog;
import org.apache.dolphinscheduler.dao.entity.WorkflowInstance;
import org.apache.dolphinscheduler.dao.entity.WorkflowTaskRelationLog;
-import org.apache.dolphinscheduler.dao.mapper.DqRuleExecuteSqlMapper;
-import org.apache.dolphinscheduler.dao.mapper.DqRuleInputEntryMapper;
-import org.apache.dolphinscheduler.dao.mapper.DqRuleMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionLogMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskGroupQueueMapper;
@@ -53,16 +46,10 @@
import org.apache.dolphinscheduler.dao.repository.TaskDefinitionDao;
import org.apache.dolphinscheduler.dao.repository.TaskDefinitionLogDao;
import org.apache.dolphinscheduler.plugin.task.api.enums.Direct;
-import org.apache.dolphinscheduler.plugin.task.api.enums.dp.DataType;
-import org.apache.dolphinscheduler.plugin.task.api.enums.dp.DqTaskState;
-import org.apache.dolphinscheduler.plugin.task.api.enums.dp.ExecuteSqlType;
-import org.apache.dolphinscheduler.plugin.task.api.enums.dp.InputType;
-import org.apache.dolphinscheduler.plugin.task.api.enums.dp.OptionSourceType;
import org.apache.dolphinscheduler.plugin.task.api.model.Property;
import org.apache.dolphinscheduler.plugin.task.api.model.ResourceInfo;
import org.apache.dolphinscheduler.service.expand.CuringParamsService;
import org.apache.dolphinscheduler.service.model.TaskNode;
-import org.apache.dolphinscheduler.spi.params.base.FormType;
import java.util.ArrayList;
import java.util.Collections;
@@ -121,15 +108,6 @@ public class ProcessServiceTest {
@Mock
private TaskGroupQueueMapper taskGroupQueueMapper;
- @Mock
- private DqRuleMapper dqRuleMapper;
-
- @Mock
- private DqRuleInputEntryMapper dqRuleInputEntryMapper;
-
- @Mock
- private DqRuleExecuteSqlMapper dqRuleExecuteSqlMapper;
-
@Mock
CuringParamsService curingGlobalParamsService;
@@ -206,104 +184,6 @@ public void testSwitchVersion() {
Assertions.assertEquals(0, processService.switchVersion(workflowDefinition, processDefinitionLog));
}
- @Test
- public void getDqRule() {
- when(dqRuleMapper.selectById(1)).thenReturn(new DqRule());
- Assertions.assertNotNull(processService.getDqRule(1));
- }
-
- @Test
- public void getRuleInputEntry() {
- when(dqRuleInputEntryMapper.getRuleInputEntryList(1)).thenReturn(getRuleInputEntryList());
- Assertions.assertNotNull(processService.getRuleInputEntry(1));
- }
-
- @Test
- public void getDqExecuteSql() {
- when(dqRuleExecuteSqlMapper.getExecuteSqlList(1)).thenReturn(getRuleExecuteSqlList());
- Assertions.assertNotNull(processService.getDqExecuteSql(1));
- }
-
- private List getRuleInputEntryList() {
- List list = new ArrayList<>();
-
- DqRuleInputEntry srcConnectorType = new DqRuleInputEntry();
- srcConnectorType.setTitle("源数据类型");
- srcConnectorType.setField("src_connector_type");
- srcConnectorType.setType(FormType.SELECT.getFormType());
- srcConnectorType.setCanEdit(true);
- srcConnectorType.setIsShow(true);
- srcConnectorType.setData("JDBC");
- srcConnectorType.setPlaceholder("Please select the source connector type");
- srcConnectorType.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- srcConnectorType
- .setOptions("[{\"label\":\"HIVE\",\"value\":\"HIVE\"},{\"label\":\"JDBC\",\"value\":\"JDBC\"}]");
- srcConnectorType.setInputType(InputType.DEFAULT.getCode());
- srcConnectorType.setDataType(DataType.NUMBER.getCode());
- srcConnectorType.setIsEmit(true);
-
- DqRuleInputEntry statisticsName = new DqRuleInputEntry();
- statisticsName.setTitle("统计值名");
- statisticsName.setField("statistics_name");
- statisticsName.setType(FormType.INPUT.getFormType());
- statisticsName.setCanEdit(true);
- statisticsName.setIsShow(true);
- statisticsName.setPlaceholder("Please enter statistics name, the alias in statistics execute sql");
- statisticsName.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- statisticsName.setInputType(InputType.DEFAULT.getCode());
- statisticsName.setDataType(DataType.STRING.getCode());
- statisticsName.setIsEmit(false);
-
- DqRuleInputEntry statisticsExecuteSql = new DqRuleInputEntry();
- statisticsExecuteSql.setTitle("统计值计算SQL");
- statisticsExecuteSql.setField("statistics_execute_sql");
- statisticsExecuteSql.setType(FormType.TEXTAREA.getFormType());
- statisticsExecuteSql.setCanEdit(true);
- statisticsExecuteSql.setIsShow(true);
- statisticsExecuteSql.setPlaceholder("Please enter the statistics execute sql");
- statisticsExecuteSql.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- statisticsExecuteSql.setDataType(DataType.LIKE_SQL.getCode());
- statisticsExecuteSql.setIsEmit(false);
-
- list.add(srcConnectorType);
- list.add(statisticsName);
- list.add(statisticsExecuteSql);
-
- return list;
- }
-
- private List getRuleExecuteSqlList() {
- List list = new ArrayList<>();
-
- DqRuleExecuteSql executeSqlDefinition = new DqRuleExecuteSql();
- executeSqlDefinition.setIndex(0);
- executeSqlDefinition.setSql("SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})");
- executeSqlDefinition.setTableAlias("total_count");
- executeSqlDefinition.setType(ExecuteSqlType.COMPARISON.getCode());
- list.add(executeSqlDefinition);
-
- return list;
- }
-
- public DqExecuteResult getExecuteResult() {
- DqExecuteResult dqExecuteResult = new DqExecuteResult();
- dqExecuteResult.setId(1);
- dqExecuteResult.setState(DqTaskState.FAILURE.getCode());
-
- return dqExecuteResult;
- }
-
- public List getExecuteResultList() {
-
- List list = new ArrayList<>();
- DqExecuteResult dqExecuteResult = new DqExecuteResult();
- dqExecuteResult.setId(1);
- dqExecuteResult.setState(DqTaskState.FAILURE.getCode());
- list.add(dqExecuteResult);
-
- return list;
- }
-
@Test
public void testSetGlobalParamIfCommanded() {
WorkflowDefinition workflowDefinition = new WorkflowDefinition();
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-all/pom.xml b/dolphinscheduler-task-plugin/dolphinscheduler-task-all/pom.xml
index 3d107ba14c2b..83d3b8ded191 100644
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-all/pom.xml
+++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-all/pom.xml
@@ -28,12 +28,6 @@
-
- org.apache.dolphinscheduler
- dolphinscheduler-task-dataquality
- ${project.version}
-
-
org.apache.dolphinscheduler
dolphinscheduler-task-datax
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/DataQualityTaskExecutionContext.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/DataQualityTaskExecutionContext.java
deleted file mode 100644
index 684ed712c91d..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/DataQualityTaskExecutionContext.java
+++ /dev/null
@@ -1,291 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.api;
-
-import java.io.Serializable;
-
-/**
- * DataQualityTaskExecutionContext
- */
-public class DataQualityTaskExecutionContext implements Serializable {
-
- /**
- * rule id
- */
- private int ruleId;
- /**
- * rule name
- */
- private String ruleName;
- /**
- * rule type
- */
- private int ruleType;
- /**
- * input entry list
- */
- private String ruleInputEntryList;
- /**
- * execute sql list
- */
- private String executeSqlList;
- /**
- * if comparison value calculate from statistics value table
- */
- private boolean comparisonNeedStatisticsValueTable = false;
- /**
- * compare with fixed value
- */
- private boolean compareWithFixedValue = false;
- /**
- * error output path
- */
- private String hdfsPath;
- /**
- * sourceConnector type
- */
- private String sourceConnectorType;
- /**
- * source type
- */
- private int sourceType;
- /**
- * source connection params
- */
- private String sourceConnectionParams;
- /**
- * target connector type
- */
- private String targetConnectorType;
- /**
- * target type
- */
- private int targetType;
- /**
- * target connection params
- */
- private String targetConnectionParams;
- /**
- * source connector type
- */
- private String writerConnectorType;
- /**
- * writer type
- */
- private int writerType;
- /**
- * writer table
- */
- private String writerTable;
- /**
- * writer connection params
- */
- private String writerConnectionParams;
- /**
- * statistics value connector type
- */
- private String statisticsValueConnectorType;
- /**
- * statistics value type
- */
- private int statisticsValueType;
- /**
- * statistics value table
- */
- private String statisticsValueTable;
- /**
- * statistics value writer connection params
- */
- private String statisticsValueWriterConnectionParams;
-
- public int getRuleId() {
- return ruleId;
- }
-
- public void setRuleId(int ruleId) {
- this.ruleId = ruleId;
- }
-
- public String getSourceConnectorType() {
- return sourceConnectorType;
- }
-
- public void setSourceConnectorType(String sourceConnectorType) {
- this.sourceConnectorType = sourceConnectorType;
- }
-
- public int getSourceType() {
- return sourceType;
- }
-
- public void setSourceType(int sourceType) {
- this.sourceType = sourceType;
- }
-
- public String getSourceConnectionParams() {
- return sourceConnectionParams;
- }
-
- public void setSourceConnectionParams(String sourceConnectionParams) {
- this.sourceConnectionParams = sourceConnectionParams;
- }
-
- public String getTargetConnectorType() {
- return targetConnectorType;
- }
-
- public void setTargetConnectorType(String targetConnectorType) {
- this.targetConnectorType = targetConnectorType;
- }
-
- public int getTargetType() {
- return targetType;
- }
-
- public void setTargetType(int targetType) {
- this.targetType = targetType;
- }
-
- public String getTargetConnectionParams() {
- return targetConnectionParams;
- }
-
- public void setTargetConnectionParams(String targetConnectionParams) {
- this.targetConnectionParams = targetConnectionParams;
- }
-
- public int getWriterType() {
- return writerType;
- }
-
- public void setWriterType(int writerType) {
- this.writerType = writerType;
- }
-
- public String getWriterConnectionParams() {
- return writerConnectionParams;
- }
-
- public void setWriterConnectionParams(String writerConnectionParams) {
- this.writerConnectionParams = writerConnectionParams;
- }
-
- public String getWriterTable() {
- return writerTable;
- }
-
- public void setWriterTable(String writerTable) {
- this.writerTable = writerTable;
- }
-
- public String getWriterConnectorType() {
- return writerConnectorType;
- }
-
- public void setWriterConnectorType(String writerConnectorType) {
- this.writerConnectorType = writerConnectorType;
- }
-
- public String getStatisticsValueConnectorType() {
- return statisticsValueConnectorType;
- }
-
- public void setStatisticsValueConnectorType(String statisticsValueConnectorType) {
- this.statisticsValueConnectorType = statisticsValueConnectorType;
- }
-
- public int getStatisticsValueType() {
- return statisticsValueType;
- }
-
- public void setStatisticsValueType(int statisticsValueType) {
- this.statisticsValueType = statisticsValueType;
- }
-
- public String getStatisticsValueTable() {
- return statisticsValueTable;
- }
-
- public void setStatisticsValueTable(String statisticsValueTable) {
- this.statisticsValueTable = statisticsValueTable;
- }
-
- public String getStatisticsValueWriterConnectionParams() {
- return statisticsValueWriterConnectionParams;
- }
-
- public void setStatisticsValueWriterConnectionParams(String statisticsValueWriterConnectionParams) {
- this.statisticsValueWriterConnectionParams = statisticsValueWriterConnectionParams;
- }
-
- public String getRuleName() {
- return ruleName;
- }
-
- public void setRuleName(String ruleName) {
- this.ruleName = ruleName;
- }
-
- public int getRuleType() {
- return ruleType;
- }
-
- public void setRuleType(int ruleType) {
- this.ruleType = ruleType;
- }
-
- public String getRuleInputEntryList() {
- return ruleInputEntryList;
- }
-
- public void setRuleInputEntryList(String ruleInputEntryList) {
- this.ruleInputEntryList = ruleInputEntryList;
- }
-
- public String getExecuteSqlList() {
- return executeSqlList;
- }
-
- public void setExecuteSqlList(String executeSqlList) {
- this.executeSqlList = executeSqlList;
- }
-
- public boolean isComparisonNeedStatisticsValueTable() {
- return comparisonNeedStatisticsValueTable;
- }
-
- public void setComparisonNeedStatisticsValueTable(boolean comparisonNeedStatisticsValueTable) {
- this.comparisonNeedStatisticsValueTable = comparisonNeedStatisticsValueTable;
- }
-
- public boolean isCompareWithFixedValue() {
- return compareWithFixedValue;
- }
-
- public void setCompareWithFixedValue(boolean compareWithFixedValue) {
- this.compareWithFixedValue = compareWithFixedValue;
- }
-
- public String getHdfsPath() {
- return hdfsPath;
- }
-
- public void setHdfsPath(String hdfsPath) {
- this.hdfsPath = hdfsPath;
- }
-}
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskExecutionContext.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskExecutionContext.java
index 4b146396520d..5c2302061bc1 100644
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskExecutionContext.java
+++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskExecutionContext.java
@@ -131,8 +131,6 @@ public class TaskExecutionContext implements Serializable {
private Map paramsMap;
- private DataQualityTaskExecutionContext dataQualityTaskExecutionContext;
-
private Integer cpuQuota;
private Integer memoryMax;
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/enums/dp/DqFailureStrategy.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/enums/dp/DqFailureStrategy.java
deleted file mode 100644
index 1c139a31a9b1..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/enums/dp/DqFailureStrategy.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.api.enums.dp;
-
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * failure policy when dqs task node failed.
- */
-public enum DqFailureStrategy {
-
- /**
- * 0-alert and continue when dqc tasks failed.
- * 1-alert and block when dqc tasks failed.
- **/
- ALERT(0, "alert"),
- BLOCK(1, "block");
-
- DqFailureStrategy(int code, String description) {
- this.code = code;
- this.description = description;
- }
-
- private final int code;
- private final String description;
-
- public int getCode() {
- return code;
- }
-
- public String getDescription() {
- return description;
- }
-
- private static final Map VALUES_MAP = new HashMap<>();
-
- static {
- for (DqFailureStrategy type : DqFailureStrategy.values()) {
- VALUES_MAP.put(type.code, type);
- }
- }
-
- public static DqFailureStrategy of(Integer status) {
- if (VALUES_MAP.containsKey(status)) {
- return VALUES_MAP.get(status);
- }
- throw new IllegalArgumentException("invalid code : " + status);
- }
-}
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/enums/dp/DqTaskState.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/enums/dp/DqTaskState.java
deleted file mode 100644
index 710db2e4ae91..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/enums/dp/DqTaskState.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.api.enums.dp;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import com.fasterxml.jackson.annotation.JsonValue;
-
-/**
- * data quality task state
- */
-public enum DqTaskState {
-
- /**
- * 0-default
- * 1-success
- * 2-failure
- */
- DEFAULT(0, "default"),
- SUCCESS(1, "success"),
- FAILURE(2, "failure");
-
- DqTaskState(int code, String description) {
- this.code = code;
- this.description = description;
- }
-
- private final int code;
- private final String description;
-
- @JsonValue
- public int getCode() {
- return code;
- }
-
- public String getDescription() {
- return description;
- }
-
- private static final Map VALUES_MAP = new HashMap<>();
-
- static {
- for (DqTaskState type : DqTaskState.values()) {
- VALUES_MAP.put(type.code, type);
- }
- }
-
- public static DqTaskState of(Integer status) {
- if (VALUES_MAP.containsKey(status)) {
- return VALUES_MAP.get(status);
- }
- throw new IllegalArgumentException("invalid code : " + status);
- }
-}
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/dataquality/DataQualityParameters.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/dataquality/DataQualityParameters.java
deleted file mode 100644
index 273cf5719319..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/dataquality/DataQualityParameters.java
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.api.parameters.dataquality;
-
-import org.apache.dolphinscheduler.plugin.task.api.model.ResourceInfo;
-import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters;
-import org.apache.dolphinscheduler.plugin.task.api.parameters.dataquality.spark.SparkParameters;
-import org.apache.dolphinscheduler.plugin.task.api.utils.MapUtils;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
-import lombok.extern.slf4j.Slf4j;
-
-/**
- * DataQualityParameters
- */
-@Slf4j
-public class DataQualityParameters extends AbstractParameters {
-
- /**
- * rule id
- */
- private int ruleId;
- /**
- * rule input entry value map
- */
- private Map ruleInputParameter;
- /**
- * spark parameters
- */
- private SparkParameters sparkParameters;
-
- public int getRuleId() {
- return ruleId;
- }
-
- public void setRuleId(int ruleId) {
- this.ruleId = ruleId;
- }
-
- public Map getRuleInputParameter() {
- return ruleInputParameter;
- }
-
- public void setRuleInputParameter(Map ruleInputParameter) {
- this.ruleInputParameter = ruleInputParameter;
- }
-
- /**
- * In this function ,we need more detailed check every parameter,
- * if the parameter is non-conformant will return false
- * @return boolean result
- */
- @Override
- public boolean checkParameters() {
-
- if (ruleId == 0) {
- log.error("rule id is null");
- return false;
- }
-
- if (MapUtils.isEmpty(ruleInputParameter)) {
- log.error("rule input parameter is empty");
- return false;
- }
-
- return sparkParameters != null;
- }
-
- @Override
- public List getResourceFilesList() {
- return new ArrayList<>();
- }
-
- public SparkParameters getSparkParameters() {
- return sparkParameters;
- }
-
- public void setSparkParameters(SparkParameters sparkParameters) {
- this.sparkParameters = sparkParameters;
- }
-
-}
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/dataquality/spark/ProgramType.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/dataquality/spark/ProgramType.java
deleted file mode 100644
index 09871c15ae99..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/dataquality/spark/ProgramType.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.api.parameters.dataquality.spark;
-
-/**
- * support program types
- */
-public enum ProgramType {
-
- /**
- * 0 JAVA,1 SCALA,2 PYTHON
- */
- JAVA,
- SCALA,
- PYTHON
-}
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/dataquality/spark/SparkConstants.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/dataquality/spark/SparkConstants.java
deleted file mode 100644
index d886d0381c1e..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/dataquality/spark/SparkConstants.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.api.parameters.dataquality.spark;
-
-public class SparkConstants {
-
- private SparkConstants() {
- throw new IllegalStateException("Utility class");
- }
-
- /**
- * --class CLASS_NAME
- */
- public static final String MAIN_CLASS = "--class";
-
- /**
- * --name NAME
- */
- public static final String SPARK_NAME = "--name";
-
- /**
- * --queue QUEUE
- */
- public static final String SPARK_QUEUE = "--queue";
-
- public static final String DEPLOY_MODE = "--deploy-mode";
-
- /**
- * --driver-cores NUM
- */
- public static final String DRIVER_CORES = "--driver-cores";
-
- /**
- * --driver-memory MEM
- */
- public static final String DRIVER_MEMORY = "--driver-memory";
-
- /**
- * master
- */
- public static final String MASTER = "--master";
-
- /**
- * --num-executors NUM
- */
- public static final String NUM_EXECUTORS = "--num-executors";
-
- /**
- * --executor-cores NUM
- */
- public static final String EXECUTOR_CORES = "--executor-cores";
-
- /**
- * --executor-memory MEM
- */
- public static final String EXECUTOR_MEMORY = "--executor-memory";
-
-}
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/dataquality/spark/SparkParameters.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/dataquality/spark/SparkParameters.java
deleted file mode 100644
index 1b1f2cc8eff3..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/dataquality/spark/SparkParameters.java
+++ /dev/null
@@ -1,228 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.api.parameters.dataquality.spark;
-
-import org.apache.dolphinscheduler.plugin.task.api.model.ResourceInfo;
-import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * spark parameters
- */
-public class SparkParameters extends AbstractParameters {
-
- /**
- * main jar
- */
- private ResourceInfo mainJar;
-
- /**
- * main class
- */
- private String mainClass;
-
- /**
- * deploy mode
- */
- private String deployMode;
-
- /**
- * arguments
- */
- private String mainArgs;
-
- /**
- * driver-cores Number of cores used by the driver, only in cluster mode
- */
- private int driverCores;
-
- /**
- * driver-memory Memory for driver
- */
-
- private String driverMemory;
-
- /**
- * num-executors Number of executors to launch
- */
- private int numExecutors;
-
- /**
- * executor-cores Number of cores per executor
- */
- private int executorCores;
-
- /**
- * Memory per executor
- */
- private String executorMemory;
-
- /**
- * app name
- */
- private String appName;
-
- /**
- * The YARN queue to submit to
- */
- private String yarnQueue;
-
- /**
- * other arguments
- */
- private String others;
-
- /**
- * program type
- * 0 JAVA,1 SCALA,2 PYTHON
- */
- private ProgramType programType;
-
- /**
- * resource list
- */
- private List resourceList = new ArrayList<>();
-
- public ResourceInfo getMainJar() {
- return mainJar;
- }
-
- public void setMainJar(ResourceInfo mainJar) {
- this.mainJar = mainJar;
- }
-
- public String getMainClass() {
- return mainClass;
- }
-
- public void setMainClass(String mainClass) {
- this.mainClass = mainClass;
- }
-
- public String getDeployMode() {
- return deployMode;
- }
-
- public void setDeployMode(String deployMode) {
- this.deployMode = deployMode;
- }
-
- public String getMainArgs() {
- return mainArgs;
- }
-
- public void setMainArgs(String mainArgs) {
- this.mainArgs = mainArgs;
- }
-
- public int getDriverCores() {
- return driverCores;
- }
-
- public void setDriverCores(int driverCores) {
- this.driverCores = driverCores;
- }
-
- public String getDriverMemory() {
- return driverMemory;
- }
-
- public void setDriverMemory(String driverMemory) {
- this.driverMemory = driverMemory;
- }
-
- public int getNumExecutors() {
- return numExecutors;
- }
-
- public void setNumExecutors(int numExecutors) {
- this.numExecutors = numExecutors;
- }
-
- public int getExecutorCores() {
- return executorCores;
- }
-
- public void setExecutorCores(int executorCores) {
- this.executorCores = executorCores;
- }
-
- public String getExecutorMemory() {
- return executorMemory;
- }
-
- public void setExecutorMemory(String executorMemory) {
- this.executorMemory = executorMemory;
- }
-
- public String getAppName() {
- return appName;
- }
-
- public void setAppName(String appName) {
- this.appName = appName;
- }
-
- public String getYarnQueue() {
- return yarnQueue;
- }
-
- public void setYarnQueue(String yarnQueue) {
- this.yarnQueue = yarnQueue;
- }
-
- public String getOthers() {
- return others;
- }
-
- public void setOthers(String others) {
- this.others = others;
- }
-
- public List getResourceList() {
- return resourceList;
- }
-
- public void setResourceList(List resourceList) {
- this.resourceList = resourceList;
- }
-
- public ProgramType getProgramType() {
- return programType;
- }
-
- public void setProgramType(ProgramType programType) {
- this.programType = programType;
- }
-
- @Override
- public boolean checkParameters() {
- return mainJar != null && programType != null;
- }
-
- @Override
- public List getResourceFilesList() {
- if (mainJar != null && !resourceList.contains(mainJar)) {
- resourceList.add(mainJar);
- }
- return resourceList;
- }
-
-}
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/DataQualityConstants.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/DataQualityConstants.java
deleted file mode 100644
index ceda8af71b7a..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/DataQualityConstants.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.api.utils;
-
-/**
- * DataQualityConstants
- */
-public class DataQualityConstants {
-
- private DataQualityConstants() {
- throw new IllegalStateException("Utility class");
- }
-
- /**
- * data quality task
- */
- public static final String SRC_CONNECTOR_TYPE = "src_connector_type";
- public static final String SRC_DATASOURCE_ID = "src_datasource_id";
- public static final String SRC_DATABASE = "src_database";
- public static final String SRC_TABLE = "src_table";
- public static final String SRC_FILTER = "src_filter";
- public static final String SRC_FIELD = "src_field";
- public static final String TARGET_CONNECTOR_TYPE = "target_connector_type";
- public static final String TARGET_DATASOURCE_ID = "target_datasource_id";
- public static final String TARGET_DATABASE = "target_database";
- public static final String TARGET_TABLE = "target_table";
- public static final String TARGET_FILTER = "target_filter";
- public static final String TARGET_FIELD = "target_field";
- public static final String STATISTICS_NAME = "statistics_name";
- public static final String STATISTICS_EXECUTE_SQL = "statistics_execute_sql";
- public static final String COMPARISON_NAME = "comparison_name";
- public static final String COMPARISON_TYPE = "comparison_type";
- public static final String COMPARISON_VALUE = "comparison_value";
- public static final String COMPARISON_EXECUTE_SQL = "comparison_execute_sql";
- public static final String MAPPING_COLUMNS = "mapping_columns";
- public static final String ON_CLAUSE = "on_clause";
- public static final String WHERE_CLAUSE = "where_clause";
- public static final String CHECK_TYPE = "check_type";
- public static final String THRESHOLD = "threshold";
- public static final String OPERATOR = "operator";
- public static final String FAILURE_STRATEGY = "failure_strategy";
- public static final String STATISTICS_TABLE = "statistics_table";
- public static final String COMPARISON_TABLE = "comparison_table";
- public static final String AND = " AND ";
- public static final String WRITER_CONNECTOR_TYPE = "writer_connector_type";
- public static final String WRITER_DATASOURCE_ID = "writer_datasource_id";
- public static final String UNIQUE_CODE = "unique_code";
- public static final String DATA_TIME = "data_time";
- public static final String REGEXP_PATTERN = "regexp_pattern";
- public static final String ERROR_OUTPUT_PATH = "error_output_path";
- public static final String INDEX = "index";
- public static final String PATH = "path";
- public static final String HDFS_FILE = "hdfs_file";
- public static final String BATCH = "batch";
-
- public static final String RULE_ID = "rule_id";
- public static final String RULE_TYPE = "rule_type";
- public static final String RULE_NAME = "rule_name";
- public static final String CREATE_TIME = "create_time";
- public static final String UPDATE_TIME = "update_time";
- public static final String PROCESS_DEFINITION_ID = "process_definition_id";
- public static final String PROCESS_INSTANCE_ID = "process_instance_id";
- public static final String TASK_INSTANCE_ID = "task_instance_id";
-
- public static final String ADDRESS = "address";
- public static final String DATABASE = "database";
- public static final String JDBC_URL = "jdbcUrl";
- public static final String PRINCIPAL = "principal";
- public static final String OTHER = "other";
- public static final String ORACLE_DB_CONNECT_TYPE = "connectType";
-
- public static final String TABLE = "table";
- public static final String URL = "url";
- public static final String DRIVER = "driver";
- public static final String SQL = "sql";
- public static final String INPUT_TABLE = "input_table";
- public static final String OUTPUT_TABLE = "output_table";
- public static final String TMP_TABLE = "tmp_table";
-
- public static final String USER = "user";
- public static final String PASSWORD = "password";
-
- /**
- * database type
- */
- public static final String MYSQL = "MYSQL";
- public static final String POSTGRESQL = "POSTGRESQL";
-
-}
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/JdbcUrlParser.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/JdbcUrlParser.java
deleted file mode 100644
index e8e5ec299cf6..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/JdbcUrlParser.java
+++ /dev/null
@@ -1,133 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.api.utils;
-
-import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.COLON;
-import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.DOUBLE_SLASH;
-import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.EQUAL_SIGN;
-import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.QUESTION;
-import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.SEMICOLON;
-import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.SINGLE_SLASH;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.MYSQL;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.POSTGRESQL;
-
-import org.apache.dolphinscheduler.plugin.task.api.model.JdbcInfo;
-import org.apache.dolphinscheduler.spi.enums.DbType;
-
-import org.apache.commons.lang3.StringUtils;
-
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * JdbcUrlParser
- */
-public class JdbcUrlParser {
-
- private JdbcUrlParser() {
- throw new IllegalStateException("Utility class");
- }
-
- public static DbType getDbType(String datasourceType) {
- switch (datasourceType.toUpperCase()) {
- case MYSQL:
- return DbType.MYSQL;
- case POSTGRESQL:
- return DbType.POSTGRESQL;
- default:
- return null;
- }
- }
-
- public static JdbcInfo getJdbcInfo(String jdbcUrl) {
-
- JdbcInfo jdbcInfo = new JdbcInfo();
-
- int pos;
- int pos1;
- int pos2;
- String tempUri;
-
- if (jdbcUrl == null || !jdbcUrl.startsWith("jdbc:") || (pos1 = jdbcUrl.indexOf(COLON, 5)) == -1) {
- return null;
- }
-
- String driverName = jdbcUrl.substring(5, pos1);
- String params = "";
- String host = "";
- String database = "";
- String port = "";
- if (((pos2 = jdbcUrl.indexOf(SEMICOLON, pos1)) == -1) && ((pos2 = jdbcUrl.indexOf(QUESTION, pos1)) == -1)) {
- tempUri = jdbcUrl.substring(pos1 + 1);
- } else {
- tempUri = jdbcUrl.substring(pos1 + 1, pos2);
- params = jdbcUrl.substring(pos2 + 1);
- }
-
- if (tempUri.startsWith(DOUBLE_SLASH)) {
- if ((pos = tempUri.indexOf(SINGLE_SLASH, 2)) != -1) {
- host = tempUri.substring(2, pos);
- database = tempUri.substring(pos + 1);
-
- if ((pos = host.indexOf(COLON)) != -1) {
- port = host.substring(pos + 1);
- host = host.substring(0, pos);
- }
- }
- } else {
- database = tempUri;
- }
-
- if (StringUtils.isEmpty(database)) {
- return null;
- }
-
- if (database.contains(QUESTION)) {
- database = database.substring(0, database.indexOf(QUESTION));
- }
-
- if (database.contains(SEMICOLON)) {
- database = database.substring(0, database.indexOf(SEMICOLON));
- }
-
- jdbcInfo.setDriverName(driverName);
- jdbcInfo.setHost(host);
- jdbcInfo.setPort(port);
- jdbcInfo.setDatabase(database);
-
- if (StringUtils.isNotEmpty(params)) {
- Map others = new HashMap<>();
- String[] paramList = params.split("&");
- for (String param : paramList) {
- // handle bad params
- if (StringUtils.isEmpty(param) || !param.contains(EQUAL_SIGN)) {
- continue;
- }
- String[] kv = param.split(EQUAL_SIGN);
- others.put(kv[0], kv[1]);
- }
- jdbcInfo.setParams(others);
- }
-
- String address = "jdbc:" + driverName + "://" + host + COLON + port;
- jdbcInfo.setAddress(address);
- jdbcInfo.setJdbcUrl(address + SINGLE_SLASH + database);
-
- return jdbcInfo;
- }
-}
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/java/org/apache/dolphinscheduler/plugin/task/api/enums/dp/DqFailureStrategyTest.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/java/org/apache/dolphinscheduler/plugin/task/api/enums/dp/DqFailureStrategyTest.java
deleted file mode 100644
index 74ba47493d9e..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/java/org/apache/dolphinscheduler/plugin/task/api/enums/dp/DqFailureStrategyTest.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.api.enums.dp;
-
-import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.Test;
-
-public class DqFailureStrategyTest {
-
- @Test
- public void testGetCode() {
- Assertions.assertEquals(0, DqFailureStrategy.ALERT.getCode());
- Assertions.assertEquals(1, DqFailureStrategy.BLOCK.getCode());
- }
-
- @Test
- public void testGetDescription() {
- Assertions.assertEquals("alert", DqFailureStrategy.ALERT.getDescription());
- Assertions.assertEquals("block", DqFailureStrategy.BLOCK.getDescription());
- }
-
- @Test
- public void testOf() {
- Assertions.assertEquals(DqFailureStrategy.ALERT, DqFailureStrategy.of(0));
- Assertions.assertEquals(DqFailureStrategy.BLOCK, DqFailureStrategy.of(1));
- }
-}
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/java/org/apache/dolphinscheduler/plugin/task/api/enums/dp/DqTaskStateTest.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/java/org/apache/dolphinscheduler/plugin/task/api/enums/dp/DqTaskStateTest.java
deleted file mode 100644
index 2e8aaa3cb152..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/java/org/apache/dolphinscheduler/plugin/task/api/enums/dp/DqTaskStateTest.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.api.enums.dp;
-
-import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.Test;
-
-public class DqTaskStateTest {
-
- @Test
- public void testGetCode() {
- Assertions.assertEquals(0, DqTaskState.DEFAULT.getCode());
- Assertions.assertEquals(1, DqTaskState.SUCCESS.getCode());
- Assertions.assertEquals(2, DqTaskState.FAILURE.getCode());
- }
-
- @Test
- public void testGetDescription() {
- Assertions.assertEquals("default", DqTaskState.DEFAULT.getDescription());
- Assertions.assertEquals("success", DqTaskState.SUCCESS.getDescription());
- Assertions.assertEquals("failure", DqTaskState.FAILURE.getDescription());
- }
-
- @Test
- public void testOf() {
- Assertions.assertEquals(DqTaskState.DEFAULT, DqTaskState.of(0));
- Assertions.assertEquals(DqTaskState.SUCCESS, DqTaskState.of(1));
- Assertions.assertEquals(DqTaskState.FAILURE, DqTaskState.of(2));
- }
-}
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/java/org/apache/dolphinscheduler/plugin/task/api/utils/JdbcUrlParserTest.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/java/org/apache/dolphinscheduler/plugin/task/api/utils/JdbcUrlParserTest.java
deleted file mode 100644
index bad9171bacb0..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/java/org/apache/dolphinscheduler/plugin/task/api/utils/JdbcUrlParserTest.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.api.utils;
-
-import org.apache.dolphinscheduler.plugin.task.api.model.JdbcInfo;
-
-import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.Test;
-
-/**
- * JdbcUrlParserTest
- */
-public class JdbcUrlParserTest {
-
- @Test
- public void testGetJdbcInfo() {
- JdbcInfo jdbcInfo =
- JdbcUrlParser.getJdbcInfo("jdbc:mysql://localhost:3306/dolphinscheduler?"
- + "useUnicode=true&characterEncoding=UTF-8");
- if (jdbcInfo != null) {
- String jdbcInfoStr = jdbcInfo.toString();
- String expected =
- "JdbcInfo(host=localhost, port=3306, driverName=mysql, database=dolphinscheduler, " +
- "params={useUnicode=true, characterEncoding=UTF-8}, address=jdbc:mysql://localhost:3306, jdbcUrl=jdbc:mysql://localhost:3306/dolphinscheduler)";
- Assertions.assertEquals(expected, jdbcInfoStr);
- }
-
- // bad jdbc url case
- jdbcInfo = JdbcUrlParser.getJdbcInfo("jdbc:mysql://localhost:3306/dolphinscheduler?"
- + "useUnicode=true&&characterEncoding=UTF-8");
- if (jdbcInfo != null) {
- String jdbcInfoStr = jdbcInfo.toString();
- String expected =
- "JdbcInfo(host=localhost, port=3306, driverName=mysql, database=dolphinscheduler, " +
- "params={useUnicode=true, characterEncoding=UTF-8}, address=jdbc:mysql://localhost:3306, jdbcUrl=jdbc:mysql://localhost:3306/dolphinscheduler)";
- Assertions.assertEquals(expected, jdbcInfoStr);
- }
- }
-}
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/resources/common.properties b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/resources/common.properties
index 40e1c5abcb74..05dc379c56b1 100644
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/resources/common.properties
+++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/test/resources/common.properties
@@ -73,13 +73,6 @@ datasource.encryption.enable=false
# datasource encryption salt
datasource.encryption.salt=!@#$%^&*
-# data quality jar directory path, it would auto discovery data quality jar from this given dir. You should keep it empty if you do not change anything in
-# data-quality, it will auto discovery by dolphinscheduler itself. Change it only if you want to use your own data-quality jar and it is not in worker-server
-# libs directory(but may sure your jar name start with `dolphinscheduler-data-quality`).
-data-quality.jar.dir=
-
-#data-quality.error.output.path=/tmp/data-quality-error-data
-
# Network IP gets priority, default inner outer
# Whether hive SQL is executed in the same session
@@ -118,4 +111,4 @@ ml.mlflow.preset_repository_version="main"
appId.collect=log
# The default env list will be load by Shell task, e.g. /etc/profile,~/.bash_profile
-# shell.env_source_list=/etc/profile,~/.bash_profile
\ No newline at end of file
+# shell.env_source_list=/etc/profile,~/.bash_profile
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/pom.xml b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/pom.xml
deleted file mode 100644
index 41a5447ea7a1..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/pom.xml
+++ /dev/null
@@ -1,84 +0,0 @@
-
-
-
- 4.0.0
-
- org.apache.dolphinscheduler
- dolphinscheduler-task-plugin
- dev-SNAPSHOT
-
-
- dolphinscheduler-task-dataquality
- jar
-
-
- task.dataquality
-
-
-
-
- org.apache.dolphinscheduler
- dolphinscheduler-task-api
- ${project.version}
-
-
-
- org.apache.dolphinscheduler
- dolphinscheduler-datasource-all
- ${project.version}
-
-
-
- org.apache.dolphinscheduler
- dolphinscheduler-datasource-api
- ${project.version}
- provided
-
-
- org.slf4j
- slf4j-reload4j
-
-
-
-
-
- org.apache.dolphinscheduler
- dolphinscheduler-common
- ${project.version}
- provided
-
-
-
-
-
-
- org.apache.maven.plugins
- maven-shade-plugin
-
-
-
- shade
-
- package
-
-
-
-
-
-
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTask.java
deleted file mode 100644
index b607293888a3..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTask.java
+++ /dev/null
@@ -1,200 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.dq;
-
-import static org.apache.dolphinscheduler.common.constants.DateConstants.YYYY_MM_DD_HH_MM_SS;
-import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.SLASH;
-import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.UNDERLINE;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.CREATE_TIME;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.DATA_TIME;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.ERROR_OUTPUT_PATH;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.PROCESS_DEFINITION_ID;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.PROCESS_INSTANCE_ID;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.REGEXP_PATTERN;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.RULE_ID;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.RULE_NAME;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.RULE_TYPE;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.TASK_INSTANCE_ID;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.UPDATE_TIME;
-
-import org.apache.dolphinscheduler.common.constants.Constants;
-import org.apache.dolphinscheduler.common.utils.JSONUtils;
-import org.apache.dolphinscheduler.plugin.datasource.api.utils.CommonUtils;
-import org.apache.dolphinscheduler.plugin.task.api.AbstractYarnTask;
-import org.apache.dolphinscheduler.plugin.task.api.DataQualityTaskExecutionContext;
-import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
-import org.apache.dolphinscheduler.plugin.task.api.model.ResourceInfo;
-import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters;
-import org.apache.dolphinscheduler.plugin.task.api.parameters.dataquality.DataQualityParameters;
-import org.apache.dolphinscheduler.plugin.task.api.utils.ArgsUtils;
-import org.apache.dolphinscheduler.plugin.task.api.utils.ParameterUtils;
-import org.apache.dolphinscheduler.plugin.task.dq.rule.RuleManager;
-import org.apache.dolphinscheduler.plugin.task.dq.rule.parameter.DataQualityConfiguration;
-import org.apache.dolphinscheduler.plugin.task.dq.utils.SparkArgsUtils;
-
-import org.apache.commons.lang3.StringEscapeUtils;
-import org.apache.commons.lang3.StringUtils;
-
-import java.time.LocalDateTime;
-import java.time.format.DateTimeFormatter;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.stream.Collectors;
-
-import lombok.extern.slf4j.Slf4j;
-
-/**
- * In DataQualityTask, the input parameters will be converted into DataQualityConfiguration,
- * which will be converted into a string as the parameter of DataQualityApplication,
- * and DataQualityApplication is spark application
- */
-@Slf4j
-public class DataQualityTask extends AbstractYarnTask {
-
- /**
- * spark command
- */
- private static final String SPARK_COMMAND = "${SPARK_HOME}/bin/spark-submit";
-
- private DataQualityParameters dataQualityParameters;
-
- private final TaskExecutionContext dqTaskExecutionContext;
-
- public DataQualityTask(TaskExecutionContext taskExecutionContext) {
- super(taskExecutionContext);
- this.dqTaskExecutionContext = taskExecutionContext;
- }
-
- @Override
- public void init() {
-
- dataQualityParameters =
- JSONUtils.parseObject(dqTaskExecutionContext.getTaskParams(), DataQualityParameters.class);
- log.info("Initialize data quality task params {}", JSONUtils.toPrettyJsonString(dataQualityParameters));
-
- if (null == dataQualityParameters) {
- log.error("data quality params is null");
- return;
- }
-
- if (!dataQualityParameters.checkParameters()) {
- throw new RuntimeException("data quality task params is not valid");
- }
-
- Map inputParameter = dataQualityParameters.getRuleInputParameter();
- for (Map.Entry entry : inputParameter.entrySet()) {
- if (entry != null && entry.getValue() != null) {
- entry.setValue(entry.getValue().trim());
- }
- }
-
- DataQualityTaskExecutionContext dataQualityTaskExecutionContext =
- dqTaskExecutionContext.getDataQualityTaskExecutionContext();
-
- operateInputParameter(inputParameter, dataQualityTaskExecutionContext);
-
- RuleManager ruleManager = new RuleManager(
- inputParameter,
- dataQualityTaskExecutionContext);
-
- DataQualityConfiguration dataQualityConfiguration =
- ruleManager.generateDataQualityParameter();
-
- log.info("data quality configuration: {}", JSONUtils.toPrettyJsonString(dataQualityConfiguration));
- dataQualityParameters
- .getSparkParameters()
- .setMainArgs("\""
- + replaceDoubleBrackets(
- StringEscapeUtils.escapeJava(JSONUtils.toJsonString(dataQualityConfiguration)))
- + "\"");
-
- setMainJarName();
- }
-
- private void operateInputParameter(Map inputParameter,
- DataQualityTaskExecutionContext dataQualityTaskExecutionContext) {
- DateTimeFormatter df = DateTimeFormatter.ofPattern(YYYY_MM_DD_HH_MM_SS);
- LocalDateTime time = LocalDateTime.now();
- String now = df.format(time);
-
- inputParameter.put(RULE_ID, String.valueOf(dataQualityTaskExecutionContext.getRuleId()));
- inputParameter.put(RULE_TYPE, String.valueOf(dataQualityTaskExecutionContext.getRuleType()));
- inputParameter.put(RULE_NAME, ArgsUtils.wrapperSingleQuotes(dataQualityTaskExecutionContext.getRuleName()));
- inputParameter.put(CREATE_TIME, ArgsUtils.wrapperSingleQuotes(now));
- inputParameter.put(UPDATE_TIME, ArgsUtils.wrapperSingleQuotes(now));
- inputParameter.put(PROCESS_DEFINITION_ID, String.valueOf(dqTaskExecutionContext.getWorkflowDefinitionId()));
- inputParameter.put(PROCESS_INSTANCE_ID, String.valueOf(dqTaskExecutionContext.getWorkflowInstanceId()));
- inputParameter.put(TASK_INSTANCE_ID, String.valueOf(dqTaskExecutionContext.getTaskInstanceId()));
-
- if (StringUtils.isEmpty(inputParameter.get(DATA_TIME))) {
- inputParameter.put(DATA_TIME, ArgsUtils.wrapperSingleQuotes(now));
- }
-
- if (StringUtils.isNotEmpty(inputParameter.get(REGEXP_PATTERN))) {
- inputParameter.put(REGEXP_PATTERN,
- StringEscapeUtils.escapeJava(StringEscapeUtils.escapeJava(inputParameter.get(REGEXP_PATTERN))));
- }
-
- if (StringUtils.isNotEmpty(dataQualityTaskExecutionContext.getHdfsPath())) {
- inputParameter.put(ERROR_OUTPUT_PATH,
- dataQualityTaskExecutionContext.getHdfsPath()
- + SLASH + dqTaskExecutionContext.getWorkflowDefinitionId()
- + UNDERLINE + dqTaskExecutionContext.getWorkflowInstanceId()
- + UNDERLINE + dqTaskExecutionContext.getTaskName());
- } else {
- inputParameter.put(ERROR_OUTPUT_PATH, "");
- }
- }
-
- @Override
- protected String getScript() {
- List args = new ArrayList<>();
- args.add(SPARK_COMMAND);
- args.addAll(SparkArgsUtils.buildArgs(dataQualityParameters.getSparkParameters()));
- return args.stream().collect(Collectors.joining(" "));
- }
-
- @Override
- protected Map getProperties() {
- return ParameterUtils.convert(dqTaskExecutionContext.getPrepareParamsMap());
- }
-
- protected void setMainJarName() {
- ResourceInfo mainJar = new ResourceInfo();
- mainJar.setResourceName(CommonUtils.getDataQualityJarPath());
- dataQualityParameters.getSparkParameters().setMainJar(mainJar);
- }
-
- @Override
- public AbstractParameters getParameters() {
- return dataQualityParameters;
- }
-
- private static String replaceDoubleBrackets(String mainParameter) {
- mainParameter = mainParameter
- .replace(Constants.DOUBLE_BRACKETS_LEFT, Constants.DOUBLE_BRACKETS_LEFT_SPACE)
- .replace(Constants.DOUBLE_BRACKETS_RIGHT, Constants.DOUBLE_BRACKETS_RIGHT_SPACE);
- if (mainParameter.contains(Constants.DOUBLE_BRACKETS_LEFT)
- || mainParameter.contains(Constants.DOUBLE_BRACKETS_RIGHT)) {
- return replaceDoubleBrackets(mainParameter);
- } else {
- return mainParameter;
- }
- }
-}
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTaskChannel.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTaskChannel.java
deleted file mode 100644
index 1608de87afd3..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTaskChannel.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.dq;
-
-import org.apache.dolphinscheduler.common.utils.JSONUtils;
-import org.apache.dolphinscheduler.plugin.task.api.AbstractTask;
-import org.apache.dolphinscheduler.plugin.task.api.TaskChannel;
-import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext;
-import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters;
-import org.apache.dolphinscheduler.plugin.task.api.parameters.dataquality.DataQualityParameters;
-
-public class DataQualityTaskChannel implements TaskChannel {
-
- @Override
- public AbstractTask createTask(TaskExecutionContext taskRequest) {
- return new DataQualityTask(taskRequest);
- }
-
- @Override
- public AbstractParameters parseParameters(String taskParams) {
- return JSONUtils.parseObject(taskParams, DataQualityParameters.class);
- }
-
-}
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTaskChannelFactory.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTaskChannelFactory.java
deleted file mode 100644
index 7ad56daf136c..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTaskChannelFactory.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.dq;
-
-import org.apache.dolphinscheduler.plugin.task.api.TaskChannel;
-import org.apache.dolphinscheduler.plugin.task.api.TaskChannelFactory;
-
-import com.google.auto.service.AutoService;
-
-@AutoService(TaskChannelFactory.class)
-public class DataQualityTaskChannelFactory implements TaskChannelFactory {
-
- @Override
- public String getName() {
- return "DATA_QUALITY";
- }
-
- @Override
- public TaskChannel create() {
- return new DataQualityTaskChannel();
- }
-}
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/exception/DataQualityException.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/exception/DataQualityException.java
deleted file mode 100644
index 8bc820f34c25..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/exception/DataQualityException.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.dq.exception;
-
-/**
- * data quality runtime exception
- */
-public class DataQualityException extends RuntimeException {
-
- public DataQualityException() {
- super();
- }
-
- public DataQualityException(String message) {
- super(message);
- }
-
- public DataQualityException(String message, Throwable cause) {
- super(message, cause);
- }
-
- public DataQualityException(Throwable cause) {
- super(cause);
- }
-
- protected DataQualityException(String message, Throwable cause, boolean enableSuppression,
- boolean writableStackTrace) {
- super(message, cause, enableSuppression, writableStackTrace);
- }
-}
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/RuleManager.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/RuleManager.java
deleted file mode 100644
index b71b9e911cfe..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/RuleManager.java
+++ /dev/null
@@ -1,137 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.dq.rule;
-
-import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.SINGLE_QUOTES;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.COMPARISON_TYPE;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.UNIQUE_CODE;
-
-import org.apache.dolphinscheduler.common.enums.CommandType;
-import org.apache.dolphinscheduler.common.utils.JSONUtils;
-import org.apache.dolphinscheduler.common.utils.placeholder.BusinessTimeUtils;
-import org.apache.dolphinscheduler.plugin.task.api.DataQualityTaskExecutionContext;
-import org.apache.dolphinscheduler.plugin.task.api.enums.dp.RuleType;
-import org.apache.dolphinscheduler.plugin.task.dq.exception.DataQualityException;
-import org.apache.dolphinscheduler.plugin.task.dq.rule.entity.DqRuleInputEntry;
-import org.apache.dolphinscheduler.plugin.task.dq.rule.parameter.DataQualityConfiguration;
-import org.apache.dolphinscheduler.plugin.task.dq.rule.parser.IRuleParser;
-import org.apache.dolphinscheduler.plugin.task.dq.rule.parser.MultiTableAccuracyRuleParser;
-import org.apache.dolphinscheduler.plugin.task.dq.rule.parser.MultiTableComparisonRuleParser;
-import org.apache.dolphinscheduler.plugin.task.dq.rule.parser.SingleTableCustomSqlRuleParser;
-import org.apache.dolphinscheduler.plugin.task.dq.rule.parser.SingleTableRuleParser;
-import org.apache.dolphinscheduler.plugin.task.dq.utils.RuleParserUtils;
-
-import java.util.Date;
-import java.util.Map;
-
-/**
- * RuleManager is responsible for parsing the input parameters to the DataQualityConfiguration
- * And DataQualityConfiguration will be used in DataQualityApplication
- */
-public class RuleManager {
-
- private final Map inputParameterValue;
- private final DataQualityTaskExecutionContext dataQualityTaskExecutionContext;
-
- private static final String NONE_COMPARISON_TYPE = "0";
- private static final String BASE_SQL =
- "select ${rule_type} as rule_type,"
- + "${rule_name} as rule_name,"
- + "${process_definition_id} as process_definition_id,"
- + "${process_instance_id} as process_instance_id,"
- + "${task_instance_id} as task_instance_id,"
- + "${statistics_name} AS statistics_value,"
- + "${comparison_name} AS comparison_value,"
- + "${comparison_type} AS comparison_type,"
- + "${check_type} as check_type,"
- + "${threshold} as threshold,"
- + "${operator} as operator,"
- + "${failure_strategy} as failure_strategy,"
- + "'${error_output_path}' as error_output_path,"
- + "${create_time} as create_time,"
- + "${update_time} as update_time ";
-
- public static final String DEFAULT_COMPARISON_WRITER_SQL =
- BASE_SQL + "from ${statistics_table} full join ${comparison_table}";
-
- public static final String MULTI_TABLE_COMPARISON_WRITER_SQL =
- BASE_SQL
- + "from ( ${statistics_execute_sql} ) tmp1 "
- + "join ( ${comparison_execute_sql} ) tmp2";
-
- public static final String SINGLE_TABLE_CUSTOM_SQL_WRITER_SQL =
- BASE_SQL
- + "from ( ${statistics_table} ) tmp1 "
- + "join ${comparison_table}";
- public static final String TASK_STATISTICS_VALUE_WRITER_SQL =
- "select "
- + "${process_definition_id} as process_definition_id,"
- + "${task_instance_id} as task_instance_id,"
- + "${rule_id} as rule_id,"
- + "${unique_code} as unique_code,"
- + "'${statistics_name}'AS statistics_name,"
- + "${statistics_name} AS statistics_value,"
- + "${data_time} as data_time,"
- + "${create_time} as create_time,"
- + "${update_time} as update_time "
- + "from ${statistics_table}";
-
- public RuleManager(Map inputParameterValue,
- DataQualityTaskExecutionContext dataQualityTaskExecutionContext) {
- this.inputParameterValue = inputParameterValue;
- this.dataQualityTaskExecutionContext = dataQualityTaskExecutionContext;
- }
-
- /**
- * @return DataQualityConfiguration
- * @throws RuntimeException RuntimeException
- */
- public DataQualityConfiguration generateDataQualityParameter() throws RuntimeException {
-
- Map inputParameterValueResult =
- RuleParserUtils.getInputParameterMapFromEntryList(
- JSONUtils.toList(dataQualityTaskExecutionContext.getRuleInputEntryList(),
- DqRuleInputEntry.class));
- inputParameterValueResult.putAll(inputParameterValue);
- inputParameterValueResult
- .putAll(BusinessTimeUtils.getBusinessTime(CommandType.START_PROCESS, new Date(), null));
- inputParameterValueResult.putIfAbsent(COMPARISON_TYPE, NONE_COMPARISON_TYPE);
- inputParameterValueResult.put(UNIQUE_CODE,
- SINGLE_QUOTES + RuleParserUtils.generateUniqueCode(inputParameterValueResult) + SINGLE_QUOTES);
-
- IRuleParser ruleParser = null;
- switch (RuleType.of(dataQualityTaskExecutionContext.getRuleType())) {
- case SINGLE_TABLE:
- ruleParser = new SingleTableRuleParser();
- break;
- case SINGLE_TABLE_CUSTOM_SQL:
- ruleParser = new SingleTableCustomSqlRuleParser();
- break;
- case MULTI_TABLE_ACCURACY:
- ruleParser = new MultiTableAccuracyRuleParser();
- break;
- case MULTI_TABLE_COMPARISON:
- ruleParser = new MultiTableComparisonRuleParser();
- break;
- default:
- throw new DataQualityException("rule type is not support");
- }
-
- return ruleParser.parse(inputParameterValueResult, dataQualityTaskExecutionContext);
- }
-}
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/entity/DqRuleExecuteSql.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/entity/DqRuleExecuteSql.java
deleted file mode 100644
index ad45c1a9a556..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/entity/DqRuleExecuteSql.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.dq.rule.entity;
-
-import org.apache.dolphinscheduler.plugin.task.api.enums.dp.ExecuteSqlType;
-
-import java.io.Serializable;
-import java.util.Date;
-
-import lombok.Data;
-
-@Data
-public class DqRuleExecuteSql implements Serializable {
-
- /**
- * primary key
- */
- private Integer id;
- /**
- * index,ensure the execution order of sql
- */
- private int index;
- /**
- * SQL Statement
- */
- private String sql;
- /**
- * table alias name
- */
- private String tableAlias;
- /**
- * input entry type: default,statistics,comparison,check
- */
- private int type = ExecuteSqlType.MIDDLE.getCode();
- /**
- * is error output sql
- */
- private boolean isErrorOutputSql;
- /**
- * create_time
- */
- private Date createTime;
- /**
- * update_time
- */
- private Date updateTime;
-}
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/entity/DqRuleInputEntry.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/entity/DqRuleInputEntry.java
deleted file mode 100644
index bce50133732a..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/entity/DqRuleInputEntry.java
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.dq.rule.entity;
-
-import org.apache.dolphinscheduler.plugin.task.api.enums.dp.DataType;
-import org.apache.dolphinscheduler.plugin.task.api.enums.dp.InputType;
-import org.apache.dolphinscheduler.plugin.task.api.enums.dp.OptionSourceType;
-
-import java.io.Serializable;
-import java.util.Date;
-
-import lombok.Data;
-
-@Data
-public class DqRuleInputEntry implements Serializable {
-
- /**
- * primary key
- */
- private int id;
- /**
- * form field name
- */
- private String field;
- /**
- * form type
- */
- private String type;
- /**
- * form title
- */
- private String title;
- /**
- * default data,can be null
- */
- private String data;
- /**
- * default options,can be null
- * [{label:"",value:""}]
- */
- private String options;
- /**
- * ${field}
- */
- private String placeholder;
- /**
- * the source type of options,use default options or other
- */
- private int optionSourceType = OptionSourceType.DEFAULT.getCode();
- /**
- * input entry type: string,array,number .etc
- */
- private int dataType = DataType.NUMBER.getCode();
- /**
- * input entry type: default,statistics,comparison
- */
- private int inputType = InputType.DEFAULT.getCode();
- /**
- * whether to display on the front end
- */
- private Boolean isShow;
- /**
- * whether to edit on the front end
- */
- private Boolean canEdit;
- /**
- * is emit event
- */
- private Boolean isEmit;
- /**
- * is validate
- */
- private Boolean isValidate;
- /**
- * values map
- */
- private String valuesMap;
- /**
- * values map
- */
- private Integer index;
- /**
- * create_time
- */
- private Date createTime;
- /**
- * update_time
- */
- private Date updateTime;
-}
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parameter/BaseConfig.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parameter/BaseConfig.java
deleted file mode 100644
index cbcf90846412..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parameter/BaseConfig.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.dq.rule.parameter;
-
-import java.util.Map;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-/**
- * BaseConfig
- */
-public class BaseConfig {
-
- @JsonProperty("type")
- private String type;
-
- @JsonProperty("config")
- private Map config;
-
- public BaseConfig() {
- }
-
- public BaseConfig(String type, Map config) {
- this.type = type;
- this.config = config;
- }
-
- public String getType() {
- return type;
- }
-
- public void setType(String type) {
- this.type = type;
- }
-
- public Map getConfig() {
- return config;
- }
-
- public void setConfig(Map config) {
- this.config = config;
- }
-
-}
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parameter/DataQualityConfiguration.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parameter/DataQualityConfiguration.java
deleted file mode 100644
index 3773932bac27..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parameter/DataQualityConfiguration.java
+++ /dev/null
@@ -1,122 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.dq.rule.parameter;
-
-import java.util.List;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-/**
- * DataQualityConfiguration
- * The reader is responsible for connecting to the data source,
- * and the transformer is responsible for transformer the data that from reader,
- * and the writer is responsible for writing data to the target datasource
- */
-public class DataQualityConfiguration {
-
- @JsonProperty("name")
- private String name;
-
- @JsonProperty("env")
- private EnvConfig envConfig;
-
- @JsonProperty("readers")
- private List readerConfigs;
-
- @JsonProperty("transformers")
- private List transformerConfigs;
-
- @JsonProperty("writers")
- private List writerConfigs;
-
- public DataQualityConfiguration() {
- }
-
- public DataQualityConfiguration(String name,
- List readerConfigs,
- List writerConfigs,
- List transformerConfigs) {
- this.name = name;
- this.readerConfigs = readerConfigs;
- this.writerConfigs = writerConfigs;
- this.transformerConfigs = transformerConfigs;
- }
-
- public DataQualityConfiguration(String name,
- EnvConfig envConfig,
- List readerConfigs,
- List writerConfigs,
- List transformerConfigs) {
- this.name = name;
- this.envConfig = envConfig;
- this.readerConfigs = readerConfigs;
- this.writerConfigs = writerConfigs;
- this.transformerConfigs = transformerConfigs;
- }
-
- public String getName() {
- return name;
- }
-
- public void setName(String name) {
- this.name = name;
- }
-
- public EnvConfig getEnvConfig() {
- return envConfig;
- }
-
- public void setEnvConfig(EnvConfig envConfig) {
- this.envConfig = envConfig;
- }
-
- public List getReaderConfigs() {
- return readerConfigs;
- }
-
- public void setReaderConfigs(List readerConfigs) {
- this.readerConfigs = readerConfigs;
- }
-
- public List getTransformerConfigs() {
- return transformerConfigs;
- }
-
- public void setTransformerConfigs(List transformerConfigs) {
- this.transformerConfigs = transformerConfigs;
- }
-
- public List getWriterConfigs() {
- return writerConfigs;
- }
-
- public void setWriterConfigs(List writerConfigs) {
- this.writerConfigs = writerConfigs;
- }
-
- @Override
- public String toString() {
- return "DataQualityConfiguration{"
- + "name='" + name + '\''
- + ", envConfig=" + envConfig
- + ", readerConfigs=" + readerConfigs
- + ", transformerConfigs=" + transformerConfigs
- + ", writerConfigs=" + writerConfigs
- + '}';
- }
-}
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parameter/EnvConfig.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parameter/EnvConfig.java
deleted file mode 100644
index 5da079e69c6f..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parameter/EnvConfig.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.dq.rule.parameter;
-
-import java.util.Map;
-
-/**
- * EnvConfig
- */
-public class EnvConfig extends BaseConfig {
-
- public EnvConfig() {
- }
-
- public EnvConfig(String type, Map config) {
- super(type, config);
- }
-
-}
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/IRuleParser.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/IRuleParser.java
deleted file mode 100644
index eb3801efc785..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/IRuleParser.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.dq.rule.parser;
-
-import org.apache.dolphinscheduler.plugin.task.api.DataQualityTaskExecutionContext;
-import org.apache.dolphinscheduler.plugin.task.dq.exception.DataQualityException;
-import org.apache.dolphinscheduler.plugin.task.dq.rule.parameter.DataQualityConfiguration;
-
-import java.util.Map;
-
-/**
- * IRuleParser is a component that actually converts input parameters to DataQualityConfiguration
- */
-public interface IRuleParser {
-
- DataQualityConfiguration parse(Map inputParameterValue,
- DataQualityTaskExecutionContext context) throws DataQualityException;
-}
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/MappingColumn.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/MappingColumn.java
deleted file mode 100644
index 05e09efe2762..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/MappingColumn.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.dq.rule.parser;
-
-/**
- * MappingColumn
- */
-public class MappingColumn {
-
- private String srcField;
- private String operator;
- private String targetField;
-
- public MappingColumn() {
- }
-
- public MappingColumn(String srcField, String operator, String targetField) {
- this.srcField = srcField;
- this.operator = operator;
- this.targetField = targetField;
- }
-
- public String getSrcField() {
- return srcField;
- }
-
- public void setSrcField(String srcField) {
- this.srcField = srcField;
- }
-
- public String getOperator() {
- return operator;
- }
-
- public void setOperator(String operator) {
- this.operator = operator;
- }
-
- public String getTargetField() {
- return targetField;
- }
-
- public void setTargetField(String targetField) {
- this.targetField = targetField;
- }
-}
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/MultiTableAccuracyRuleParser.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/MultiTableAccuracyRuleParser.java
deleted file mode 100644
index ed94b873b2b6..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/MultiTableAccuracyRuleParser.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.dq.rule.parser;
-
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.MAPPING_COLUMNS;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.ON_CLAUSE;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.STATISTICS_TABLE;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.WHERE_CLAUSE;
-
-import org.apache.dolphinscheduler.common.utils.JSONUtils;
-import org.apache.dolphinscheduler.plugin.task.api.DataQualityTaskExecutionContext;
-import org.apache.dolphinscheduler.plugin.task.api.enums.dp.ExecuteSqlType;
-import org.apache.dolphinscheduler.plugin.task.dq.exception.DataQualityException;
-import org.apache.dolphinscheduler.plugin.task.dq.rule.RuleManager;
-import org.apache.dolphinscheduler.plugin.task.dq.rule.entity.DqRuleExecuteSql;
-import org.apache.dolphinscheduler.plugin.task.dq.rule.parameter.BaseConfig;
-import org.apache.dolphinscheduler.plugin.task.dq.rule.parameter.DataQualityConfiguration;
-import org.apache.dolphinscheduler.plugin.task.dq.utils.RuleParserUtils;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
-/**
- * MultiTableAccuracyRuleParser
- */
-public class MultiTableAccuracyRuleParser implements IRuleParser {
-
- @Override
- public DataQualityConfiguration parse(Map inputParameterValue,
- DataQualityTaskExecutionContext context) throws DataQualityException {
- List dqRuleExecuteSqlList =
- JSONUtils.toList(context.getExecuteSqlList(), DqRuleExecuteSql.class);
-
- DqRuleExecuteSql statisticsSql =
- RuleParserUtils.getExecuteSqlListByType(
- dqRuleExecuteSqlList, ExecuteSqlType.STATISTICS).get(0);
- inputParameterValue.put(STATISTICS_TABLE, statisticsSql.getTableAlias());
-
- int index = 1;
-
- List readerConfigList =
- RuleParserUtils.getReaderConfigList(inputParameterValue, context);
-
- RuleParserUtils.addStatisticsValueTableReaderConfig(readerConfigList, context);
-
- List transformerConfigList = new ArrayList<>();
-
- List mappingColumnList =
- RuleParserUtils.getMappingColumnList(inputParameterValue.get(MAPPING_COLUMNS));
-
- // get on clause
- inputParameterValue.put(ON_CLAUSE, RuleParserUtils.getOnClause(mappingColumnList, inputParameterValue));
- // get where clause
- inputParameterValue.put(WHERE_CLAUSE, RuleParserUtils.getWhereClause(mappingColumnList, inputParameterValue));
-
- index = RuleParserUtils.replaceExecuteSqlPlaceholder(
- dqRuleExecuteSqlList,
- index,
- inputParameterValue,
- transformerConfigList);
-
- String writerSql = RuleManager.DEFAULT_COMPARISON_WRITER_SQL;
- if (context.isCompareWithFixedValue()) {
- writerSql = writerSql.replaceAll("full join \\$\\{comparison_table}", "");
- }
-
- List writerConfigList = RuleParserUtils.getAllWriterConfigList(inputParameterValue,
- context, index, transformerConfigList, writerSql, RuleManager.TASK_STATISTICS_VALUE_WRITER_SQL);
-
- return new DataQualityConfiguration(
- context.getRuleName(),
- RuleParserUtils.getEnvConfig(),
- readerConfigList,
- writerConfigList,
- transformerConfigList);
- }
-}
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/MultiTableComparisonRuleParser.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/MultiTableComparisonRuleParser.java
deleted file mode 100644
index 27a1f83838fb..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/MultiTableComparisonRuleParser.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.dq.rule.parser;
-
-import org.apache.dolphinscheduler.plugin.task.api.DataQualityTaskExecutionContext;
-import org.apache.dolphinscheduler.plugin.task.api.utils.ParameterUtils;
-import org.apache.dolphinscheduler.plugin.task.dq.exception.DataQualityException;
-import org.apache.dolphinscheduler.plugin.task.dq.rule.RuleManager;
-import org.apache.dolphinscheduler.plugin.task.dq.rule.parameter.BaseConfig;
-import org.apache.dolphinscheduler.plugin.task.dq.rule.parameter.DataQualityConfiguration;
-import org.apache.dolphinscheduler.plugin.task.dq.utils.RuleParserUtils;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
-/**
- * MultiTableComparisonRuleParser
- */
-public class MultiTableComparisonRuleParser implements IRuleParser {
-
- @Override
- public DataQualityConfiguration parse(Map inputParameterValue,
- DataQualityTaskExecutionContext context) throws DataQualityException {
-
- List readerConfigList =
- RuleParserUtils.getReaderConfigList(inputParameterValue, context);
- RuleParserUtils.addStatisticsValueTableReaderConfig(readerConfigList, context);
-
- List transformerConfigList = new ArrayList<>();
-
- List writerConfigList = RuleParserUtils.getWriterConfigList(
- ParameterUtils.convertParameterPlaceholders(RuleManager.MULTI_TABLE_COMPARISON_WRITER_SQL,
- inputParameterValue),
- context);
-
- return new DataQualityConfiguration(
- context.getRuleName(),
- RuleParserUtils.getEnvConfig(),
- readerConfigList,
- writerConfigList,
- transformerConfigList);
- }
-}
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/SingleTableCustomSqlRuleParser.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/SingleTableCustomSqlRuleParser.java
deleted file mode 100644
index aaf0316c9246..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/SingleTableCustomSqlRuleParser.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.dq.rule.parser;
-
-import org.apache.dolphinscheduler.common.utils.JSONUtils;
-import org.apache.dolphinscheduler.plugin.task.api.DataQualityTaskExecutionContext;
-import org.apache.dolphinscheduler.plugin.task.dq.exception.DataQualityException;
-import org.apache.dolphinscheduler.plugin.task.dq.rule.RuleManager;
-import org.apache.dolphinscheduler.plugin.task.dq.rule.entity.DqRuleExecuteSql;
-import org.apache.dolphinscheduler.plugin.task.dq.rule.parameter.BaseConfig;
-import org.apache.dolphinscheduler.plugin.task.dq.rule.parameter.DataQualityConfiguration;
-import org.apache.dolphinscheduler.plugin.task.dq.utils.RuleParserUtils;
-
-import java.util.List;
-import java.util.Map;
-
-/**
- * SingleTableCustomSqlRuleParser
- */
-public class SingleTableCustomSqlRuleParser implements IRuleParser {
-
- @Override
- public DataQualityConfiguration parse(Map inputParameterValue,
- DataQualityTaskExecutionContext context) throws DataQualityException {
- List dqRuleExecuteSqlList =
- JSONUtils.toList(context.getExecuteSqlList(), DqRuleExecuteSql.class);
-
- int index = 1;
-
- List readerConfigList =
- RuleParserUtils.getReaderConfigList(inputParameterValue, context);
- RuleParserUtils.addStatisticsValueTableReaderConfig(readerConfigList, context);
-
- List transformerConfigList = RuleParserUtils
- .getSingleTableCustomSqlTransformerConfigList(index, inputParameterValue);
-
- // replace the placeholder in execute sql list
- index = RuleParserUtils.replaceExecuteSqlPlaceholder(
- dqRuleExecuteSqlList,
- index,
- inputParameterValue,
- transformerConfigList);
-
- String writerSql = RuleManager.SINGLE_TABLE_CUSTOM_SQL_WRITER_SQL;
- if (context.isCompareWithFixedValue()) {
- writerSql = writerSql.replaceAll("join \\$\\{comparison_table}", "");
- }
-
- List writerConfigList = RuleParserUtils.getAllWriterConfigList(inputParameterValue,
- context, index, transformerConfigList, writerSql, RuleManager.TASK_STATISTICS_VALUE_WRITER_SQL);
-
- return new DataQualityConfiguration(
- context.getRuleName(),
- RuleParserUtils.getEnvConfig(),
- readerConfigList,
- writerConfigList,
- transformerConfigList);
- }
-}
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/SingleTableRuleParser.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/SingleTableRuleParser.java
deleted file mode 100644
index fd0cc1b398e7..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/SingleTableRuleParser.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.dq.rule.parser;
-
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.STATISTICS_TABLE;
-
-import org.apache.dolphinscheduler.common.utils.JSONUtils;
-import org.apache.dolphinscheduler.plugin.task.api.DataQualityTaskExecutionContext;
-import org.apache.dolphinscheduler.plugin.task.api.enums.dp.ExecuteSqlType;
-import org.apache.dolphinscheduler.plugin.task.dq.exception.DataQualityException;
-import org.apache.dolphinscheduler.plugin.task.dq.rule.RuleManager;
-import org.apache.dolphinscheduler.plugin.task.dq.rule.entity.DqRuleExecuteSql;
-import org.apache.dolphinscheduler.plugin.task.dq.rule.parameter.BaseConfig;
-import org.apache.dolphinscheduler.plugin.task.dq.rule.parameter.DataQualityConfiguration;
-import org.apache.dolphinscheduler.plugin.task.dq.utils.RuleParserUtils;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
-/**
- * SingleTableRuleParser
- */
-public class SingleTableRuleParser implements IRuleParser {
-
- @Override
- public DataQualityConfiguration parse(Map inputParameterValue,
- DataQualityTaskExecutionContext context) throws DataQualityException {
- List dqRuleExecuteSqlList =
- JSONUtils.toList(context.getExecuteSqlList(), DqRuleExecuteSql.class);
-
- DqRuleExecuteSql statisticsSql =
- RuleParserUtils.getExecuteSqlListByType(dqRuleExecuteSqlList, ExecuteSqlType.STATISTICS).get(0);
- inputParameterValue.put(STATISTICS_TABLE, statisticsSql.getTableAlias());
-
- int index = 1;
-
- List readerConfigList =
- RuleParserUtils.getReaderConfigList(inputParameterValue, context);
- RuleParserUtils.addStatisticsValueTableReaderConfig(readerConfigList, context);
-
- List transformerConfigList = new ArrayList<>();
-
- // replace the placeholder in execute sql list
- index = RuleParserUtils.replaceExecuteSqlPlaceholder(
- dqRuleExecuteSqlList,
- index,
- inputParameterValue,
- transformerConfigList);
-
- String writerSql = RuleManager.DEFAULT_COMPARISON_WRITER_SQL;
-
- if (context.isCompareWithFixedValue()) {
- writerSql = writerSql.replaceAll("full join \\$\\{comparison_table}", "");
- }
-
- List writerConfigList = RuleParserUtils.getAllWriterConfigList(inputParameterValue,
- context, index, transformerConfigList, writerSql, RuleManager.TASK_STATISTICS_VALUE_WRITER_SQL);
-
- return new DataQualityConfiguration(
- context.getRuleName(),
- RuleParserUtils.getEnvConfig(),
- readerConfigList,
- writerConfigList,
- transformerConfigList);
- }
-}
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/Md5Utils.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/Md5Utils.java
deleted file mode 100644
index ce0c8766c475..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/Md5Utils.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.dq.utils;
-
-import java.nio.charset.StandardCharsets;
-import java.security.MessageDigest;
-import java.util.Base64;
-
-import lombok.experimental.UtilityClass;
-import lombok.extern.slf4j.Slf4j;
-
-@Slf4j
-@UtilityClass
-public final class Md5Utils {
-
- public static String getMd5(String src, boolean isUpper) {
- String md5 = "";
- try {
- MessageDigest md = MessageDigest.getInstance("SHA-256");
- Base64.Encoder encoder = Base64.getEncoder();
- md5 = encoder.encodeToString(md.digest(src.getBytes(StandardCharsets.UTF_8)));
- } catch (Exception e) {
- log.error("get md5 error: {}", e.getMessage());
- }
-
- if (isUpper) {
- md5 = md5.toUpperCase();
- }
-
- return md5;
- }
-}
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/RuleParserUtils.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/RuleParserUtils.java
deleted file mode 100644
index 185573f66e3d..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/RuleParserUtils.java
+++ /dev/null
@@ -1,592 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.dq.utils;
-
-import static java.nio.charset.StandardCharsets.UTF_8;
-import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.PARAMETER_BUSINESS_DATE;
-import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.PARAMETER_CURRENT_DATE;
-import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.PARAMETER_DATETIME;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.AND;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.BATCH;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.CHECK_TYPE;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.COMPARISON_NAME;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.COMPARISON_TABLE;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.COMPARISON_TYPE;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.CREATE_TIME;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.DATABASE;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.DATA_TIME;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.DRIVER;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.ERROR_OUTPUT_PATH;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.FAILURE_STRATEGY;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.HDFS_FILE;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.INDEX;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.INPUT_TABLE;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.OPERATOR;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.OUTPUT_TABLE;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.PASSWORD;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.PATH;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.PROCESS_DEFINITION_ID;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.PROCESS_INSTANCE_ID;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.RULE_NAME;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.RULE_TYPE;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.SQL;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.SRC_DATABASE;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.SRC_FIELD;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.SRC_FILTER;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.SRC_TABLE;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.STATISTICS_EXECUTE_SQL;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.STATISTICS_TABLE;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.TABLE;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.TARGET_DATABASE;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.TARGET_FIELD;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.TARGET_FILTER;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.TARGET_TABLE;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.TASK_INSTANCE_ID;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.THRESHOLD;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.UPDATE_TIME;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.URL;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.USER;
-
-import org.apache.dolphinscheduler.common.utils.JSONUtils;
-import org.apache.dolphinscheduler.plugin.datasource.api.utils.DataSourceUtils;
-import org.apache.dolphinscheduler.plugin.task.api.DataQualityTaskExecutionContext;
-import org.apache.dolphinscheduler.plugin.task.api.enums.dp.ExecuteSqlType;
-import org.apache.dolphinscheduler.plugin.task.api.utils.MapUtils;
-import org.apache.dolphinscheduler.plugin.task.api.utils.ParameterUtils;
-import org.apache.dolphinscheduler.plugin.task.dq.exception.DataQualityException;
-import org.apache.dolphinscheduler.plugin.task.dq.rule.entity.DqRuleExecuteSql;
-import org.apache.dolphinscheduler.plugin.task.dq.rule.entity.DqRuleInputEntry;
-import org.apache.dolphinscheduler.plugin.task.dq.rule.parameter.BaseConfig;
-import org.apache.dolphinscheduler.plugin.task.dq.rule.parameter.EnvConfig;
-import org.apache.dolphinscheduler.plugin.task.dq.rule.parser.MappingColumn;
-import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam;
-import org.apache.dolphinscheduler.spi.enums.DbType;
-
-import org.apache.commons.collections4.CollectionUtils;
-import org.apache.commons.lang3.StringUtils;
-
-import java.net.URLEncoder;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.stream.Collectors;
-
-import lombok.SneakyThrows;
-
-import com.fasterxml.jackson.databind.node.ArrayNode;
-
-/**
- * RuleParserUtils
- */
-public class RuleParserUtils {
-
- private RuleParserUtils() {
- throw new IllegalStateException("Utility class");
- }
-
- private static final String AND_SRC_FILTER = "AND (${src_filter})";
- private static final String WHERE_SRC_FILTER = "WHERE (${src_filter})";
- private static final String AND_TARGET_FILTER = "AND (${target_filter})";
- private static final String WHERE_TARGET_FILTER = "WHERE (${target_filter})";
-
- @SneakyThrows
- public static List getReaderConfigList(
- Map inputParameterValue,
- DataQualityTaskExecutionContext dataQualityTaskExecutionContext) {
-
- List readerConfigList = new ArrayList<>();
-
- // all the rule need the source config
- if (StringUtils.isNotEmpty(dataQualityTaskExecutionContext.getSourceConnectorType())) {
- BaseConnectionParam sourceDataSource =
- (BaseConnectionParam) DataSourceUtils.buildConnectionParams(
- DbType.of(dataQualityTaskExecutionContext.getSourceType()),
- dataQualityTaskExecutionContext.getSourceConnectionParams());
- BaseConfig sourceBaseConfig = new BaseConfig();
- sourceBaseConfig.setType(dataQualityTaskExecutionContext.getSourceConnectorType());
- Map config = new HashMap<>();
- if (sourceDataSource != null) {
- config.put(DATABASE, inputParameterValue.get(SRC_DATABASE));
- config.put(TABLE, inputParameterValue.get(SRC_TABLE));
- config.put(URL, DataSourceUtils.getJdbcUrl(DbType.of(dataQualityTaskExecutionContext.getSourceType()),
- sourceDataSource));
- config.put(USER, sourceDataSource.getUser());
- config.put(PASSWORD, URLEncoder.encode(sourceDataSource.getPassword(), UTF_8.name()));
- config.put(DRIVER, DataSourceUtils
- .getDatasourceDriver(DbType.of(dataQualityTaskExecutionContext.getSourceType())));
- String outputTable = inputParameterValue.get(SRC_DATABASE) + "_" + inputParameterValue.get(SRC_TABLE);
- config.put(OUTPUT_TABLE, outputTable);
- inputParameterValue.put(SRC_TABLE, outputTable);
- }
- sourceBaseConfig.setConfig(config);
-
- readerConfigList.add(sourceBaseConfig);
- }
-
- // MultiTableAccuracyRule need the target config
- if (StringUtils.isNotEmpty(dataQualityTaskExecutionContext.getTargetConnectorType())) {
- BaseConnectionParam targetDataSource =
- (BaseConnectionParam) DataSourceUtils.buildConnectionParams(
- DbType.of(dataQualityTaskExecutionContext.getTargetType()),
- dataQualityTaskExecutionContext.getTargetConnectionParams());
- BaseConfig targetBaseConfig = new BaseConfig();
- targetBaseConfig.setType(dataQualityTaskExecutionContext.getTargetConnectorType());
- Map config = new HashMap<>();
- if (targetDataSource != null) {
- config.put(DATABASE, inputParameterValue.get(TARGET_DATABASE));
- config.put(TABLE, inputParameterValue.get(TARGET_TABLE));
- config.put(URL, DataSourceUtils.getJdbcUrl(DbType.of(dataQualityTaskExecutionContext.getTargetType()),
- targetDataSource));
- config.put(USER, targetDataSource.getUser());
- config.put(PASSWORD, URLEncoder.encode(targetDataSource.getPassword(), UTF_8.name()));
- config.put(DRIVER, DataSourceUtils
- .getDatasourceDriver(DbType.of(dataQualityTaskExecutionContext.getTargetType())));
- String outputTable =
- inputParameterValue.get(TARGET_DATABASE) + "_" + inputParameterValue.get(TARGET_TABLE);
- config.put(OUTPUT_TABLE, outputTable);
- inputParameterValue.put(TARGET_TABLE, outputTable);
- }
- targetBaseConfig.setConfig(config);
-
- readerConfigList.add(targetBaseConfig);
- }
-
- return readerConfigList;
- }
-
- public static int replaceExecuteSqlPlaceholder(List executeSqlList,
- int index, Map inputParameterValueResult,
- List transformerConfigList) {
- List midExecuteSqlDefinitionList =
- getExecuteSqlListByType(executeSqlList, ExecuteSqlType.MIDDLE);
-
- List statisticsExecuteSqlDefinitionList =
- getExecuteSqlListByType(executeSqlList, ExecuteSqlType.STATISTICS);
-
- checkAndReplace(midExecuteSqlDefinitionList, inputParameterValueResult.get(SRC_FILTER), AND_SRC_FILTER);
- checkAndReplace(midExecuteSqlDefinitionList, inputParameterValueResult.get(SRC_FILTER), WHERE_SRC_FILTER);
- checkAndReplace(statisticsExecuteSqlDefinitionList, inputParameterValueResult.get(SRC_FILTER), AND_SRC_FILTER);
- checkAndReplace(statisticsExecuteSqlDefinitionList, inputParameterValueResult.get(SRC_FILTER),
- WHERE_SRC_FILTER);
-
- checkAndReplace(midExecuteSqlDefinitionList, inputParameterValueResult.get(TARGET_FILTER), AND_TARGET_FILTER);
- checkAndReplace(midExecuteSqlDefinitionList, inputParameterValueResult.get(TARGET_FILTER), WHERE_TARGET_FILTER);
- checkAndReplace(statisticsExecuteSqlDefinitionList, inputParameterValueResult.get(TARGET_FILTER),
- AND_TARGET_FILTER);
- checkAndReplace(statisticsExecuteSqlDefinitionList, inputParameterValueResult.get(TARGET_FILTER),
- WHERE_TARGET_FILTER);
-
- if (CollectionUtils.isNotEmpty(midExecuteSqlDefinitionList)) {
- for (DqRuleExecuteSql executeSqlDefinition : midExecuteSqlDefinitionList) {
- index = setTransformerConfig(
- index,
- inputParameterValueResult,
- transformerConfigList,
- executeSqlDefinition);
- }
- }
-
- if (CollectionUtils.isNotEmpty(statisticsExecuteSqlDefinitionList)) {
- for (DqRuleExecuteSql executeSqlDefinition : statisticsExecuteSqlDefinitionList) {
- index = setTransformerConfig(
- index,
- inputParameterValueResult,
- transformerConfigList,
- executeSqlDefinition);
- }
- }
-
- return index;
- }
-
- private static int setTransformerConfig(int index,
- Map inputParameterValueResult,
- List transformerConfigList,
- DqRuleExecuteSql executeSqlDefinition) {
- Map config = new HashMap<>();
- config.put(INDEX, index++);
- config.put(SQL,
- ParameterUtils.convertParameterPlaceholders(executeSqlDefinition.getSql(), inputParameterValueResult));
- config.put(OUTPUT_TABLE, executeSqlDefinition.getTableAlias());
-
- BaseConfig transformerConfig = new BaseConfig(SQL, config);
- transformerConfigList.add(transformerConfig);
- return index;
- }
-
- public static List getSingleTableCustomSqlTransformerConfigList(int index,
- Map inputParameterValueResult) {
- List list = new ArrayList<>();
-
- Map config = new HashMap<>();
- config.put(INDEX, index + 1);
- config.put(SQL, ParameterUtils.convertParameterPlaceholders(
- inputParameterValueResult.get(STATISTICS_EXECUTE_SQL), inputParameterValueResult));
- config.put(OUTPUT_TABLE, inputParameterValueResult.get(SRC_TABLE));
- inputParameterValueResult.put(STATISTICS_TABLE, inputParameterValueResult.get(SRC_TABLE));
- BaseConfig transformerConfig = new BaseConfig(SQL, config);
- list.add(transformerConfig);
- return list;
- }
-
- private static String getCoalesceString(String table, String column) {
- return "coalesce(" + table + "." + column + ", '')";
- }
-
- private static String getSrcColumnIsNullStr(String table, List columns) {
- String[] columnList = new String[columns.size()];
- for (int i = 0; i < columns.size(); i++) {
- String column = columns.get(i);
- columnList[i] = table + "." + column + " IS NULL";
- }
- return String.join(AND, columnList);
- }
-
- public static Map getInputParameterMapFromEntryList(List defaultInputEntryList) {
-
- Map defaultInputParameterValue = new HashMap<>();
- for (DqRuleInputEntry inputEntry : defaultInputEntryList) {
- defaultInputParameterValue.put(inputEntry.getField(), inputEntry.getData());
- }
-
- return defaultInputParameterValue;
- }
-
- @SneakyThrows
- public static List getWriterConfigList(
- String sql,
- DataQualityTaskExecutionContext dataQualityTaskExecutionContext) {
-
- List writerConfigList = new ArrayList<>();
- if (StringUtils.isNotEmpty(dataQualityTaskExecutionContext.getWriterConnectorType())) {
- BaseConnectionParam writerDataSource =
- (BaseConnectionParam) DataSourceUtils.buildConnectionParams(
- DbType.of(dataQualityTaskExecutionContext.getWriterType()),
- dataQualityTaskExecutionContext.getWriterConnectionParams());
- BaseConfig writerConfig = new BaseConfig();
- writerConfig.setType(dataQualityTaskExecutionContext.getWriterConnectorType());
-
- Map config = new HashMap<>();
- if (writerDataSource != null) {
- config.put(DATABASE, writerDataSource.getDatabase());
- config.put(TABLE, dataQualityTaskExecutionContext.getWriterTable());
- config.put(URL, DataSourceUtils.getJdbcUrl(DbType.of(dataQualityTaskExecutionContext.getWriterType()),
- writerDataSource));
- config.put(USER, writerDataSource.getUser());
- config.put(PASSWORD, URLEncoder.encode(writerDataSource.getPassword(), UTF_8.name()));
- config.put(DRIVER, DataSourceUtils
- .getDatasourceDriver(DbType.of(dataQualityTaskExecutionContext.getWriterType())));
- config.put(SQL, sql);
- }
- writerConfig.setConfig(config);
- writerConfigList.add(writerConfig);
- }
-
- return writerConfigList;
- }
-
- public static void addStatisticsValueTableReaderConfig(List readerConfigList,
- DataQualityTaskExecutionContext dataQualityTaskExecutionContext) {
- if (dataQualityTaskExecutionContext.isComparisonNeedStatisticsValueTable()) {
- List statisticsBaseConfigList =
- RuleParserUtils.getStatisticsValueConfigReaderList(dataQualityTaskExecutionContext);
- readerConfigList.addAll(statisticsBaseConfigList);
- }
- }
-
- public static List getStatisticsValueConfigWriterList(
- String sql,
- Map inputParameterValueResult,
- DataQualityTaskExecutionContext dataQualityTaskExecutionContext) throws DataQualityException {
-
- List writerConfigList = new ArrayList<>();
- if (StringUtils.isNotEmpty(dataQualityTaskExecutionContext.getStatisticsValueConnectorType())) {
- BaseConfig writerConfig = getStatisticsValueConfig(dataQualityTaskExecutionContext);
- if (writerConfig != null) {
- writerConfig.getConfig().put(SQL,
- ParameterUtils.convertParameterPlaceholders(sql, inputParameterValueResult));
- }
- writerConfigList.add(writerConfig);
- }
- return writerConfigList;
- }
-
- public static List getStatisticsValueConfigReaderList(
- DataQualityTaskExecutionContext dataQualityTaskExecutionContext) throws DataQualityException {
-
- List readerConfigList = new ArrayList<>();
- if (StringUtils.isNotEmpty(dataQualityTaskExecutionContext.getStatisticsValueConnectorType())) {
- BaseConfig readerConfig = getStatisticsValueConfig(dataQualityTaskExecutionContext);
- if (readerConfig != null) {
- readerConfig.getConfig().put(OUTPUT_TABLE, dataQualityTaskExecutionContext.getStatisticsValueTable());
- }
- readerConfigList.add(readerConfig);
- }
- return readerConfigList;
- }
-
- @SneakyThrows
- public static BaseConfig getStatisticsValueConfig(
- DataQualityTaskExecutionContext dataQualityTaskExecutionContext) {
- BaseConfig baseConfig = null;
- if (StringUtils.isNotEmpty(dataQualityTaskExecutionContext.getStatisticsValueConnectorType())) {
- BaseConnectionParam writerDataSource =
- (BaseConnectionParam) DataSourceUtils.buildConnectionParams(
- DbType.of(dataQualityTaskExecutionContext.getStatisticsValueType()),
- dataQualityTaskExecutionContext.getStatisticsValueWriterConnectionParams());
- baseConfig = new BaseConfig();
- baseConfig.setType(dataQualityTaskExecutionContext.getStatisticsValueConnectorType());
-
- Map config = new HashMap<>();
- if (writerDataSource != null) {
- config.put(DATABASE, writerDataSource.getDatabase());
- config.put(TABLE, dataQualityTaskExecutionContext.getStatisticsValueTable());
- config.put(URL, DataSourceUtils.getJdbcUrl(
- DbType.of(dataQualityTaskExecutionContext.getStatisticsValueType()), writerDataSource));
- config.put(USER, writerDataSource.getUser());
- config.put(PASSWORD, URLEncoder.encode(writerDataSource.getPassword(), UTF_8.name()));
- config.put(DRIVER, DataSourceUtils
- .getDatasourceDriver(DbType.of(dataQualityTaskExecutionContext.getWriterType())));
- }
-
- baseConfig.setConfig(config);
- }
-
- return baseConfig;
- }
-
- public static String getOnClause(List mappingColumnList,
- Map inputParameterValueResult) {
- // get on clause
- String[] columnList = new String[mappingColumnList.size()];
- for (int i = 0; i < mappingColumnList.size(); i++) {
- MappingColumn column = mappingColumnList.get(i);
- columnList[i] = getCoalesceString(inputParameterValueResult.get(SRC_TABLE), column.getSrcField())
- + column.getOperator()
- + getCoalesceString(inputParameterValueResult.get(TARGET_TABLE), column.getTargetField());
- }
-
- return String.join(AND, columnList);
- }
-
- public static String getWhereClause(List mappingColumnList,
- Map inputParameterValueResult) {
- String srcColumnNotNull = "( NOT ("
- + getSrcColumnIsNullStr(inputParameterValueResult.get(SRC_TABLE), getSrcColumnList(mappingColumnList))
- + " ))";
- String targetColumnIsNull = "( " + getSrcColumnIsNullStr(inputParameterValueResult.get(TARGET_TABLE),
- getTargetColumnList(mappingColumnList)) + " )";
-
- return srcColumnNotNull + AND + targetColumnIsNull;
- }
-
- public static List getWriterConfigList(
- int index,
- Map inputParameterValueResult,
- List transformerConfigList,
- DataQualityTaskExecutionContext dataQualityTaskExecutionContext,
- String writerSql) throws DataQualityException {
- List comparisonExecuteSqlList =
- getExecuteSqlListByType(
- JSONUtils.toList(dataQualityTaskExecutionContext.getExecuteSqlList(), DqRuleExecuteSql.class),
- ExecuteSqlType.COMPARISON);
-
- if (CollectionUtils.isNotEmpty(comparisonExecuteSqlList)) {
- DqRuleExecuteSql comparisonSql = comparisonExecuteSqlList.get(0);
- inputParameterValueResult.put(COMPARISON_TABLE, comparisonSql.getTableAlias());
-
- checkAndReplace(comparisonExecuteSqlList, inputParameterValueResult.get(SRC_FILTER), AND_SRC_FILTER);
- checkAndReplace(comparisonExecuteSqlList, inputParameterValueResult.get(SRC_FILTER), WHERE_SRC_FILTER);
- checkAndReplace(comparisonExecuteSqlList, inputParameterValueResult.get(TARGET_FILTER), AND_TARGET_FILTER);
- checkAndReplace(comparisonExecuteSqlList, inputParameterValueResult.get(TARGET_FILTER),
- WHERE_TARGET_FILTER);
-
- for (DqRuleExecuteSql executeSqlDefinition : comparisonExecuteSqlList) {
- index = setTransformerConfig(
- index,
- inputParameterValueResult,
- transformerConfigList,
- executeSqlDefinition);
- }
- }
-
- return getWriterConfigList(
- ParameterUtils.convertParameterPlaceholders(writerSql, inputParameterValueResult),
- dataQualityTaskExecutionContext);
- }
-
- public static List getAllWriterConfigList(
- Map inputParameterValue,
- DataQualityTaskExecutionContext context,
- int index,
- List transformerConfigList,
- String writerSql,
- String statisticsValueWriterSql) {
-
- List writerConfigList = RuleParserUtils.getWriterConfigList(
- index,
- inputParameterValue,
- transformerConfigList,
- context,
- writerSql);
-
- writerConfigList.addAll(
- RuleParserUtils.getStatisticsValueConfigWriterList(
- statisticsValueWriterSql,
- inputParameterValue,
- context));
-
- BaseConfig errorOutputWriter = RuleParserUtils.getErrorOutputWriter(inputParameterValue, context);
- if (errorOutputWriter != null) {
- writerConfigList.add(errorOutputWriter);
- }
-
- return writerConfigList;
- }
-
- public static List getExecuteSqlListByType(
- List allExecuteSqlList,
- ExecuteSqlType executeSqlType) {
- if (CollectionUtils.isEmpty(allExecuteSqlList)) {
- return allExecuteSqlList;
- }
-
- return allExecuteSqlList
- .stream()
- .filter(x -> ExecuteSqlType.of(x.getType()) == executeSqlType)
- .collect(Collectors.toList());
- }
-
- private static void checkAndReplace(List list, String checkValue, String replaceSrc) {
- if (StringUtils.isEmpty(checkValue) && CollectionUtils.isNotEmpty(list)) {
- for (DqRuleExecuteSql executeSqlDefinition : list) {
- String sql = executeSqlDefinition.getSql();
- sql = sql.replace(replaceSrc, "");
- executeSqlDefinition.setSql(sql);
- }
- }
- }
-
- public static List getMappingColumnList(String mappingColumns) {
- ArrayNode mappingColumnList = JSONUtils.parseArray(mappingColumns);
- List list = new ArrayList<>();
- mappingColumnList.forEach(item -> {
- MappingColumn column = new MappingColumn(
- String.valueOf(item.get(SRC_FIELD)).replace("\"", ""),
- String.valueOf(item.get(OPERATOR)).replace("\"", " "),
- String.valueOf(item.get(TARGET_FIELD)).replace("\"", ""));
- list.add(column);
- });
-
- return list;
- }
-
- public static List getSrcColumnList(List mappingColumns) {
- List list = new ArrayList<>();
- mappingColumns.forEach(item -> list.add(item.getSrcField()));
-
- return list;
- }
-
- public static List getTargetColumnList(List mappingColumns) {
- List list = new ArrayList<>();
- mappingColumns.forEach(item -> list.add(item.getTargetField()));
-
- return list;
- }
-
- public static EnvConfig getEnvConfig() {
- EnvConfig envConfig = new EnvConfig();
- envConfig.setType(BATCH);
- return envConfig;
- }
-
- public static BaseConfig getErrorOutputWriter(Map inputParameterValueResult,
- DataQualityTaskExecutionContext dataQualityTaskExecutionContext) {
- List dqRuleExecuteSqlList =
- JSONUtils.toList(dataQualityTaskExecutionContext.getExecuteSqlList(), DqRuleExecuteSql.class);
-
- DqRuleExecuteSql errorOutputSql = null;
- if (CollectionUtils.isEmpty(dqRuleExecuteSqlList)) {
- return null;
- }
-
- for (DqRuleExecuteSql executeSql : dqRuleExecuteSqlList) {
- if (executeSql.isErrorOutputSql()) {
- errorOutputSql = executeSql;
- break;
- }
- }
-
- BaseConfig baseConfig = null;
- if (StringUtils.isNotEmpty(inputParameterValueResult.get(ERROR_OUTPUT_PATH))
- && errorOutputSql != null) {
- baseConfig = new BaseConfig();
- Map config = new HashMap<>();
- config.put(PATH, inputParameterValueResult.get(ERROR_OUTPUT_PATH));
- config.put(INPUT_TABLE, errorOutputSql.getTableAlias());
- baseConfig.setConfig(config);
- baseConfig.setType(HDFS_FILE);
- }
-
- return baseConfig;
- }
-
- /**
- * the unique code use to get the same type and condition task statistics value
- *
- * @param inputParameterValue
- * @return
- */
- public static String generateUniqueCode(Map inputParameterValue) {
-
- if (MapUtils.isEmpty(inputParameterValue)) {
- return "-1";
- }
-
- Map newInputParameterValue = new HashMap<>(inputParameterValue);
-
- newInputParameterValue.remove(RULE_TYPE);
- newInputParameterValue.remove(RULE_NAME);
- newInputParameterValue.remove(CREATE_TIME);
- newInputParameterValue.remove(UPDATE_TIME);
- newInputParameterValue.remove(PROCESS_DEFINITION_ID);
- newInputParameterValue.remove(PROCESS_INSTANCE_ID);
- newInputParameterValue.remove(TASK_INSTANCE_ID);
- newInputParameterValue.remove(CHECK_TYPE);
- newInputParameterValue.remove(OPERATOR);
- newInputParameterValue.remove(THRESHOLD);
- newInputParameterValue.remove(FAILURE_STRATEGY);
- newInputParameterValue.remove(DATA_TIME);
- newInputParameterValue.remove(ERROR_OUTPUT_PATH);
- newInputParameterValue.remove(COMPARISON_TYPE);
- newInputParameterValue.remove(COMPARISON_NAME);
- newInputParameterValue.remove(COMPARISON_TABLE);
- newInputParameterValue.remove(PARAMETER_CURRENT_DATE);
- newInputParameterValue.remove(PARAMETER_BUSINESS_DATE);
- newInputParameterValue.remove(PARAMETER_DATETIME);
-
- StringBuilder sb = new StringBuilder();
- for (String value : newInputParameterValue.values()) {
- sb.append(value);
- }
-
- return Md5Utils.getMd5(sb.toString(), true);
- }
-}
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/SparkArgsUtils.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/SparkArgsUtils.java
deleted file mode 100644
index 2e384ef48e1e..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/SparkArgsUtils.java
+++ /dev/null
@@ -1,137 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.dq.utils;
-
-import org.apache.dolphinscheduler.plugin.task.api.model.ResourceInfo;
-import org.apache.dolphinscheduler.plugin.task.api.parameters.dataquality.spark.ProgramType;
-import org.apache.dolphinscheduler.plugin.task.api.parameters.dataquality.spark.SparkConstants;
-import org.apache.dolphinscheduler.plugin.task.api.parameters.dataquality.spark.SparkParameters;
-import org.apache.dolphinscheduler.plugin.task.api.utils.ArgsUtils;
-
-import org.apache.commons.lang3.StringUtils;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * spark args utils
- */
-public class SparkArgsUtils {
-
- private static final String SPARK_CLUSTER = "cluster";
-
- private static final String SPARK_LOCAL = "local";
-
- private static final String SPARK_ON_YARN = "yarn";
-
- private static final String DEFAULT_QUALITY_CLASS =
- "org.apache.dolphinscheduler.data.quality.DataQualityApplication";
-
- private SparkArgsUtils() {
- throw new IllegalStateException("Utility class");
- }
-
- /**
- * build args
- *
- * @param param param
- * @return argument list
- */
- public static List buildArgs(SparkParameters param) {
- List args = new ArrayList<>();
- args.add(SparkConstants.MASTER);
-
- String deployMode = StringUtils.isNotEmpty(param.getDeployMode()) ? param.getDeployMode() : SPARK_CLUSTER;
- if (!SPARK_LOCAL.equals(deployMode)) {
- args.add(SPARK_ON_YARN);
- args.add(SparkConstants.DEPLOY_MODE);
- }
- args.add(deployMode);
-
- ProgramType programType = param.getProgramType();
- String mainClass = param.getMainClass();
- if (programType != null && programType != ProgramType.PYTHON) {
- args.add(SparkConstants.MAIN_CLASS);
- args.add(StringUtils.isNotEmpty(mainClass) ? mainClass : DEFAULT_QUALITY_CLASS);
- }
-
- int driverCores = param.getDriverCores();
- if (driverCores > 0) {
- args.add(SparkConstants.DRIVER_CORES);
- args.add(String.format("%d", driverCores));
- }
-
- String driverMemory = param.getDriverMemory();
- if (StringUtils.isNotEmpty(driverMemory)) {
- args.add(SparkConstants.DRIVER_MEMORY);
- args.add(driverMemory);
- }
-
- int numExecutors = param.getNumExecutors();
- if (numExecutors > 0) {
- args.add(SparkConstants.NUM_EXECUTORS);
- args.add(String.format("%d", numExecutors));
- }
-
- int executorCores = param.getExecutorCores();
- if (executorCores > 0) {
- args.add(SparkConstants.EXECUTOR_CORES);
- args.add(String.format("%d", executorCores));
- }
-
- String executorMemory = param.getExecutorMemory();
- if (StringUtils.isNotEmpty(executorMemory)) {
- args.add(SparkConstants.EXECUTOR_MEMORY);
- args.add(executorMemory);
- }
-
- String appName = param.getAppName();
- if (StringUtils.isNotEmpty(appName)) {
- args.add(SparkConstants.SPARK_NAME);
- args.add(ArgsUtils.escape(appName));
- }
-
- String others = param.getOthers();
- if (!SPARK_LOCAL.equals(deployMode)
- && (StringUtils.isEmpty(others) || !others.contains(SparkConstants.SPARK_QUEUE))) {
- String queue = param.getYarnQueue();
- if (StringUtils.isNotEmpty(queue)) {
- args.add(SparkConstants.SPARK_QUEUE);
- args.add(queue);
- }
- }
-
- // --conf --files --jars --packages
- if (StringUtils.isNotEmpty(others)) {
- args.add(others);
- }
-
- ResourceInfo mainJar = param.getMainJar();
- if (mainJar != null) {
- args.add(mainJar.getResourceName());
- }
-
- String mainArgs = param.getMainArgs();
- if (StringUtils.isNotEmpty(mainArgs)) {
- args.add(mainArgs);
- }
-
- return args;
- }
-
-}
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/test/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityParameterTest.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/test/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityParameterTest.java
deleted file mode 100644
index 2c61a000d5c0..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/test/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityParameterTest.java
+++ /dev/null
@@ -1,131 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.dq;
-
-import org.apache.dolphinscheduler.plugin.task.api.parameters.dataquality.DataQualityParameters;
-import org.apache.dolphinscheduler.plugin.task.api.parameters.dataquality.spark.SparkParameters;
-import org.apache.dolphinscheduler.spi.params.base.ParamsOptions;
-import org.apache.dolphinscheduler.spi.params.base.PluginParams;
-import org.apache.dolphinscheduler.spi.params.base.TriggerType;
-import org.apache.dolphinscheduler.spi.params.base.Validate;
-import org.apache.dolphinscheduler.spi.params.input.InputParam;
-import org.apache.dolphinscheduler.spi.params.input.InputParamProps;
-import org.apache.dolphinscheduler.spi.params.select.SelectParam;
-import org.apache.dolphinscheduler.spi.params.select.SelectParamProps;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-
-import com.fasterxml.jackson.annotation.JsonInclude;
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-
-public class DataQualityParameterTest {
-
- private DataQualityParameters dataQualityParameters = null;
-
- @BeforeEach
- public void before() {
- dataQualityParameters = new DataQualityParameters();
- dataQualityParameters.setRuleId(1);
- dataQualityParameters.setSparkParameters(new SparkParameters());
- }
-
- @Test
- public void testCheckParameterNormal() {
-
- Map inputParameterValue = new HashMap<>();
- inputParameterValue.put("src_connector_type", "JDBC");
- inputParameterValue.put("src_datasource_id", "1");
- inputParameterValue.put("src_database", "test");
- inputParameterValue.put("src_table", "test1");
- inputParameterValue.put("src_filter", "date=2012-10-05");
- inputParameterValue.put("src_field", "id");
-
- inputParameterValue.put("rule_type", "1");
- inputParameterValue.put("process_definition_id", "1");
- inputParameterValue.put("task_instance_id", "1");
- inputParameterValue.put("check_type", "1");
- inputParameterValue.put("threshold", "1000");
- inputParameterValue.put("create_time", "2012-10-05");
- inputParameterValue.put("update_time", "2012-10-05");
-
- dataQualityParameters.setRuleInputParameter(inputParameterValue);
-
- Assertions.assertTrue(dataQualityParameters.checkParameters());
- }
-
- @Test
- public void testRuleInputParameter() {
- String formCreateJson = "[{\"field\":\"src_connector_type\",\"name\":\"源数据类型\","
- + "\"props\":{\"disabled\":false,\"multiple\":false,\"size\":\"small\"},"
- + "\"type\":\"select\",\"title\":\"源数据类型\",\"value\":\"JDBC\","
- + "\"options\":[{\"label\":\"HIVE\",\"value\":\"HIVE\",\"disabled\":false},"
- + "{\"label\":\"JDBC\",\"value\":\"JDBC\",\"disabled\":false}]},"
- + "{\"props\":{\"disabled\":false,\"rows\":0,\"placeholder\":\"Please enter source table name\","
- + "\"size\":\"small\"},\"field\":\"src_table\",\"name\":\"源数据表\","
- + "\"type\":\"input\",\"title\":\"源数据表\",\"validate\":[{\"required\":true,\"type\":\"string\","
- + "\"trigger\":\"blur\"}]}]";
-
- List pluginParamsList = new ArrayList<>();
- SelectParamProps selectParamProps = new SelectParamProps();
- selectParamProps.setMultiple(false);
- selectParamProps.setDisabled(false);
- selectParamProps.setSize("small");
-
- SelectParam srcConnectorType = SelectParam.newBuilder("src_connector_type", "源数据类型")
- .setProps(selectParamProps)
- .addOptions(new ParamsOptions("HIVE", "HIVE", false))
- .addOptions(new ParamsOptions("JDBC", "JDBC", false))
- .setValue("JDBC")
- .build();
-
- InputParamProps inputParamProps = new InputParamProps();
- inputParamProps.setPlaceholder("Please enter source table name");
- inputParamProps.setDisabled(false);
- inputParamProps.setSize("small");
- inputParamProps.setRows(0);
-
- InputParam srcTable = InputParam.newBuilder("src_table", "源数据表")
- .setProps(inputParamProps)
- .addValidate(Validate.newBuilder().setType("string").setRequired(true)
- .setTrigger(TriggerType.BLUR.getTriggerType()).build())
- .build();
-
- pluginParamsList.add(srcConnectorType);
- pluginParamsList.add(srcTable);
-
- ObjectMapper mapper = new ObjectMapper();
- mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
- String result = null;
-
- try {
- result = mapper.writeValueAsString(pluginParamsList);
- } catch (JsonProcessingException e) {
- Assertions.fail();
- }
-
- Assertions.assertEquals(formCreateJson, result);
- }
-}
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/test/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTaskTest.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/test/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTaskTest.java
deleted file mode 100644
index da01e32e9b25..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/test/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTaskTest.java
+++ /dev/null
@@ -1,1241 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.dq;
-
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.COMPARISON_TABLE;
-import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.SRC_FIELD;
-
-import org.apache.dolphinscheduler.common.utils.JSONUtils;
-import org.apache.dolphinscheduler.plugin.task.api.DataQualityTaskExecutionContext;
-import org.apache.dolphinscheduler.plugin.task.api.enums.dp.DataType;
-import org.apache.dolphinscheduler.plugin.task.api.enums.dp.ExecuteSqlType;
-import org.apache.dolphinscheduler.plugin.task.api.enums.dp.InputType;
-import org.apache.dolphinscheduler.plugin.task.api.enums.dp.OptionSourceType;
-import org.apache.dolphinscheduler.plugin.task.api.enums.dp.RuleType;
-import org.apache.dolphinscheduler.plugin.task.dq.rule.RuleManager;
-import org.apache.dolphinscheduler.plugin.task.dq.rule.entity.DqRuleExecuteSql;
-import org.apache.dolphinscheduler.plugin.task.dq.rule.entity.DqRuleInputEntry;
-import org.apache.dolphinscheduler.spi.params.base.FormType;
-
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.junit.jupiter.api.Assertions;
-
-/**
- * DataQualityTaskTest
- */
-
-public class DataQualityTaskTest {
-
- // @Test
- public void testSingleTable() throws Exception {
- DataQualityTaskExecutionContext dataQualityTaskExecutionContext = getSingleTableContext();
-
- Map inputParameterValue = new HashMap<>();
- inputParameterValue.put("src_connector_type", "0");
- inputParameterValue.put("src_datasource_id", "2");
- inputParameterValue.put("src_table", "src_result");
- inputParameterValue.put("check_type", "0");
- inputParameterValue.put("src_database", "test");
- inputParameterValue.put("operator", "3");
- inputParameterValue.put("threshold", "1");
- inputParameterValue.put("failure_strategy", "0");
- inputParameterValue.put("comparison_type", "1");
- inputParameterValue.put("comparison_name", "10");
- inputParameterValue.put("rule_id", "10");
- inputParameterValue.put("rule_type", "0");
- inputParameterValue.put("rule_name", "'表行数校验'");
- inputParameterValue.put("create_time", "'2021-08-12 10:15:48'");
- inputParameterValue.put("update_time", "'2021-08-12 10:15:48'");
- inputParameterValue.put("process_definition_id", "21");
- inputParameterValue.put("process_instance_id", "284");
- inputParameterValue.put("task_instance_id", "287");
- inputParameterValue.put("data_time", "'2021-08-12 10:15:48'");
- inputParameterValue.put("error_output_path",
- "hdfs://192.168.0.1:8022/user/ods/data_quality_error_data/21_284_287");
-
- RuleManager ruleManager = new RuleManager(inputParameterValue, dataQualityTaskExecutionContext);
- String expect = "{\"name\":\"表行数校验\",\"env\":{\"type\":\"batch\",\"config\":null},"
- + "\"readers\":[{\"type\":\"JDBC\",\"config\":"
- + "{\"database\":\"test\",\"password\":\"test\",\"driver\":\"com.mysql.cj.jdbc.Driver\","
- + "\"user\":\"test\",\"output_table\":\"test_src_result\",\"table\":\"src_result\","
- + "\"url\":\"jdbc:mysql://localhost:3306/test\"}}],"
- + "\"transformers\":[{\"type\":\"sql\",\"config\":{\"index\":1,"
- + "\"output_table\":\"table_count\",\"sql\":\"SELECT COUNT(*) AS total FROM test_src_result \"}}],"
- + "\"writers\":[{\"type\":\"JDBC\",\"config\":{\"database\":\"test\",\"password\":\"test\","
- + "\"driver\":\"com.mysql.cj.jdbc.Driver\",\"user\":\"test\",\"table\":\"dqc_result\","
- + "\"url\":\"jdbc:mysql://localhost:3306/test\","
- + "\"sql\":\"select 0 as rule_type,'表行数校验' as rule_name,21 as process_definition_id,284 as process_instance_id,"
- + "287 as task_instance_id,table_count.total AS statistics_value,10 AS comparison_value,1 AS comparison_type,"
- + "0 as check_type,1 as threshold,3 as operator,0 as failure_strategy,"
- + "'hdfs://192.168.0.1:8022/user/ods/data_quality_error_data/21_284_287' as error_output_path,"
- + "'2021-08-12 10:15:48' as create_time,'2021-08-12 10:15:48' as update_time from table_count \"}},"
- + "{\"type\":\"JDBC\",\"config\":{\"database\":\"test\",\"password\":\"test\",\"driver\":\"com.mysql.cj.jdbc.Driver\","
- + "\"user\":\"test\",\"table\":\"dqc_statistics_value\",\"url\":"
- + "\"jdbc:mysql://localhost:3306/test\","
- + "\"sql\":\"select 21 as process_definition_id,287 as task_instance_id,10 as rule_id,'SA8QJTSZZNEXNIXHUL5LTGRTYPWKJ4XY85VPS/NCKES=' "
- + "as unique_code,'table_count.total'AS statistics_name,"
- + "table_count.total AS statistics_value,'2021-08-12 10:15:48' as data_time,'2021-08-12 10:15:48' as create_time,"
- + "'2021-08-12 10:15:48' as update_time from table_count\"}}]}";
- Assertions.assertEquals(expect, JSONUtils.toJsonString(ruleManager.generateDataQualityParameter()));
- }
-
- private DataQualityTaskExecutionContext getSingleTableContext() {
- DataQualityTaskExecutionContext dataQualityTaskExecutionContext = new DataQualityTaskExecutionContext();
-
- dataQualityTaskExecutionContext.setRuleName("表行数校验");
- dataQualityTaskExecutionContext.setRuleType(RuleType.SINGLE_TABLE.getCode());
-
- List defaultInputEntryList = new ArrayList<>();
-
- DqRuleInputEntry srcConnectorType = new DqRuleInputEntry();
- srcConnectorType.setTitle("源数据类型");
- srcConnectorType.setField("src_connector_type");
- srcConnectorType.setType(FormType.SELECT.getFormType());
- srcConnectorType.setCanEdit(true);
- srcConnectorType.setIsShow(true);
- srcConnectorType.setData(null);
- srcConnectorType.setPlaceholder("${src_connector_type}");
- srcConnectorType.setOptionSourceType(OptionSourceType.DATASOURCE_TYPE.getCode());
- srcConnectorType.setOptions(null);
- srcConnectorType.setInputType(InputType.DEFAULT.getCode());
- srcConnectorType.setDataType(DataType.NUMBER.getCode());
- srcConnectorType.setCreateTime(new Date());
- srcConnectorType.setUpdateTime(new Date());
-
- DqRuleInputEntry srcDatasourceId = new DqRuleInputEntry();
- srcDatasourceId.setTitle("源数据源");
- srcDatasourceId.setField("src_datasource_id");
- srcDatasourceId.setType(FormType.CASCADER.getFormType());
- srcDatasourceId.setCanEdit(true);
- srcDatasourceId.setIsShow(true);
- srcDatasourceId.setData(null);
- srcDatasourceId.setOptionSourceType(OptionSourceType.DATASOURCE_ID.getCode());
- srcDatasourceId.setInputType(InputType.DEFAULT.getCode());
- srcDatasourceId.setDataType(DataType.NUMBER.getCode());
- srcDatasourceId.setCreateTime(new Date());
- srcDatasourceId.setUpdateTime(new Date());
-
- DqRuleInputEntry srcDatabase = new DqRuleInputEntry();
- srcDatabase.setTitle("源数据库");
- srcDatabase.setField("src_database");
- srcDatabase.setType(FormType.CASCADER.getFormType());
- srcDatabase.setCanEdit(true);
- srcDatabase.setIsShow(true);
- srcDatabase.setData(null);
- srcDatabase.setPlaceholder("$t(src_database)");
- srcDatabase.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- srcDatabase.setInputType(InputType.DEFAULT.getCode());
- srcDatabase.setDataType(DataType.NUMBER.getCode());
- srcDatabase.setCreateTime(new Date());
- srcDatabase.setUpdateTime(new Date());
-
- DqRuleInputEntry srcTable = new DqRuleInputEntry();
- srcTable.setTitle("源数据表");
- srcTable.setField("src_table");
- srcTable.setType(FormType.INPUT.getFormType());
- srcTable.setCanEdit(true);
- srcTable.setIsShow(true);
- srcTable.setPlaceholder("Please enter source table name");
- srcTable.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- srcTable.setInputType(InputType.DEFAULT.getCode());
- srcTable.setDataType(DataType.STRING.getCode());
- srcTable.setCreateTime(new Date());
- srcTable.setUpdateTime(new Date());
-
- DqRuleInputEntry srcFilter = new DqRuleInputEntry();
- srcFilter.setTitle("源表过滤条件");
- srcFilter.setField("src_filter");
- srcFilter.setType(FormType.INPUT.getFormType());
- srcFilter.setCanEdit(true);
- srcFilter.setIsShow(true);
- srcFilter.setPlaceholder("Please enter filter expression");
- srcFilter.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- srcFilter.setInputType(InputType.DEFAULT.getCode());
- srcFilter.setDataType(DataType.LIKE_SQL.getCode());
- srcFilter.setCreateTime(new Date());
- srcFilter.setUpdateTime(new Date());
-
- DqRuleInputEntry srcField = new DqRuleInputEntry();
- srcField.setTitle("检测列");
- srcField.setField(SRC_FIELD);
- srcField.setType(FormType.INPUT.getFormType());
- srcField.setCanEdit(true);
- srcField.setIsShow(true);
- srcField.setData("");
- srcField.setPlaceholder("Please enter column, only single column is supported");
- srcField.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- srcField.setInputType(InputType.DEFAULT.getCode());
- srcField.setDataType(DataType.STRING.getCode());
- srcField.setCreateTime(new Date());
- srcField.setUpdateTime(new Date());
-
- DqRuleInputEntry statisticsName = new DqRuleInputEntry();
- statisticsName.setTitle("统计值");
- statisticsName.setField("statistics_name");
- statisticsName.setType(FormType.INPUT.getFormType());
- statisticsName.setCanEdit(false);
- statisticsName.setIsShow(false);
- statisticsName.setData("table_count.total");
- statisticsName.setPlaceholder("${statistics_name}");
- statisticsName.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- statisticsName.setInputType(InputType.STATISTICS.getCode());
- statisticsName.setDataType(DataType.STRING.getCode());
- statisticsName.setCreateTime(new Date());
- statisticsName.setUpdateTime(new Date());
-
- DqRuleInputEntry checkType = new DqRuleInputEntry();
- checkType.setTitle("检测方式");
- checkType.setField("check_type");
- checkType.setType(FormType.SELECT.getFormType());
- checkType.setCanEdit(true);
- checkType.setIsShow(true);
- checkType.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- checkType.setOptions(
- "[{\"label\":\"比对值 - 统计值\",\"value\":\"0\"},{\"label\":\"统计值 - 比对值\",\"value\":\"1\"},{\"label\":\"统计值 / 比对值\","
- + "\"value\":\"2\"},{\"label\":\"(比对值-统计值) / 比对值\",\"value\":\"3\"}]");
- checkType.setData("0");
- checkType.setInputType(InputType.CHECK.getCode());
- checkType.setDataType(DataType.STRING.getCode());
- checkType.setPlaceholder("检测类型");
- checkType.setCreateTime(new Date());
- checkType.setUpdateTime(new Date());
-
- DqRuleInputEntry operator = new DqRuleInputEntry();
- operator.setTitle("操作符");
- operator.setField("operator");
- operator.setType(FormType.SELECT.getFormType());
- operator.setCanEdit(true);
- operator.setIsShow(true);
- operator.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- operator.setOptions("[{\"label\":\"=\",\"value\":\"0\"},"
- + "{\"label\":\"<\",\"value\":\"1\"},{\"label\":\"<=\",\"value\":\"2\"},"
- + "{\"label\":\">\",\"value\":\"3\"},{\"label\":\">=\",\"value\":\"4\"},"
- + "{\"label\":\"!=\",\"value\":\"5\"}]");
- operator.setData("0");
- operator.setInputType(InputType.CHECK.getCode());
- operator.setDataType(DataType.STRING.getCode());
- operator.setPlaceholder("操作符");
- operator.setCreateTime(new Date());
- operator.setUpdateTime(new Date());
-
- DqRuleInputEntry threshold = new DqRuleInputEntry();
- threshold.setTitle("阈值");
- threshold.setField("threshold");
- threshold.setType(FormType.INPUT.getFormType());
- threshold.setCanEdit(true);
- threshold.setIsShow(true);
- threshold.setPlaceholder("Please enter threshold, number is needed");
- threshold.setInputType(InputType.CHECK.getCode());
- threshold.setDataType(DataType.NUMBER.getCode());
- threshold.setCreateTime(new Date());
- threshold.setUpdateTime(new Date());
-
- DqRuleInputEntry afterFailure = new DqRuleInputEntry();
- afterFailure.setTitle("失败策略");
- afterFailure.setField("failure_strategy");
- afterFailure.setType(FormType.SELECT.getFormType());
- afterFailure.setCanEdit(true);
- afterFailure.setIsShow(true);
- afterFailure.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- afterFailure.setOptions("[{\"label\":\"告警\",\"value\":\"0\"},{\"label\":\"阻断\",\"value\":\"1\"}]");
- afterFailure.setData("0");
- afterFailure.setInputType(InputType.CHECK.getCode());
- afterFailure.setDataType(DataType.STRING.getCode());
- afterFailure.setPlaceholder("失败策略");
- afterFailure.setCreateTime(new Date());
- afterFailure.setUpdateTime(new Date());
-
- defaultInputEntryList.add(checkType);
- defaultInputEntryList.add(operator);
- defaultInputEntryList.add(threshold);
- defaultInputEntryList.add(afterFailure);
-
- defaultInputEntryList.add(srcConnectorType);
- defaultInputEntryList.add(srcDatasourceId);
- defaultInputEntryList.add(srcDatabase);
- defaultInputEntryList.add(srcTable);
- defaultInputEntryList.add(srcFilter);
- defaultInputEntryList.add(srcField);
- defaultInputEntryList.add(statisticsName);
-
- DqRuleExecuteSql executeSqlDefinition3 = new DqRuleExecuteSql();
- executeSqlDefinition3.setIndex(0);
- executeSqlDefinition3.setSql("SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})");
- executeSqlDefinition3.setTableAlias("table_count");
- executeSqlDefinition3.setType(ExecuteSqlType.STATISTICS.getCode());
-
- List executeSqlList = new ArrayList<>();
- executeSqlList.add(executeSqlDefinition3);
- dataQualityTaskExecutionContext.setExecuteSqlList(JSONUtils.toJsonString(executeSqlList));
- dataQualityTaskExecutionContext.setRuleInputEntryList(JSONUtils.toJsonString(defaultInputEntryList));
- dataQualityTaskExecutionContext.setSourceConnectorType("JDBC");
- dataQualityTaskExecutionContext.setSourceType(0);
- dataQualityTaskExecutionContext.setSourceConnectionParams(
- "{\"address\":\"jdbc:mysql://localhost:3306\","
- + "\"database\":\"test\","
- + "\"jdbcUrl\":\"jdbc:mysql://localhost:3306/test\","
- + "\"user\":\"test\","
- + "\"password\":\"test\"}");
-
- dataQualityTaskExecutionContext.setWriterType(0);
- dataQualityTaskExecutionContext.setWriterConnectorType("JDBC");
- dataQualityTaskExecutionContext.setWriterTable("dqc_result");
- dataQualityTaskExecutionContext.setWriterConnectionParams(
- "{\"address\":\"jdbc:mysql://localhost:3306\","
- + "\"database\":\"test\","
- + "\"jdbcUrl\":\"jdbc:mysql://localhost:3306/test\","
- + "\"user\":\"test\","
- + "\"password\":\"test\"}");
-
- dataQualityTaskExecutionContext.setStatisticsValueConnectorType("JDBC");
- dataQualityTaskExecutionContext.setStatisticsValueType(0);
- dataQualityTaskExecutionContext.setStatisticsValueTable("dqc_statistics_value");
- dataQualityTaskExecutionContext.setStatisticsValueWriterConnectionParams(
- "{\"address\":\"jdbc:mysql://localhost:3306\","
- + "\"database\":\"test\","
- + "\"jdbcUrl\":\"jdbc:mysql://localhost:3306/test\","
- + "\"user\":\"test\","
- + "\"password\":\"test\"}");
-
- dataQualityTaskExecutionContext.setCompareWithFixedValue(true);
- return dataQualityTaskExecutionContext;
- }
-
- // @Test
- public void testSingleTableCustomSql() throws Exception {
- DataQualityTaskExecutionContext dataQualityTaskExecutionContext = new DataQualityTaskExecutionContext();
-
- dataQualityTaskExecutionContext.setRuleName("自定义SQL");
- dataQualityTaskExecutionContext.setRuleType(RuleType.SINGLE_TABLE_CUSTOM_SQL.getCode());
-
- List defaultInputEntryList = new ArrayList<>();
-
- DqRuleInputEntry srcConnectorType = new DqRuleInputEntry();
- srcConnectorType.setTitle("源数据类型");
- srcConnectorType.setField("src_connector_type");
- srcConnectorType.setType(FormType.SELECT.getFormType());
- srcConnectorType.setCanEdit(true);
- srcConnectorType.setIsShow(true);
- srcConnectorType.setData(null);
- srcConnectorType.setPlaceholder("${src_connector_type}");
- srcConnectorType.setOptionSourceType(OptionSourceType.DATASOURCE_TYPE.getCode());
- srcConnectorType.setOptions(null);
- srcConnectorType.setInputType(InputType.DEFAULT.getCode());
- srcConnectorType.setDataType(DataType.NUMBER.getCode());
- srcConnectorType.setCreateTime(new Date());
- srcConnectorType.setUpdateTime(new Date());
-
- DqRuleInputEntry srcDatasourceId = new DqRuleInputEntry();
- srcDatasourceId.setTitle("源数据源");
- srcDatasourceId.setField("src_datasource_id");
- srcDatasourceId.setType(FormType.CASCADER.getFormType());
- srcDatasourceId.setCanEdit(true);
- srcDatasourceId.setIsShow(true);
- srcDatasourceId.setData(null);
- srcDatasourceId.setPlaceholder("${comparison_value}");
- srcDatasourceId.setOptionSourceType(OptionSourceType.DATASOURCE_ID.getCode());
- srcDatasourceId.setInputType(InputType.DEFAULT.getCode());
- srcDatasourceId.setDataType(DataType.NUMBER.getCode());
- srcConnectorType.setCreateTime(new Date());
- srcConnectorType.setUpdateTime(new Date());
-
- DqRuleInputEntry srcDatabase = new DqRuleInputEntry();
- srcDatabase.setTitle("源数据库");
- srcDatabase.setField("src_database");
- srcDatabase.setType(FormType.CASCADER.getFormType());
- srcDatabase.setCanEdit(true);
- srcDatabase.setIsShow(true);
- srcDatabase.setData(null);
- srcDatabase.setPlaceholder("$t(src_database)");
- srcDatabase.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- srcDatabase.setInputType(InputType.DEFAULT.getCode());
- srcDatabase.setDataType(DataType.NUMBER.getCode());
- srcDatabase.setCreateTime(new Date());
- srcDatabase.setUpdateTime(new Date());
-
- DqRuleInputEntry srcTable = new DqRuleInputEntry();
- srcTable.setTitle("源数据表");
- srcTable.setField("src_table");
- srcTable.setType(FormType.INPUT.getFormType());
- srcTable.setCanEdit(true);
- srcTable.setIsShow(true);
- srcTable.setPlaceholder("Please enter source table name");
- srcTable.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- srcTable.setInputType(InputType.DEFAULT.getCode());
- srcTable.setDataType(DataType.STRING.getCode());
- srcConnectorType.setCreateTime(new Date());
- srcConnectorType.setUpdateTime(new Date());
-
- DqRuleInputEntry srcFilter = new DqRuleInputEntry();
- srcFilter.setTitle("源表过滤条件");
- srcFilter.setField("src_filter");
- srcFilter.setType(FormType.INPUT.getFormType());
- srcFilter.setCanEdit(true);
- srcFilter.setIsShow(true);
- srcFilter.setPlaceholder("Please enter source filter expression");
- srcFilter.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- srcFilter.setInputType(InputType.DEFAULT.getCode());
- srcFilter.setDataType(DataType.LIKE_SQL.getCode());
-
- DqRuleInputEntry statisticsName = new DqRuleInputEntry();
- statisticsName.setTitle("统计值名");
- statisticsName.setField("statistics_name");
- statisticsName.setType(FormType.INPUT.getFormType());
- statisticsName.setCanEdit(true);
- statisticsName.setIsShow(true);
- statisticsName.setPlaceholder("Please enter statistics name, the alias in statistics execute sql");
- statisticsName.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- statisticsName.setInputType(InputType.DEFAULT.getCode());
- statisticsName.setDataType(DataType.STRING.getCode());
-
- DqRuleInputEntry statisticsExecuteSql = new DqRuleInputEntry();
- statisticsExecuteSql.setTitle("统计值计算SQL");
- statisticsExecuteSql.setField("statistics_execute_sql");
- statisticsExecuteSql.setType(FormType.TEXTAREA.getFormType());
- statisticsExecuteSql.setCanEdit(true);
- statisticsExecuteSql.setIsShow(true);
- statisticsExecuteSql.setPlaceholder("Please enter the statistics execute sql");
- statisticsExecuteSql.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- statisticsExecuteSql.setDataType(DataType.LIKE_SQL.getCode());
-
- DqRuleInputEntry checkType = new DqRuleInputEntry();
- checkType.setTitle("检测方式");
- checkType.setField("check_type");
- checkType.setType(FormType.SELECT.getFormType());
- checkType.setCanEdit(true);
- checkType.setIsShow(true);
- checkType.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- checkType.setOptions("[{\"label\":\"比对值 - 统计值\",\"value\":\"0\"},{\"label\":\"统计值 - 比对值\",\"value\":\"1\"},"
- + "{\"label\":\"统计值 / 比对值\",\"value\":\"2\"},{\"label\":\"(比对值-统计值) / 比对值\",\"value\":\"3\"}]");
- checkType.setData("0");
- checkType.setInputType(InputType.CHECK.getCode());
- checkType.setDataType(DataType.STRING.getCode());
- checkType.setPlaceholder("检测类型");
-
- DqRuleInputEntry operator = new DqRuleInputEntry();
- operator.setTitle("操作符");
- operator.setField("operator");
- operator.setType(FormType.SELECT.getFormType());
- operator.setCanEdit(true);
- operator.setIsShow(true);
- operator.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- operator.setOptions("[{\"label\":\"=\",\"value\":\"0\"},"
- + "{\"label\":\"<\",\"value\":\"1\"},{\"label\":\"<=\",\"value\":\"2\"},"
- + "{\"label\":\">\",\"value\":\"3\"},{\"label\":\">=\",\"value\":\"4\"},"
- + "{\"label\":\"!=\",\"value\":\"5\"}]");
- operator.setData("0");
- operator.setInputType(InputType.CHECK.getCode());
- operator.setDataType(DataType.STRING.getCode());
- operator.setPlaceholder("操作符");
-
- DqRuleInputEntry threshold = new DqRuleInputEntry();
- threshold.setTitle("阈值");
- threshold.setField("threshold");
- threshold.setType(FormType.INPUT.getFormType());
- threshold.setCanEdit(true);
- threshold.setIsShow(true);
- threshold.setPlaceholder("Please enter threshold value, number is needed");
- threshold.setInputType(InputType.CHECK.getCode());
- threshold.setDataType(DataType.NUMBER.getCode());
-
- DqRuleInputEntry afterFailure = new DqRuleInputEntry();
- afterFailure.setTitle("失败策略");
- afterFailure.setField("failure_strategy");
- afterFailure.setType(FormType.SELECT.getFormType());
- afterFailure.setCanEdit(true);
- afterFailure.setIsShow(true);
- afterFailure.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- afterFailure.setOptions("[{\"label\":\"告警\",\"value\":\"0\"},{\"label\":\"阻断\",\"value\":\"1\"}]");
- afterFailure.setData("0");
- afterFailure.setInputType(InputType.CHECK.getCode());
- afterFailure.setDataType(DataType.STRING.getCode());
- afterFailure.setPlaceholder("失败策略");
-
- defaultInputEntryList.add(checkType);
- defaultInputEntryList.add(operator);
- defaultInputEntryList.add(threshold);
- defaultInputEntryList.add(afterFailure);
- defaultInputEntryList.add(srcConnectorType);
- defaultInputEntryList.add(srcDatasourceId);
- defaultInputEntryList.add(srcDatabase);
- defaultInputEntryList.add(srcTable);
- defaultInputEntryList.add(statisticsName);
- defaultInputEntryList.add(statisticsExecuteSql);
- defaultInputEntryList.add(srcFilter);
-
- Map inputParameterValue = new HashMap<>();
- inputParameterValue.put("src_connector_type", "0");
- inputParameterValue.put("src_datasource_id", "2");
- inputParameterValue.put("src_table", "person");
- inputParameterValue.put("src_database", "test");
- inputParameterValue.put("statistics_name", "miss");
- inputParameterValue.put("statistics_execute_sql",
- "select count(*) as miss from ${src_table} where (sex = null or sex='') and age=1");
- inputParameterValue.put("src_filter", "age=1");
- inputParameterValue.put("check_type", "2");
- inputParameterValue.put("operator", "3");
- inputParameterValue.put("threshold", "50");
- inputParameterValue.put("failure_strategy", "1");
- inputParameterValue.put("comparison_type", "1");
- inputParameterValue.put("comparison_name", "3");
- inputParameterValue.put("rule_id", "1");
- inputParameterValue.put("rule_type", "1");
- inputParameterValue.put("rule_name", "'自定义SQL'");
- inputParameterValue.put("create_time", "'2021-08-30 00:00:00'");
- inputParameterValue.put("update_time", "'2021-08-30 00:00:00'");
- inputParameterValue.put("process_definition_id", "1");
- inputParameterValue.put("process_instance_id", "1");
- inputParameterValue.put("task_instance_id", "1");
- inputParameterValue.put("data_time", "'2021-08-30 00:00:00'");
- inputParameterValue.put("error_output_path",
- "hdfs://localhost:8022/user/ods/data_quality_error_data/1_1_test2");
-
- dataQualityTaskExecutionContext.setRuleInputEntryList(JSONUtils.toJsonString(defaultInputEntryList));
- dataQualityTaskExecutionContext.setSourceConnectorType("JDBC");
- dataQualityTaskExecutionContext.setSourceType(0);
- dataQualityTaskExecutionContext.setSourceConnectionParams(
- "{\"address\":\"jdbc:mysql://localhost:3306\","
- + "\"database\":\"test\","
- + "\"jdbcUrl\":\"jdbc:mysql://localhost:3306/test\","
- + "\"user\":\"test\","
- + "\"password\":\"test\"}");
-
- dataQualityTaskExecutionContext.setWriterType(1);
- dataQualityTaskExecutionContext.setWriterConnectorType("JDBC");
- dataQualityTaskExecutionContext.setWriterTable("t_ds_dq_execute_result");
- dataQualityTaskExecutionContext.setWriterConnectionParams(
- "{\"address\":\"jdbc:postgresql://localhost:5432\","
- + "\"database\":\"dolphinscheduler\","
- + "\"jdbcUrl\":\"jdbc:postgresql://localhost:5432/dolphinscheduler\","
- + "\"user\":\"test\","
- + "\"password\":\"test\","
- + "\"other\":{\"stringtype\": \"unspecified\", \"characterEncoding\" : \"UTF-8\", \"allowMultiQueries\": true}}");
-
- dataQualityTaskExecutionContext.setStatisticsValueConnectorType("JDBC");
- dataQualityTaskExecutionContext.setStatisticsValueType(1);
- dataQualityTaskExecutionContext.setStatisticsValueTable("t_ds_dq_task_statistics_value");
- dataQualityTaskExecutionContext.setStatisticsValueWriterConnectionParams(
- "{\"address\":\"jdbc:postgresql://localhost:5432\","
- + "\"database\":\"dolphinscheduler\","
- + "\"jdbcUrl\":\"jdbc:postgresql://localhost:5432/dolphinscheduler\","
- + "\"user\":\"test\","
- + "\"password\":\"test\","
- + "\"other\":{\"stringtype\": \"unspecified\", \"characterEncoding\" : \"UTF-8\", \"allowMultiQueries\": true}}");
-
- dataQualityTaskExecutionContext.setCompareWithFixedValue(true);
-
- RuleManager ruleManager = new RuleManager(inputParameterValue, dataQualityTaskExecutionContext);
- String expect =
- "{\"name\":\"自定义SQL\",\"env\":{\"type\":\"batch\",\"config\":null},\"readers\":[{\"type\":\"JDBC\","
- + "\"config\":{\"database\":\"test\",\"password\":\"test\",\"driver\":\"com.mysql.cj.jdbc.Driver\",\"user\":"
- + "\"test\",\"output_table\":\"test_person\",\"table\":\"person\",\"url\":"
- + "\"jdbc:mysql://localhost:3306/test\"}}],"
- + "\"transformers\":[{\"type\":\"sql\",\"config\":"
- + "{\"index\":2,\"output_table\":\"test_person\",\"sql\":\"select count(*) as "
- + "miss from test_person where (sex = null or sex='') and age=1\"}}],\"writers\":"
- + "[{\"type\":\"JDBC\",\"config\":{\"database\":\"dolphinscheduler\",\"password\":"
- + "\"test\",\"driver\":\"org.postgresql.Driver\",\"user\":\"test\",\"table\":"
- + "\"t_ds_dq_execute_result\",\"url\":"
- + "\"jdbc:postgresql://localhost:5432/dolphinscheduler?stringtype=unspecified&characterEncoding"
- + "=UTF-8&allowMultiQueries=true\",\"sql\":\"select 1 as rule_type,'自定义SQL' as rule_name,1 "
- + "as process_definition_id,1 as process_instance_id,1 as task_instance_id,miss AS "
- + "statistics_value,3 AS comparison_value,1 AS comparison_type,2 as check_type,50 as "
- + "threshold,3 as operator,1 as failure_strategy,'hdfs://localhost:8022/user/ods/"
- + "data_quality_error_data/1_1_test2' as error_output_path,'2021-08-30 00:00:00' as "
- + "create_time,'2021-08-30 00:00:00' as update_time from ( test_person ) tmp1 \"}},"
- + "{\"type\":\"JDBC\",\"config\":{\"database\":\"dolphinscheduler\",\"password\":\"test\",\"driver\":"
- + "\"org.postgresql.Driver\",\"user\":\"test\",\"table\":\"t_ds_dq_task_statistics_value\",\"url\":"
- + "\"jdbc:postgresql://localhost:5432/dolphinscheduler?stringtype=unspecified&characterEncoding="
- + "UTF-8&allowMultiQueries=true\",\"sql\":\"select 1 as process_definition_id,1 as "
- + "task_instance_id,1 as rule_id,'IGTZ9I6KWVEPXFFJKDVMO6QB6URHHXK0NINS9GAOUEA=' as unique_code,'miss'AS statistics_name,miss AS statistics_value,"
- + "'2021-08-30 00:00:00' as data_time,'2021-08-30 00:00:00' as create_time,'2021-08-30 00:00:00' "
- + "as update_time from test_person\"}}]}";
-
- Assertions.assertEquals(expect, JSONUtils.toJsonString(ruleManager.generateDataQualityParameter()));
- }
-
- // @Test
- public void testMultiTableComparison() throws Exception {
- DataQualityTaskExecutionContext dataQualityTaskExecutionContext = new DataQualityTaskExecutionContext();
- dataQualityTaskExecutionContext.setRuleName("跨表值比对");
- dataQualityTaskExecutionContext.setRuleType(RuleType.MULTI_TABLE_COMPARISON.getCode());
-
- List defaultInputEntryList = new ArrayList<>();
-
- DqRuleInputEntry srcConnectorType = new DqRuleInputEntry();
- srcConnectorType.setTitle("源数据类型");
- srcConnectorType.setField("src_connector_type");
- srcConnectorType.setType(FormType.SELECT.getFormType());
- srcConnectorType.setCanEdit(true);
- srcConnectorType.setIsShow(true);
- srcConnectorType.setData(null);
- srcConnectorType.setPlaceholder("${src_connector_type}");
- srcConnectorType.setOptionSourceType(OptionSourceType.DATASOURCE_TYPE.getCode());
- srcConnectorType.setOptions(null);
- srcConnectorType.setInputType(InputType.DEFAULT.getCode());
- srcConnectorType.setDataType(DataType.NUMBER.getCode());
- srcConnectorType.setCreateTime(new Date());
- srcConnectorType.setUpdateTime(new Date());
-
- DqRuleInputEntry srcDatasourceId = new DqRuleInputEntry();
- srcDatasourceId.setTitle("源数据源");
- srcDatasourceId.setField("src_datasource_id");
- srcDatasourceId.setType(FormType.CASCADER.getFormType());
- srcDatasourceId.setCanEdit(true);
- srcDatasourceId.setIsShow(true);
- srcDatasourceId.setData(null);
- srcDatasourceId.setPlaceholder("${comparison_value}");
- srcDatasourceId.setOptionSourceType(OptionSourceType.DATASOURCE_ID.getCode());
- srcDatasourceId.setInputType(InputType.DEFAULT.getCode());
- srcDatasourceId.setDataType(DataType.NUMBER.getCode());
- srcConnectorType.setCreateTime(new Date());
- srcConnectorType.setUpdateTime(new Date());
-
- DqRuleInputEntry srcDatabase = new DqRuleInputEntry();
- srcDatabase.setTitle("源数据库");
- srcDatabase.setField("src_database");
- srcDatabase.setType(FormType.CASCADER.getFormType());
- srcDatabase.setCanEdit(true);
- srcDatabase.setIsShow(true);
- srcDatabase.setData(null);
- srcDatasourceId.setPlaceholder("$t(src_database)");
- srcDatabase.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- srcDatabase.setInputType(InputType.DEFAULT.getCode());
- srcDatabase.setDataType(DataType.NUMBER.getCode());
- srcDatabase.setCreateTime(new Date());
- srcDatabase.setUpdateTime(new Date());
-
- DqRuleInputEntry srcTable = new DqRuleInputEntry();
- srcTable.setTitle("源数据表");
- srcTable.setField("src_table");
- srcTable.setType(FormType.INPUT.getFormType());
- srcTable.setCanEdit(true);
- srcTable.setIsShow(true);
- srcTable.setPlaceholder("Please enter source table name");
- srcTable.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- srcTable.setInputType(InputType.DEFAULT.getCode());
- srcTable.setDataType(DataType.STRING.getCode());
- srcConnectorType.setCreateTime(new Date());
- srcConnectorType.setUpdateTime(new Date());
-
- DqRuleInputEntry statisticsName = new DqRuleInputEntry();
- statisticsName.setTitle("统计值名");
- statisticsName.setField("statistics_name");
- statisticsName.setType(FormType.INPUT.getFormType());
- statisticsName.setCanEdit(true);
- statisticsName.setIsShow(true);
- statisticsName.setPlaceholder("Please enter statistics name, the alias in statistics execute sql");
- statisticsName.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- statisticsName.setDataType(DataType.STRING.getCode());
- statisticsName.setInputType(InputType.DEFAULT.getCode());
-
- DqRuleInputEntry statisticsExecuteSql = new DqRuleInputEntry();
- statisticsExecuteSql.setTitle("统计值计算SQL");
- statisticsExecuteSql.setField("statistics_execute_sql");
- statisticsExecuteSql.setType(FormType.TEXTAREA.getFormType());
- statisticsExecuteSql.setCanEdit(true);
- statisticsExecuteSql.setIsShow(true);
- statisticsExecuteSql.setPlaceholder("Please enter statistics execute sql");
- statisticsExecuteSql.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- statisticsExecuteSql.setDataType(DataType.LIKE_SQL.getCode());
- statisticsExecuteSql.setInputType(InputType.DEFAULT.getCode());
-
- DqRuleInputEntry targetConnectorType = new DqRuleInputEntry();
- targetConnectorType.setTitle("目标数据类型");
- targetConnectorType.setField("target_connector_type");
- targetConnectorType.setType(FormType.SELECT.getFormType());
- targetConnectorType.setCanEdit(true);
- targetConnectorType.setIsShow(true);
- targetConnectorType.setData("JDBC");
- targetConnectorType.setPlaceholder("Please select target connector type");
- targetConnectorType.setOptionSourceType(OptionSourceType.DATASOURCE_TYPE.getCode());
- targetConnectorType.setOptions(null);
- targetConnectorType.setInputType(InputType.DEFAULT.getCode());
-
- DqRuleInputEntry targetDatasourceId = new DqRuleInputEntry();
- targetDatasourceId.setTitle("目标数据源");
- targetDatasourceId.setField("target_datasource_id");
- targetDatasourceId.setType(FormType.SELECT.getFormType());
- targetDatasourceId.setCanEdit(true);
- targetDatasourceId.setIsShow(true);
- targetDatasourceId.setData("1");
- targetDatasourceId.setPlaceholder("Please select target datasource");
- targetDatasourceId.setOptionSourceType(OptionSourceType.DATASOURCE_ID.getCode());
-
- DqRuleInputEntry targetDatabase = new DqRuleInputEntry();
- targetDatabase.setTitle("目标数据库");
- targetDatabase.setField("src_database");
- targetDatabase.setType(FormType.CASCADER.getFormType());
- targetDatabase.setCanEdit(true);
- targetDatabase.setIsShow(true);
- targetDatabase.setData(null);
- targetDatabase.setPlaceholder("$t(src_database)");
- targetDatabase.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- targetDatabase.setInputType(InputType.DEFAULT.getCode());
- targetDatabase.setDataType(DataType.NUMBER.getCode());
- targetDatabase.setCreateTime(new Date());
- targetDatabase.setUpdateTime(new Date());
- targetDatasourceId.setInputType(InputType.DEFAULT.getCode());
-
- DqRuleInputEntry targetTable = new DqRuleInputEntry();
- targetTable.setTitle("目标数据表");
- targetTable.setField("target_table");
- targetTable.setType(FormType.INPUT.getFormType());
- targetTable.setCanEdit(true);
- targetTable.setIsShow(true);
- targetTable.setPlaceholder("Please enter target table");
- targetTable.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- targetTable.setDataType(DataType.STRING.getCode());
- targetTable.setInputType(InputType.DEFAULT.getCode());
-
- DqRuleInputEntry comparisonName = new DqRuleInputEntry();
- comparisonName.setTitle("比对值名");
- comparisonName.setField("comparison_name");
- comparisonName.setType(FormType.INPUT.getFormType());
- comparisonName.setCanEdit(true);
- comparisonName.setIsShow(true);
- comparisonName.setPlaceholder("Please enter comparison name, the alias in comparison execute sql");
- comparisonName.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- comparisonName.setDataType(DataType.STRING.getCode());
- comparisonName.setInputType(InputType.DEFAULT.getCode());
-
- DqRuleInputEntry comparisonExecuteSql = new DqRuleInputEntry();
- comparisonExecuteSql.setTitle("比对值计算SQL");
- comparisonExecuteSql.setField("comparison_execute_sql");
- comparisonExecuteSql.setType(FormType.TEXTAREA.getFormType());
- comparisonExecuteSql.setCanEdit(true);
- comparisonExecuteSql.setIsShow(true);
- comparisonExecuteSql.setPlaceholder("Please enter comparison execute sql");
- comparisonExecuteSql.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- comparisonExecuteSql.setDataType(DataType.LIKE_SQL.getCode());
- comparisonExecuteSql.setInputType(InputType.DEFAULT.getCode());
-
- DqRuleInputEntry checkType = new DqRuleInputEntry();
- checkType.setTitle("检测方式");
- checkType.setField("check_type");
- checkType.setType(FormType.SELECT.getFormType());
- checkType.setCanEdit(true);
- checkType.setIsShow(true);
- checkType.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- checkType.setOptions("[{\"label\":\"比对值 - 统计值\",\"value\":\"0\"},{\"label\":\"统计值 - 比对值\",\"value\":\"1\"},"
- + "{\"label\":\"统计值 / 比对值\",\"value\":\"2\"},{\"label\":\"(比对值-统计值) / 比对值\",\"value\":\"3\"}]");
- checkType.setData("0");
- checkType.setInputType(InputType.CHECK.getCode());
- checkType.setDataType(DataType.STRING.getCode());
- checkType.setPlaceholder("检测类型");
-
- DqRuleInputEntry operator = new DqRuleInputEntry();
- operator.setTitle("操作符");
- operator.setField("operator");
- operator.setType(FormType.SELECT.getFormType());
- operator.setCanEdit(true);
- operator.setIsShow(true);
- operator.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- operator.setOptions("[{\"label\":\"=\",\"value\":\"0\"},"
- + "{\"label\":\"<\",\"value\":\"1\"},{\"label\":\"<=\",\"value\":\"2\"},"
- + "{\"label\":\">\",\"value\":\"3\"},{\"label\":\">=\",\"value\":\"4\"},"
- + "{\"label\":\"!=\",\"value\":\"5\"}]");
- operator.setData("0");
- operator.setInputType(InputType.CHECK.getCode());
- operator.setDataType(DataType.STRING.getCode());
- operator.setPlaceholder("操作符");
-
- DqRuleInputEntry threshold = new DqRuleInputEntry();
- threshold.setTitle("阈值");
- threshold.setField("threshold");
- threshold.setType(FormType.INPUT.getFormType());
- threshold.setCanEdit(true);
- threshold.setIsShow(true);
- threshold.setInputType(InputType.CHECK.getCode());
- threshold.setDataType(DataType.NUMBER.getCode());
- threshold.setPlaceholder("Please enter threshold, number is needed");
-
- DqRuleInputEntry afterFailure = new DqRuleInputEntry();
- afterFailure.setTitle("失败策略");
- afterFailure.setField("failure_strategy");
- afterFailure.setType(FormType.SELECT.getFormType());
- afterFailure.setCanEdit(true);
- afterFailure.setIsShow(true);
- afterFailure.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- afterFailure.setOptions("[{\"label\":\"告警\",\"value\":\"0\"},{\"label\":\"阻断\",\"value\":\"1\"}]");
- afterFailure.setData("0");
- afterFailure.setInputType(InputType.CHECK.getCode());
- afterFailure.setDataType(DataType.STRING.getCode());
- afterFailure.setPlaceholder("失败策略");
-
- defaultInputEntryList.add(checkType);
- defaultInputEntryList.add(operator);
- defaultInputEntryList.add(threshold);
- defaultInputEntryList.add(afterFailure);
-
- defaultInputEntryList.add(srcConnectorType);
- defaultInputEntryList.add(srcDatasourceId);
- defaultInputEntryList.add(srcDatabase);
- defaultInputEntryList.add(srcTable);
- defaultInputEntryList.add(statisticsName);
- defaultInputEntryList.add(statisticsExecuteSql);
-
- defaultInputEntryList.add(targetConnectorType);
- defaultInputEntryList.add(targetDatasourceId);
- defaultInputEntryList.add(targetDatabase);
- defaultInputEntryList.add(targetTable);
- defaultInputEntryList.add(comparisonName);
- defaultInputEntryList.add(comparisonExecuteSql);
-
- dataQualityTaskExecutionContext.setRuleInputEntryList(JSONUtils.toJsonString(defaultInputEntryList));
-
- Map inputParameterValue = new HashMap<>();
- inputParameterValue.put("src_connector_type", "0");
- inputParameterValue.put("src_datasource_id", "2");
- inputParameterValue.put("src_database", "test");
- inputParameterValue.put("src_table", "test1");
- inputParameterValue.put("statistics_name", "src");
- inputParameterValue.put("statistics_execute_sql", "select count(*) as src from ${src_table} where c1>20");
- inputParameterValue.put("target_connector_type", "2");
- inputParameterValue.put("target_datasource_id", "3");
- inputParameterValue.put("target_database", "default");
- inputParameterValue.put("target_table", "test1_1");
- inputParameterValue.put("comparison_name", "target");
- inputParameterValue.put("comparison_execute_sql", "select count(*) as target from ${target_table} where c1>20");
- inputParameterValue.put("check_type", "1");
- inputParameterValue.put("operator", "3");
- inputParameterValue.put("threshold", "2");
- inputParameterValue.put("failure_strategy", "0");
- inputParameterValue.put("rule_id", "4");
- inputParameterValue.put("rule_type", "3");
- inputParameterValue.put("rule_name", "'跨表值比对'");
- inputParameterValue.put("create_time", "'2021-08-25 00:00:00'");
- inputParameterValue.put("update_time", "'2021-08-25 00:00:00'");
- inputParameterValue.put("process_definition_id", "1");
- inputParameterValue.put("process_instance_id", "1");
- inputParameterValue.put("task_instance_id", "1");
- inputParameterValue.put("data_time", "'2021-08-25 00:00:00'");
- inputParameterValue.put("error_output_path", "hdfs://localhost:8022/user/ods/data_quality_error_data/1_1_1");
-
- dataQualityTaskExecutionContext.setSourceConnectorType("JDBC");
- dataQualityTaskExecutionContext.setSourceType(0);
- dataQualityTaskExecutionContext.setSourceConnectionParams(
- "{\"address\":\"jdbc:mysql://localhost:3306\","
- + "\"database\":\"test\","
- + "\"jdbcUrl\":\"jdbc:mysql://localhost:3306/test\","
- + "\"user\":\"test\","
- + "\"password\":\"test\"}");
-
- dataQualityTaskExecutionContext.setTargetConnectorType("HIVE");
- dataQualityTaskExecutionContext.setTargetType(2);
- dataQualityTaskExecutionContext.setTargetConnectionParams(
- "{\"address\":\"jdbc:hive2://localhost:10000\","
- + "\"database\":\"default\","
- + "\"jdbcUrl\":\"jdbc:hive2://localhost:10000/default\","
- + "\"user\":\"test\","
- + "\"password\":\"test\"}");
-
- dataQualityTaskExecutionContext.setWriterType(1);
- dataQualityTaskExecutionContext.setWriterConnectorType("JDBC");
- dataQualityTaskExecutionContext.setWriterTable("t_ds_dq_execute_result");
- dataQualityTaskExecutionContext.setWriterConnectionParams(
- "{\"address\":\"jdbc:postgresql://localhost:5432\","
- + "\"database\":\"dolphinscheduler\","
- + "\"jdbcUrl\":\"jdbc:postgresql://localhost:5432/dolphinscheduler\","
- + "\"user\":\"test\","
- + "\"password\":\"test\","
- + "\"other\":{\"stringtype\": \"unspecified\", \"characterEncoding\": \"UTF-8\", \"allowMultiQueries\": true}}");
-
- String expect = "{\"name\":\"跨表值比对\",\"env\":{\"type\":\"batch\",\"config\":null},\"readers\""
- + ":[{\"type\":\"JDBC\",\"config\":{\"database\":\"test\",\"password\":\"test\",\"driver\":"
- + "\"com.mysql.cj.jdbc.Driver\",\"user\":\"test\",\"output_table\":\"test_test1\",\"table\":"
- + "\"test1\",\"url\":\"jdbc:mysql://localhost:3306/test\"}},"
- + "{\"type\":\"HIVE\",\"config\":"
- + "{\"database\":\"default\",\"password\":\"test\",\"driver\":\"org.apache.hive.jdbc.HiveDriver\",\"user\":"
- + "\"test\",\"output_table\":\"default_test1_1\",\"table\":\"test1_1\",\"url\":"
- + "\"jdbc:hive2://localhost:10000/default\"}}],\"transformers\":[],\"writers\":"
- + "[{\"type\":\"JDBC\",\"config\":{\"database\":\"dolphinscheduler\",\"password\":"
- + "\"test\",\"driver\":\"org.postgresql.Driver\",\"user\":\"test\",\"table\":"
- + "\"t_ds_dq_execute_result\",\"url\":"
- + "\"jdbc:postgresql://localhost:5432/dolphinscheduler?stringtype=unspecified&characterEncoding=UTF-8&allowMultiQueries=true\","
- + "\"sql\":\"select 3 as rule_type,'跨表值比对' as rule_name,"
- + "1 as process_definition_id,1 as process_instance_id,1 as task_instance_id,src AS statistics_value,"
- + "target AS comparison_value,0 AS comparison_type,1 as check_type,2 as threshold,3 as operator,"
- + "0 as failure_strategy,'hdfs://localhost:8022/user/ods/data_quality_error_data/1_1_1' "
- + "as error_output_path,'2021-08-25 00:00:00' as create_time,'2021-08-25 00:00:00' as update_time "
- + "from ( select count(*) as src from test_test1 where c1>20 ) tmp1 join ( select count(*) as target from default_test1_1 "
- + "where c1>20 ) tmp2\"}}]}";
-
- RuleManager ruleManager = new RuleManager(inputParameterValue, dataQualityTaskExecutionContext);
- Assertions.assertEquals(expect, JSONUtils.toJsonString(ruleManager.generateDataQualityParameter()));
- }
-
- // @Test
- public void testMultiTableAccuracy() throws Exception {
-
- DataQualityTaskExecutionContext dataQualityTaskExecutionContext = new DataQualityTaskExecutionContext();
-
- List defaultInputEntryList = new ArrayList<>();
-
- DqRuleInputEntry srcConnectorType = new DqRuleInputEntry();
- srcConnectorType.setTitle("源数据类型");
- srcConnectorType.setField("src_connector_type");
- srcConnectorType.setType(FormType.SELECT.getFormType());
- srcConnectorType.setCanEdit(true);
- srcConnectorType.setIsShow(true);
- srcConnectorType.setData("JDBC");
- srcConnectorType.setPlaceholder("Please select source connector type");
- srcConnectorType.setOptionSourceType(OptionSourceType.DATASOURCE_TYPE.getCode());
- srcConnectorType.setOptions(null);
- srcConnectorType.setInputType(InputType.DEFAULT.getCode());
- srcConnectorType.setDataType(DataType.NUMBER.getCode());
-
- DqRuleInputEntry srcDatasourceId = new DqRuleInputEntry();
- srcDatasourceId.setTitle("源数据源");
- srcDatasourceId.setField("src_datasource_id");
- srcDatasourceId.setType(FormType.SELECT.getFormType());
- srcDatasourceId.setCanEdit(true);
- srcDatasourceId.setIsShow(true);
- srcDatasourceId.setData("1");
- srcDatasourceId.setPlaceholder("Please select source datasource");
- srcDatasourceId.setOptionSourceType(OptionSourceType.DATASOURCE_ID.getCode());
- srcDatasourceId.setInputType(InputType.DEFAULT.getCode());
- srcDatasourceId.setDataType(DataType.NUMBER.getCode());
-
- DqRuleInputEntry srcTable = new DqRuleInputEntry();
- srcTable.setTitle("源数据表");
- srcTable.setField("src_table");
- srcTable.setType(FormType.INPUT.getFormType());
- srcTable.setCanEdit(true);
- srcTable.setIsShow(true);
- srcTable.setPlaceholder("Please enter source table");
- srcTable.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- srcTable.setInputType(InputType.DEFAULT.getCode());
- srcTable.setDataType(DataType.STRING.getCode());
-
- DqRuleInputEntry srcFilter = new DqRuleInputEntry();
- srcFilter.setTitle("源表过滤条件");
- srcFilter.setField("src_filter");
- srcFilter.setType(FormType.INPUT.getFormType());
- srcFilter.setCanEdit(true);
- srcFilter.setIsShow(true);
- srcFilter.setPlaceholder("Please enter source filter expression");
- srcFilter.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- srcFilter.setInputType(InputType.DEFAULT.getCode());
- srcFilter.setDataType(DataType.LIKE_SQL.getCode());
-
- DqRuleInputEntry targetConnectorType = new DqRuleInputEntry();
- targetConnectorType.setTitle("目标数据类型");
- targetConnectorType.setField("target_connector_type");
- targetConnectorType.setType(FormType.SELECT.getFormType());
- targetConnectorType.setCanEdit(true);
- targetConnectorType.setIsShow(true);
- targetConnectorType.setData("JDBC");
- targetConnectorType.setPlaceholder("Please select target connector type");
- targetConnectorType.setOptionSourceType(OptionSourceType.DATASOURCE_TYPE.getCode());
- targetConnectorType.setOptions(null);
- targetConnectorType.setInputType(InputType.DEFAULT.getCode());
- targetConnectorType.setDataType(DataType.STRING.getCode());
-
- DqRuleInputEntry targetDatasourceId = new DqRuleInputEntry();
- targetDatasourceId.setTitle("目标数据源");
- targetDatasourceId.setField("target_datasource_id");
- targetDatasourceId.setType(FormType.CASCADER.getFormType());
- targetDatasourceId.setCanEdit(true);
- targetDatasourceId.setIsShow(true);
- targetDatasourceId.setData("1");
- targetDatasourceId.setPlaceholder("Please select target datasource");
- targetDatasourceId.setOptionSourceType(OptionSourceType.DATASOURCE_ID.getCode());
- targetDatasourceId.setInputType(InputType.DEFAULT.getCode());
- targetDatasourceId.setDataType(DataType.NUMBER.getCode());
-
- DqRuleInputEntry targetTable = new DqRuleInputEntry();
- targetTable.setTitle("目标数据表");
- targetTable.setField("target_table");
- targetTable.setType(FormType.INPUT.getFormType());
- targetTable.setCanEdit(true);
- targetTable.setIsShow(true);
- targetTable.setPlaceholder("Please enter target table");
- targetTable.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- targetTable.setInputType(InputType.DEFAULT.getCode());
- targetTable.setDataType(DataType.STRING.getCode());
-
- DqRuleInputEntry targetFilter = new DqRuleInputEntry();
- targetFilter.setTitle("目标表过滤条件");
- targetFilter.setField("target_filter");
- targetFilter.setType(FormType.INPUT.getFormType());
- targetFilter.setCanEdit(true);
- targetFilter.setIsShow(true);
- targetFilter.setPlaceholder("Please enter target filter expression");
- targetFilter.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- targetFilter.setInputType(InputType.DEFAULT.getCode());
- targetFilter.setDataType(DataType.LIKE_SQL.getCode());
-
- DqRuleInputEntry mappingColumns = new DqRuleInputEntry();
- mappingColumns.setTitle("检查列");
- mappingColumns.setField("mapping_columns");
- mappingColumns.setType(FormType.INPUT.getFormType());
- mappingColumns.setCanEdit(true);
- mappingColumns.setIsShow(true);
- mappingColumns.setPlaceholder("${mapping_columns}");
- mappingColumns.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- mappingColumns.setInputType(InputType.DEFAULT.getCode());
- mappingColumns.setDataType(DataType.LIST.getCode());
-
- DqRuleInputEntry statisticsName = new DqRuleInputEntry();
- statisticsName.setTitle("统计值");
- statisticsName.setField("statistics_name");
- statisticsName.setType(FormType.INPUT.getFormType());
- statisticsName.setCanEdit(false);
- statisticsName.setIsShow(false);
- statisticsName.setData("miss_count.miss");
- statisticsName.setPlaceholder("${statistics_name}");
- statisticsName.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- statisticsName.setInputType(InputType.DEFAULT.getCode());
- statisticsName.setDataType(DataType.STRING.getCode());
-
- defaultInputEntryList.add(srcConnectorType);
- defaultInputEntryList.add(srcDatasourceId);
- defaultInputEntryList.add(srcTable);
- defaultInputEntryList.add(srcFilter);
- defaultInputEntryList.add(targetConnectorType);
- defaultInputEntryList.add(targetDatasourceId);
- defaultInputEntryList.add(targetTable);
- defaultInputEntryList.add(targetFilter);
- defaultInputEntryList.add(mappingColumns);
- defaultInputEntryList.add(statisticsName);
-
- DqRuleExecuteSql executeSqlDefinition3 = new DqRuleExecuteSql();
- executeSqlDefinition3.setIndex(0);
- executeSqlDefinition3.setSql("SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})");
- executeSqlDefinition3.setTableAlias("total_count");
- executeSqlDefinition3.setType(ExecuteSqlType.MIDDLE.getCode());
-
- DqRuleExecuteSql executeSqlDefinition1 = new DqRuleExecuteSql();
- executeSqlDefinition1.setIndex(0);
- executeSqlDefinition1.setSql("SELECT ${src_table}.* FROM (SELECT * FROM ${src_table} WHERE (${src_filter})) "
- + "${src_table} LEFT JOIN (SELECT * FROM ${target_table} WHERE (${target_filter})) "
- + "${target_table} ON ${on_clause} WHERE ${where_clause}");
- executeSqlDefinition1.setTableAlias("miss_items");
- executeSqlDefinition1.setType(ExecuteSqlType.MIDDLE.getCode());
- executeSqlDefinition1.setErrorOutputSql(true);
-
- DqRuleExecuteSql executeSqlDefinition2 = new DqRuleExecuteSql();
- executeSqlDefinition2.setIndex(0);
- executeSqlDefinition2.setSql("SELECT COUNT(*) AS miss FROM miss_items");
- executeSqlDefinition2.setTableAlias("miss_count");
- executeSqlDefinition2.setType(ExecuteSqlType.STATISTICS.getCode());
-
- DqRuleInputEntry comparisonTitle = new DqRuleInputEntry();
- comparisonTitle.setTitle("比对值");
- comparisonTitle.setField("comparison_title");
- comparisonTitle.setType(FormType.INPUT.getFormType());
- comparisonTitle.setCanEdit(false);
- comparisonTitle.setIsShow(true);
- comparisonTitle.setPlaceholder("${comparison_title}");
- comparisonTitle.setData("目标表总行数");
-
- DqRuleInputEntry comparisonName = new DqRuleInputEntry();
- comparisonName.setTitle("比对值名");
- comparisonName.setField("comparison_name");
- comparisonName.setType(FormType.INPUT.getFormType());
- comparisonName.setCanEdit(false);
- comparisonName.setIsShow(false);
- comparisonName.setData("total_count.total");
- comparisonName.setPlaceholder("${comparison_name}");
-
- DqRuleInputEntry comparisonTable = new DqRuleInputEntry();
- comparisonTable.setField(COMPARISON_TABLE);
- comparisonTable.setData("total_count");
-
- DqRuleInputEntry checkType = new DqRuleInputEntry();
- checkType.setTitle("检测方式");
- checkType.setField("check_type");
- checkType.setType(FormType.SELECT.getFormType());
- checkType.setCanEdit(true);
- checkType.setIsShow(true);
- checkType.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- checkType.setOptions(
- "[{\"label\":\"比对值 - 统计值\",\"value\":\"0\"},{\"label\":\"统计值 - 比对值\",\"value\":\"1\"},{\"label\":\"统计值 / 比对值\","
- + "\"value\":\"2\"},{\"label\":\"(比对值-统计值) / 比对值\",\"value\":\"3\"}]");
- checkType.setData("0");
- checkType.setInputType(InputType.CHECK.getCode());
- checkType.setDataType(DataType.STRING.getCode());
- checkType.setPlaceholder("检测类型");
-
- DqRuleInputEntry operator = new DqRuleInputEntry();
- operator.setTitle("操作符");
- operator.setField("operator");
- operator.setType(FormType.SELECT.getFormType());
- operator.setCanEdit(true);
- operator.setIsShow(true);
- operator.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- operator.setOptions("[{\"label\":\"=\",\"value\":\"0\"},"
- + "{\"label\":\"<\",\"value\":\"1\"},{\"label\":\"<=\",\"value\":\"2\"},"
- + "{\"label\":\">\",\"value\":\"3\"},{\"label\":\">=\",\"value\":\"4\"},{\"label\":\"!=\",\"value\":\"5\"}]");
- operator.setData("0");
- operator.setInputType(InputType.CHECK.getCode());
- operator.setDataType(DataType.STRING.getCode());
- operator.setPlaceholder("操作符");
-
- DqRuleInputEntry threshold = new DqRuleInputEntry();
- threshold.setTitle("阈值");
- threshold.setField("threshold");
- threshold.setType(FormType.INPUT.getFormType());
- threshold.setCanEdit(true);
- threshold.setIsShow(true);
- threshold.setInputType(InputType.CHECK.getCode());
- threshold.setDataType(DataType.NUMBER.getCode());
- threshold.setPlaceholder("Please enter threshold, number is needed");
-
- DqRuleInputEntry afterFailure = new DqRuleInputEntry();
- afterFailure.setTitle("失败策略");
- afterFailure.setField("failure_strategy");
- afterFailure.setType(FormType.SELECT.getFormType());
- afterFailure.setCanEdit(true);
- afterFailure.setIsShow(true);
- afterFailure.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
- afterFailure.setOptions("[{\"label\":\"告警\",\"value\":\"0\"},{\"label\":\"阻断\",\"value\":\"1\"}]");
- afterFailure.setData("0");
- afterFailure.setInputType(InputType.CHECK.getCode());
- afterFailure.setDataType(DataType.STRING.getCode());
- afterFailure.setPlaceholder("失败策略");
-
- defaultInputEntryList.add(checkType);
- defaultInputEntryList.add(operator);
- defaultInputEntryList.add(threshold);
- defaultInputEntryList.add(afterFailure);
- defaultInputEntryList.add(comparisonTitle);
- defaultInputEntryList.add(comparisonName);
- defaultInputEntryList.add(comparisonTable);
-
- List executeSqlList = new ArrayList<>();
- executeSqlList.add(executeSqlDefinition3);
- executeSqlList.add(executeSqlDefinition1);
- executeSqlList.add(executeSqlDefinition2);
- dataQualityTaskExecutionContext.setExecuteSqlList(JSONUtils.toJsonString(executeSqlList));
- dataQualityTaskExecutionContext.setRuleInputEntryList(JSONUtils.toJsonString(defaultInputEntryList));
-
- Map inputParameterValue = new HashMap<>();
- inputParameterValue.put("src_connector_type", "0");
- inputParameterValue.put("src_datasource_id", "2");
- inputParameterValue.put("src_database", "test");
- inputParameterValue.put("src_table", "demo_src");
- inputParameterValue.put("src_filter", "age<100");
- inputParameterValue.put("target_connector_type", "2");
- inputParameterValue.put("target_datasource_id", "3");
- inputParameterValue.put("target_database", "default");
- inputParameterValue.put("target_table", "demo_src");
- inputParameterValue.put("target_filter", "age<100");
- inputParameterValue.put("mapping_columns",
- "[{\"src_field\":\"hour\",\"operator\":\"=\",\"target_field\":\"hour\"}]");
- inputParameterValue.put("check_type", "2");
- inputParameterValue.put("operator", "3");
- inputParameterValue.put("threshold", "3");
- inputParameterValue.put("failure_strategy", "0");
- inputParameterValue.put("comparison_type", "7");
- inputParameterValue.put("rule_id", "3");
- inputParameterValue.put("rule_type", "2");
- inputParameterValue.put("rule_name", "'跨表准确性'");
- inputParameterValue.put("create_time", "'2021-08-30 00:00:00'");
- inputParameterValue.put("update_time", "'2021-08-30 00:00:00'");
- inputParameterValue.put("process_definition_id", "1");
- inputParameterValue.put("process_instance_id", "1");
- inputParameterValue.put("task_instance_id", "1");
- inputParameterValue.put("data_time", "'2021-08-30 00:00:00'");
- inputParameterValue.put("error_output_path", "hdfs://localhost:8022/user/ods/data_quality_error_data/1_1_test");
-
- dataQualityTaskExecutionContext.setSourceConnectorType("JDBC");
- dataQualityTaskExecutionContext.setSourceType(0);
- dataQualityTaskExecutionContext.setSourceConnectionParams(
- "{\"address\":\"jdbc:mysql://localhost:3306\","
- + "\"database\":\"test\","
- + "\"jdbcUrl\":\"jdbc:mysql://localhost:3306/test\","
- + "\"user\":\"test\","
- + "\"password\":\"test\"}");
-
- dataQualityTaskExecutionContext.setTargetConnectorType("HIVE");
- dataQualityTaskExecutionContext.setTargetType(2);
- dataQualityTaskExecutionContext.setTargetConnectionParams(
- "{\"address\":\"jdbc:hive2://localhost:10000\","
- + "\"database\":\"default\","
- + "\"jdbcUrl\":\"jdbc:hive2://localhost:10000/default\","
- + "\"user\":\"test\","
- + "\"password\":\"test\"}");
-
- dataQualityTaskExecutionContext.setWriterType(1);
- dataQualityTaskExecutionContext.setWriterConnectorType("JDBC");
- dataQualityTaskExecutionContext.setWriterTable("t_ds_dq_execute_result");
- dataQualityTaskExecutionContext.setWriterConnectionParams(
- "{\"address\":\"jdbc:postgresql://localhost:5432\","
- + "\"database\":\"dolphinscheduler\","
- + "\"jdbcUrl\":\"jdbc:postgresql://localhost:5432/dolphinscheduler\","
- + "\"user\":\"test\","
- + "\"password\":\"test\","
- + "\"other\":{\"stringtype\": \"unspecified\", \"characterEncoding\": \"UTF-8\", \"allowMultiQueries\": true}}");
-
- dataQualityTaskExecutionContext.setStatisticsValueConnectorType("JDBC");
- dataQualityTaskExecutionContext.setStatisticsValueType(1);
- dataQualityTaskExecutionContext.setStatisticsValueTable("t_ds_dq_task_statistics_value");
- dataQualityTaskExecutionContext.setStatisticsValueWriterConnectionParams(
- "{\"address\":\"jdbc:postgresql://localhost:5432\","
- + "\"database\":\"dolphinscheduler\","
- + "\"jdbcUrl\":\"jdbc:postgresql://localhost:5432/dolphinscheduler\","
- + "\"user\":\"test\","
- + "\"password\":\"test\","
- + "\"other\":{\"stringtype\": \"unspecified\", \"characterEncoding\": \"UTF-8\", \"allowMultiQueries\": true }}");
-
- dataQualityTaskExecutionContext.setRuleName("跨表准确性");
- dataQualityTaskExecutionContext.setRuleType(RuleType.MULTI_TABLE_ACCURACY.getCode());
-
- String expect = "{\"name\":\"跨表准确性\",\"env\":{\"type\":\"batch\",\"config\":null},\"readers\":"
- + "[{\"type\":\"JDBC\",\"config\":{\"database\":\"test\",\"password\":\"test\",\"driver\":"
- + "\"com.mysql.cj.jdbc.Driver\",\"user\":\"test\",\"output_table\":\"test_demo_src\",\"table\":"
- + "\"demo_src\",\"url\":\"jdbc:mysql://localhost:3306/test\"}},"
- + "{\"type\":\"HIVE\",\"config\":"
- + "{\"database\":\"default\",\"password\":\"test\",\"driver\":"
- + "\"org.apache.hive.jdbc.HiveDriver\",\"user\":\"test\",\"output_table\":\"default_demo_src\",\"table\":"
- + "\"demo_src\",\"url\":\"jdbc:hive2://localhost:10000/default\"}}],\"transformers\":"
- + "[{\"type\":\"sql\",\"config\":{\"index\":1,\"output_table\":\"total_count\","
- + "\"sql\":\"SELECT COUNT(*) AS total FROM test_demo_src WHERE (age<100)\"}},"
- + "{\"type\":\"sql\",\"config\":{\"index\":2,\"output_table\":\"miss_items\",\"sql\":"
- + "\"SELECT test_demo_src.* FROM (SELECT * FROM test_demo_src WHERE (age<100)) "
- + "test_demo_src LEFT JOIN (SELECT * FROM default_demo_src WHERE (age<100)) default_demo_src ON coalesce(test_demo_src.hour, '') ="
- + " coalesce(default_demo_src.hour, '') WHERE ( NOT (test_demo_src.hour IS NULL )) AND "
- + "( default_demo_src.hour IS NULL )\"}},{\"type\":\"sql\",\"config\":{\"index\":3,\"output_table\":\"miss_count\","
- + "\"sql\":\"SELECT COUNT(*) AS miss FROM miss_items\"}}],\"writers\":[{\"type\":\"JDBC\",\"config\":"
- + "{\"database\":\"dolphinscheduler\",\"password\":\"test\",\"driver\":\"org.postgresql.Driver\",\"user\":\"test\",\"table\":"
- + "\"t_ds_dq_execute_result\",\"url\":\"jdbc:postgresql://localhost:5432/dolphinscheduler?stringtype=unspecified"
- + "&characterEncoding=UTF-8&allowMultiQueries=true\",\"sql\":\"select 2 as rule_type,'跨表准确性' as rule_name,1 as process_definition_id,"
- + "1 as process_instance_id,1 as task_instance_id,miss_count.miss AS statistics_value,total_count.total AS comparison_value,"
- + "7 AS comparison_type,2 as check_type,3 as threshold,3 as operator,0 as failure_strategy,"
- + "'hdfs://localhost:8022/user/ods/data_quality_error_data/1_1_test' as error_output_path,"
- + "'2021-08-30 00:00:00' as create_time,'2021-08-30 00:00:00' as update_time from miss_count"
- + " full join total_count\"}},{\"type\":\"JDBC\",\"config\":{\"database\":\"dolphinscheduler\","
- + "\"password\":\"test\",\"driver\":\"org.postgresql.Driver\",\"user\":\"test\",\"table\":"
- + "\"t_ds_dq_task_statistics_value\",\"url\":\"jdbc:postgresql://localhost:5432/dolphinscheduler?stringtype=unspecified"
- + "&characterEncoding=UTF-8&allowMultiQueries=true\",\"sql\":\"select 1 as process_definition_id,1 as task_instance_id,"
- + "3 as rule_id,'NGRU3S2KPG0GQ4BIHSW9C/LKX3NHN+CEUNU7AMNSPJK=' as unique_code,'miss_count.miss'AS statistics_name,miss_count.miss "
- + "AS statistics_value,'2021-08-30 00:00:00' as data_time,"
- + "'2021-08-30 00:00:00' as create_time,'2021-08-30 00:00:00' as update_time from miss_count\"}},{\"type\":\"hdfs_file\","
- + "\"config\":{\"path\":\"hdfs://localhost:8022/user/ods/data_quality_error_data/1_1_test\",\"input_table\":\"miss_items\"}}]}";
-
- RuleManager ruleManager = new RuleManager(inputParameterValue, dataQualityTaskExecutionContext);
- Assertions.assertEquals(expect, JSONUtils.toJsonString(ruleManager.generateDataQualityParameter()));
- }
-}
diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/test/java/org/apache/dolphinscheduler/plugin/task/dq/utils/Md5UtilsTest.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/test/java/org/apache/dolphinscheduler/plugin/task/dq/utils/Md5UtilsTest.java
deleted file mode 100644
index 140a08094792..000000000000
--- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/test/java/org/apache/dolphinscheduler/plugin/task/dq/utils/Md5UtilsTest.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.dolphinscheduler.plugin.task.dq.utils;
-
-import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.Test;
-
-public class Md5UtilsTest {
-
- @Test
- public void testGetMd5() {
- Assertions.assertEquals("jZae727K08KaOmKSgOaGzww/XVqGr/PKEgIMkjrcbJI=", Md5Utils.getMd5("123456", false));
- }
-}
diff --git a/dolphinscheduler-task-plugin/pom.xml b/dolphinscheduler-task-plugin/pom.xml
index e1446ec42652..543f42f6644a 100644
--- a/dolphinscheduler-task-plugin/pom.xml
+++ b/dolphinscheduler-task-plugin/pom.xml
@@ -40,7 +40,6 @@
dolphinscheduler-task-sql
dolphinscheduler-task-sqoop
dolphinscheduler-task-procedure
- dolphinscheduler-task-dataquality
dolphinscheduler-task-seatunnel
dolphinscheduler-task-emr
dolphinscheduler-task-k8s
diff --git a/dolphinscheduler-ui/public/images/task-icons/data_quality.png b/dolphinscheduler-ui/public/images/task-icons/data_quality.png
deleted file mode 100644
index bf2f83e20600..000000000000
Binary files a/dolphinscheduler-ui/public/images/task-icons/data_quality.png and /dev/null differ
diff --git a/dolphinscheduler-ui/public/images/task-icons/data_quality_hover.png b/dolphinscheduler-ui/public/images/task-icons/data_quality_hover.png
deleted file mode 100644
index e6affd6ff1db..000000000000
Binary files a/dolphinscheduler-ui/public/images/task-icons/data_quality_hover.png and /dev/null differ
diff --git a/dolphinscheduler-ui/src/layouts/content/use-dataList.ts b/dolphinscheduler-ui/src/layouts/content/use-dataList.ts
index 1eb85cbd59a7..67a99b025915 100644
--- a/dolphinscheduler-ui/src/layouts/content/use-dataList.ts
+++ b/dolphinscheduler-ui/src/layouts/content/use-dataList.ts
@@ -43,9 +43,6 @@ import {
KeyOutlined,
SafetyOutlined,
GroupOutlined,
- ContainerOutlined,
- ApartmentOutlined,
- BarsOutlined,
CloudServerOutlined,
ClusterOutlined
} from '@vicons/antd'
@@ -192,24 +189,6 @@ export function useDataList() {
}
]
},
- {
- label: () =>
- h(NEllipsis, null, { default: () => t('menu.data_quality') }),
- key: 'data-quality',
- icon: renderIcon(ContainerOutlined),
- children: [
- {
- label: t('menu.task_result'),
- key: '/data-quality/task-result',
- icon: renderIcon(ApartmentOutlined)
- },
- {
- label: t('menu.rule'),
- key: '/data-quality/rule',
- icon: renderIcon(BarsOutlined)
- }
- ]
- },
{
label: () =>
h(NEllipsis, null, { default: () => t('menu.datasource') }),
diff --git a/dolphinscheduler-ui/src/locales/en_US/data-quality.ts b/dolphinscheduler-ui/src/locales/en_US/data-quality.ts
deleted file mode 100644
index e5d8c0f91274..000000000000
--- a/dolphinscheduler-ui/src/locales/en_US/data-quality.ts
+++ /dev/null
@@ -1,114 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-export default {
- task_result: {
- task_name: 'Task Name',
- workflow_instance: 'Workflow Instance',
- rule_type: 'Rule Type',
- rule_name: 'Rule Name',
- state: 'State',
- actual_value: 'Actual Value',
- excepted_value: 'Excepted Value',
- check_type: 'Check Type',
- operator: 'Operator',
- threshold: 'Threshold',
- failure_strategy: 'Failure Strategy',
- excepted_value_type: 'Excepted Value Type',
- error_output_path: 'Error Output Path',
- username: 'Username',
- create_time: 'Create Time',
- update_time: 'Update Time',
- undone: 'Undone',
- success: 'Success',
- failure: 'Failure',
- single_table: 'Single Table',
- single_table_custom_sql: 'Single Table Custom Sql',
- multi_table_accuracy: 'Multi Table Accuracy',
- multi_table_comparison: 'Multi Table Comparison',
- expected_and_actual_or_expected: '(Expected - Actual) / Expected x 100%',
- expected_and_actual: 'Expected - Actual',
- actual_and_expected: 'Actual - Expected',
- actual_or_expected: 'Actual / Expected x 100%'
- },
- rule: {
- actions: 'Actions',
- name: 'Rule Name',
- type: 'Rule Type',
- username: 'User Name',
- create_time: 'Create Time',
- update_time: 'Update Time',
- input_item: 'Rule input item',
- view_input_item: 'View input items',
- input_item_title: 'Input item title',
- input_item_placeholder: 'Input item placeholder',
- input_item_type: 'Input item type',
- src_connector_type: 'SrcConnType',
- src_datasource_id: 'SrcSource',
- src_database: 'SrcDatabase',
- src_table: 'SrcTable',
- src_filter: 'SrcFilter',
- src_field: 'SrcField',
- statistics_name: 'ActualValName',
- check_type: 'CheckType',
- operator: 'Operator',
- threshold: 'Threshold',
- failure_strategy: 'FailureStrategy',
- target_connector_type: 'TargetConnType',
- target_datasource_id: 'TargetSourceId',
- target_database: 'TargetDatabase',
- target_table: 'TargetTable',
- target_filter: 'TargetFilter',
- mapping_columns: 'OnClause',
- statistics_execute_sql: 'ActualValExecSql',
- comparison_name: 'ExceptedValName',
- comparison_execute_sql: 'ExceptedValExecSql',
- comparison_type: 'ExceptedValType',
- writer_connector_type: 'WriterConnType',
- writer_datasource_id: 'WriterSourceId',
- target_field: 'TargetField',
- field_length: 'FieldLength',
- logic_operator: 'LogicOperator',
- regexp_pattern: 'RegexpPattern',
- deadline: 'Deadline',
- datetime_format: 'DatetimeFormat',
- enum_list: 'EnumList',
- begin_time: 'BeginTime',
- fix_value: 'FixValue',
- null_check: 'NullCheck',
- custom_sql: 'Custom Sql',
- single_table: 'Single Table',
- single_table_custom_sql: 'Single Table Custom Sql',
- multi_table_accuracy: 'Multi Table Accuracy',
- multi_table_value_comparison: 'Multi Table Compare',
- field_length_check: 'FieldLengthCheck',
- uniqueness_check: 'UniquenessCheck',
- regexp_check: 'RegexpCheck',
- timeliness_check: 'TimelinessCheck',
- enumeration_check: 'EnumerationCheck',
- table_count_check: 'TableCountCheck',
- all: 'All',
- FixValue: 'FixValue',
- DailyAvg: 'DailyAvg',
- WeeklyAvg: 'WeeklyAvg',
- MonthlyAvg: 'MonthlyAvg',
- Last7DayAvg: 'Last7DayAvg',
- Last30DayAvg: 'Last30DayAvg',
- SrcTableTotalRows: 'SrcTableTotalRows',
- TargetTableTotalRows: 'TargetTableTotalRows'
- }
-}
diff --git a/dolphinscheduler-ui/src/locales/en_US/index.ts b/dolphinscheduler-ui/src/locales/en_US/index.ts
index ec661005af12..0e289642de67 100644
--- a/dolphinscheduler-ui/src/locales/en_US/index.ts
+++ b/dolphinscheduler-ui/src/locales/en_US/index.ts
@@ -16,7 +16,6 @@
*/
import crontab from '@/locales/en_US/crontab'
-import data_quality from '@/locales/en_US/data-quality'
import datasource from '@/locales/en_US/datasource'
import home from '@/locales/en_US/home'
import input_search from '@/locales/en_US/input-search'
@@ -49,7 +48,6 @@ export default {
project,
security,
datasource,
- data_quality,
crontab,
ui_setting,
input_search
diff --git a/dolphinscheduler-ui/src/locales/en_US/menu.ts b/dolphinscheduler-ui/src/locales/en_US/menu.ts
index 9414e73d3ddd..df7a75010d80 100644
--- a/dolphinscheduler-ui/src/locales/en_US/menu.ts
+++ b/dolphinscheduler-ui/src/locales/en_US/menu.ts
@@ -56,7 +56,6 @@ export default {
task_group_manage: 'Task Group Manage',
task_group_option: 'Task Group Option',
task_group_queue: 'Task Group Queue',
- data_quality: 'Data Quality',
task_result: 'Task Result',
rule: 'Rule management',
ui_setting: 'UI Setting'
diff --git a/dolphinscheduler-ui/src/locales/en_US/project.ts b/dolphinscheduler-ui/src/locales/en_US/project.ts
index eb44116f6db4..910b364c01d5 100644
--- a/dolphinscheduler-ui/src/locales/en_US/project.ts
+++ b/dolphinscheduler-ui/src/locales/en_US/project.ts
@@ -937,7 +937,6 @@ export default {
cloud: 'Cloud',
logic: 'Logic',
di: 'Data Integration',
- dq: 'Data Quality',
ml: 'Machine Learning',
other: 'Other'
},
diff --git a/dolphinscheduler-ui/src/locales/zh_CN/data-quality.ts b/dolphinscheduler-ui/src/locales/zh_CN/data-quality.ts
deleted file mode 100644
index 55c66dcba792..000000000000
--- a/dolphinscheduler-ui/src/locales/zh_CN/data-quality.ts
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-export default {
- task_result: {
- task_name: '任务名称',
- workflow_instance: '工作流实例',
- rule_type: '规则类型',
- rule_name: '规则名称',
- state: '状态',
- actual_value: '实际值',
- excepted_value: '期望值',
- check_type: '检测类型',
- operator: '操作符',
- threshold: '阈值',
- failure_strategy: '失败策略',
- excepted_value_type: '期望值类型',
- error_output_path: '错误数据路径',
- username: '用户名',
- create_time: '创建时间',
- update_time: '更新时间',
- undone: '未完成',
- success: '成功',
- failure: '失败',
- single_table: '单表检测',
- single_table_custom_sql: '自定义SQL',
- multi_table_accuracy: '多表准确性',
- multi_table_comparison: '两表值对比',
- expected_and_actual_or_expected: '(期望值-实际值)/实际值 x 100%',
- expected_and_actual: '期望值-实际值',
- actual_and_expected: '实际值-期望值',
- actual_or_expected: '实际值/期望值 x 100%'
- },
- rule: {
- actions: '操作',
- name: '规则名称',
- type: '规则类型',
- username: '用户名',
- create_time: '创建时间',
- update_time: '更新时间',
- input_item: '规则输入项',
- view_input_item: '查看规则输入项信息',
- input_item_title: '输入项标题',
- input_item_placeholder: '输入项占位符',
- input_item_type: '输入项类型',
- src_connector_type: '源数据类型',
- src_datasource_id: '源数据源',
- src_database: '源数据库',
- src_table: '源数据表',
- src_filter: '源表过滤条件',
- src_field: '源表检测列',
- statistics_name: '实际值名',
- check_type: '校验方式',
- operator: '校验操作符',
- threshold: '阈值',
- failure_strategy: '失败策略',
- target_connector_type: '目标数据类型',
- target_datasource_id: '目标数据源',
- target_database: '目标数据库',
- target_table: '目标数据表',
- target_filter: '目标表过滤条件',
- mapping_columns: 'ON语句',
- statistics_execute_sql: '实际值计算SQL',
- comparison_name: '期望值名',
- comparison_execute_sql: '期望值计算SQL',
- comparison_type: '期望值类型',
- writer_connector_type: '输出数据类型',
- writer_datasource_id: '输出数据源',
- target_field: '目标表检测列',
- field_length: '字段长度限制',
- logic_operator: '逻辑操作符',
- regexp_pattern: '正则表达式',
- deadline: '截止时间',
- datetime_format: '时间格式',
- enum_list: '枚举值列表',
- begin_time: '起始时间',
- fix_value: '固定值',
- null_check: '空值检测',
- custom_sql: '自定义SQL',
- single_table: '单表检测',
- multi_table_accuracy: '多表准确性',
- multi_table_value_comparison: '两表值比对',
- field_length_check: '字段长度校验',
- uniqueness_check: '唯一性校验',
- regexp_check: '正则表达式',
- timeliness_check: '及时性校验',
- enumeration_check: '枚举值校验',
- table_count_check: '表行数校验',
- all: '全部',
- FixValue: '固定值',
- DailyAvg: '日均值',
- WeeklyAvg: '周均值',
- MonthlyAvg: '月均值',
- Last7DayAvg: '最近7天均值',
- Last30DayAvg: '最近30天均值',
- SrcTableTotalRows: '源表总行数',
- TargetTableTotalRows: '目标表总行数'
- }
-}
diff --git a/dolphinscheduler-ui/src/locales/zh_CN/index.ts b/dolphinscheduler-ui/src/locales/zh_CN/index.ts
index 6dfce3788b57..fdd36e1a5cb0 100644
--- a/dolphinscheduler-ui/src/locales/zh_CN/index.ts
+++ b/dolphinscheduler-ui/src/locales/zh_CN/index.ts
@@ -16,7 +16,6 @@
*/
import crontab from '@/locales/zh_CN/crontab'
-import data_quality from '@/locales/zh_CN/data-quality'
import datasource from '@/locales/zh_CN/datasource'
import home from '@/locales/zh_CN/home'
import input_search from '@/locales/zh_CN/input-search'
@@ -49,7 +48,6 @@ export default {
project,
security,
datasource,
- data_quality,
crontab,
ui_setting,
input_search
diff --git a/dolphinscheduler-ui/src/locales/zh_CN/menu.ts b/dolphinscheduler-ui/src/locales/zh_CN/menu.ts
index 5111b80d116b..52bf57de9607 100644
--- a/dolphinscheduler-ui/src/locales/zh_CN/menu.ts
+++ b/dolphinscheduler-ui/src/locales/zh_CN/menu.ts
@@ -57,7 +57,6 @@ export default {
task_group_manage: '任务组管理',
task_group_option: '任务组配置',
task_group_queue: '任务组队列',
- data_quality: '数据质量',
task_result: '任务结果',
rule: '规则管理'
}
diff --git a/dolphinscheduler-ui/src/locales/zh_CN/project.ts b/dolphinscheduler-ui/src/locales/zh_CN/project.ts
index e1cfd541ad9b..58d48b796bd3 100644
--- a/dolphinscheduler-ui/src/locales/zh_CN/project.ts
+++ b/dolphinscheduler-ui/src/locales/zh_CN/project.ts
@@ -907,7 +907,6 @@ export default {
cloud: '云',
logic: '逻辑节点',
di: '数据集成',
- dq: '数据质量',
ml: '机器学习',
other: '其他'
},
diff --git a/dolphinscheduler-ui/src/router/modules/data-quality.ts b/dolphinscheduler-ui/src/router/modules/data-quality.ts
deleted file mode 100644
index 71c0f193125f..000000000000
--- a/dolphinscheduler-ui/src/router/modules/data-quality.ts
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import type { Component } from 'vue'
-import utils from '@/utils'
-
-// All TSX files under the views folder automatically generate mapping relationship
-const modules = import.meta.glob('/src/views/**/**.tsx')
-const components: { [key: string]: Component } = utils.mapping(modules)
-
-export default {
- path: '/data-quality',
- name: 'data-quality',
- meta: { title: 'data-quality' },
- redirect: { name: 'task-result' },
- component: () => import('@/layouts/content'),
- children: [
- {
- path: '/data-quality/task-result',
- name: 'task-result',
- component: components['data-quality-task-result'],
- meta: {
- title: '数据质量-task-result',
- activeMenu: 'data-quality',
- showSide: true,
- auth: []
- }
- },
- {
- path: '/data-quality/rule',
- name: 'data-quality-rule',
- component: components['data-quality-rule'],
- meta: {
- title: '数据质量-rule',
- activeMenu: 'data-quality',
- showSide: true,
- auth: []
- }
- }
- ]
-}
diff --git a/dolphinscheduler-ui/src/router/routes.ts b/dolphinscheduler-ui/src/router/routes.ts
index e17c7e1b9322..1a06ac035dd5 100644
--- a/dolphinscheduler-ui/src/router/routes.ts
+++ b/dolphinscheduler-ui/src/router/routes.ts
@@ -23,7 +23,6 @@ import resourcesPage from './modules/resources'
import datasourcePage from './modules/datasource'
import monitorPage from './modules/monitor'
import securityPage from './modules/security'
-import dataQualityPage from './modules/data-quality'
// todo: why is it throwing cannot find module and its corresponding type, but the render is working?
import uiSettingPage from './modules/ui-setting'
@@ -85,7 +84,6 @@ const basePage: RouteRecordRaw[] = [
datasourcePage,
monitorPage,
securityPage,
- dataQualityPage,
uiSettingPage
]
diff --git a/dolphinscheduler-ui/src/service/modules/data-quality/index.ts b/dolphinscheduler-ui/src/service/modules/data-quality/index.ts
deleted file mode 100644
index c58a62e9108e..000000000000
--- a/dolphinscheduler-ui/src/service/modules/data-quality/index.ts
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import { axios } from '@/service/service'
-import type { RuleListReq, ResultListReq } from './types'
-
-export function queryRuleListPaging(params: RuleListReq): any {
- return axios({
- url: '/data-quality/rule/page',
- method: 'get',
- params
- })
-}
-
-export function queryExecuteResultListPaging(params: ResultListReq): any {
- return axios({
- url: '/data-quality/result/page',
- method: 'get',
- params
- })
-}
-
-export function queryRuleList(): any {
- return axios({
- url: '/data-quality/ruleList',
- method: 'get'
- })
-}
-
-export function getRuleFormCreateJson(ruleId: number): any {
- return axios({
- url: '/data-quality/getRuleFormCreateJson',
- method: 'get',
- params: {
- ruleId
- }
- })
-}
-
-export function getDatasourceOptionsById(datasourceId: number): any {
- return axios({
- url: '/data-quality/getDatasourceOptionsById',
- method: 'get',
- params: {
- datasourceId
- }
- })
-}
diff --git a/dolphinscheduler-ui/src/service/modules/data-quality/types.ts b/dolphinscheduler-ui/src/service/modules/data-quality/types.ts
deleted file mode 100644
index a8065560be8f..000000000000
--- a/dolphinscheduler-ui/src/service/modules/data-quality/types.ts
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-interface ListReq {
- pageNo: number
- pageSize: number
- searchVal?: string
-}
-
-interface RuleListReq extends ListReq {
- endDate?: string
- startDate?: string
- ruleType?: string
-}
-
-interface ResultListReq extends ListReq {
- endDate?: string
- startDate?: string
- ruleType?: string
- state?: string
-}
-
-interface ResultItem {
- id: number
- workflowDefinitionId: number
- workflowDefinitionName: string
- workflowDefinitionCode: number
- workflowInstanceId: number
- workflowInstanceName: string
- projectCode: number
- taskInstanceId: number
- taskName: string
- ruleType: number
- ruleName: string
- statisticsValue: number
- comparisonValue: number
- comparisonType: number
- comparisonTypeName: string
- checkType: number
- threshold: number
- operator: number
- failureStrategy: number
- userId: number
- userName: string
- state: number
- errorOutputPath: string
- createTime: string
- updateTime: string
-}
-
-interface ResultListRes {
- totalList: ResultItem[]
- total: number
- totalPage: number
- pageSize: number
- currentPage: number
- start: number
-}
-
-interface Rule {
- id: number
- name: string
- ruleJson: string
- type: number
- userId: number
- userName: string
- createTime: string
- updateTime: string
-}
-
-interface RuleRes {
- totalList: Rule[]
- total: number
- totalPage: number
- pageSize: number
- currentPage: number
- start: number
-}
-
-export { RuleListReq, ResultListReq, ResultItem, ResultListRes, Rule, RuleRes }
diff --git a/dolphinscheduler-ui/src/store/project/task-type.ts b/dolphinscheduler-ui/src/store/project/task-type.ts
index e1c56da4d1e3..b655a0814934 100644
--- a/dolphinscheduler-ui/src/store/project/task-type.ts
+++ b/dolphinscheduler-ui/src/store/project/task-type.ts
@@ -70,10 +70,6 @@ export const TASK_TYPES_MAP = {
CONDITIONS: {
alias: 'CONDITIONS'
},
- DATA_QUALITY: {
- alias: 'DATA_QUALITY',
- helperLinkDisable: true
- },
SWITCH: {
alias: 'SWITCH'
},
diff --git a/dolphinscheduler-ui/src/store/project/types.ts b/dolphinscheduler-ui/src/store/project/types.ts
index b3a077e6dcf9..bf2d4df3cee8 100644
--- a/dolphinscheduler-ui/src/store/project/types.ts
+++ b/dolphinscheduler-ui/src/store/project/types.ts
@@ -35,7 +35,6 @@ type TaskType =
| 'DATAX'
| 'SQOOP'
| 'CONDITIONS'
- | 'DATA_QUALITY'
| 'SWITCH'
| 'SEATUNNEL'
| 'EMR'
diff --git a/dolphinscheduler-ui/src/views/data-quality/rule/components/rule-modal.tsx b/dolphinscheduler-ui/src/views/data-quality/rule/components/rule-modal.tsx
deleted file mode 100644
index 3cc5504e2a63..000000000000
--- a/dolphinscheduler-ui/src/views/data-quality/rule/components/rule-modal.tsx
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import { defineComponent, PropType } from 'vue'
-import { useI18n } from 'vue-i18n'
-import { NDataTable } from 'naive-ui'
-import Modal from '@/components/modal'
-import { TableColumns } from 'naive-ui/es/data-table/src/interface'
-
-const props = {
- show: {
- type: Boolean as PropType,
- default: false
- },
- data: {
- type: String as PropType,
- default: ''
- }
-}
-
-export default defineComponent({
- name: 'ruleInputEntry',
- props,
- emits: ['cancel', 'confirm'],
- setup(props, ctx) {
- const { t } = useI18n()
-
- const ruleInputEntryList = JSON.parse(props.data).ruleInputEntryList
-
- ruleInputEntryList.forEach((item: any) => {
- item.title = t(
- 'data_quality.rule.' + item.title.substring(3, item.title.length - 1)
- )
- })
-
- const columns: TableColumns = [
- {
- title: t('data_quality.rule.input_item_title'),
- key: 'title'
- },
- {
- title: t('data_quality.rule.input_item_placeholder'),
- key: 'field'
- },
- {
- title: t('data_quality.rule.input_item_type'),
- key: 'type'
- }
- ]
-
- const onCancel = () => {
- ctx.emit('cancel')
- }
-
- const onConfirm = () => {
- ctx.emit('confirm')
- }
-
- return {
- onCancel,
- onConfirm,
- columns,
- ruleInputEntryList
- }
- },
-
- render() {
- const { t } = useI18n()
-
- return (
-
-
-
- )
- }
-})
diff --git a/dolphinscheduler-ui/src/views/data-quality/rule/components/table-action.tsx b/dolphinscheduler-ui/src/views/data-quality/rule/components/table-action.tsx
deleted file mode 100644
index bc8e2a2dd5ee..000000000000
--- a/dolphinscheduler-ui/src/views/data-quality/rule/components/table-action.tsx
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import { defineComponent, PropType } from 'vue'
-import { useI18n } from 'vue-i18n'
-import { NSpace, NTooltip, NButton, NIcon } from 'naive-ui'
-import { InfoCircleFilled } from '@vicons/antd'
-import type { Rule } from '@/service/modules/data-quality/types'
-
-interface ItemRow extends Rule {}
-
-const props = {
- row: {
- type: Object as PropType,
- default: {}
- }
-}
-
-const TableAction = defineComponent({
- name: 'TableAction',
- props,
- emits: ['viewRuleEntry'],
- setup(props, { emit }) {
- const { t } = useI18n()
-
- const viewRuleEntryDetails = (detail: string) => {
- emit('viewRuleEntry', detail)
- }
-
- return { t, viewRuleEntryDetails }
- },
- render() {
- const { t, viewRuleEntryDetails } = this
-
- return (
-
-
- {{
- default: () => t('data_quality.rule.view_input_item'),
- trigger: () => (
- viewRuleEntryDetails(this.row.ruleJson)}
- circle
- >
-
-
-
-
- )
- }}
-
-
- )
- }
-})
-
-export default TableAction
diff --git a/dolphinscheduler-ui/src/views/data-quality/rule/index.tsx b/dolphinscheduler-ui/src/views/data-quality/rule/index.tsx
deleted file mode 100644
index 99f164018f8e..000000000000
--- a/dolphinscheduler-ui/src/views/data-quality/rule/index.tsx
+++ /dev/null
@@ -1,157 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import {
- defineComponent,
- getCurrentInstance,
- onMounted,
- ref,
- toRefs
-} from 'vue'
-import { NSpace, NButton, NIcon, NDataTable, NPagination } from 'naive-ui'
-import { SearchOutlined } from '@vicons/antd'
-import { useTable } from './use-table'
-import Card from '@/components/card'
-import Search from '@/components/input-search'
-import RuleModal from './components/rule-modal'
-
-const TaskResult = defineComponent({
- name: 'rule',
- setup() {
- const { t, variables, getTableData } = useTable()
- const showModalRef = ref(false)
- const ruleEntryData = ref('')
-
- const requestTableData = () => {
- getTableData({
- pageSize: variables.pageSize,
- pageNo: variables.page,
- startDate: '',
- endDate: '',
- searchVal: variables.searchVal
- })
- }
-
- const onUpdatePageSize = () => {
- variables.page = 1
- requestTableData()
- }
-
- const onSearch = () => {
- requestTableData()
- }
-
- const onCancel = () => {
- showModalRef.value = false
- }
-
- const onConfirm = () => {
- showModalRef.value = false
- }
-
- const viewRuleEntry = (ruleJson: string) => {
- showModalRef.value = true
- ruleEntryData.value = ruleJson
- }
-
- const trim = getCurrentInstance()?.appContext.config.globalProperties.trim
-
- onMounted(() => {
- requestTableData()
- })
-
- return {
- t,
- ...toRefs(variables),
- requestTableData,
- onUpdatePageSize,
- showModalRef,
- onCancel,
- onConfirm,
- onSearch,
- ruleEntryData,
- viewRuleEntry,
- trim
- }
- },
- render() {
- const {
- t,
- showModalRef,
- requestTableData,
- onUpdatePageSize,
- onSearch,
- onCancel,
- onConfirm,
- viewRuleEntry,
- ruleEntryData,
- loadingRef
- } = this
-
- const { columns } = useTable(viewRuleEntry)
-
- return (
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {showModalRef && (
-
- )}
-
- )
- }
-})
-
-export default TaskResult
diff --git a/dolphinscheduler-ui/src/views/data-quality/rule/use-table.ts b/dolphinscheduler-ui/src/views/data-quality/rule/use-table.ts
deleted file mode 100644
index 952d0bb84395..000000000000
--- a/dolphinscheduler-ui/src/views/data-quality/rule/use-table.ts
+++ /dev/null
@@ -1,152 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import { useI18n } from 'vue-i18n'
-import { h, reactive, ref } from 'vue'
-import { useAsyncState } from '@vueuse/core'
-import { queryRuleListPaging } from '@/service/modules/data-quality'
-import type { RuleRes } from '@/service/modules/data-quality/types'
-import TableAction from './components/table-action'
-import _ from 'lodash'
-import { format } from 'date-fns'
-import { TableColumns } from 'naive-ui/es/data-table/src/interface'
-import { parseTime } from '@/common/common'
-
-export function useTable(viewRuleEntry = (unusedRuleJson: string): void => {}) {
- const { t } = useI18n()
-
- const variables = reactive({
- tableData: [],
- page: ref(1),
- pageSize: ref(10),
- state: ref(null),
- searchVal: '',
- totalPage: ref(1),
- loadingRef: ref(false)
- })
-
- const columns: TableColumns = [
- {
- title: t('data_quality.rule.name'),
- key: 'ruleName'
- },
- {
- title: t('data_quality.rule.type'),
- key: 'ruleTypeName'
- },
- {
- title: t('data_quality.rule.username'),
- key: 'userName'
- },
- {
- title: t('data_quality.rule.create_time'),
- key: 'createTime'
- },
- {
- title: t('data_quality.rule.update_time'),
- key: 'updateTime'
- },
- {
- title: t('data_quality.rule.actions'),
- key: 'actions',
- width: 150,
- render: (row: any) =>
- h(TableAction, {
- row,
- onViewRuleEntry: (ruleJson: string) => {
- viewRuleEntry(ruleJson)
- }
- })
- }
- ]
-
- const ruleTypeMapping = [
- {
- code: -1,
- label: t('data_quality.rule.all')
- },
- {
- code: 0,
- label: t('data_quality.rule.single_table')
- },
- {
- code: 1,
- label: t('data_quality.rule.custom_sql')
- },
- {
- code: 2,
- label: t('data_quality.rule.multi_table_accuracy')
- },
- {
- code: 3,
- label: t('data_quality.rule.multi_table_value_comparison')
- }
- ]
-
- const getTableData = (params: any) => {
- if (variables.loadingRef) return
- variables.loadingRef = true
- const data = {
- pageSize: params.pageSize,
- pageNo: params.pageNo,
- searchVal: params.searchVal,
- startDate: params.startDate,
- endDate: params.endDate
- }
-
- const { state } = useAsyncState(
- queryRuleListPaging(data).then((res: RuleRes) => {
- variables.totalPage = res.totalPage
- variables.tableData = res.totalList.map((item, unused) => {
- const ruleName =
- 'data_quality.rule.' + item.name.substring(3, item.name.length - 1)
- const ruleNameLocale = t(ruleName)
-
- const ruleType = _.find(ruleTypeMapping, { code: item.type })
-
- let ruleTypeName = ''
-
- if (ruleType) {
- ruleTypeName = ruleType.label
- }
-
- item.createTime = format(
- parseTime(item.createTime),
- 'yyyy-MM-dd HH:mm:ss'
- )
- item.updateTime = format(
- parseTime(item.updateTime),
- 'yyyy-MM-dd HH:mm:ss'
- )
-
- return {
- ...item,
- ruleName: ruleNameLocale,
- ruleTypeName: ruleTypeName
- }
- }) as any
-
- variables.loadingRef = false
- }),
- {}
- )
-
- return state
- }
-
- return { t, variables, getTableData, columns }
-}
diff --git a/dolphinscheduler-ui/src/views/data-quality/task-result/index.tsx b/dolphinscheduler-ui/src/views/data-quality/task-result/index.tsx
deleted file mode 100644
index f3bdc0f8f3bb..000000000000
--- a/dolphinscheduler-ui/src/views/data-quality/task-result/index.tsx
+++ /dev/null
@@ -1,186 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import {
- defineComponent,
- getCurrentInstance,
- onMounted,
- toRefs,
- watch
-} from 'vue'
-import {
- NSpace,
- NSelect,
- NDatePicker,
- NButton,
- NIcon,
- NDataTable,
- NPagination
-} from 'naive-ui'
-import { SearchOutlined } from '@vicons/antd'
-import { useTable } from './use-table'
-import { useI18n } from 'vue-i18n'
-import Card from '@/components/card'
-import Search from '@/components/input-search'
-
-const TaskResult = defineComponent({
- name: 'task-result',
- setup() {
- const { t, variables, getTableData, createColumns } = useTable()
-
- const requestTableData = () => {
- getTableData({
- pageSize: variables.pageSize,
- pageNo: variables.page,
- ruleType: variables.ruleType,
- state: variables.state,
- searchVal: variables.searchVal,
- datePickerRange: variables.datePickerRange
- })
- }
-
- const onUpdatePageSize = () => {
- variables.page = 1
- requestTableData()
- }
-
- const onSearch = () => {
- variables.page = 1
- requestTableData()
- }
-
- const trim = getCurrentInstance()?.appContext.config.globalProperties.trim
-
- onMounted(() => {
- createColumns(variables)
- requestTableData()
- })
-
- watch(useI18n().locale, () => {
- createColumns(variables)
- })
-
- return {
- t,
- ...toRefs(variables),
- requestTableData,
- onUpdatePageSize,
- onSearch,
- trim
- }
- },
- render() {
- const { t, requestTableData, onUpdatePageSize, onSearch, loadingRef } = this
-
- return (
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- )
- }
-})
-
-export default TaskResult
diff --git a/dolphinscheduler-ui/src/views/data-quality/task-result/use-table.ts b/dolphinscheduler-ui/src/views/data-quality/task-result/use-table.ts
deleted file mode 100644
index 1882d5142aa8..000000000000
--- a/dolphinscheduler-ui/src/views/data-quality/task-result/use-table.ts
+++ /dev/null
@@ -1,290 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import { useI18n } from 'vue-i18n'
-import { h, reactive, ref } from 'vue'
-import { useAsyncState } from '@vueuse/core'
-import { queryExecuteResultListPaging } from '@/service/modules/data-quality'
-import { format } from 'date-fns'
-import {
- COLUMN_WIDTH_CONFIG,
- calculateTableWidth,
- DefaultTableWidth
-} from '@/common/column-width-config'
-import type {
- ResultItem,
- ResultListRes
-} from '@/service/modules/data-quality/types'
-import { parseTime } from '@/common/common'
-import ButtonLink from '@/components/button-link'
-import { NEllipsis, NTag } from 'naive-ui'
-import { useRouter } from 'vue-router'
-
-export function useTable() {
- const { t } = useI18n()
- const router = useRouter()
-
- const variables = reactive({
- columns: [],
- tableWidth: DefaultTableWidth,
- tableData: [],
- page: ref(1),
- pageSize: ref(10),
- ruleType: ref(null),
- state: ref(null),
- searchVal: ref(null),
- datePickerRange: ref(null),
- totalPage: ref(1),
- loadingRef: ref(false)
- })
-
- const createColumns = (variables: any) => {
- variables.columns = [
- {
- title: '#',
- key: 'index',
- render: (row: any, index: number) => index + 1,
- ...COLUMN_WIDTH_CONFIG['index']
- },
- {
- title: t('data_quality.task_result.task_name'),
- key: 'taskName',
- ...COLUMN_WIDTH_CONFIG['userName']
- },
- {
- title: t('data_quality.task_result.workflow_instance'),
- key: 'workflowInstanceName',
- ...COLUMN_WIDTH_CONFIG['name'],
- render: (row: ResultItem) =>
- h(
- ButtonLink,
- {
- onClick: () =>
- void router.push({
- name: 'workflow-instance-detail',
- params: {
- projectCode: row.projectCode,
- id: row.workflowInstanceId
- },
- query: { code: row.workflowDefinitionCode }
- })
- },
- {
- default: () =>
- h(
- NEllipsis,
- COLUMN_WIDTH_CONFIG['linkEllipsis'],
- () => row.workflowInstanceName
- )
- }
- )
- },
- {
- title: t('data_quality.task_result.rule_type'),
- key: 'ruleType',
- render: (row: ResultItem) => {
- if (row.ruleType === 0) {
- return t('data_quality.task_result.single_table')
- } else if (row.ruleType === 1) {
- return t('data_quality.task_result.single_table_custom_sql')
- } else if (row.ruleType === 2) {
- return t('data_quality.task_result.multi_table_accuracy')
- } else if (row.ruleType === 3) {
- return t('data_quality.task_result.multi_table_comparison')
- }
- },
- ...COLUMN_WIDTH_CONFIG['ruleType']
- },
- {
- title: t('data_quality.task_result.rule_name'),
- key: 'ruleName',
- ...COLUMN_WIDTH_CONFIG['name']
- },
- {
- title: t('data_quality.task_result.state'),
- key: 'state',
- render: (row: ResultItem) => {
- if (row.state === 0) {
- return h(
- NTag,
- { type: 'info', size: 'small' },
- {
- default: () => t('data_quality.task_result.undone')
- }
- )
- } else if (row.state === 1) {
- return h(
- NTag,
- { type: 'success', size: 'small' },
- {
- default: () => t('data_quality.task_result.success')
- }
- )
- } else if (row.state === 2) {
- return h(
- NTag,
- { type: 'error', size: 'small' },
- {
- default: () => t('data_quality.task_result.failure')
- }
- )
- } else {
- return '-'
- }
- },
- ...COLUMN_WIDTH_CONFIG['state']
- },
- {
- title: t('data_quality.task_result.actual_value'),
- key: 'statisticsValue',
- width: 140
- },
- {
- title: t('data_quality.task_result.excepted_value'),
- key: 'comparisonValue',
- width: 140
- },
- {
- title: t('data_quality.task_result.check_type'),
- key: 'checkType',
- render: (row: ResultItem) => {
- if (row.checkType === 0) {
- return t('data_quality.task_result.expected_and_actual')
- } else if (row.checkType === 1) {
- return t('data_quality.task_result.actual_and_expected')
- } else if (row.checkType === 2) {
- return t('data_quality.task_result.actual_or_expected')
- } else if (row.checkType === 3) {
- return t('data_quality.task_result.expected_and_actual_or_expected')
- }
- },
- ...COLUMN_WIDTH_CONFIG['type']
- },
- {
- title: t('data_quality.task_result.operator'),
- key: 'operator',
- render: (row: ResultItem) => {
- if (row.operator === 0) {
- return '='
- } else if (row.operator === 1) {
- return '<'
- } else if (row.operator === 2) {
- return '<='
- } else if (row.operator === 3) {
- return '>'
- } else if (row.operator === 4) {
- return '>='
- } else if (row.operator === 5) {
- return '!='
- }
- },
- ...COLUMN_WIDTH_CONFIG['userName']
- },
- {
- title: t('data_quality.task_result.threshold'),
- key: 'threshold',
- width: 120
- },
- {
- title: t('data_quality.task_result.failure_strategy'),
- key: 'failureStrategy',
- width: 150,
- render: (row: ResultItem) => {
- if (row.failureStrategy === 0) {
- return 'Alert'
- }
- if (row.failureStrategy === 1) {
- return 'Block'
- }
- return ''
- }
- },
- {
- title: t('data_quality.task_result.excepted_value_type'),
- key: 'comparisonTypeName',
- width: 200
- },
- {
- title: t('data_quality.task_result.error_output_path'),
- key: 'errorOutputPath',
- render: (row: ResultItem) => {
- return row.errorOutputPath ? row.errorOutputPath : '-'
- },
- width: 200
- },
- {
- title: t('data_quality.task_result.username'),
- key: 'userName',
- ...COLUMN_WIDTH_CONFIG['userName']
- },
- {
- title: t('data_quality.task_result.create_time'),
- key: 'createTime',
- ...COLUMN_WIDTH_CONFIG['time']
- },
- {
- title: t('data_quality.task_result.update_time'),
- key: 'updateTime',
- ...COLUMN_WIDTH_CONFIG['time']
- }
- ]
- if (variables.tableWidth) {
- variables.tableWidth = calculateTableWidth(variables.columns)
- }
- }
-
- const getTableData = (params: any) => {
- if (variables.loadingRef) return
- variables.loadingRef = true
- const data = {
- pageSize: params.pageSize,
- pageNo: params.pageNo,
- ruleType: params.ruleType,
- state: params.state,
- searchVal: params.searchVal,
- startDate: params.datePickerRange
- ? format(parseTime(params.datePickerRange[0]), 'yyyy-MM-dd HH:mm:ss')
- : '',
- endDate: params.datePickerRange
- ? format(parseTime(params.datePickerRange[1]), 'yyyy-MM-dd HH:mm:ss')
- : ''
- }
-
- const { state } = useAsyncState(
- queryExecuteResultListPaging(data).then((res: ResultListRes) => {
- variables.totalPage = res.totalPage
- variables.tableData = res.totalList.map((item, unused) => {
- return {
- ...item
- }
- }) as any
-
- variables.loadingRef = false
- }),
- {}
- )
-
- return state
- }
-
- return {
- t,
- variables,
- getTableData,
- createColumns
- }
-}
diff --git a/dolphinscheduler-ui/src/views/projects/task/components/node/fields/index.ts b/dolphinscheduler-ui/src/views/projects/task/components/node/fields/index.ts
index 058f0a00c0f0..cbbe8a70327d 100644
--- a/dolphinscheduler-ui/src/views/projects/task/components/node/fields/index.ts
+++ b/dolphinscheduler-ui/src/views/projects/task/components/node/fields/index.ts
@@ -42,7 +42,6 @@ export { useSourceType } from './use-sqoop-source-type'
export { useTargetType } from './use-sqoop-target-type'
export { useRelationCustomParams } from './use-relation-custom-params'
export { useDependentTimeout } from './use-dependent-timeout'
-export { useRules } from './use-rules'
export { useDeployMode } from './use-deploy-mode'
export { useDriverCores } from './use-driver-cores'
export { useDriverMemory } from './use-driver-memory'
diff --git a/dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-rules.ts b/dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-rules.ts
deleted file mode 100644
index 71fc56cc8e41..000000000000
--- a/dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-rules.ts
+++ /dev/null
@@ -1,304 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import { ref, onMounted, computed } from 'vue'
-import { useI18n } from 'vue-i18n'
-import {
- queryRuleList,
- getRuleFormCreateJson,
- getDatasourceOptionsById
-} from '@/service/modules/data-quality'
-import {
- getDatasourceDatabasesById,
- getDatasourceTablesById,
- getDatasourceTableColumnsById
-} from '@/service/modules/data-source'
-import type { IJsonItem, IResponseJsonItem, IJsonItemParams } from '../types'
-
-export function useRules(
- model: { [field: string]: any },
- updateRules: (items: IJsonItem[], len: number) => void
-): IJsonItem[] {
- const { t } = useI18n()
- const rules = ref([])
- const ruleLoading = ref(false)
- const srcDatasourceOptions = ref([] as { label: string; value: number }[])
- const srcDatabaseOptions = ref([] as { label: string; value: number }[])
- const srcTableOptions = ref([] as { label: string; value: number }[])
- const srcTableColumnOptions = ref([] as { label: string; value: number }[])
- const targetDatasourceOptions = ref([] as { label: string; value: number }[])
- const targetDatabaseOptions = ref([] as { label: string; value: number }[])
- const targetTableOptions = ref([] as { label: string; value: string }[])
- const targetTableColumnOptions = ref([] as { label: string; value: number }[])
- const writerDatasourceOptions = ref([] as { label: string; value: number }[])
-
- const fixValueSpan = computed(() => (model.comparison_type === 1 ? 24 : 0))
-
- let preItemLen = 0
-
- const getRuleList = async () => {
- if (ruleLoading.value) return
- ruleLoading.value = true
- const result = await queryRuleList()
- rules.value = result.map((item: { id: number; name: string }) => {
- let name = ''
- if (item.name) {
- name = item.name.replace('$t(', '').replace(')', '')
- }
- return {
- value: item.id,
- label: name ? t(`project.node.${name}`) : ''
- }
- })
- ruleLoading.value = false
- }
-
- const getRuleById = async (ruleId: number) => {
- if (ruleLoading.value) return
- ruleLoading.value = true
- const result = await getRuleFormCreateJson(ruleId)
- const items = JSON.parse(result).map((item: IResponseJsonItem) =>
- formatResponseJson(item)
- )
- updateRules(items, preItemLen)
- preItemLen = items.length
- ruleLoading.value = false
- }
-
- const formatResponseJson = (
- responseItem: IResponseJsonItem
- ): IJsonItemParams => {
- const item: IJsonItemParams = {
- field: responseItem.field,
- options: responseItem.options,
- validate: responseItem.validate,
- props: responseItem.props,
- value: responseItem.value
- }
- item.props.filterable = true
- const name = responseItem.name?.replace('$t(', '').replace(')', '')
- item.name = name ? t(`project.node.${name}`) : ''
-
- if (responseItem.type !== 'group') {
- item.type = responseItem.type
- } else {
- item.type = 'custom-parameters'
- item.children = item.props.rules.map((child: IJsonItemParams) => {
- child.span = Math.floor(22 / item.props.rules.length)
- return child
- })
- model[item.field] = model[item.field] || []
- delete item.props.rules
- }
- if (responseItem.emit) {
- responseItem.emit.forEach((emit) => {
- if (emit === 'change') {
- item.props.onUpdateValue = (value: string | number) => {
- onFieldChange(value, item.field, true)
- }
- }
- })
- }
- if (responseItem.props.placeholder) {
- item.props.placeholder = t(
- 'project.node.' +
- responseItem.props.placeholder
- .split(' ')
- .join('_')
- .split(',')
- .join('')
- .toLowerCase()
- )
- }
- if (item.field === 'src_datasource_id') {
- item.options = srcDatasourceOptions
- }
- if (item.field === 'src_database') {
- item.options = srcDatabaseOptions
- }
- if (item.field === 'target_datasource_id') {
- item.options = targetDatasourceOptions
- }
- if (item.field === 'target_database') {
- item.options = targetDatabaseOptions
- }
- if (item.field === 'writer_datasource_id') {
- item.options = writerDatasourceOptions
- }
- if (item.field === 'src_table') {
- item.options = srcTableOptions
- item.props.filterable = true
- }
- if (item.field === 'target_table') {
- item.options = targetTableOptions
- item.props.filterable = true
- }
- if (item.field === 'src_field') {
- item.options = srcTableColumnOptions
- }
- if (item.field === 'target_field') {
- item.options = targetTableColumnOptions
- }
-
- if (model[item.field] !== void 0) {
- onFieldChange(model[item.field], item.field, false)
- item.value = model[item.field]
- }
-
- return item
- }
- const onFieldChange = async (
- value: string | number,
- field: string,
- reset: boolean
- ) => {
- if (field === 'src_connector_type' && typeof value === 'number') {
- const result = await getDatasourceOptionsById(value)
- srcDatasourceOptions.value = result || []
- if (reset) {
- srcDatabaseOptions.value = []
- srcTableOptions.value = []
- srcTableColumnOptions.value = []
- model.src_datasource_id = null
- model.src_database = null
- model.src_table = null
- model.src_field = null
- }
- return
- }
- if (field === 'target_connector_type' && typeof value === 'number') {
- const result = await getDatasourceOptionsById(value)
- targetDatasourceOptions.value = result || []
- if (reset) {
- targetDatabaseOptions.value = []
- targetTableOptions.value = []
- targetTableColumnOptions.value = []
- model.target_datasource_id = null
- model.target_database = null
- model.target_table = null
- model.target_field = null
- }
- return
- }
- if (field === 'writer_connector_type' && typeof value === 'number') {
- const result = await getDatasourceOptionsById(value)
- writerDatasourceOptions.value = result || []
- if (reset) {
- model.writer_datasource_id = null
- }
- return
- }
- if (field === 'src_datasource_id' && typeof value === 'number') {
- const result = await getDatasourceDatabasesById(value)
- srcDatabaseOptions.value = result || []
- if (reset) {
- srcTableOptions.value = []
- srcTableColumnOptions.value = []
- model.src_database = null
- model.src_table = null
- model.src_field = null
- }
- }
- if (field === 'target_datasource_id' && typeof value === 'number') {
- const result = await getDatasourceDatabasesById(value)
- targetDatabaseOptions.value = result || []
- if (reset) {
- targetTableOptions.value = []
- targetTableColumnOptions.value = []
- model.target_database = null
- model.target_table = null
- model.target_field = null
- }
- }
-
- if (field === 'src_database' && typeof value === 'string') {
- const result = await getDatasourceTablesById(
- model.src_datasource_id,
- value
- )
- srcTableOptions.value = result || []
- if (reset) {
- srcTableColumnOptions.value = []
- model.src_table = null
- model.src_field = null
- }
- }
-
- if (field === 'target_database' && typeof value === 'string') {
- const result = await getDatasourceTablesById(
- model.target_datasource_id,
- value
- )
- targetTableOptions.value = result || []
- if (reset) {
- targetTableColumnOptions.value = []
- model.target_table = null
- model.target_field = null
- }
- }
-
- if (field === 'src_table' && typeof value === 'string') {
- const result = await getDatasourceTableColumnsById(
- model.src_datasource_id,
- model.src_database,
- value
- )
- srcTableColumnOptions.value = result || []
- if (reset) {
- model.src_field = null
- }
- }
- if (field === 'target_table' && typeof value === 'string') {
- const result = await getDatasourceTableColumnsById(
- model.target_datasource_id,
- model.target_database,
- value
- )
- targetTableColumnOptions.value = result || []
- if (reset) {
- model.target_field = null
- }
- }
- }
-
- onMounted(async () => {
- await getRuleList()
- await getRuleById(model.ruleId)
- })
-
- return [
- {
- type: 'select',
- field: 'ruleId',
- name: t('project.node.rule_name'),
- props: {
- loading: ruleLoading,
- filterable: true,
- onUpdateValue: getRuleById
- },
- options: rules
- },
- {
- type: 'input',
- field: 'comparison_name',
- name: t('project.node.fix_value'),
- props: {
- placeholder: t('project.node.fix_value')
- },
- span: fixValueSpan
- }
- ]
-}
diff --git a/dolphinscheduler-ui/src/views/projects/task/components/node/format-data.ts b/dolphinscheduler-ui/src/views/projects/task/components/node/format-data.ts
index 006756f10da5..57c658dd9a6b 100644
--- a/dolphinscheduler-ui/src/views/projects/task/components/node/format-data.ts
+++ b/dolphinscheduler-ui/src/views/projects/task/components/node/format-data.ts
@@ -275,49 +275,6 @@ export function formatParams(data: INodeData): {
dependTaskList: data.dependTaskList
}
}
- if (data.taskType === 'DATA_QUALITY') {
- taskParams.ruleId = data.ruleId
- taskParams.ruleInputParameter = {
- check_type: data.check_type,
- comparison_execute_sql: data.comparison_execute_sql,
- comparison_type: data.comparison_type,
- comparison_name: data.comparison_name,
- failure_strategy: data.failure_strategy,
- operator: data.operator,
- src_connector_type: data.src_connector_type,
- src_datasource_id: data.src_datasource_id,
- src_database: data.src_database,
- field_length: data.field_length,
- begin_time: data.begin_time,
- deadline: data.deadline,
- datetime_format: data.datetime_format,
- enum_list: data.enum_list,
- regexp_pattern: data.regexp_pattern,
- target_filter: data.target_filter,
- src_filter: data.src_filter,
- src_field: data.src_field,
- src_table: data.src_table,
- statistics_execute_sql: data.statistics_execute_sql,
- statistics_name: data.statistics_name,
- target_connector_type: data.target_connector_type,
- target_datasource_id: data.target_datasource_id,
- target_database: data.target_database,
- target_table: data.target_table,
- threshold: data.threshold,
- mapping_columns: JSON.stringify(data.mapping_columns)
- }
- taskParams.sparkParameters = {
- deployMode: data.deployMode,
- driverCores: data.driverCores,
- driverMemory: data.driverMemory,
- executorCores: data.executorCores,
- executorMemory: data.executorMemory,
- numExecutors: data.numExecutors,
- others: data.others,
- yarnQueue: data.yarnQueue,
- sqlExecutionType: data.sqlExecutionType
- }
- }
if (data.taskType === 'EMR') {
taskParams.type = data.type
diff --git a/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/index.ts b/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/index.ts
index 6f777da5493b..e98d709a440f 100644
--- a/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/index.ts
+++ b/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/index.ts
@@ -31,7 +31,6 @@ import { useSwitch } from './use-switch'
import { useConditions } from './use-conditions'
import { useDataX } from './use-datax'
import { useDependent } from './use-dependent'
-import { useDataQuality } from './use-data-quality'
import { useEmr } from './use-emr'
import { useZeppelin } from './use-zeppelin'
import { useK8s } from './use-k8s'
@@ -71,7 +70,6 @@ export default {
CONDITIONS: useConditions,
DATAX: useDataX,
DEPENDENT: useDependent,
- DATA_QUALITY: useDataQuality,
EMR: useEmr,
ZEPPELIN: useZeppelin,
K8S: useK8s,
diff --git a/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-data-quality.ts b/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-data-quality.ts
deleted file mode 100644
index c45dfa7044f8..000000000000
--- a/dolphinscheduler-ui/src/views/projects/task/components/node/tasks/use-data-quality.ts
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import { Ref, reactive } from 'vue'
-import { useI18n } from 'vue-i18n'
-import * as Fields from '../fields/index'
-import type { IJsonItem, INodeData, ITaskData } from '../types'
-
-export function useDataQuality({
- projectCode,
- from = 0,
- readonly,
- data,
- jsonRef,
- updateElements
-}: {
- projectCode: number
- from?: number
- readonly?: boolean
- data?: ITaskData
- jsonRef: Ref
- updateElements: () => void
-}) {
- const { t } = useI18n()
- const model = reactive({
- taskType: 'DATA_QUALITY',
- name: '',
- flag: 'YES',
- description: '',
- timeoutFlag: false,
- timeoutNotifyStrategy: ['WARN'],
- timeout: 30,
- localParams: [],
- environmentCode: null,
- failRetryInterval: 1,
- failRetryTimes: 0,
- workerGroup: 'default',
- delayTime: 0,
- ruleId: 1,
- deployMode: 'cluster',
- driverCores: 1,
- driverMemory: '512M',
- numExecutors: 2,
- executorMemory: '2G',
- executorCores: 2,
- others: '--conf spark.yarn.maxAppAttempts=1',
- yarnQueue: ''
- } as INodeData)
-
- return {
- json: [
- Fields.useName(from),
- ...Fields.useTaskDefinition({ projectCode, from, readonly, data, model }),
- Fields.useRunFlag(),
- Fields.useCache(),
- Fields.useDescription(),
- Fields.useTaskPriority(),
- Fields.useWorkerGroup(projectCode),
- Fields.useEnvironmentName(model, !data?.id),
- ...Fields.useTaskGroup(model, projectCode),
- ...Fields.useFailed(),
- Fields.useDelayTime(model),
- ...Fields.useTimeoutAlarm(model),
- ...Fields.useRules(model, (items: IJsonItem[], len: number) => {
- jsonRef.value.splice(15, len, ...items)
- updateElements()
- }),
- Fields.useDeployMode(),
- Fields.useDriverCores(),
- Fields.useDriverMemory(),
- Fields.useExecutorNumber(),
- Fields.useExecutorMemory(),
- Fields.useExecutorCores(),
- Fields.useYarnQueue(),
- {
- type: 'input',
- field: 'others',
- name: t('project.node.option_parameters'),
- props: {
- type: 'textarea',
- placeholder: t('project.node.option_parameters_tips')
- }
- },
- ...Fields.useCustomParams({
- model,
- field: 'localParams',
- isSimple: true
- }),
- Fields.usePreTasks()
- ] as IJsonItem[],
- model
- }
-}
diff --git a/dolphinscheduler-ui/src/views/projects/task/constants/task-type.ts b/dolphinscheduler-ui/src/views/projects/task/constants/task-type.ts
index 22235fa0e2f5..aeea696dd862 100644
--- a/dolphinscheduler-ui/src/views/projects/task/constants/task-type.ts
+++ b/dolphinscheduler-ui/src/views/projects/task/constants/task-type.ts
@@ -29,7 +29,6 @@ export type TaskType =
| 'DATAX'
| 'SQOOP'
| 'CONDITIONS'
- | 'DATA_QUALITY'
| 'SWITCH'
| 'SEATUNNEL'
| 'EMR'
@@ -104,10 +103,6 @@ export const TASK_TYPES_MAP = {
CONDITIONS: {
alias: 'CONDITIONS'
},
- DATA_QUALITY: {
- alias: 'DATA_QUALITY',
- helperLinkDisable: true
- },
SWITCH: {
alias: 'SWITCH'
},
diff --git a/dolphinscheduler-ui/src/views/projects/workflow/components/dag/dag-sidebar.tsx b/dolphinscheduler-ui/src/views/projects/workflow/components/dag/dag-sidebar.tsx
index 5efbde95a841..ac573c1f4b42 100644
--- a/dolphinscheduler-ui/src/views/projects/workflow/components/dag/dag-sidebar.tsx
+++ b/dolphinscheduler-ui/src/views/projects/workflow/components/dag/dag-sidebar.tsx
@@ -66,9 +66,6 @@ export default defineComponent({
variables.di = variables.dataList.filter(
(item: any) => item.taskCategory === 'DataIntegration'
)
- variables.dq = variables.dataList.filter(
- (item: any) => item.taskCategory === 'DataQuality'
- )
variables.ml = variables.dataList.filter(
(item: any) => item.taskCategory === 'MachineLearning'
)
diff --git a/dolphinscheduler-ui/src/views/projects/workflow/components/dag/dag.module.scss b/dolphinscheduler-ui/src/views/projects/workflow/components/dag/dag.module.scss
index af0606b299aa..217059b0ed41 100644
--- a/dolphinscheduler-ui/src/views/projects/workflow/components/dag/dag.module.scss
+++ b/dolphinscheduler-ui/src/views/projects/workflow/components/dag/dag.module.scss
@@ -110,9 +110,6 @@ $bgLight: #ffffff;
&.icon-dynamic {
background-image: url('/images/task-icons/dynamic.png');
}
- &.icon-data_quality {
- background-image: url('/images/task-icons/data_quality.png');
- }
&.icon-procedure {
background-image: url('/images/task-icons/procedure.png');
}
@@ -226,9 +223,6 @@ $bgLight: #ffffff;
&.icon-dynamic {
background-image: url('/images/task-icons/dynamic_hover.png');
}
- &.icon-data_quality {
- background-image: url('/images/task-icons/data_quality_hover.png');
- }
&.icon-procedure {
background-image: url('/images/task-icons/procedure_hover.png');
}
diff --git a/pom.xml b/pom.xml
index 80560f8334b9..8ad04e8bcea0 100755
--- a/pom.xml
+++ b/pom.xml
@@ -45,7 +45,6 @@
dolphinscheduler-dist
dolphinscheduler-service
dolphinscheduler-microbench
- dolphinscheduler-data-quality
dolphinscheduler-standalone-server
dolphinscheduler-datasource-plugin
dolphinscheduler-meter
@@ -188,12 +187,6 @@
${project.version}
-
- org.apache.dolphinscheduler
- dolphinscheduler-data-quality
- ${project.version}
-
-
org.apache.dolphinscheduler
dolphinscheduler-tools