Browse Source

[Feature][Data Quality] Data Quality Support Choose Database (#14406)

* add dataquality database api

* change ui

* api change

* update

* fix spotless

* fix h2

* fix pg

* fix-dead-line

* update

* fix-spotless

* update pg sql

* add ut

* fix ut
旺阳 1 year ago
parent
commit
175d976cd2
29 changed files with 547 additions and 34 deletions
  1. 4 0
      docs/docs/en/guide/upgrade/incompatible.md
  2. 4 0
      docs/docs/zh/guide/upgrade/incompatible.md
  3. 23 6
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java
  4. 1 0
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java
  5. 11 2
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java
  6. 59 5
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java
  7. 29 0
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataSourceServiceTest.java
  8. 6 0
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/constants/Constants.java
  9. 44 2
      dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_h2.sql
  10. 42 0
      dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_mysql.sql
  11. 42 1
      dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_postgresql.sql
  12. 45 0
      dolphinscheduler-dao/src/main/resources/sql/upgrade/3.2.0_schema/mysql/dolphinscheduler_dml.sql
  13. 46 1
      dolphinscheduler-dao/src/main/resources/sql/upgrade/3.2.0_schema/postgresql/dolphinscheduler_dml.sql
  14. 2 1
      dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/reader/JdbcReader.java
  15. 2 1
      dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/JdbcWriter.java
  16. 1 1
      dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/reader/JdbcReaderTest.java
  17. 2 0
      dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/DataQualityConstants.java
  18. 7 4
      dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/RuleParserUtils.java
  19. 1 0
      dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/test/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityParameterTest.java
  20. 69 3
      dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/test/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTaskTest.java
  21. 2 0
      dolphinscheduler-ui/src/locales/en_US/data-quality.ts
  22. 12 0
      dolphinscheduler-ui/src/locales/en_US/project.ts
  23. 2 0
      dolphinscheduler-ui/src/locales/zh_CN/data-quality.ts
  24. 12 0
      dolphinscheduler-ui/src/locales/zh_CN/project.ts
  25. 18 3
      dolphinscheduler-ui/src/service/modules/data-source/index.ts
  26. 53 3
      dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-rules.ts
  27. 2 1
      dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-task-type.ts
  28. 4 0
      dolphinscheduler-ui/src/views/projects/task/components/node/format-data.ts
  29. 2 0
      dolphinscheduler-ui/src/views/projects/task/components/node/types.ts

+ 4 - 0
docs/docs/en/guide/upgrade/incompatible.md

@@ -18,3 +18,7 @@ This document records the incompatible updates between each version. You need to
 * Copy and import workflow without 'copy' suffix [#10607](https://github.com/apache/dolphinscheduler/pull/10607)
 * Use semicolon as default sql segment separator [#10869](https://github.com/apache/dolphinscheduler/pull/10869)
 
+## 3.2.0
+
+* Add required field `database` in /datasources/tables && /datasources/tableColumns Api [#14406](https://github.com/apache/dolphinscheduler/pull/14406)
+

+ 4 - 0
docs/docs/zh/guide/upgrade/incompatible.md

@@ -18,3 +18,7 @@
 * Copy and import workflow without 'copy' suffix [#10607](https://github.com/apache/dolphinscheduler/pull/10607)
 * Use semicolon as default sql segment separator [#10869](https://github.com/apache/dolphinscheduler/pull/10869)
 
+## 3.2.0
+
+* 在 /datasources/tables && /datasources/tableColumns 接口中添加了必选字段`database` [#14406](https://github.com/apache/dolphinscheduler/pull/14406)
+

+ 23 - 6
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java

@@ -22,6 +22,7 @@ import static org.apache.dolphinscheduler.api.enums.Status.CONNECTION_TEST_FAILU
 import static org.apache.dolphinscheduler.api.enums.Status.CONNECT_DATASOURCE_FAILURE;
 import static org.apache.dolphinscheduler.api.enums.Status.CREATE_DATASOURCE_ERROR;
 import static org.apache.dolphinscheduler.api.enums.Status.DELETE_DATA_SOURCE_FAILURE;
+import static org.apache.dolphinscheduler.api.enums.Status.GET_DATASOURCE_DATABASES_ERROR;
 import static org.apache.dolphinscheduler.api.enums.Status.GET_DATASOURCE_TABLES_ERROR;
 import static org.apache.dolphinscheduler.api.enums.Status.GET_DATASOURCE_TABLE_COLUMNS_ERROR;
 import static org.apache.dolphinscheduler.api.enums.Status.KERBEROS_STARTUP_STATE;
@@ -340,27 +341,43 @@ public class DataSourceController extends BaseController {
 
     @Operation(summary = "tables", description = "GET_DATASOURCE_TABLES_NOTES")
     @Parameters({
-            @Parameter(name = "datasourceId", description = "DATA_SOURCE_ID", required = true, schema = @Schema(implementation = int.class, example = "1"))
+            @Parameter(name = "datasourceId", description = "DATA_SOURCE_ID", required = true, schema = @Schema(implementation = int.class, example = "1")),
+            @Parameter(name = "database", description = "DATABASE", required = true, schema = @Schema(implementation = String.class, example = "test"))
     })
     @GetMapping(value = "/tables")
     @ResponseStatus(HttpStatus.OK)
     @ApiException(GET_DATASOURCE_TABLES_ERROR)
-    public Result getTables(@RequestParam("datasourceId") Integer datasourceId) {
-        Map<String, Object> result = dataSourceService.getTables(datasourceId);
+    public Result getTables(@RequestParam("datasourceId") Integer datasourceId,
+                            @RequestParam(value = "database") String database) {
+        Map<String, Object> result = dataSourceService.getTables(datasourceId, database);
         return returnDataList(result);
     }
 
     @Operation(summary = "tableColumns", description = "GET_DATASOURCE_TABLE_COLUMNS_NOTES")
     @Parameters({
             @Parameter(name = "datasourceId", description = "DATA_SOURCE_ID", required = true, schema = @Schema(implementation = int.class, example = "1")),
-            @Parameter(name = "tableName", description = "TABLE_NAME", required = true, schema = @Schema(implementation = String.class, example = "test"))
+            @Parameter(name = "tableName", description = "TABLE_NAME", required = true, schema = @Schema(implementation = String.class, example = "test")),
+            @Parameter(name = "database", description = "DATABASE", required = true, schema = @Schema(implementation = String.class, example = "test"))
     })
     @GetMapping(value = "/tableColumns")
     @ResponseStatus(HttpStatus.OK)
     @ApiException(GET_DATASOURCE_TABLE_COLUMNS_ERROR)
     public Result getTableColumns(@RequestParam("datasourceId") Integer datasourceId,
-                                  @RequestParam("tableName") String tableName) {
-        Map<String, Object> result = dataSourceService.getTableColumns(datasourceId, tableName);
+                                  @RequestParam("tableName") String tableName,
+                                  @RequestParam(value = "database") String database) {
+        Map<String, Object> result = dataSourceService.getTableColumns(datasourceId, database, tableName);
+        return returnDataList(result);
+    }
+
+    @Operation(summary = "databases", description = "GET_DATASOURCE_DATABASE_NOTES")
+    @Parameters({
+            @Parameter(name = "datasourceId", description = "DATA_SOURCE_ID", required = true, schema = @Schema(implementation = int.class, example = "1"))
+    })
+    @GetMapping(value = "/databases")
+    @ResponseStatus(HttpStatus.OK)
+    @ApiException(GET_DATASOURCE_DATABASES_ERROR)
+    public Result getDatabases(@RequestParam("datasourceId") Integer datasourceId) {
+        Map<String, Object> result = dataSourceService.getDatabases(datasourceId);
         return returnDataList(result);
     }
 }

+ 1 - 0
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java

@@ -482,6 +482,7 @@ public enum Status {
     GET_DATASOURCE_OPTIONS_ERROR(1200017, "get datasource options error", "获取数据源Options错误"),
     GET_DATASOURCE_TABLES_ERROR(1200018, "get datasource tables error", "获取数据源表列表错误"),
     GET_DATASOURCE_TABLE_COLUMNS_ERROR(1200019, "get datasource table columns error", "获取数据源表列名错误"),
+    GET_DATASOURCE_DATABASES_ERROR(1200035, "get datasource databases error", "获取数据库列表错误"),
 
     CREATE_CLUSTER_ERROR(120020, "create cluster error", "创建集群失败"),
     CLUSTER_NAME_EXISTS(120021, "this cluster name [{0}] already exists", "集群名称[{0}]已经存在"),

+ 11 - 2
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java

@@ -132,15 +132,24 @@ public interface DataSourceService {
     /**
      * get tables
      * @param datasourceId
+     * @param database
      * @return
      */
-    Map<String, Object> getTables(Integer datasourceId);
+    Map<String, Object> getTables(Integer datasourceId, String database);
 
     /**
      * get table columns
      * @param datasourceId
+     * @param database
      * @param tableName
      * @return
      */
-    Map<String, Object> getTableColumns(Integer datasourceId, String tableName);
+    Map<String, Object> getTableColumns(Integer datasourceId, String database, String tableName);
+
+    /**
+     * get databases
+     * @param datasourceId
+     * @return
+     */
+    Map<String, Object> getDatabases(Integer datasourceId);
 }

+ 59 - 5
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java

@@ -515,7 +515,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
     }
 
     @Override
-    public Map<String, Object> getTables(Integer datasourceId) {
+    public Map<String, Object> getTables(Integer datasourceId, String database) {
         Map<String, Object> result = new HashMap<>();
 
         DataSource dataSource = dataSourceMapper.selectById(datasourceId);
@@ -551,7 +551,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
             }
 
             tables = metaData.getTables(
-                    connectionParam.getDatabase(),
+                    database,
                     getDbSchemaPattern(dataSource.getType(), schema, connectionParam),
                     "%", TABLE_TYPES);
             if (null == tables) {
@@ -583,7 +583,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
     }
 
     @Override
-    public Map<String, Object> getTableColumns(Integer datasourceId, String tableName) {
+    public Map<String, Object> getTableColumns(Integer datasourceId, String database, String tableName) {
         Map<String, Object> result = new HashMap<>();
 
         DataSource dataSource = dataSourceMapper.selectById(datasourceId);
@@ -603,8 +603,6 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
         ResultSet rs = null;
 
         try {
-
-            String database = connectionParam.getDatabase();
             if (null == connection) {
                 return result;
             }
@@ -635,6 +633,62 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
         return result;
     }
 
+    @Override
+    public Map<String, Object> getDatabases(Integer datasourceId) {
+        Map<String, Object> result = new HashMap<>();
+
+        DataSource dataSource = dataSourceMapper.selectById(datasourceId);
+
+        if (dataSource == null) {
+            putMsg(result, Status.QUERY_DATASOURCE_ERROR);
+            return result;
+        }
+
+        List<String> tableList;
+        BaseConnectionParam connectionParam =
+                (BaseConnectionParam) DataSourceUtils.buildConnectionParams(
+                        dataSource.getType(),
+                        dataSource.getConnectionParams());
+
+        if (null == connectionParam) {
+            putMsg(result, Status.DATASOURCE_CONNECT_FAILED);
+            return result;
+        }
+
+        Connection connection =
+                DataSourceUtils.getConnection(dataSource.getType(), connectionParam);
+        ResultSet rs = null;
+
+        try {
+            if (null == connection) {
+                putMsg(result, Status.DATASOURCE_CONNECT_FAILED);
+                return result;
+            }
+            if (dataSource.getType() == DbType.POSTGRESQL) {
+                rs = connection.createStatement().executeQuery(Constants.DATABASES_QUERY_PG);
+            }
+            rs = connection.createStatement().executeQuery(Constants.DATABASES_QUERY);
+            tableList = new ArrayList<>();
+            while (rs.next()) {
+                String name = rs.getString(1);
+                tableList.add(name);
+            }
+        } catch (Exception e) {
+            log.error("Get databases error, datasourceId:{}.", datasourceId, e);
+            putMsg(result, Status.GET_DATASOURCE_TABLES_ERROR);
+            return result;
+        } finally {
+            closeResult(rs);
+            releaseConnection(connection);
+        }
+
+        List<ParamsOptions> options = getParamsOptions(tableList);
+
+        result.put(Constants.DATA_LIST, options);
+        putMsg(result, Status.SUCCESS);
+        return result;
+    }
+
     private List<ParamsOptions> getParamsOptions(List<String> columnList) {
         List<ParamsOptions> options = null;
         if (CollectionUtils.isNotEmpty(columnList)) {

+ 29 - 0
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataSourceServiceTest.java

@@ -38,6 +38,7 @@ import org.apache.dolphinscheduler.plugin.datasource.api.plugin.DataSourceClient
 import org.apache.dolphinscheduler.plugin.datasource.api.utils.CommonUtils;
 import org.apache.dolphinscheduler.plugin.datasource.api.utils.DataSourceUtils;
 import org.apache.dolphinscheduler.plugin.datasource.hive.param.HiveDataSourceParamDTO;
+import org.apache.dolphinscheduler.plugin.datasource.mysql.param.MySQLConnectionParam;
 import org.apache.dolphinscheduler.plugin.datasource.mysql.param.MySQLDataSourceParamDTO;
 import org.apache.dolphinscheduler.plugin.datasource.oracle.param.OracleDataSourceParamDTO;
 import org.apache.dolphinscheduler.plugin.datasource.postgresql.param.PostgreSQLDataSourceParamDTO;
@@ -48,6 +49,7 @@ import org.apache.dolphinscheduler.spi.enums.DbType;
 import org.apache.commons.collections4.CollectionUtils;
 
 import java.sql.Connection;
+import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashMap;
@@ -517,4 +519,31 @@ public class DataSourceServiceTest {
         }
     }
 
+    @Test
+    public void testGetDatabases() throws SQLException {
+        DataSource dataSource = getOracleDataSource();
+        int datasourceId = 1;
+        dataSource.setId(datasourceId);
+        Map<String, Object> result;
+        Mockito.when(dataSourceMapper.selectById(datasourceId)).thenReturn(null);
+        result = dataSourceService.getDatabases(datasourceId);
+        Assertions.assertEquals(Status.QUERY_DATASOURCE_ERROR, result.get(Constants.STATUS));
+
+        Mockito.when(dataSourceMapper.selectById(datasourceId)).thenReturn(dataSource);
+        MySQLConnectionParam connectionParam = new MySQLConnectionParam();
+        Connection connection = Mockito.mock(Connection.class);
+        MockedStatic<DataSourceUtils> dataSourceUtils = Mockito.mockStatic(DataSourceUtils.class);
+        dataSourceUtils.when(() -> DataSourceUtils.getConnection(Mockito.any(), Mockito.any())).thenReturn(connection);
+        dataSourceUtils.when(() -> DataSourceUtils.buildConnectionParams(Mockito.any(), Mockito.any()))
+                .thenReturn(connectionParam);
+        result = dataSourceService.getDatabases(datasourceId);
+        Assertions.assertEquals(Status.GET_DATASOURCE_TABLES_ERROR, result.get(Constants.STATUS));
+
+        dataSourceUtils.when(() -> DataSourceUtils.buildConnectionParams(Mockito.any(), Mockito.any()))
+                .thenReturn(null);
+        result = dataSourceService.getDatabases(datasourceId);
+        Assertions.assertEquals(Status.DATASOURCE_CONNECT_FAILED, result.get(Constants.STATUS));
+        connection.close();
+        dataSourceUtils.close();
+    }
 }

+ 6 - 0
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/constants/Constants.java

@@ -850,4 +850,10 @@ public final class Constants {
     public static final String REMOTE_LOGGING_GCS_CREDENTIAL = "remote.logging.google.cloud.storage.credential";
 
     public static final String REMOTE_LOGGING_GCS_BUCKET_NAME = "remote.logging.google.cloud.storage.bucket.name";
+
+    /**
+     * data quality
+     */
+    public static final String DATABASES_QUERY = "show databases";
+    public static final String DATABASES_QUERY_PG = "SELECT datname FROM pg_database";
 }

+ 44 - 2
dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_h2.sql

@@ -1368,6 +1368,12 @@ VALUES(28, 'enum_list', 'input', '$t(enum_list)', NULL, NULL, 'Please enter enum
 INSERT INTO `t_ds_dq_rule_input_entry`
 (`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
 VALUES(29, 'begin_time', 'input', '$t(begin_time)', NULL, NULL, 'Please enter begin time', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.0', '2021-03-03 11:31:24.0');
+INSERT INTO `t_ds_dq_rule_input_entry`
+(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
+VALUES(30, 'src_database', 'select', '$t(src_database)', NULL, NULL, 'Please select source database', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.0', '2021-03-03 11:31:24.0');
+INSERT INTO `t_ds_dq_rule_input_entry`
+(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
+VALUES(31, 'target_database', 'select', '$t(target_database)', NULL, NULL, 'Please select target database', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.0', '2021-03-03 11:31:24.0');
 
 --
 -- Table structure for table `t_ds_dq_task_statistics_value`
@@ -1851,9 +1857,45 @@ VALUES(148, 10, 17, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.00
 INSERT INTO `t_ds_relation_rule_input_entry`
 (`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
 VALUES(149, 10, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-INSERT INTO t_ds_relation_rule_input_entry
+INSERT INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(150, 8, 29, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
+INSERT INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(151, 1, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
+INSERT INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(152, 2, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
+INSERT INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(153, 3, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
+INSERT INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(154, 4, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
+INSERT INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(155, 5, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
+INSERT INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(156, 6, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
+INSERT INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(157, 7, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
+INSERT INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(158, 8, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
+INSERT INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(159, 9, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
+INSERT INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(160, 10, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
+INSERT INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(161, 3, 31, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
+INSERT INTO `t_ds_relation_rule_input_entry`
 (`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
-VALUES(150, 8, 29, NULL, 7, '2021-03-03 11:31:24.0', '2021-03-03 11:31:24.0');
+VALUES(162, 4, 31, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
 
 --
 -- Table structure for table t_ds_environment

+ 42 - 0
dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_mysql.sql

@@ -1358,6 +1358,12 @@ VALUES(28, 'enum_list', 'input', '$t(enum_list)', NULL, NULL, 'Please enter enum
 INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
 (`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
 VALUES(29, 'begin_time', 'input', '$t(begin_time)', NULL, NULL, 'Please enter begin time', 0, 0, 0, 1, 1, 0, 0, current_timestamp, current_timestamp);
+INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
+(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
+VALUES(30, 'src_database', 'select', '$t(src_database)', NULL, NULL, 'Please select source database', 0, 0, 0, 1, 1, 1, 1, current_timestamp, current_timestamp);
+INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
+(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
+VALUES(31, 'target_database', 'select', '$t(target_database)', NULL, NULL, 'Please select target database', 0, 0, 0, 1, 1, 1, 1, current_timestamp, current_timestamp);
 
 --
 -- Table structure for table `t_ds_dq_task_statistics_value`
@@ -1845,6 +1851,42 @@ INSERT IGNORE INTO t_ds_relation_rule_input_entry
 (`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
 VALUES(150, 8, 29, NULL, 7, current_timestamp, current_timestamp);
 
+INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(151, 1, 30, NULL, 2, current_timestamp, current_timestamp);
+INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(152, 2, 30, NULL, 2, current_timestamp, current_timestamp);
+INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(153, 3, 30, NULL, 2, current_timestamp, current_timestamp);
+INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(154, 4, 30, NULL, 2, current_timestamp, current_timestamp);
+INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(155, 5, 30, NULL, 2, current_timestamp, current_timestamp);
+INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(156, 6, 30, NULL, 2, current_timestamp, current_timestamp);
+INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(157, 7, 30, NULL, 2, current_timestamp, current_timestamp);
+INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(158, 8, 30, NULL, 2, current_timestamp, current_timestamp);
+INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(159, 9, 30, NULL, 2, current_timestamp, current_timestamp);
+INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(160, 10, 30, NULL, 2, current_timestamp, current_timestamp);
+INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(161, 3, 31, NULL, 6, current_timestamp, current_timestamp);
+INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(162, 4, 31, NULL, 7, current_timestamp, current_timestamp);
 -- ----------------------------
 -- Table structure for t_ds_environment
 -- ----------------------------

+ 42 - 1
dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_postgresql.sql

@@ -1335,6 +1335,12 @@ VALUES(28, 'enum_list', 'input', '$t(enum_list)', NULL, NULL, 'Please enter enum
 INSERT INTO t_ds_dq_rule_input_entry
 (id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
 VALUES(29, 'begin_time', 'input', '$t(begin_time)', NULL, NULL, 'Please enter begin time', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
+INSERT INTO t_ds_dq_rule_input_entry
+(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
+VALUES(30, 'src_database', 'select', '$t(src_database)', NULL, NULL, 'Please select source database', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
+INSERT INTO t_ds_dq_rule_input_entry
+(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
+VALUES(31, 'target_database', 'select', '$t(target_database)', NULL, NULL, 'Please select target database', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
 
 --
 -- Table structure for table t_ds_dq_task_statistics_value
@@ -1819,7 +1825,42 @@ VALUES(149, 10, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.00
 INSERT INTO t_ds_relation_rule_input_entry
 (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
 VALUES(150, 8, 29, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
-
+INSERT INTO t_ds_relation_rule_input_entry
+(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
+VALUES(151, 1, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
+INSERT INTO t_ds_relation_rule_input_entry
+(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
+VALUES(152, 2, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
+INSERT INTO t_ds_relation_rule_input_entry
+(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
+VALUES(153, 3, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
+INSERT INTO t_ds_relation_rule_input_entry
+(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
+VALUES(154, 4, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
+INSERT INTO t_ds_relation_rule_input_entry
+(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
+VALUES(155, 5, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
+INSERT INTO t_ds_relation_rule_input_entry
+(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
+VALUES(156, 6, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
+INSERT INTO t_ds_relation_rule_input_entry
+(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
+VALUES(157, 7, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
+INSERT INTO t_ds_relation_rule_input_entry
+(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
+VALUES(158, 8, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
+INSERT INTO t_ds_relation_rule_input_entry
+(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
+VALUES(159, 9, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
+INSERT INTO t_ds_relation_rule_input_entry
+(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
+VALUES(160, 10, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
+INSERT INTO t_ds_relation_rule_input_entry
+(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
+VALUES(161, 3, 31, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
+INSERT INTO t_ds_relation_rule_input_entry
+(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
+VALUES(162, 4, 31, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000');
 --
 -- Table structure for table t_ds_environment
 --

+ 45 - 0
dolphinscheduler-dao/src/main/resources/sql/upgrade/3.2.0_schema/mysql/dolphinscheduler_dml.sql

@@ -37,3 +37,48 @@ DROP PROCEDURE dolphin_t_ds_tenant_insert_default;
 -- tenant improvement
 UPDATE t_ds_schedules t1 JOIN t_ds_process_definition t2 ON t1.process_definition_code = t2.code LEFT JOIN t_ds_tenant t3 ON t2.tenant_id = t3.id SET t1.tenant_code = COALESCE(t3.tenant_code, 'default');
 UPDATE `t_ds_process_instance` SET `tenant_code` = 'default' WHERE `tenant_code` IS NULL;
+
+-- data quality support choose database
+INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
+(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
+VALUES(30, 'src_database', 'select', '$t(src_database)', NULL, NULL, 'please select source database', 0, 0, 0, 1, 1, 1, 1, current_timestamp, current_timestamp);
+INSERT IGNORE INTO `t_ds_dq_rule_input_entry`
+(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`)
+VALUES(31, 'target_database', 'select', '$t(target_database)', NULL, NULL, 'please select target database', 0, 0, 0, 1, 1, 1, 1, current_timestamp, current_timestamp);
+
+INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(151, 1, 30, NULL, 2, current_timestamp, current_timestamp);
+INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(152, 2, 30, NULL, 2, current_timestamp, current_timestamp);
+INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(153, 3, 30, NULL, 2, current_timestamp, current_timestamp);
+INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(154, 4, 30, NULL, 2, current_timestamp, current_timestamp);
+INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(155, 5, 30, NULL, 2, current_timestamp, current_timestamp);
+INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(156, 6, 30, NULL, 2, current_timestamp, current_timestamp);
+INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(157, 7, 30, NULL, 2, current_timestamp, current_timestamp);
+INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(158, 8, 30, NULL, 2, current_timestamp, current_timestamp);
+INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(159, 9, 30, NULL, 2, current_timestamp, current_timestamp);
+INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(160, 10, 30, NULL, 2, current_timestamp, current_timestamp);
+INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(161, 3, 31, NULL, 6, current_timestamp, current_timestamp);
+INSERT IGNORE INTO `t_ds_relation_rule_input_entry`
+(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`)
+VALUES(162, 4, 31, NULL, 7, current_timestamp, current_timestamp);

+ 46 - 1
dolphinscheduler-dao/src/main/resources/sql/upgrade/3.2.0_schema/postgresql/dolphinscheduler_dml.sql

@@ -15,8 +15,53 @@
  * limitations under the License.
 */
 
-INSERT INTO t_ds_tenant(id, tenant_code, description, queue_id, create_time, update_time) VALUES (-1, 'default', 'default tenant', '0', '2018-03-27 15:48:50', '2018-10-24 17:40:22');
+INSERT INTO t_ds_tenant(id, tenant_code, description, queue_id, create_time, update_time) VALUES (-1, 'default', 'default tenant', '0', '2018-03-27 15:48:50', '2018-10-24 17:40:22') ON CONFLICT (id) DO NOTHING;
 
 -- tenant improvement
 UPDATE t_ds_schedules t1 SET t1.tenant_code = COALESCE(t3.tenant_code, 'default') FROM t_ds_process_definition t2 LEFT JOIN t_ds_tenant t3 ON t2.tenant_id = t3.id WHERE t1.process_definition_code = t2.code;
 UPDATE t_ds_process_instance SET tenant_code = 'default' WHERE tenant_code IS NULL;
+
+-- data quality support choose database
+INSERT INTO t_ds_dq_rule_input_entry
+(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
+VALUES(30, 'src_database', 'select', '$t(src_database)', NULL, NULL, 'Please select source database', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000') ON CONFLICT (id) DO NOTHING;
+INSERT INTO t_ds_dq_rule_input_entry
+(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time)
+VALUES(31, 'target_database', 'select', '$t(target_database)', NULL, NULL, 'Please select target database', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000') ON CONFLICT (id) DO NOTHING;
+
+INSERT INTO t_ds_relation_rule_input_entry
+(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
+VALUES(151, 1, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000') ON CONFLICT (id) DO NOTHING;
+INSERT INTO t_ds_relation_rule_input_entry
+(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
+VALUES(152, 2, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000') ON CONFLICT (id) DO NOTHING;
+INSERT INTO t_ds_relation_rule_input_entry
+(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
+VALUES(153, 3, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000') ON CONFLICT (id) DO NOTHING;
+INSERT INTO t_ds_relation_rule_input_entry
+(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
+VALUES(154, 4, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000') ON CONFLICT (id) DO NOTHING;
+INSERT INTO t_ds_relation_rule_input_entry
+(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
+VALUES(155, 5, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000') ON CONFLICT (id) DO NOTHING;
+INSERT INTO t_ds_relation_rule_input_entry
+(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
+VALUES(156, 6, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000') ON CONFLICT (id) DO NOTHING;
+INSERT INTO t_ds_relation_rule_input_entry
+(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
+VALUES(157, 7, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000') ON CONFLICT (id) DO NOTHING;
+INSERT INTO t_ds_relation_rule_input_entry
+(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
+VALUES(158, 8, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000') ON CONFLICT (id) DO NOTHING;
+INSERT INTO t_ds_relation_rule_input_entry
+(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
+VALUES(159, 9, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000') ON CONFLICT (id) DO NOTHING;
+INSERT INTO t_ds_relation_rule_input_entry
+(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
+VALUES(160, 10, 30, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000') ON CONFLICT (id) DO NOTHING;
+INSERT INTO t_ds_relation_rule_input_entry
+(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
+VALUES(161, 3, 31, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000') ON CONFLICT (id) DO NOTHING;
+INSERT INTO t_ds_relation_rule_input_entry
+(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time)
+VALUES(162, 4, 31, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000') ON CONFLICT (id) DO NOTHING;

+ 2 - 1
dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/reader/JdbcReader.java

@@ -17,6 +17,7 @@
 
 package org.apache.dolphinscheduler.data.quality.flow.batch.reader;
 
+import static org.apache.dolphinscheduler.data.quality.Constants.DATABASE;
 import static org.apache.dolphinscheduler.data.quality.Constants.DB_TABLE;
 import static org.apache.dolphinscheduler.data.quality.Constants.DOTS;
 import static org.apache.dolphinscheduler.data.quality.Constants.DRIVER;
@@ -78,7 +79,7 @@ public class JdbcReader implements BatchReader {
         DataFrameReader reader = sparkSession.read()
                 .format(JDBC)
                 .option(URL, config.getString(URL))
-                .option(DB_TABLE, config.getString(TABLE))
+                .option(DB_TABLE, config.getString(DATABASE) + "." + config.getString(TABLE))
                 .option(USER, config.getString(USER))
                 .option(PASSWORD, ParserUtils.decode(config.getString(PASSWORD)))
                 .option(DRIVER, config.getString(DRIVER));

+ 2 - 1
dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/JdbcWriter.java

@@ -18,6 +18,7 @@
 package org.apache.dolphinscheduler.data.quality.flow.batch.writer;
 
 import static org.apache.dolphinscheduler.data.quality.Constants.APPEND;
+import static org.apache.dolphinscheduler.data.quality.Constants.DATABASE;
 import static org.apache.dolphinscheduler.data.quality.Constants.DB_TABLE;
 import static org.apache.dolphinscheduler.data.quality.Constants.DRIVER;
 import static org.apache.dolphinscheduler.data.quality.Constants.JDBC;
@@ -79,7 +80,7 @@ public class JdbcWriter implements BatchWriter {
                 .format(JDBC)
                 .option(DRIVER, config.getString(DRIVER))
                 .option(URL, config.getString(URL))
-                .option(DB_TABLE, config.getString(TABLE))
+                .option(DB_TABLE, config.getString(DATABASE) + "." + config.getString(TABLE))
                 .option(USER, config.getString(USER))
                 .option(PASSWORD, ParserUtils.decode(config.getString(PASSWORD)))
                 .mode(config.getString(SAVE_MODE))

+ 1 - 1
dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/reader/JdbcReaderTest.java

@@ -56,7 +56,7 @@ public class JdbcReaderTest extends FlowTestBase {
     private Config buildReaderConfig() {
         Map<String, Object> config = new HashMap<>();
         config.put(DATABASE, "test");
-        config.put(TABLE, "test.test1");
+        config.put(TABLE, "test1");
         config.put(URL, url);
         config.put(USER, "test");
         config.put(PASSWORD, "123456");

+ 2 - 0
dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/DataQualityConstants.java

@@ -31,11 +31,13 @@ public class DataQualityConstants {
      */
     public static final String SRC_CONNECTOR_TYPE = "src_connector_type";
     public static final String SRC_DATASOURCE_ID = "src_datasource_id";
+    public static final String SRC_DATABASE = "src_database";
     public static final String SRC_TABLE = "src_table";
     public static final String SRC_FILTER = "src_filter";
     public static final String SRC_FIELD = "src_field";
     public static final String TARGET_CONNECTOR_TYPE = "target_connector_type";
     public static final String TARGET_DATASOURCE_ID = "target_datasource_id";
+    public static final String TARGET_DATABASE = "target_database";
     public static final String TARGET_TABLE = "target_table";
     public static final String TARGET_FILTER = "target_filter";
     public static final String TARGET_FIELD = "target_field";

+ 7 - 4
dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/RuleParserUtils.java

@@ -44,12 +44,14 @@ import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConst
 import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.RULE_NAME;
 import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.RULE_TYPE;
 import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.SQL;
+import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.SRC_DATABASE;
 import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.SRC_FIELD;
 import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.SRC_FILTER;
 import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.SRC_TABLE;
 import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.STATISTICS_EXECUTE_SQL;
 import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.STATISTICS_TABLE;
 import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.TABLE;
+import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.TARGET_DATABASE;
 import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.TARGET_FIELD;
 import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.TARGET_FILTER;
 import static org.apache.dolphinscheduler.plugin.task.api.utils.DataQualityConstants.TARGET_TABLE;
@@ -116,7 +118,7 @@ public class RuleParserUtils {
             sourceBaseConfig.setType(dataQualityTaskExecutionContext.getSourceConnectorType());
             Map<String, Object> config = new HashMap<>();
             if (sourceDataSource != null) {
-                config.put(DATABASE, sourceDataSource.getDatabase());
+                config.put(DATABASE, inputParameterValue.get(SRC_DATABASE));
                 config.put(TABLE, inputParameterValue.get(SRC_TABLE));
                 config.put(URL, DataSourceUtils.getJdbcUrl(DbType.of(dataQualityTaskExecutionContext.getSourceType()),
                         sourceDataSource));
@@ -124,7 +126,7 @@ public class RuleParserUtils {
                 config.put(PASSWORD, ParserUtils.encode(sourceDataSource.getPassword()));
                 config.put(DRIVER, DataSourceUtils
                         .getDatasourceDriver(DbType.of(dataQualityTaskExecutionContext.getSourceType())));
-                String outputTable = sourceDataSource.getDatabase() + "_" + inputParameterValue.get(SRC_TABLE);
+                String outputTable = inputParameterValue.get(SRC_DATABASE) + "_" + inputParameterValue.get(SRC_TABLE);
                 config.put(OUTPUT_TABLE, outputTable);
                 inputParameterValue.put(SRC_TABLE, outputTable);
             }
@@ -143,7 +145,7 @@ public class RuleParserUtils {
             targetBaseConfig.setType(dataQualityTaskExecutionContext.getTargetConnectorType());
             Map<String, Object> config = new HashMap<>();
             if (targetDataSource != null) {
-                config.put(DATABASE, targetDataSource.getDatabase());
+                config.put(DATABASE, inputParameterValue.get(TARGET_DATABASE));
                 config.put(TABLE, inputParameterValue.get(TARGET_TABLE));
                 config.put(URL, DataSourceUtils.getJdbcUrl(DbType.of(dataQualityTaskExecutionContext.getTargetType()),
                         targetDataSource));
@@ -151,7 +153,8 @@ public class RuleParserUtils {
                 config.put(PASSWORD, ParserUtils.encode(targetDataSource.getPassword()));
                 config.put(DRIVER, DataSourceUtils
                         .getDatasourceDriver(DbType.of(dataQualityTaskExecutionContext.getTargetType())));
-                String outputTable = targetDataSource.getDatabase() + "_" + inputParameterValue.get(TARGET_TABLE);
+                String outputTable =
+                        inputParameterValue.get(TARGET_DATABASE) + "_" + inputParameterValue.get(TARGET_TABLE);
                 config.put(OUTPUT_TABLE, outputTable);
                 inputParameterValue.put(TARGET_TABLE, outputTable);
             }

+ 1 - 0
dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/test/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityParameterTest.java

@@ -61,6 +61,7 @@ public class DataQualityParameterTest {
         Map<String, String> inputParameterValue = new HashMap<>();
         inputParameterValue.put("src_connector_type", "JDBC");
         inputParameterValue.put("src_datasource_id", "1");
+        inputParameterValue.put("src_database", "test");
         inputParameterValue.put("src_table", "test1");
         inputParameterValue.put("src_filter", "date=2012-10-05");
         inputParameterValue.put("src_field", "id");

+ 69 - 3
dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/test/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTaskTest.java

@@ -56,6 +56,7 @@ public class DataQualityTaskTest {
         inputParameterValue.put("src_datasource_id", "2");
         inputParameterValue.put("src_table", "src_result");
         inputParameterValue.put("check_type", "0");
+        inputParameterValue.put("src_database", "test");
         inputParameterValue.put("operator", "3");
         inputParameterValue.put("threshold", "1");
         inputParameterValue.put("failure_strategy", "0");
@@ -92,7 +93,7 @@ public class DataQualityTaskTest {
                 + "{\"type\":\"JDBC\",\"config\":{\"database\":\"test\",\"password\":\"test\",\"driver\":\"com.mysql.cj.jdbc.Driver\","
                 + "\"user\":\"test\",\"table\":\"dqc_statistics_value\",\"url\":"
                 + "\"jdbc:mysql://localhost:3306/test?allowLoadLocalInfile=false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false\","
-                + "\"sql\":\"select 21 as process_definition_id,287 as task_instance_id,10 as rule_id,'DN/MS5NLTSLVZ/++KEJ9BHPQSEN6/UY/EV5TWI1IRRY=' "
+                + "\"sql\":\"select 21 as process_definition_id,287 as task_instance_id,10 as rule_id,'SA8QJTSZZNEXNIXHUL5LTGRTYPWKJ4XY85VPS/NCKES=' "
                 + "as unique_code,'table_count.total'AS statistics_name,"
                 + "table_count.total AS statistics_value,'2021-08-12 10:15:48' as data_time,'2021-08-12 10:15:48' as create_time,"
                 + "'2021-08-12 10:15:48' as update_time from table_count\"}}]}";
@@ -135,6 +136,20 @@ public class DataQualityTaskTest {
         srcDatasourceId.setCreateTime(new Date());
         srcDatasourceId.setUpdateTime(new Date());
 
+        DqRuleInputEntry srcDatabase = new DqRuleInputEntry();
+        srcDatabase.setTitle("源数据库");
+        srcDatabase.setField("src_database");
+        srcDatabase.setType(FormType.CASCADER.getFormType());
+        srcDatabase.setCanEdit(true);
+        srcDatabase.setIsShow(true);
+        srcDatabase.setValue(null);
+        srcDatabase.setPlaceholder("$t(src_database)");
+        srcDatabase.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
+        srcDatabase.setInputType(InputType.DEFAULT.getCode());
+        srcDatabase.setValueType(ValueType.NUMBER.getCode());
+        srcDatabase.setCreateTime(new Date());
+        srcDatabase.setUpdateTime(new Date());
+
         DqRuleInputEntry srcTable = new DqRuleInputEntry();
         srcTable.setTitle("源数据表");
         srcTable.setField("src_table");
@@ -258,6 +273,7 @@ public class DataQualityTaskTest {
 
         defaultInputEntryList.add(srcConnectorType);
         defaultInputEntryList.add(srcDatasourceId);
+        defaultInputEntryList.add(srcDatabase);
         defaultInputEntryList.add(srcTable);
         defaultInputEntryList.add(srcFilter);
         defaultInputEntryList.add(srcField);
@@ -344,6 +360,20 @@ public class DataQualityTaskTest {
         srcConnectorType.setCreateTime(new Date());
         srcConnectorType.setUpdateTime(new Date());
 
+        DqRuleInputEntry srcDatabase = new DqRuleInputEntry();
+        srcDatabase.setTitle("源数据库");
+        srcDatabase.setField("src_database");
+        srcDatabase.setType(FormType.CASCADER.getFormType());
+        srcDatabase.setCanEdit(true);
+        srcDatabase.setIsShow(true);
+        srcDatabase.setValue(null);
+        srcDatabase.setPlaceholder("$t(src_database)");
+        srcDatabase.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
+        srcDatabase.setInputType(InputType.DEFAULT.getCode());
+        srcDatabase.setValueType(ValueType.NUMBER.getCode());
+        srcDatabase.setCreateTime(new Date());
+        srcDatabase.setUpdateTime(new Date());
+
         DqRuleInputEntry srcTable = new DqRuleInputEntry();
         srcTable.setTitle("源数据表");
         srcTable.setField("src_table");
@@ -448,6 +478,7 @@ public class DataQualityTaskTest {
         defaultInputEntryList.add(afterFailure);
         defaultInputEntryList.add(srcConnectorType);
         defaultInputEntryList.add(srcDatasourceId);
+        defaultInputEntryList.add(srcDatabase);
         defaultInputEntryList.add(srcTable);
         defaultInputEntryList.add(statisticsName);
         defaultInputEntryList.add(statisticsExecuteSql);
@@ -457,6 +488,7 @@ public class DataQualityTaskTest {
         inputParameterValue.put("src_connector_type", "0");
         inputParameterValue.put("src_datasource_id", "2");
         inputParameterValue.put("src_table", "person");
+        inputParameterValue.put("src_database", "test");
         inputParameterValue.put("statistics_name", "miss");
         inputParameterValue.put("statistics_execute_sql",
                 "select count(*) as miss from ${src_table} where (sex = null or sex='') and age=1");
@@ -536,7 +568,7 @@ public class DataQualityTaskTest {
                         + "\"org.postgresql.Driver\",\"user\":\"test\",\"table\":\"t_ds_dq_task_statistics_value\",\"url\":"
                         + "\"jdbc:postgresql://localhost:5432/dolphinscheduler?stringtype=unspecified&characterEncoding="
                         + "UTF-8&allowMultiQueries=true\",\"sql\":\"select 1 as process_definition_id,1 as "
-                        + "task_instance_id,1 as rule_id,'FNWZLNCPWWF4ZWKO/LYENOPL6JPV1SHPPWQ9YSYLOCU=' as unique_code,'miss'AS statistics_name,miss AS statistics_value,"
+                        + "task_instance_id,1 as rule_id,'IGTZ9I6KWVEPXFFJKDVMO6QB6URHHXK0NINS9GAOUEA=' as unique_code,'miss'AS statistics_name,miss AS statistics_value,"
                         + "'2021-08-30 00:00:00' as data_time,'2021-08-30 00:00:00' as create_time,'2021-08-30 00:00:00' "
                         + "as update_time from test_person\"}}]}";
 
@@ -580,6 +612,20 @@ public class DataQualityTaskTest {
         srcConnectorType.setCreateTime(new Date());
         srcConnectorType.setUpdateTime(new Date());
 
+        DqRuleInputEntry srcDatabase = new DqRuleInputEntry();
+        srcDatabase.setTitle("源数据库");
+        srcDatabase.setField("src_database");
+        srcDatabase.setType(FormType.CASCADER.getFormType());
+        srcDatabase.setCanEdit(true);
+        srcDatabase.setIsShow(true);
+        srcDatabase.setValue(null);
+        srcDatasourceId.setPlaceholder("$t(src_database)");
+        srcDatabase.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
+        srcDatabase.setInputType(InputType.DEFAULT.getCode());
+        srcDatabase.setValueType(ValueType.NUMBER.getCode());
+        srcDatabase.setCreateTime(new Date());
+        srcDatabase.setUpdateTime(new Date());
+
         DqRuleInputEntry srcTable = new DqRuleInputEntry();
         srcTable.setTitle("源数据表");
         srcTable.setField("src_table");
@@ -636,6 +682,20 @@ public class DataQualityTaskTest {
         targetDatasourceId.setValue("1");
         targetDatasourceId.setPlaceholder("Please select target datasource");
         targetDatasourceId.setOptionSourceType(OptionSourceType.DATASOURCE_ID.getCode());
+
+        DqRuleInputEntry targetDatabase = new DqRuleInputEntry();
+        targetDatabase.setTitle("目标数据库");
+        targetDatabase.setField("src_database");
+        targetDatabase.setType(FormType.CASCADER.getFormType());
+        targetDatabase.setCanEdit(true);
+        targetDatabase.setIsShow(true);
+        targetDatabase.setValue(null);
+        targetDatabase.setPlaceholder("$t(src_database)");
+        targetDatabase.setOptionSourceType(OptionSourceType.DEFAULT.getCode());
+        targetDatabase.setInputType(InputType.DEFAULT.getCode());
+        targetDatabase.setValueType(ValueType.NUMBER.getCode());
+        targetDatabase.setCreateTime(new Date());
+        targetDatabase.setUpdateTime(new Date());
         targetDatasourceId.setInputType(InputType.DEFAULT.getCode());
 
         DqRuleInputEntry targetTable = new DqRuleInputEntry();
@@ -731,12 +791,14 @@ public class DataQualityTaskTest {
 
         defaultInputEntryList.add(srcConnectorType);
         defaultInputEntryList.add(srcDatasourceId);
+        defaultInputEntryList.add(srcDatabase);
         defaultInputEntryList.add(srcTable);
         defaultInputEntryList.add(statisticsName);
         defaultInputEntryList.add(statisticsExecuteSql);
 
         defaultInputEntryList.add(targetConnectorType);
         defaultInputEntryList.add(targetDatasourceId);
+        defaultInputEntryList.add(targetDatabase);
         defaultInputEntryList.add(targetTable);
         defaultInputEntryList.add(comparisonName);
         defaultInputEntryList.add(comparisonExecuteSql);
@@ -746,11 +808,13 @@ public class DataQualityTaskTest {
         Map<String, String> inputParameterValue = new HashMap<>();
         inputParameterValue.put("src_connector_type", "0");
         inputParameterValue.put("src_datasource_id", "2");
+        inputParameterValue.put("src_database", "test");
         inputParameterValue.put("src_table", "test1");
         inputParameterValue.put("statistics_name", "src");
         inputParameterValue.put("statistics_execute_sql", "select count(*) as src from ${src_table} where c1>20");
         inputParameterValue.put("target_connector_type", "2");
         inputParameterValue.put("target_datasource_id", "3");
+        inputParameterValue.put("target_database", "default");
         inputParameterValue.put("target_table", "test1_1");
         inputParameterValue.put("comparison_name", "target");
         inputParameterValue.put("comparison_execute_sql", "select count(*) as target from ${target_table} where c1>20");
@@ -1071,10 +1135,12 @@ public class DataQualityTaskTest {
         Map<String, String> inputParameterValue = new HashMap<>();
         inputParameterValue.put("src_connector_type", "0");
         inputParameterValue.put("src_datasource_id", "2");
+        inputParameterValue.put("src_database", "test");
         inputParameterValue.put("src_table", "demo_src");
         inputParameterValue.put("src_filter", "age<100");
         inputParameterValue.put("target_connector_type", "2");
         inputParameterValue.put("target_datasource_id", "3");
+        inputParameterValue.put("target_database", "default");
         inputParameterValue.put("target_table", "demo_src");
         inputParameterValue.put("target_filter", "age<100");
         inputParameterValue.put("mapping_columns",
@@ -1165,7 +1231,7 @@ public class DataQualityTaskTest {
                 + "\"password\":\"test\",\"driver\":\"org.postgresql.Driver\",\"user\":\"test\",\"table\":"
                 + "\"t_ds_dq_task_statistics_value\",\"url\":\"jdbc:postgresql://localhost:5432/dolphinscheduler?stringtype=unspecified"
                 + "&characterEncoding=UTF-8&allowMultiQueries=true\",\"sql\":\"select 1 as process_definition_id,1 as task_instance_id,"
-                + "3 as rule_id,'T4MB2XTVSL+VA/L6XCU1M/ELHKYOMGVNBBE5KHBXHHI=' as unique_code,'miss_count.miss'AS statistics_name,miss_count.miss "
+                + "3 as rule_id,'NGRU3S2KPG0GQ4BIHSW9C/LKX3NHN+CEUNU7AMNSPJK=' as unique_code,'miss_count.miss'AS statistics_name,miss_count.miss "
                 + "AS statistics_value,'2021-08-30 00:00:00' as data_time,"
                 + "'2021-08-30 00:00:00' as create_time,'2021-08-30 00:00:00' as update_time from miss_count\"}},{\"type\":\"hdfs_file\","
                 + "\"config\":{\"path\":\"hdfs://localhost:8022/user/ods/data_quality_error_data/1_1_test\",\"input_table\":\"miss_items\"}}]}";

+ 2 - 0
dolphinscheduler-ui/src/locales/en_US/data-quality.ts

@@ -59,6 +59,7 @@ export default {
     input_item_type: 'Input item type',
     src_connector_type: 'SrcConnType',
     src_datasource_id: 'SrcSource',
+    src_database: 'SrcDatabase',
     src_table: 'SrcTable',
     src_filter: 'SrcFilter',
     src_field: 'SrcField',
@@ -69,6 +70,7 @@ export default {
     failure_strategy: 'FailureStrategy',
     target_connector_type: 'TargetConnType',
     target_datasource_id: 'TargetSourceId',
+    target_database: 'TargetDatabase',
     target_table: 'TargetTable',
     target_filter: 'TargetFilter',
     mapping_columns: 'OnClause',

+ 12 - 0
dolphinscheduler-ui/src/locales/en_US/project.ts

@@ -664,6 +664,7 @@ export default {
     table_count_check: 'TableCountCheck',
     src_connector_type: 'SrcConnType',
     src_datasource_id: 'SrcSource',
+    src_database: 'SrcDatabase',
     src_table: 'SrcTable',
     src_filter: 'SrcFilter',
     src_field: 'SrcField',
@@ -674,6 +675,7 @@ export default {
     failure_strategy: 'FailureStrategy',
     target_connector_type: 'TargetConnType',
     target_datasource_id: 'TargetSourceId',
+    target_database: 'TargetDatabase',
     target_table: 'TargetTable',
     target_filter: 'TargetFilter',
     mapping_columns: 'OnClause',
@@ -808,10 +810,20 @@ export default {
     unlimited: 'unlimited',
     please_select_source_connector_type: 'Please select source connector type',
     please_select_source_datasource_id: 'Please select source datasource id',
+    please_select_source_database: 'Please select source database',
     please_enter_source_table_name: 'Please select source table name',
     please_enter_filter_expression: 'Please enter filter expression',
     please_enter_column_only_single_column_is_supported:
       'Please select column, only single column is supported',
+    please_select_target_connector_type: 'Please select target connector type',
+    please_select_target_datasource: 'Please select target datasource id',
+    please_select_target_database: 'Please select target database',
+    please_enter_target_table: 'Please select target table name',
+    please_enter_target_filter_expression: 'Please enter target filter expression',
+    please_enter_comparison_name_the_alias_in_comparison_execute_sql: 'Please enter comparison name the alias in comparison execute sql',
+    please_enter_statistics_name_the_alias_in_statistics_execute_sql: 'Please enter statistics name the alias in statistics execute sql',
+    please_enter_comparison_execute_sql: 'Please enter comparison execute sql',
+    please_enter_statistics_execute_sql: 'Please enter statistics execute sql',
     please_enter_threshold_number_is_needed:
       'Please enter threshold number is needed',
     please_enter_comparison_title: 'please select comparison title',

+ 2 - 0
dolphinscheduler-ui/src/locales/zh_CN/data-quality.ts

@@ -59,6 +59,7 @@ export default {
     input_item_type: '输入项类型',
     src_connector_type: '源数据类型',
     src_datasource_id: '源数据源',
+    src_database: '源数据库',
     src_table: '源数据表',
     src_filter: '源表过滤条件',
     src_field: '源表检测列',
@@ -69,6 +70,7 @@ export default {
     failure_strategy: '失败策略',
     target_connector_type: '目标数据类型',
     target_datasource_id: '目标数据源',
+    target_database: '目标数据库',
     target_table: '目标数据表',
     target_filter: '目标表过滤条件',
     mapping_columns: 'ON语句',

+ 12 - 0
dolphinscheduler-ui/src/locales/zh_CN/project.ts

@@ -653,6 +653,7 @@ export default {
     table_count_check: '表行数校验',
     src_connector_type: '源数据类型',
     src_datasource_id: '源数据源',
+    src_database: '源数据库',
     src_table: '源数据表',
     src_filter: '源表过滤条件',
     src_field: '源表检测列',
@@ -663,6 +664,7 @@ export default {
     failure_strategy: '失败策略',
     target_connector_type: '目标数据类型',
     target_datasource_id: '目标数据源',
+    target_database: '目标数据库',
     target_table: '目标数据表',
     target_filter: '目标表过滤条件',
     mapping_columns: 'ON语句',
@@ -786,11 +788,21 @@ export default {
     unlimited: '不限制',
     please_select_source_connector_type: '请选择源数据类型',
     please_select_source_datasource_id: '请选择源数据源',
+    please_select_source_database: '请选择源数据库',
     please_enter_source_table_name: '请选择源数据表',
     please_enter_filter_expression: '请输入源表过滤条件',
     please_enter_column_only_single_column_is_supported: '请选择源表检测列',
     please_enter_threshold_number_is_needed: '请输入阈值',
     please_enter_comparison_title: '请选择期望值类型',
+    please_select_target_connector_type: '请选择目标数据类型',
+    please_select_target_datasource: '请选择目标数据源',
+    please_select_target_database: '请选择目标数据库',
+    please_enter_target_table: '请选择目标数据表',
+    please_enter_target_filter_expression: '请输入目标表过滤条件',
+    please_enter_comparison_name_the_alias_in_comparison_execute_sql: '请输入期望值名',
+    please_enter_statistics_name_the_alias_in_statistics_execute_sql: '请输入实际值名',
+    please_enter_comparison_execute_sql: '请输入期望值计算SQL',
+    please_enter_statistics_execute_sql: '请输入实际值计算SQL',
     custom_config: '自定义配置',
     engine: '引擎',
     engine_tips: '请选择引擎',

+ 18 - 3
dolphinscheduler-ui/src/service/modules/data-source/index.ts

@@ -129,18 +129,32 @@ export function connectionTest(id: IdReq): any {
   })
 }
 
-export function getDatasourceTablesById(datasourceId: number): any {
+export function getDatasourceDatabasesById(datasourceId: number): any {
   return axios({
-    url: '/datasources/tables',
+    url: '/datasources/databases',
     method: 'get',
     params: {
-      datasourceId
+       datasourceId
     }
   })
 }
 
+export function getDatasourceTablesById(
+  datasourceId: number,
+  database: string
+): any {
+  return axios({
+    url: '/datasources/tables',
+    method: 'get',
+    params: {
+      datasourceId,
+      database
+    }
+  })
+}
 export function getDatasourceTableColumnsById(
   datasourceId: number,
+  database: string,
   tableName: string
 ): any {
   return axios({
@@ -148,6 +162,7 @@ export function getDatasourceTableColumnsById(
     method: 'get',
     params: {
       datasourceId,
+      database,
       tableName
     }
   })

+ 53 - 3
dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-rules.ts

@@ -22,6 +22,7 @@ import {
   getDatasourceOptionsById
 } from '@/service/modules/data-quality'
 import {
+  getDatasourceDatabasesById,
   getDatasourceTablesById,
   getDatasourceTableColumnsById
 } from '@/service/modules/data-source'
@@ -35,9 +36,11 @@ export function useRules(
   const rules = ref([])
   const ruleLoading = ref(false)
   const srcDatasourceOptions = ref([] as { label: string; value: number }[])
+  const srcDatabaseOptions = ref([] as { label: string; value: number }[])
   const srcTableOptions = ref([] as { label: string; value: number }[])
   const srcTableColumnOptions = ref([] as { label: string; value: number }[])
   const targetDatasourceOptions = ref([] as { label: string; value: number }[])
+  const targetDatabaseOptions = ref([] as { label: string; value: number }[])
   const targetTableOptions = ref([] as { label: string; value: string }[])
   const targetTableColumnOptions = ref([] as { label: string; value: number }[])
   const writerDatasourceOptions = ref([] as { label: string; value: number }[])
@@ -122,9 +125,15 @@ export function useRules(
     if (item.field === 'src_datasource_id') {
       item.options = srcDatasourceOptions
     }
+    if (item.field === 'src_database') {
+      item.options = srcDatabaseOptions
+    }
     if (item.field === 'target_datasource_id') {
       item.options = targetDatasourceOptions
     }
+    if (item.field === 'target_database') {
+      item.options = targetDatabaseOptions
+    }
     if (item.field === 'writer_datasource_id') {
       item.options = writerDatasourceOptions
     }
@@ -159,8 +168,11 @@ export function useRules(
       const result = await getDatasourceOptionsById(value)
       srcDatasourceOptions.value = result || []
       if (reset) {
+        srcDatabaseOptions.value = []
         srcTableOptions.value = []
+        srcTableColumnOptions.value = []
         model.src_datasource_id = null
+        model.src_database = null
         model.src_table = null
         model.src_field = null
       }
@@ -170,8 +182,11 @@ export function useRules(
       const result = await getDatasourceOptionsById(value)
       targetDatasourceOptions.value = result || []
       if (reset) {
+        targetDatabaseOptions.value = []
         targetTableOptions.value = []
+        targetTableColumnOptions.value = []
         model.target_datasource_id = null
+        model.target_database = null
         model.target_table = null
         model.target_field = null
       }
@@ -186,24 +201,58 @@ export function useRules(
       return
     }
     if (field === 'src_datasource_id' && typeof value === 'number') {
-      const result = await getDatasourceTablesById(value)
-      srcTableOptions.value = result || []
+      const result = await getDatasourceDatabasesById(value)
+      srcDatabaseOptions.value = result || []
       if (reset) {
+        srcTableOptions.value = []
+        srcTableColumnOptions.value = []
+        model.src_database = null
         model.src_table = null
         model.src_field = null
       }
     }
     if (field === 'target_datasource_id' && typeof value === 'number') {
-      const result = await getDatasourceTablesById(value)
+        const result = await getDatasourceDatabasesById(value)
+        targetDatabaseOptions.value = result || []
+        if (reset) {
+            targetTableOptions.value = []
+            targetTableColumnOptions.value = []
+            model.target_database = null
+            model.target_table = null
+            model.target_field = null
+        }
+    }
+
+    if (field === 'src_database' && typeof value === 'string') {
+     const result = await getDatasourceTablesById(
+        model.src_datasource_id,
+        value
+      )
+      srcTableOptions.value = result || []
+      if (reset) {
+        srcTableColumnOptions.value = []
+        model.src_table = null
+        model.src_field = null
+      }
+    }
+
+    if (field === 'target_database' && typeof value === 'string') {
+      const result = await getDatasourceTablesById(
+        model.target_datasource_id,
+        value
+      )
       targetTableOptions.value = result || []
       if (reset) {
+        targetTableColumnOptions.value = []
         model.target_table = null
         model.target_field = null
       }
     }
+
     if (field === 'src_table' && typeof value === 'string') {
       const result = await getDatasourceTableColumnsById(
         model.src_datasource_id,
+        model.src_database,
         value
       )
       srcTableColumnOptions.value = result || []
@@ -214,6 +263,7 @@ export function useRules(
     if (field === 'target_table' && typeof value === 'string') {
       const result = await getDatasourceTableColumnsById(
         model.target_datasource_id,
+        model.target_database,
         value
       )
       targetTableColumnOptions.value = result || []

+ 2 - 1
dolphinscheduler-ui/src/views/projects/task/components/node/fields/use-task-type.ts

@@ -37,7 +37,8 @@ export function useTaskType(
     span: 24,
     name: t('project.node.task_type'),
     props: {
-      disabled: readonly || ['CONDITIONS', 'SWITCH'].includes(model.taskType)
+      disabled: readonly || ['CONDITIONS', 'SWITCH'].includes(model.taskType),
+      filterable: true
     },
     options: options,
     validate: {

+ 4 - 0
dolphinscheduler-ui/src/views/projects/task/components/node/format-data.ts

@@ -297,6 +297,7 @@ export function formatParams(data: INodeData): {
       operator: data.operator,
       src_connector_type: data.src_connector_type,
       src_datasource_id: data.src_datasource_id,
+      src_database: data.src_database,
       field_length: data.field_length,
       begin_time: data.begin_time,
       deadline: data.deadline,
@@ -311,6 +312,7 @@ export function formatParams(data: INodeData): {
       statistics_name: data.statistics_name,
       target_connector_type: data.target_connector_type,
       target_datasource_id: data.target_datasource_id,
+      target_database: data.target_database,
       target_table: data.target_table,
       threshold: data.threshold,
       mapping_columns: JSON.stringify(data.mapping_columns)
@@ -690,6 +692,7 @@ export function formatModel(data: ITaskData) {
       data.taskParams.ruleInputParameter.src_connector_type
     params.src_datasource_id =
       data.taskParams.ruleInputParameter.src_datasource_id
+    params.src_database = data.taskParams.ruleInputParameter.src_database
     params.src_table = data.taskParams.ruleInputParameter.src_table
     params.field_length = data.taskParams.ruleInputParameter.field_length
     params.begin_time = data.taskParams.ruleInputParameter.begin_time
@@ -707,6 +710,7 @@ export function formatModel(data: ITaskData) {
       data.taskParams.ruleInputParameter.target_connector_type
     params.target_datasource_id =
       data.taskParams.ruleInputParameter.target_datasource_id
+    params.target_database = data.taskParams.ruleInputParameter.target_database
     params.target_table = data.taskParams.ruleInputParameter.target_table
     params.threshold = data.taskParams.ruleInputParameter.threshold
     if (data.taskParams.ruleInputParameter.mapping_columns)

+ 2 - 0
dolphinscheduler-ui/src/views/projects/task/components/node/types.ts

@@ -235,6 +235,7 @@ interface IRuleParameters {
   operator?: string
   src_connector_type?: number
   src_datasource_id?: number
+  src_database?: string
   src_table?: string
   field_length?: number
   begin_time?: string
@@ -249,6 +250,7 @@ interface IRuleParameters {
   statistics_name?: string
   target_connector_type?: number
   target_datasource_id?: number
+  target_database?: string
   target_table?: string
   threshold?: string
   mapping_columns?: string