Browse Source

Merge branch 'dev' into dev

xingchun-chen 5 years ago
parent
commit
bc7a3f9647

+ 12 - 12
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java

@@ -449,30 +449,30 @@ public class ProcessDefinitionController extends BaseController {
     }
 
     /**
-     * export process definition by id
+     * batch export process definition by ids
      *
      * @param loginUser           login user
      * @param projectName         project name
-     * @param processDefinitionId process definition id
+     * @param processDefinitionIds process definition ids
      * @param response            response
      */
 
-    @ApiOperation(value = "exportProcessDefinitionById", notes= "EXPORT_PROCESS_DEFINITION_BY_ID_NOTES")
+    @ApiOperation(value = "batchExportProcessDefinitionByIds", notes= "BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES")
     @ApiImplicitParams({
-            @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100")
+            @ApiImplicitParam(name = "processDefinitionIds", value = "PROCESS_DEFINITION_ID", required = true, dataType = "String")
     })
     @GetMapping(value = "/export")
     @ResponseBody
-    public void exportProcessDefinitionById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
-                                            @PathVariable String projectName,
-                                            @RequestParam("processDefinitionId") Integer processDefinitionId,
-                                            HttpServletResponse response) {
+    public void batchExportProcessDefinitionByIds(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
+                                                  @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
+                                                  @RequestParam("processDefinitionIds") String processDefinitionIds,
+                                                  HttpServletResponse response) {
         try {
-            logger.info("export process definition by id, login user:{}, project name:{}, process definition id:{}",
-                    loginUser.getUserName(), projectName, processDefinitionId);
-            processDefinitionService.exportProcessDefinitionById(loginUser, projectName, processDefinitionId, response);
+            logger.info("batch export process definition by ids, login user:{}, project name:{}, process definition ids:{}",
+                    loginUser.getUserName(), projectName, processDefinitionIds);
+            processDefinitionService.batchExportProcessDefinitionByIds(loginUser, projectName, processDefinitionIds, response);
         } catch (Exception e) {
-            logger.error(Status.EXPORT_PROCESS_DEFINE_BY_ID_ERROR.getMsg(), e);
+            logger.error(Status.BATCH_EXPORT_PROCESS_DEFINE_BY_IDS_ERROR.getMsg(), e);
         }
     }
 

+ 3 - 2
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java

@@ -214,8 +214,8 @@ public enum Status {
     EXECUTE_PROCESS_INSTANCE_ERROR(50015,"execute process instance error", "操作工作流实例错误"),
     CHECK_PROCESS_DEFINITION_ERROR(50016,"check process definition error", "检查工作流实例错误"),
     QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR(50017,"query recipients and copyers by process definition error", "查询收件人和抄送人错误"),
-    DATA_IS_NOT_VALID(50017,"data %s not valid", "数据[%s]无效"),
-    DATA_IS_NULL(50018,"data %s is null", "数据[%s]不能为空"),
+    DATA_IS_NOT_VALID(50017,"data {0} not valid", "数据[{0}]无效"),
+    DATA_IS_NULL(50018,"data {0} is null", "数据[{0}]不能为空"),
     PROCESS_NODE_HAS_CYCLE(50019,"process node has cycle", "流程节点间存在循环依赖"),
     PROCESS_NODE_S_PARAMETER_INVALID(50020,"process node %s parameter invalid", "流程节点[%s]参数无效"),
     PROCESS_DEFINE_STATE_ONLINE(50021, "process definition {0} is already on line", "工作流定义[{0}]已上线"),
@@ -226,6 +226,7 @@ public enum Status {
     BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR(50026,"batch delete process definition by ids {0} error", "批量删除工作流定义[{0}]错误"),
     TENANT_NOT_SUITABLE(50027,"there is not any tenant suitable, please choose a tenant available.", "没有合适的租户,请选择可用的租户"),
     EXPORT_PROCESS_DEFINE_BY_ID_ERROR(50028,"export process definition by id error", "导出工作流定义错误"),
+    BATCH_EXPORT_PROCESS_DEFINE_BY_IDS_ERROR(50028,"batch export process definition by ids error", "批量导出工作流定义错误"),
     IMPORT_PROCESS_DEFINE_ERROR(50029,"import process definition error", "导入工作流定义错误"),
 
     HDFS_NOT_STARTUP(60001,"hdfs not startup", "hdfs未启用"),

+ 193 - 70
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java

@@ -563,14 +563,18 @@ public class ProcessDefinitionService extends BaseDAGService {
     }
 
     /**
-     * export process definition by id
-     *
-     * @param loginUser login user
-     * @param projectName project name
-     * @param processDefinitionId process definition id
-     * @param response response
+     * batch export process definition by ids
+     * @param loginUser
+     * @param projectName
+     * @param processDefinitionIds
+     * @param response
      */
-    public void exportProcessDefinitionById(User loginUser, String projectName, Integer processDefinitionId, HttpServletResponse response) {
+    public void batchExportProcessDefinitionByIds(User loginUser, String projectName, String processDefinitionIds, HttpServletResponse response){
+
+        if(StringUtils.isEmpty(processDefinitionIds)){
+            return;
+        }
+
         //export project info
         Project project = projectMapper.queryByName(projectName);
 
@@ -578,39 +582,68 @@ public class ProcessDefinitionService extends BaseDAGService {
         Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
         Status resultStatus = (Status) checkResult.get(Constants.STATUS);
 
-        if (resultStatus == Status.SUCCESS) {
+        if(resultStatus != Status.SUCCESS){
+            return;
+        }
+
+        List<ProcessMeta> processDefinitionList =
+                getProcessDefinitionList(processDefinitionIds);
+
+        if(CollectionUtils.isNotEmpty(processDefinitionList)){
+            downloadProcessDefinitionFile(response, processDefinitionList);
+        }
+    }
+
+    /**
+     * get process definition list by ids
+     * @param processDefinitionIds
+     * @return
+     */
+    private List<ProcessMeta> getProcessDefinitionList(String processDefinitionIds){
+        List<ProcessMeta> processDefinitionList = new ArrayList<>();
+        String[] processDefinitionIdArray = processDefinitionIds.split(",");
+        for (String strProcessDefinitionId : processDefinitionIdArray) {
             //get workflow info
+            int processDefinitionId = Integer.parseInt(strProcessDefinitionId);
             ProcessDefinition processDefinition = processDefineMapper.queryByDefineId(processDefinitionId);
-
             if (null != processDefinition) {
-                String exportProcessJson = exportProcessMetaDataStr(processDefinitionId, processDefinition);
-                response.setContentType(MediaType.APPLICATION_JSON_UTF8_VALUE);
-                response.setHeader("Content-Disposition", "attachment;filename="+processDefinition.getName()+".json");
-                BufferedOutputStream buff = null;
-                ServletOutputStream out = null;
+                processDefinitionList.add(exportProcessMetaData(processDefinitionId, processDefinition));
+            }
+        }
+
+        return processDefinitionList;
+    }
+
+    /**
+     * download the process definition file
+     * @param response
+     * @param processDefinitionList
+     */
+    private void downloadProcessDefinitionFile(HttpServletResponse response, List<ProcessMeta> processDefinitionList) {
+        response.setContentType(MediaType.APPLICATION_JSON_UTF8_VALUE);
+        BufferedOutputStream buff = null;
+        ServletOutputStream out = null;
+        try {
+            out = response.getOutputStream();
+            buff = new BufferedOutputStream(out);
+            buff.write(JSON.toJSONString(processDefinitionList).getBytes(StandardCharsets.UTF_8));
+            buff.flush();
+            buff.close();
+        } catch (IOException e) {
+            logger.warn("export process fail", e);
+        }finally {
+            if (null != buff) {
                 try {
-                    out = response.getOutputStream();
-                    buff = new BufferedOutputStream(out);
-                    buff.write(exportProcessJson.getBytes(StandardCharsets.UTF_8));
-                    buff.flush();
                     buff.close();
-                } catch (IOException e) {
-                    logger.warn("export process fail", e);
-                }finally {
-                    if (null != buff) {
-                        try {
-                            buff.close();
-                        } catch (Exception e) {
-                            logger.warn("export process buffer not close", e);
-                        }
-                    }
-                    if (null != out) {
-                        try {
-                            out.close();
-                        } catch (Exception e) {
-                            logger.warn("export process output stream not close", e);
-                        }
-                    }
+                } catch (Exception e) {
+                    logger.warn("export process buffer not close", e);
+                }
+            }
+            if (null != out) {
+                try {
+                    out.close();
+                } catch (Exception e) {
+                    logger.warn("export process output stream not close", e);
                 }
             }
         }
@@ -623,6 +656,17 @@ public class ProcessDefinitionService extends BaseDAGService {
      * @return export process metadata string
      */
     public String exportProcessMetaDataStr(Integer processDefinitionId, ProcessDefinition processDefinition) {
+        //create workflow json file
+        return JSONUtils.toJsonString(exportProcessMetaData(processDefinitionId,processDefinition));
+    }
+
+    /**
+     * get export process metadata string
+     * @param processDefinitionId process definition id
+     * @param processDefinition process definition
+     * @return export process metadata string
+     */
+    public ProcessMeta exportProcessMetaData(Integer processDefinitionId, ProcessDefinition processDefinition) {
         //correct task param which has data source or dependent param
         String correctProcessDefinitionJson = addExportTaskNodeSpecialParam(processDefinition.getProcessDefinitionJson());
         processDefinition.setProcessDefinitionJson(correctProcessDefinitionJson);
@@ -639,14 +683,6 @@ public class ProcessDefinitionService extends BaseDAGService {
         List<Schedule> schedules = scheduleMapper.queryByProcessDefinitionId(processDefinitionId);
         if (!schedules.isEmpty()) {
             Schedule schedule = schedules.get(0);
-            /*WorkerGroup workerGroup = workerGroupMapper.selectById(schedule.getWorkerGroupId());
-
-            if (null == workerGroup && schedule.getWorkerGroupId() == -1) {
-                workerGroup = new WorkerGroup();
-                workerGroup.setId(-1);
-                workerGroup.setName("");
-            }*/
-
             exportProcessMeta.setScheduleWarningType(schedule.getWarningType().toString());
             exportProcessMeta.setScheduleWarningGroupId(schedule.getWarningGroupId());
             exportProcessMeta.setScheduleStartTime(DateUtils.dateToString(schedule.getStartTime()));
@@ -658,7 +694,7 @@ public class ProcessDefinitionService extends BaseDAGService {
             exportProcessMeta.setScheduleWorkerGroupName(schedule.getWorkerGroup());
         }
         //create workflow json file
-        return JSONUtils.toJsonString(exportProcessMeta);
+        return exportProcessMeta;
     }
 
     /**
@@ -705,24 +741,36 @@ public class ProcessDefinitionService extends BaseDAGService {
     public Map<String, Object> importProcessDefinition(User loginUser, MultipartFile file, String currentProjectName) {
         Map<String, Object> result = new HashMap<>(5);
         String processMetaJson = FileUtils.file2String(file);
-        ProcessMeta processMeta = JSONUtils.parseObject(processMetaJson, ProcessMeta.class);
+        List<ProcessMeta> processMetaList = JSON.parseArray(processMetaJson,ProcessMeta.class);
 
         //check file content
-        if (null == processMeta) {
+        if (CollectionUtils.isEmpty(processMetaList)) {
             putMsg(result, Status.DATA_IS_NULL, "fileContent");
             return result;
         }
-        if (StringUtils.isEmpty(processMeta.getProjectName())) {
-            putMsg(result, Status.DATA_IS_NULL, "projectName");
-            return result;
-        }
-        if (StringUtils.isEmpty(processMeta.getProcessDefinitionName())) {
-            putMsg(result, Status.DATA_IS_NULL, "processDefinitionName");
-            return result;
+
+        for(ProcessMeta processMeta:processMetaList){
+
+            if (!checkAndImportProcessDefinition(loginUser, currentProjectName, result, processMeta)){
+                return result;
+            }
         }
-        if (StringUtils.isEmpty(processMeta.getProcessDefinitionJson())) {
-            putMsg(result, Status.DATA_IS_NULL, "processDefinitionJson");
-            return result;
+
+        return result;
+    }
+
+    /**
+     * check and import process definition
+     * @param loginUser
+     * @param currentProjectName
+     * @param result
+     * @param processMeta
+     * @return
+     */
+    private boolean checkAndImportProcessDefinition(User loginUser, String currentProjectName, Map<String, Object> result, ProcessMeta processMeta) {
+
+        if(!checkImportanceParams(processMeta,result)){
+            return false;
         }
 
         //deal with process name
@@ -734,31 +782,84 @@ public class ProcessDefinitionService extends BaseDAGService {
                     processDefinitionName, 1);
         }
 
-        //add special task param
-        String importProcessParam = addImportTaskNodeParam(loginUser, processMeta.getProcessDefinitionJson(), targetProject);
+        // get create process result
+        Map<String, Object> createProcessResult =
+                getCreateProcessResult(loginUser,
+                        currentProjectName,
+                        result,
+                        processMeta,
+                        processDefinitionName,
+                        addImportTaskNodeParam(loginUser, processMeta.getProcessDefinitionJson(), targetProject));
+
+        if(createProcessResult == null){
+            return false;
+        }
+
+        //create process definition
+        Integer processDefinitionId =
+                Objects.isNull(createProcessResult.get("processDefinitionId"))?
+                        null:Integer.parseInt(createProcessResult.get("processDefinitionId").toString());
+
+        //scheduler param
+        return getImportProcessScheduleResult(loginUser,
+                currentProjectName,
+                result,
+                processMeta,
+                processDefinitionName,
+                processDefinitionId);
+
+    }
 
-        Map<String, Object> createProcessResult;
+    /**
+     * get create process result
+     * @param loginUser
+     * @param currentProjectName
+     * @param result
+     * @param processMeta
+     * @param processDefinitionName
+     * @param importProcessParam
+     * @return
+     */
+    private Map<String, Object> getCreateProcessResult(User loginUser,
+                                                       String currentProjectName,
+                                                       Map<String, Object> result,
+                                                       ProcessMeta processMeta,
+                                                       String processDefinitionName,
+                                                       String importProcessParam){
+        Map<String, Object> createProcessResult = null;
         try {
             createProcessResult = createProcessDefinition(loginUser
                     ,currentProjectName,
-                    processDefinitionName,
+                    processDefinitionName+"_import_"+System.currentTimeMillis(),
                     importProcessParam,
                     processMeta.getProcessDefinitionDescription(),
                     processMeta.getProcessDefinitionLocations(),
                     processMeta.getProcessDefinitionConnects());
+            putMsg(result, Status.SUCCESS);
         } catch (JsonProcessingException e) {
             logger.error("import process meta json data: {}", e.getMessage(), e);
             putMsg(result, Status.IMPORT_PROCESS_DEFINE_ERROR);
-            return result;
         }
 
-        putMsg(result, Status.SUCCESS);
-        //create process definition
-        Integer processDefinitionId = null;
-        if (null != createProcessResult && Objects.nonNull(createProcessResult.get("processDefinitionId"))) {
-            processDefinitionId = Integer.parseInt(createProcessResult.get("processDefinitionId").toString());
-        }
-        //scheduler param
+        return createProcessResult;
+    }
+
+    /**
+     * get import process schedule result
+     * @param loginUser
+     * @param currentProjectName
+     * @param result
+     * @param processMeta
+     * @param processDefinitionName
+     * @param processDefinitionId
+     * @return
+     */
+    private boolean getImportProcessScheduleResult(User loginUser,
+                                                   String currentProjectName,
+                                                   Map<String, Object> result,
+                                                   ProcessMeta processMeta,
+                                                   String processDefinitionName,
+                                                   Integer processDefinitionId) {
         if (null != processMeta.getScheduleCrontab() && null != processDefinitionId) {
             int scheduleInsert = importProcessSchedule(loginUser,
                     currentProjectName,
@@ -768,11 +869,33 @@ public class ProcessDefinitionService extends BaseDAGService {
 
             if (0 == scheduleInsert) {
                 putMsg(result, Status.IMPORT_PROCESS_DEFINE_ERROR);
-                return result;
+                return false;
             }
         }
+        return true;
+    }
 
-        return result;
+    /**
+     * check importance params
+     * @param processMeta
+     * @param result
+     * @return
+     */
+    private boolean checkImportanceParams(ProcessMeta processMeta,Map<String, Object> result){
+        if (StringUtils.isEmpty(processMeta.getProjectName())) {
+            putMsg(result, Status.DATA_IS_NULL, "projectName");
+            return false;
+        }
+        if (StringUtils.isEmpty(processMeta.getProcessDefinitionName())) {
+            putMsg(result, Status.DATA_IS_NULL, "processDefinitionName");
+            return false;
+        }
+        if (StringUtils.isEmpty(processMeta.getProcessDefinitionJson())) {
+            putMsg(result, Status.DATA_IS_NULL, "processDefinitionJson");
+            return false;
+        }
+
+        return true;
     }
 
     /**

+ 2 - 0
dolphinscheduler-api/src/main/resources/i18n/messages.properties

@@ -252,3 +252,5 @@ UNAUTHORIZED_DATA_SOURCE_NOTES=unauthorized data source
 AUTHORIZED_DATA_SOURCE_NOTES=authorized data source
 DELETE_SCHEDULER_BY_ID_NOTES=delete scheduler by id
 QUERY_ALERT_GROUP_LIST_PAGING_NOTES=query alert group list paging
+EXPORT_PROCESS_DEFINITION_BY_ID_NOTES=export process definition by id
+BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES= batch export process definition by ids

+ 2 - 0
dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties

@@ -252,3 +252,5 @@ UNAUTHORIZED_DATA_SOURCE_NOTES=unauthorized data source
 AUTHORIZED_DATA_SOURCE_NOTES=authorized data source
 DELETE_SCHEDULER_BY_ID_NOTES=delete scheduler by id
 QUERY_ALERT_GROUP_LIST_PAGING_NOTES=query alert group list paging
+EXPORT_PROCESS_DEFINITION_BY_ID_NOTES=export process definition by id
+BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES= batch export process definition by ids

+ 3 - 0
dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties

@@ -250,3 +250,6 @@ UNAUTHORIZED_DATA_SOURCE_NOTES=未授权的数据源
 AUTHORIZED_DATA_SOURCE_NOTES=授权的数据源
 DELETE_SCHEDULER_BY_ID_NOTES=根据定时id删除定时数据
 QUERY_ALERT_GROUP_LIST_PAGING_NOTES=分页查询告警组列表
+EXPORT_PROCESS_DEFINITION_BY_ID_NOTES=通过工作流ID导出工作流定义
+BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES=批量导出工作流定义
+

+ 62 - 1
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java

@@ -18,12 +18,14 @@ package org.apache.dolphinscheduler.api.controller;
 
 import org.apache.dolphinscheduler.api.enums.Status;
 import org.apache.dolphinscheduler.api.service.ProcessDefinitionService;
+import org.apache.dolphinscheduler.api.utils.PageInfo;
 import org.apache.dolphinscheduler.api.utils.Result;
 import org.apache.dolphinscheduler.common.Constants;
 import org.apache.dolphinscheduler.common.enums.ReleaseState;
 import org.apache.dolphinscheduler.common.enums.UserType;
 import org.apache.dolphinscheduler.common.model.TaskNode;
 import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
+import org.apache.dolphinscheduler.dao.entity.Resource;
 import org.apache.dolphinscheduler.dao.entity.User;
 import org.junit.*;
 import org.junit.runner.RunWith;
@@ -33,6 +35,8 @@ import org.mockito.Mockito;
 import org.mockito.junit.MockitoJUnitRunner;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import org.springframework.mock.web.MockHttpServletResponse;
+import javax.servlet.http.HttpServletResponse;
 import java.text.MessageFormat;
 import java.util.ArrayList;
 import java.util.HashMap;
@@ -111,7 +115,7 @@ public class ProcessDefinitionControllerTest{
     }
 
     @Test
-    public void UpdateProcessDefinition() throws Exception {
+    public void updateProcessDefinition() throws Exception {
 
         String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}";
         String locations = "{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}";
@@ -287,4 +291,61 @@ public class ProcessDefinitionControllerTest{
 
         Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
     }
+
+    @Test
+    public void testQueryProcessDefinitionAllByProjectId() throws Exception{
+        int projectId = 1;
+        Map<String,Object> result = new HashMap<>();
+        putMsg(result,Status.SUCCESS);
+
+        Mockito.when(processDefinitionService.queryProcessDefinitionAllByProjectId(projectId)).thenReturn(result);
+        Result response = processDefinitionController.queryProcessDefinitionAllByProjectId(user,projectId);
+
+        Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
+    }
+
+    @Test
+    public void testViewTree() throws Exception{
+        String projectName = "test";
+        int processId = 1;
+        int limit = 2;
+        Map<String,Object> result = new HashMap<>();
+        putMsg(result,Status.SUCCESS);
+
+        Mockito.when(processDefinitionService.viewTree(processId,limit)).thenReturn(result);
+        Result response = processDefinitionController.viewTree(user,projectName,processId,limit);
+
+        Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
+    }
+
+    @Test
+    public void testQueryProcessDefinitionListPaging() throws Exception{
+        String projectName = "test";
+        int pageNo = 1;
+        int pageSize = 10;
+        String searchVal = "";
+        int userId = 1;
+
+        Map<String,Object> result = new HashMap<>();
+        putMsg(result,Status.SUCCESS);
+        result.put(Constants.DATA_LIST,new PageInfo<Resource>(1,10));
+
+        Mockito.when(processDefinitionService.queryProcessDefinitionListPaging(user,projectName, searchVal, pageNo, pageSize, userId)).thenReturn(result);
+        Result response = processDefinitionController.queryProcessDefinitionListPaging(user,projectName,pageNo,searchVal,userId,pageSize);
+
+        Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
+    }
+
+    @Test
+    public void testBatchExportProcessDefinitionByIds() throws Exception{
+
+        String processDefinitionIds = "1,2";
+        String projectName = "test";
+        HttpServletResponse response = new MockHttpServletResponse();
+        ProcessDefinitionService service = new ProcessDefinitionService();
+        ProcessDefinitionService spy = Mockito.spy(service);
+        Mockito.doNothing().when(spy).batchExportProcessDefinitionByIds(user, projectName, processDefinitionIds, response);
+        processDefinitionController.batchExportProcessDefinitionByIds(user, projectName, processDefinitionIds, response);
+    }
+
 }

+ 1 - 1
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/exceptions/ApiExceptionHandlerTest.java

@@ -46,7 +46,7 @@ public class ApiExceptionHandlerTest {
     public void exceptionHandlerRuntime() throws NoSuchMethodException {
         ApiExceptionHandler handler = new ApiExceptionHandler();
         ProcessDefinitionController controller = new ProcessDefinitionController();
-        Method method = controller.getClass().getMethod("exportProcessDefinitionById", User.class, String.class, Integer.class, HttpServletResponse.class);
+        Method method = controller.getClass().getMethod("batchExportProcessDefinitionByIds", User.class, String.class, String.class, HttpServletResponse.class);
         HandlerMethod hm = new HandlerMethod(controller, method);
         Result result = handler.exceptionHandler(new RuntimeException("test exception"), hm);
         Assert.assertEquals(Status.INTERNAL_SERVER_ERROR_ARGS.getCode(),result.getCode().intValue());

File diff suppressed because it is too large
+ 66 - 46
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java


+ 3 - 6
dolphinscheduler-dao/src/main/resources/datasource.properties

@@ -14,17 +14,14 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-
-
-# postgre
-#spring.datasource.driver-class-name=org.postgresql.Driver
-#spring.datasource.url=jdbc:postgresql://localhost:5432/dolphinscheduler
-# mysql
+        
+# postgresql
 spring.datasource.driver-class-name=org.postgresql.Driver
 spring.datasource.url=jdbc:postgresql://localhost:5432/dolphinscheduler
 spring.datasource.username=test
 spring.datasource.password=test
 
+# mysql
 # connection configuration
 #spring.datasource.initialSize=5
 # min connection number

+ 27 - 7
dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/list.vue

@@ -123,7 +123,7 @@
       </table>
     </div>
     <x-poptip
-            v-show="strDelete !== ''"
+            v-show="strSelectIds !== ''"
             ref="poptipDeleteAll"
             placement="bottom-start"
             width="90">
@@ -137,6 +137,10 @@
       </template>
     </x-poptip>
 
+    <template v-if="strSelectIds !== ''">
+      <x-button size="xsmall" style="position: absolute; bottom: -48px; left: 80px;" @click="_batchExport(item)" >{{$t('Export')}}</x-button>
+    </template>
+
   </div>
 </template>
 <script>
@@ -151,7 +155,7 @@
     data () {
       return {
         list: [],
-        strDelete: '',
+        strSelectIds: '',
         checkAll: false
       }
     },
@@ -326,12 +330,28 @@
 
       _export (item) {
         this.exportDefinition({
-          processDefinitionId: item.id,
-          processDefinitionName: item.name
+          processDefinitionIds: item.id,
+          fileName: item.name
         }).catch(e => {
           this.$message.error(e.msg || '')
         })
       },
+
+      _batchExport () {
+        this.exportDefinition({
+          processDefinitionIds: this.strSelectIds,
+          fileName: "process_"+new Date().getTime()
+        }).then(res => {
+          this._onUpdate()
+          this.checkAll = false
+          this.strSelectIds = ''
+        }).catch(e => {
+          this.strSelectIds = ''
+          this.checkAll = false
+          this.$message.error(e.msg)
+        })
+      },
+
       /**
        * Edit state
        */
@@ -364,7 +384,7 @@
             arr.push(item.id)
           }
         })
-        this.strDelete = _.join(arr, ',')
+        this.strSelectIds = _.join(arr, ',')
         if (v === false) {
           this.checkAll = false
         }
@@ -375,7 +395,7 @@
       _batchDelete () {
         this.$refs['poptipDeleteAll'].doClose()
         this.batchDeleteDefinition({
-          processDefinitionIds: this.strDelete
+          processDefinitionIds: this.strSelectIds
         }).then(res => {
           this._onUpdate()
           this.checkAll = false
@@ -399,7 +419,7 @@
         deep: true
       },
       pageNo () {
-        this.strDelete = ''
+        this.strSelectIds = ''
       }
     },
     created () {

+ 4 - 3
dolphinscheduler-ui/src/js/conf/home/store/dag/actions.js

@@ -596,14 +596,15 @@ export default {
       }
     }
 
-    io.get(`projects/${state.projectName}/process/export`,{processDefinitionId: payload.processDefinitionId,}, res => {
-      downloadBlob(res, payload.processDefinitionName)
-  }, e => {
+    io.get(`projects/${state.projectName}/process/export`,{processDefinitionIds: payload.processDefinitionIds}, res => {
+      downloadBlob(res, payload.fileName)
+    }, e => {
 
     }, {
       responseType: 'blob'
     })
   },
+
   /**
    * Process instance get variable
    */