Commit 01893647 authored by mcb's avatar mcb

Merge branch 'dmp_dev' of http://gitlab.ioubuy.cn/yaobenzhang/jz-dmp-service into dmp_dev

parents cc0d2675 5f24ed50
package com.jz.common.utils; package com.jz.common.utils;
import com.alibaba.fastjson.JSON; import java.io.File;
import com.alibaba.fastjson.JSONObject; import java.util.ArrayList;
import com.google.gson.Gson; import java.util.HashMap;
import com.jz.common.utils.web.HttpClientUtils; import java.util.List;
import com.jz.common.utils.web.SessionUtils; import java.util.Map;
import com.jz.dmp.modules.controller.DataIntegration.bean.flow.FlowExecution;
import com.jz.dmp.modules.controller.DataIntegration.bean.flow.FlowPro;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.FileSystemResource; import org.springframework.core.io.FileSystemResource;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.http.HttpEntity; import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders; import org.springframework.http.HttpHeaders;
import org.springframework.http.client.SimpleClientHttpRequestFactory; import org.springframework.http.client.SimpleClientHttpRequestFactory;
...@@ -19,11 +20,16 @@ import org.springframework.util.MultiValueMap; ...@@ -19,11 +20,16 @@ import org.springframework.util.MultiValueMap;
import org.springframework.util.StringUtils; import org.springframework.util.StringUtils;
import org.springframework.web.client.RestTemplate; import org.springframework.web.client.RestTemplate;
import java.io.File; import com.alibaba.fastjson.JSON;
import java.util.ArrayList; import com.alibaba.fastjson.JSONObject;
import java.util.HashMap; import com.google.gson.Gson;
import java.util.List; import com.jz.common.utils.web.HttpClientUtils;
import java.util.Map; import com.jz.common.utils.web.SessionUtils;
import com.jz.dmp.modules.controller.DataIntegration.bean.flow.FlowExecution;
import com.jz.dmp.modules.controller.DataIntegration.bean.flow.FlowPro;
import com.jz.dmp.modules.model.DmpMember;
import com.jz.dmp.modules.model.DmpRole;
import com.jz.dmp.modules.model.SSOUserInfo;
/** /**
* azkaban ajax api 工具类 * azkaban ajax api 工具类
...@@ -36,6 +42,9 @@ public class AzkabanApiUtils2 { ...@@ -36,6 +42,9 @@ public class AzkabanApiUtils2 {
private String azkabanServerUrl; private String azkabanServerUrl;
private String userName; private String userName;
private String password; private String password;
@Autowired
RedisTemplate<String,SSOUserInfo> redisTemplate;
public AzkabanApiUtils2(String azkabanServerUrl, String userName, String password) { public AzkabanApiUtils2(String azkabanServerUrl, String userName, String password) {
this(azkabanServerUrl); this(azkabanServerUrl);
...@@ -93,11 +102,42 @@ public class AzkabanApiUtils2 { ...@@ -93,11 +102,42 @@ public class AzkabanApiUtils2 {
throw new RuntimeException("登陆失败"); throw new RuntimeException("登陆失败");
}*/ }*/
String sessionId = SessionUtils.getSession().getId(); //"dcfc608c-c58a-45b7-adc7-9902b652496e"; String sessionId = SessionUtils.getSession().getId(); //"dcfc608c-c58a-45b7-adc7-9902b652496e";
//String sessionId = "f70d53fa-55da-4688-8d00-64350e4fb8ea"; //String sessionId = "f0d06f4a-874c-4dfc-8959-101b6add6bf5";
//通过redis方式登录Azkaban
String redisKey = "spring:sessions:sessions:"+sessionId;
SSOUserInfo ssoUserInfo = redisTemplate.opsForValue().get(redisKey);
if (ssoUserInfo==null) {
redisTemplate.opsForValue().set(redisKey, getSSOuserInfo());
}
System.err.println("----sessionId="+sessionId); System.err.println("----sessionId="+sessionId);
return sessionId; //SessionUtils.getSession().getId(); return sessionId; //SessionUtils.getSession().getId();
} }
/**
* @Title: getSSOuserInfo
* @Description: TODO(生成azkaban登录需要保存的实体)
* @param @return 参数
* @return SSOUserInfo 返回类型
* @throws
*/
private SSOUserInfo getSSOuserInfo(){
Map<String,String> rolePermissMap =new HashMap<>();
DmpMember dmpMember = SessionUtils.getSecurityUser();
List<DmpRole> memberProjectRoles = dmpMember.getMemberProjectRoleList();
for (DmpRole role : memberProjectRoles) {
rolePermissMap.put(role.getRoleType(), role.getRemark());
}
SSOUserInfo ssoUserInfo = new SSOUserInfo();
ssoUserInfo.setUserName(dmpMember.getUsername());
ssoUserInfo.setAzkabanRoleRefPermissions(rolePermissMap);
return ssoUserInfo;
}
/** /**
* 创建azkaban项目名 * 创建azkaban项目名
......
package com.jz.common.utils; package com.jz.common.utils;
import java.util.List;
import java.util.Map;
import java.util.UUID; import java.util.UUID;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.CollectionUtils;
public class CommonUtils { public class CommonUtils {
private static final Logger logger = LoggerFactory.getLogger(CommonUtils.class);
/** /**
* UUID随机数 * UUID随机数
...@@ -36,5 +44,62 @@ public class CommonUtils { ...@@ -36,5 +44,62 @@ public class CommonUtils {
String uuid = UUID.randomUUID().toString().toUpperCase().replaceAll("-", ""); String uuid = UUID.randomUUID().toString().toUpperCase().replaceAll("-", "");
return uuid; return uuid;
} }
/**
* @param <T>
* @Title: objArrangeTree
* @Description: TODO(将所有的资源整理成树形结构)
* @param @param dmpPermissions
* @param @return
* @param @throws Exception 参数
* @return List<DmpPermission> 返回类型
* @throws
*/
public static <T> List<T> objArrangeTree(Object parentCode, List<T> objs, String parentCodeFiledName, String codeFieldName, String childrenFieldName) throws Exception {
Map<Object, List<T>> dictMap = objs.stream().collect(Collectors.groupingBy(x->{
try {
return ReflectAssistUtils.getFieldValueByFieldName(parentCodeFiledName, x);
} catch (Exception e) {
e.printStackTrace();
logger.error("树形结构封装异常【{}】", e);
}
return "";
}));
List<T> tList = dictMap.get(parentCode);// 获取顶层资源
if (!CollectionUtils.isEmpty(tList)) {
for (T t : tList) {
t = arrangeChildren(t, dictMap, codeFieldName, childrenFieldName);
}
}
return tList;
}
/**
* @Title: arrangeChildren
* @Description: TODO(这里用一句话描述这个方法的作用)
* @param @param permission
* @param @param dictMap
* @param @return
* @param @throws Exception 参数
* @return DmpPermission 返回类型
* @throws
*/
private static <T> T arrangeChildren(T t, Map<Object, List<T>> dictMap, String codeFieldName, String childrenFieldName)throws Exception{
Object code = ReflectAssistUtils.getFieldValueByFieldName(codeFieldName, t);
List<T> children = dictMap.get(code);
if (!CollectionUtils.isEmpty(children)) {
for (T child : children) {
child = arrangeChildren(child, dictMap, codeFieldName, childrenFieldName);
}
ReflectAssistUtils.setFieldValueByFieldName(childrenFieldName, t, children);
}
return t;
}
} }
...@@ -16,6 +16,7 @@ import com.jz.common.enums.NodeChangeTypeEnum; ...@@ -16,6 +16,7 @@ import com.jz.common.enums.NodeChangeTypeEnum;
import com.jz.dmp.modules.controller.DataIntegration.bean.flow.FlowNode; import com.jz.dmp.modules.controller.DataIntegration.bean.flow.FlowNode;
import com.jz.dmp.modules.controller.DataIntegration.bean.flow.FlowNodeChangeInfo; import com.jz.dmp.modules.controller.DataIntegration.bean.flow.FlowNodeChangeInfo;
import com.jz.dmp.modules.controller.DataIntegration.bean.flow.FlowPro; import com.jz.dmp.modules.controller.DataIntegration.bean.flow.FlowPro;
import com.jz.dmp.modules.controller.projconfig.bean.DmpProjectConfigInfoDto;
import com.jz.dmp.modules.model.DmpNavigationTree; import com.jz.dmp.modules.model.DmpNavigationTree;
import com.jz.dmp.modules.model.DmpProject; import com.jz.dmp.modules.model.DmpProject;
import com.jz.dmp.modules.model.DmpProjectSystemInfo; import com.jz.dmp.modules.model.DmpProjectSystemInfo;
...@@ -42,14 +43,42 @@ public class FlowParseTool { ...@@ -42,14 +43,42 @@ public class FlowParseTool {
/** /**
* 要发布到的项目配置信息 * 要发布到的项目配置信息
*/ */
private DmpProjectSystemInfo publishedToProjectSystemInfo; //private DmpProjectSystemInfo publishedToProjectSystemInfo;
/**
* 项目配置信息(调整)
*/
private DmpProjectConfigInfoDto dmpProjectConfigInfoDto;
private DmpDevelopTaskService dmpDevelopTaskService; private DmpDevelopTaskService dmpDevelopTaskService;
private DmpNavigationTreeService dmpNavigationTreeService; private DmpNavigationTreeService dmpNavigationTreeService;
private DmpWorkFlowSubmitDetailsService dmpWorkFlowSubmitDetailsService; private DmpWorkFlowSubmitDetailsService dmpWorkFlowSubmitDetailsService;
/**
* 流程属性
*/
private FlowPro flowPro;
/**
* 节点依赖关系
*/
private Map<String, String> nodeDependcyRefMap;
/**
* 流程节点
* key是节点id
*/
private Map<String, FlowNode> flowNodeMap;
/**
* 流程变更数据
*/
private Map flowChangedMap;
/** /**
* 不发布项目用 * 不发布项目用
...@@ -78,34 +107,30 @@ public class FlowParseTool { ...@@ -78,34 +107,30 @@ public class FlowParseTool {
DmpWorkFlowSubmitDetailsService dmpWorkFlowSubmitDetailsService) { DmpWorkFlowSubmitDetailsService dmpWorkFlowSubmitDetailsService) {
this(flowPro, dmpWorkFlowSubmitDetailsService); this(flowPro, dmpWorkFlowSubmitDetailsService);
this.publishedToProject = publishedToProject; this.publishedToProject = publishedToProject;
this.publishedToProjectSystemInfo = publishedToProjectSystemInfo; //this.publishedToProjectSystemInfo = publishedToProjectSystemInfo;
this.dmpDevelopTaskService = dmpDevelopTaskService; this.dmpDevelopTaskService = dmpDevelopTaskService;
this.dmpNavigationTreeService = dmpNavigationTreeService; this.dmpNavigationTreeService = dmpNavigationTreeService;
} }
/** /**
* 流程属性 * 发布项目用
*/ *
private FlowPro flowPro; * @param flowPro
* @param publishedToProject
/** * @param dmpProjectConfigInfoDto
* 节点依赖关系
*/
private Map<String, String> nodeDependcyRefMap;
/**
* 流程节点
* key是节点id
*/
private Map<String, FlowNode> flowNodeMap;
/**
* 流程变更数据
*/ */
private Map flowChangedMap; public FlowParseTool(FlowPro flowPro,
DmpProject publishedToProject,
DmpProjectConfigInfoDto dmpProjectConfigInfoDto,
DmpDevelopTaskService dmpDevelopTaskService,
DmpNavigationTreeService dmpNavigationTreeService,
DmpWorkFlowSubmitDetailsService dmpWorkFlowSubmitDetailsService) {
this(flowPro, dmpWorkFlowSubmitDetailsService);
this.publishedToProject = publishedToProject;
this.dmpProjectConfigInfoDto = dmpProjectConfigInfoDto;
this.dmpDevelopTaskService = dmpDevelopTaskService;
this.dmpNavigationTreeService = dmpNavigationTreeService;
}
private void parse() { private void parse() {
...@@ -337,13 +362,13 @@ public class FlowParseTool { ...@@ -337,13 +362,13 @@ public class FlowParseTool {
*/ */
public boolean publish()throws Exception { public boolean publish()throws Exception {
Long publishedToProjectId = publishedToProjectSystemInfo.getProjectId(); Long publishedToProjectId = dmpProjectConfigInfoDto.getProjectId().longValue();
Long treeId = flowPro.getTreeId(); Long treeId = flowPro.getTreeId();
/** /**
* 当前任务生成文件存放根路径 * 当前任务生成文件存放根路径
*/ */
String localTaskPath = publishedToProjectSystemInfo.getAzkabanLocalTaskFilePath() + "/" + publishedToProjectId + "/" + treeId; String localTaskPath = dmpProjectConfigInfoDto.getDmpPublicConfigInfoDto().getAzkabanLocalTaskFilePath() + "/" + publishedToProjectId + "/" + treeId;
File localTaskFile = new File(localTaskPath); File localTaskFile = new File(localTaskPath);
if (!localTaskFile.exists()) { if (!localTaskFile.exists()) {
localTaskFile.mkdirs(); localTaskFile.mkdirs();
...@@ -430,10 +455,12 @@ public class FlowParseTool { ...@@ -430,10 +455,12 @@ public class FlowParseTool {
//上传到azkaban todo //上传到azkaban todo
//上次zip包到azkaban //上次zip包到azkaban
String localTaskZipAbsolutePath = localTaskZipPath + "/" + localZipTargetFileName; String localTaskZipAbsolutePath = localTaskZipPath + "/" + localZipTargetFileName;
String azkabanApiUrl = publishedToProjectSystemInfo.getAzkabanMonitorUrl(); String azkabanApiUrl = dmpProjectConfigInfoDto.getDmpPublicConfigInfoDto().getAzkabanMonitorUrl();
AzkabanApiUtils2 azkabanApiUtils = new AzkabanApiUtils2(azkabanApiUrl); AzkabanApiUtils2 azkabanApiUtils = new AzkabanApiUtils2(azkabanApiUrl);
return azkabanApiUtils.loginCreateProjectuploadZipAndSchedule("jz_workflow_" + publishedToProjectId, publishedToProject.getProjectDesc(), localTaskZipAbsolutePath, flowPro); return azkabanApiUtils.loginCreateProjectuploadZipAndSchedule("jz_workflow_new_" + publishedToProjectId, publishedToProject.getProjectDesc(), localTaskZipAbsolutePath, flowPro);
} }
/** /**
...@@ -446,7 +473,7 @@ public class FlowParseTool { ...@@ -446,7 +473,7 @@ public class FlowParseTool {
String fileName = flowNode.getNodeName() + ".sh"; String fileName = flowNode.getNodeName() + ".sh";
String scriptFileAbsolutePath = localTaskExecArgsPath + fileName; String scriptFileAbsolutePath = localTaskExecArgsPath + fileName;
Long publishedToProjectId = publishedToProjectSystemInfo.getProjectId(); Long publishedToProjectId = dmpProjectConfigInfoDto.getProjectId().longValue();
Long treeId = flowPro.getTreeId(); Long treeId = flowPro.getTreeId();
List<String> list = new ArrayList<>(); List<String> list = new ArrayList<>();
...@@ -456,16 +483,16 @@ public class FlowParseTool { ...@@ -456,16 +483,16 @@ public class FlowParseTool {
//远程shell 路径 //远程shell 路径
String remoteShellDir = publishedToProjectSystemInfo.getAzkabanExectorShellPath() + "/" + publishedToProjectId + "/" + treeId + "/"; String remoteShellDir = dmpProjectConfigInfoDto.getDmpPublicConfigInfoDto().getAzkabanExectorShellPath() + "/" + publishedToProjectId + "/" + treeId + "/";
//上传shell文件 todo //上传shell文件 todo
SFTPUtils sftpUtils = new SFTPUtils(publishedToProjectSystemInfo.getShellCmdServer(), SFTPUtils sftpUtils = new SFTPUtils(dmpProjectConfigInfoDto.getDmpPublicConfigInfoDto().getShellCmdServer(),
publishedToProjectSystemInfo.getShellCmdUser(), dmpProjectConfigInfoDto.getDmpPublicConfigInfoDto().getShellCmdUser(),
publishedToProjectSystemInfo.getShellCmdPassword(), dmpProjectConfigInfoDto.getDmpPublicConfigInfoDto().getShellCmdPassword(),
publishedToProjectSystemInfo.getShellSftpPort()); dmpProjectConfigInfoDto.getDmpPublicConfigInfoDto().getShellSftpPort());
sftpUtils.singleUploadFile(localTaskExecArgsPath, fileName, remoteShellDir); sftpUtils.singleUploadFile(localTaskExecArgsPath, fileName, remoteShellDir);
String command = "command=" + publishedToProjectSystemInfo.getAzkabanExectorShellExec() + " " + publishedToProjectId + " ${azkaban.flow.flowid} ${azkaban.job.id} " + remoteShellDir + fileName; String command = "command=" + dmpProjectConfigInfoDto.getDmpPublicConfigInfoDto().getAzkabanExectorShellExec() + " " + publishedToProjectId + " ${azkaban.flow.flowid} ${azkaban.job.id} " + remoteShellDir + fileName;
return command; return command;
} }
...@@ -480,22 +507,22 @@ public class FlowParseTool { ...@@ -480,22 +507,22 @@ public class FlowParseTool {
String fileName = flowNode.getNodeName() + ".sql"; String fileName = flowNode.getNodeName() + ".sql";
String scriptFileAbsolutePath = localTaskExecArgsPath + fileName; String scriptFileAbsolutePath = localTaskExecArgsPath + fileName;
Long publishedToProjectId = publishedToProjectSystemInfo.getProjectId(); Long publishedToProjectId = dmpProjectConfigInfoDto.getProjectId().longValue();
Long treeId = flowPro.getTreeId(); Long treeId = flowPro.getTreeId();
FileUtils.write(scriptFileAbsolutePath, flowNode.getScript()); FileUtils.write(scriptFileAbsolutePath, flowNode.getScript());
//上传sql文件 todo //上传sql文件 todo
//远程shell 路径 //远程shell 路径
String remoteSqlDir = publishedToProjectSystemInfo.getAzkabanExectorSqlPath() + "/" + publishedToProjectId + "/" + treeId + "/"; String remoteSqlDir = dmpProjectConfigInfoDto.getDmpPublicConfigInfoDto().getAzkabanExectorSqlPath() + "/" + publishedToProjectId + "/" + treeId + "/";
//上传shell文件 todo //上传shell文件 todo
SFTPUtils sftpUtils = new SFTPUtils(publishedToProjectSystemInfo.getShellCmdServer(), SFTPUtils sftpUtils = new SFTPUtils(dmpProjectConfigInfoDto.getDmpPublicConfigInfoDto().getShellCmdServer(),
publishedToProjectSystemInfo.getShellCmdUser(), dmpProjectConfigInfoDto.getDmpPublicConfigInfoDto().getShellCmdUser(),
publishedToProjectSystemInfo.getShellCmdPassword(), dmpProjectConfigInfoDto.getDmpPublicConfigInfoDto().getShellCmdPassword(),
publishedToProjectSystemInfo.getShellSftpPort()); dmpProjectConfigInfoDto.getDmpPublicConfigInfoDto().getShellSftpPort());
sftpUtils.singleUploadFile(localTaskExecArgsPath, fileName, remoteSqlDir); sftpUtils.singleUploadFile(localTaskExecArgsPath, fileName, remoteSqlDir);
String command = "command=" + publishedToProjectSystemInfo.getAzkabanExectorSqlExec() + " " + publishedToProjectId + " ${azkaban.flow.flowid} ${azkaban.job.id} " + remoteSqlDir + fileName; String command = "command=" + dmpProjectConfigInfoDto.getDmpPublicConfigInfoDto().getAzkabanExectorSqlExec() + " " + publishedToProjectId + " ${azkaban.flow.flowid} ${azkaban.job.id} " + remoteSqlDir + fileName;
return command; return command;
} }
...@@ -516,7 +543,7 @@ public class FlowParseTool { ...@@ -516,7 +543,7 @@ public class FlowParseTool {
String execXmlFileName = execXmlFileNameAndVersion.split("@")[1];*/ String execXmlFileName = execXmlFileNameAndVersion.split("@")[1];*/
//任务所在项目id //任务所在项目id
Long projectId = flowPro.getProjectId(); Long projectId = flowPro.getProjectId();
Long publishedToProjectId = publishedToProjectSystemInfo.getProjectId(); Long publishedToProjectId = dmpProjectConfigInfoDto.getProjectId().longValue();
//根据taskName获取treeId //根据taskName获取treeId
String taskName = flowNode.getScript(); String taskName = flowNode.getScript();
...@@ -533,7 +560,7 @@ public class FlowParseTool { ...@@ -533,7 +560,7 @@ public class FlowParseTool {
String execXmlFileNameAndVersion = getPublishSyncTaskFileNameAndLatestVersion(taskName, syncTaskTreeId); String execXmlFileNameAndVersion = getPublishSyncTaskFileNameAndLatestVersion(taskName, syncTaskTreeId);
String execXmlFileName = execXmlFileNameAndVersion.split("@")[1]; String execXmlFileName = execXmlFileNameAndVersion.split("@")[1];
//xml 执行xml的命令写到job文件中 //xml 执行xml的命令写到job文件中
String command = "command=" + publishedToProjectSystemInfo.getAzkabanExectorXmlExec() + " " + publishedToProjectId + " ${azkaban.flow.flowid} ${azkaban.job.id} " + execXmlFileName; String command = "command=" + dmpProjectConfigInfoDto.getDmpPublicConfigInfoDto().getAzkabanExectorXmlExec() + " " + publishedToProjectId + " ${azkaban.flow.flowid} ${azkaban.job.id} " + execXmlFileName;
return command; return command;
} }
...@@ -563,7 +590,7 @@ public class FlowParseTool { ...@@ -563,7 +590,7 @@ public class FlowParseTool {
String subProcessFlowName = flowNode.getScript(); String subProcessFlowName = flowNode.getScript();
//检查子流程是否存在 todo //检查子流程是否存在 todo
String azkabanApiUrl = publishedToProjectSystemInfo.getAzkabanMonitorUrl(); String azkabanApiUrl = dmpProjectConfigInfoDto.getDmpPublicConfigInfoDto().getAzkabanMonitorUrl();
AzkabanApiUtils2 azkabanApiUtils = new AzkabanApiUtils2(azkabanApiUrl); AzkabanApiUtils2 azkabanApiUtils = new AzkabanApiUtils2(azkabanApiUrl);
boolean flowExists = azkabanApiUtils.checkFlowExists("jz_workflow_" + flowPro.getPublishedToProjectId(), subProcessFlowName); boolean flowExists = azkabanApiUtils.checkFlowExists("jz_workflow_" + flowPro.getPublishedToProjectId(), subProcessFlowName);
if (!flowExists) { if (!flowExists) {
......
package com.jz.common.utils;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;
import com.jz.dmp.modules.controller.bean.DmpNavigationTreeDto;
/**
* @ClassName: ReflectAssistUtils
* @Description: TODO(反射辅助工具类)
* @author ybz
* @date 2021年1月26日
*
*/
public class ReflectAssistUtils {
/**
* @Title: getFieldValueByFieldName
* @Description: TODO(根据属性名称获取属性值)
* @param @param fieldName
* @param @param cls
* @param @return
* @param @throws Exception 参数
* @return Field 返回类型
* @throws
*/
public static Object getFieldValueByFieldName(String fieldName, Object obj)throws Exception{
Class<?> cls = obj.getClass();
String getMethodName = "get"+fieldName.substring(0, 1).toUpperCase()+fieldName.substring(1);
Method getMethod = cls.getMethod(getMethodName);
return getMethod.invoke(obj);
}
/**
* @param <T>
* @Title: setFieldValueByFieldName
* @Description: TODO(设置属性值)
* @param @param fieldName
* @param @param cls
* @param @param fieldVaule
* @param @throws Exception 参数
* @return void 返回类型
* @throws
*/
public static <T> void setFieldValueByFieldName(String fieldName, Object obj, T fieldVaule)throws Exception{
Class<?> cls = obj.getClass();
String setMethodName = "set"+fieldName.substring(0, 1).toUpperCase()+fieldName.substring(1);
Class<?> fieldValueClass = fieldVaule.getClass();
if (fieldVaule instanceof ArrayList) {
fieldValueClass = List.class;
}
Method setMethod = cls.getMethod(setMethodName, fieldValueClass);
setMethod.invoke(obj, fieldVaule);
}
public static void main(String[] args) {
try {
DmpNavigationTreeDto dmpNavigationTreeDto = new DmpNavigationTreeDto();
List<DmpNavigationTreeDto> list = new ArrayList<DmpNavigationTreeDto>();
setFieldValueByFieldName("children", dmpNavigationTreeDto, list);
System.out.println(getFieldValueByFieldName("children", dmpNavigationTreeDto));
} catch (Exception e) {
e.printStackTrace();
}
}
}
...@@ -122,6 +122,7 @@ public class SFTPUtils { ...@@ -122,6 +122,7 @@ public class SFTPUtils {
* @param remoteFileDirPath 要上传到的远程文件路径 * @param remoteFileDirPath 要上传到的远程文件路径
*/ */
public void singleUploadFile(String localFileDirPath,String uploadFileName,String remoteFileDirPath) { public void singleUploadFile(String localFileDirPath,String uploadFileName,String remoteFileDirPath) {
String pathTeString = "C:\\opt\\dmp\\dmp_web\\35\\705\\execArgs\\";
//本地文件绝对路径 //本地文件绝对路径
String localFileAbsolutePath = localFileDirPath+uploadFileName; String localFileAbsolutePath = localFileDirPath+uploadFileName;
String remoteFileAbsolutePath = remoteFileDirPath+"/"+uploadFileName; String remoteFileAbsolutePath = remoteFileDirPath+"/"+uploadFileName;
...@@ -129,7 +130,8 @@ public class SFTPUtils { ...@@ -129,7 +130,8 @@ public class SFTPUtils {
createRemoteDirs(remoteFileDirPath); createRemoteDirs(remoteFileDirPath);
try { try {
sftp.put(localFileAbsolutePath, remoteFileAbsolutePath,ChannelSftp.OVERWRITE); //sftp.put(localFileAbsolutePath, remoteFileAbsolutePath,ChannelSftp.OVERWRITE);
sftp.put(pathTeString+uploadFileName, remoteFileAbsolutePath,ChannelSftp.OVERWRITE);
sftp.chmod(Integer.parseInt("775",8), remoteFileAbsolutePath); sftp.chmod(Integer.parseInt("775",8), remoteFileAbsolutePath);
LOGGER.info("上传"+localFileAbsolutePath+" 到 "+remoteFileAbsolutePath+" 成功"); LOGGER.info("上传"+localFileAbsolutePath+" 到 "+remoteFileAbsolutePath+" 成功");
} catch (SftpException e) { } catch (SftpException e) {
......
...@@ -159,5 +159,4 @@ public class DmpNavigationTreeController { ...@@ -159,5 +159,4 @@ public class DmpNavigationTreeController {
return baseBeanResponse; return baseBeanResponse;
} }
} }
\ No newline at end of file
package com.jz.dmp.modules.controller.bean; package com.jz.dmp.modules.controller.bean;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map;
import org.springframework.beans.BeanUtils; import org.springframework.beans.BeanUtils;
...@@ -56,6 +58,8 @@ public class MyDmpDevelopTaskConverter { ...@@ -56,6 +58,8 @@ public class MyDmpDevelopTaskConverter {
flowPro.setPublishedToProjectId(dmpDevelopTask.getProjectId().longValue()); flowPro.setPublishedToProjectId(dmpDevelopTask.getProjectId().longValue());
//dmp里生成的任务id //dmp里生成的任务id
flowPro.setTaskId(dmpDevelopTask.getId().longValue()); flowPro.setTaskId(dmpDevelopTask.getId().longValue());
//dmp生成树ID
flowPro.setTreeId(dmpDevelopTask.getTreeId().longValue());
//是否带版本号进行节点变更查询? //是否带版本号进行节点变更查询?
//检查节点名称要用到的参数? //检查节点名称要用到的参数?
...@@ -71,5 +75,21 @@ public class MyDmpDevelopTaskConverter { ...@@ -71,5 +75,21 @@ public class MyDmpDevelopTaskConverter {
return flowPro; return flowPro;
} }
/**
* @Title: scriptToFlowProJson
* @Description: TODO(这里用一句话描述这个方法的作用)
* @param @param script
* @param @return 参数
* @return String 返回类型
* @throws
*/
private String scriptToFlowProJson(String script) {
Map<String, Object> map = new HashMap<String, Object>();
JSONObject jsonObject = JSONObject.parseObject(script);
map.put("flowPro", jsonObject);
return JSONObject.toJSONString(map);
}
} }
...@@ -2,6 +2,7 @@ package com.jz.dmp.modules.controller.projconfig.bean; ...@@ -2,6 +2,7 @@ package com.jz.dmp.modules.controller.projconfig.bean;
import com.jz.dmp.modules.model.DmpProjectConfigInfo; import com.jz.dmp.modules.model.DmpProjectConfigInfo;
import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
/**项目配置表Dto /**项目配置表Dto
* @author ybz * @author ybz
...@@ -9,5 +10,21 @@ import io.swagger.annotations.ApiModel; ...@@ -9,5 +10,21 @@ import io.swagger.annotations.ApiModel;
*/ */
@ApiModel(value = "项目配置表Dto", description = "项目配置表Dto") @ApiModel(value = "项目配置表Dto", description = "项目配置表Dto")
public class DmpProjectConfigInfoDto extends DmpProjectConfigInfo { public class DmpProjectConfigInfoDto extends DmpProjectConfigInfo {
/**
*
*/
private static final long serialVersionUID = 1L;
@ApiModelProperty(value = "公共配置")
private DmpPublicConfigInfoDto dmpPublicConfigInfoDto;
public DmpPublicConfigInfoDto getDmpPublicConfigInfoDto() {
return dmpPublicConfigInfoDto;
}
public void setDmpPublicConfigInfoDto(DmpPublicConfigInfoDto dmpPublicConfigInfoDto) {
this.dmpPublicConfigInfoDto = dmpPublicConfigInfoDto;
}
} }
...@@ -154,6 +154,30 @@ public class DmpPublicConfigInfoRequest extends BasePageBean { ...@@ -154,6 +154,30 @@ public class DmpPublicConfigInfoRequest extends BasePageBean {
*/ */
@ApiModelProperty(value = "元数据服务web地址") @ApiModelProperty(value = "元数据服务web地址")
private String atlasMonitorUrl; private String atlasMonitorUrl;
/**
* 远程连接默认SERVER地址
*/
@ApiModelProperty(value = "远程连接默认SERVER地址")
private String shellCmdServer;
/**
* 远程连接默认用户
*/
@ApiModelProperty(value = "远程连接默认用户")
private String shellCmdUser;
/**
* 远程连接默认用户密码
*/
@ApiModelProperty(value = "远程连接默认用户密码")
private String shellCmdPassword;
/**
* 上传配置的SFTP端口
*/
@ApiModelProperty(value = "上传配置的SFTP端口")
private Integer shellSftpPort;
/** /**
* 备注 * 备注
...@@ -394,6 +418,34 @@ public class DmpPublicConfigInfoRequest extends BasePageBean { ...@@ -394,6 +418,34 @@ public class DmpPublicConfigInfoRequest extends BasePageBean {
public void setAtlasMonitorUrl(String atlasMonitorUrl) { public void setAtlasMonitorUrl(String atlasMonitorUrl) {
this.atlasMonitorUrl = atlasMonitorUrl; this.atlasMonitorUrl = atlasMonitorUrl;
} }
public void setShellCmdServer(String shellCmdServer) {
this.shellCmdServer = shellCmdServer;
}
public String getShellCmdUser() {
return shellCmdUser;
}
public void setShellCmdUser(String shellCmdUser) {
this.shellCmdUser = shellCmdUser;
}
public String getShellCmdPassword() {
return shellCmdPassword;
}
public void setShellCmdPassword(String shellCmdPassword) {
this.shellCmdPassword = shellCmdPassword;
}
public Integer getShellSftpPort() {
return shellSftpPort;
}
public void setShellSftpPort(Integer shellSftpPort) {
this.shellSftpPort = shellSftpPort;
}
public String getRemark() { public String getRemark() {
return remark; return remark;
......
...@@ -88,7 +88,7 @@ public class DmpNavigationTree implements Serializable { ...@@ -88,7 +88,7 @@ public class DmpNavigationTree implements Serializable {
* 父节点ID * 父节点ID
*/ */
@ApiModelProperty(value = "父节点ID") @ApiModelProperty(value = "父节点ID")
private Integer parentId; public Integer parentId;
public Integer getId() { public Integer getId() {
......
...@@ -163,6 +163,30 @@ public class DmpPublicConfigInfo implements Serializable{ ...@@ -163,6 +163,30 @@ public class DmpPublicConfigInfo implements Serializable{
*/ */
@ApiModelProperty(value = "元数据服务web地址") @ApiModelProperty(value = "元数据服务web地址")
private String atlasMonitorUrl; private String atlasMonitorUrl;
/**
* 远程连接默认SERVER地址
*/
@ApiModelProperty(value = "远程连接默认SERVER地址")
private String shellCmdServer;
/**
* 远程连接默认用户
*/
@ApiModelProperty(value = "远程连接默认用户")
private String shellCmdUser;
/**
* 远程连接默认用户密码
*/
@ApiModelProperty(value = "远程连接默认用户密码")
private String shellCmdPassword;
/**
* 上传配置的SFTP端口
*/
@ApiModelProperty(value = "上传配置的SFTP端口")
private Integer shellSftpPort;
/** /**
* 备注 * 备注
...@@ -396,6 +420,38 @@ public class DmpPublicConfigInfo implements Serializable{ ...@@ -396,6 +420,38 @@ public class DmpPublicConfigInfo implements Serializable{
public void setAtlasMonitorUrl(String atlasMonitorUrl) { public void setAtlasMonitorUrl(String atlasMonitorUrl) {
this.atlasMonitorUrl = atlasMonitorUrl; this.atlasMonitorUrl = atlasMonitorUrl;
} }
public String getShellCmdServer() {
return shellCmdServer;
}
public void setShellCmdServer(String shellCmdServer) {
this.shellCmdServer = shellCmdServer;
}
public String getShellCmdUser() {
return shellCmdUser;
}
public void setShellCmdUser(String shellCmdUser) {
this.shellCmdUser = shellCmdUser;
}
public String getShellCmdPassword() {
return shellCmdPassword;
}
public void setShellCmdPassword(String shellCmdPassword) {
this.shellCmdPassword = shellCmdPassword;
}
public Integer getShellSftpPort() {
return shellSftpPort;
}
public void setShellSftpPort(Integer shellSftpPort) {
this.shellSftpPort = shellSftpPort;
}
public String getRemark() { public String getRemark() {
return remark; return remark;
......
package com.jz.dmp.modules.model;
import java.io.Serializable;
import java.util.Map;
public class SSOUserInfo implements Serializable {
//用户名
private String userName;
//用户角色对应的用户信息
private Map<String,String> azkabanRoleRefPermissions;
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
public Map<String, String> getAzkabanRoleRefPermissions() {
return azkabanRoleRefPermissions;
}
public void setAzkabanRoleRefPermissions(Map<String, String> azkabanRoleRefPermissions) {
this.azkabanRoleRefPermissions = azkabanRoleRefPermissions;
}
}
\ No newline at end of file
...@@ -17,6 +17,7 @@ import org.springframework.util.StringUtils; ...@@ -17,6 +17,7 @@ import org.springframework.util.StringUtils;
import com.jz.common.bean.BaseBeanResponse; import com.jz.common.bean.BaseBeanResponse;
import com.jz.common.constant.StatuConstant; import com.jz.common.constant.StatuConstant;
import com.jz.common.persistence.BaseService; import com.jz.common.persistence.BaseService;
import com.jz.common.utils.CommonUtils;
import com.jz.dmp.modules.controller.bean.DmpNavigationTreeDto; import com.jz.dmp.modules.controller.bean.DmpNavigationTreeDto;
import com.jz.dmp.modules.controller.bean.DmpNavigationTreeRequest; import com.jz.dmp.modules.controller.bean.DmpNavigationTreeRequest;
import com.jz.dmp.modules.controller.bean.MyDmpNavigationTreeConverter; import com.jz.dmp.modules.controller.bean.MyDmpNavigationTreeConverter;
...@@ -169,7 +170,7 @@ public class DmpNavigationTreeServiceImpl extends BaseService implements DmpNavi ...@@ -169,7 +170,7 @@ public class DmpNavigationTreeServiceImpl extends BaseService implements DmpNavi
if (dmpNavigationTreeRequest.getParentId()!=null){ if (dmpNavigationTreeRequest.getParentId()!=null){
dtos = dmpArrangeTree(dmpNavigationTreeRequest.getParentId(), dtos); dtos = dmpArrangeTree(dmpNavigationTreeRequest.getParentId(), dtos);
} }
baseBeanResponse.setCode(StatuConstant.SUCCESS_CODE); baseBeanResponse.setCode(StatuConstant.SUCCESS_CODE);
baseBeanResponse.setMessage("查询成功"); baseBeanResponse.setMessage("查询成功");
baseBeanResponse.setDatas(dtos); baseBeanResponse.setDatas(dtos);
......
...@@ -7,6 +7,7 @@ import org.springframework.beans.factory.annotation.Autowired; ...@@ -7,6 +7,7 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
import com.jz.common.bean.BaseBeanResponse;
import com.jz.common.bean.BaseResponse; import com.jz.common.bean.BaseResponse;
import com.jz.common.constant.StatuConstant; import com.jz.common.constant.StatuConstant;
import com.jz.common.utils.FlowParseTool; import com.jz.common.utils.FlowParseTool;
...@@ -16,6 +17,7 @@ import com.jz.dmp.modules.controller.DataIntegration.bean.flow.FlowPro; ...@@ -16,6 +17,7 @@ import com.jz.dmp.modules.controller.DataIntegration.bean.flow.FlowPro;
import com.jz.dmp.modules.controller.projconfig.bean.DmpProjectConfigInfoDto; import com.jz.dmp.modules.controller.projconfig.bean.DmpProjectConfigInfoDto;
import com.jz.dmp.modules.dao.DmpProjectDao; import com.jz.dmp.modules.dao.DmpProjectDao;
import com.jz.dmp.modules.dao.DmpWorkFlowSubmitDetailsDao; import com.jz.dmp.modules.dao.DmpWorkFlowSubmitDetailsDao;
import com.jz.dmp.modules.dao.projconfig.DmpProjectConfigInfoMapper;
import com.jz.dmp.modules.model.DmpProject; import com.jz.dmp.modules.model.DmpProject;
import com.jz.dmp.modules.model.DmpProjectSystemInfo; import com.jz.dmp.modules.model.DmpProjectSystemInfo;
import com.jz.dmp.modules.model.DmpWorkFlowSubmitDetails; import com.jz.dmp.modules.model.DmpWorkFlowSubmitDetails;
...@@ -24,6 +26,7 @@ import com.jz.dmp.modules.service.DmpNavigationTreeService; ...@@ -24,6 +26,7 @@ import com.jz.dmp.modules.service.DmpNavigationTreeService;
import com.jz.dmp.modules.service.DmpProjectService; import com.jz.dmp.modules.service.DmpProjectService;
import com.jz.dmp.modules.service.DmpWorkFlowSubmitDetailsService; import com.jz.dmp.modules.service.DmpWorkFlowSubmitDetailsService;
import com.jz.dmp.modules.service.FlowService; import com.jz.dmp.modules.service.FlowService;
import com.jz.dmp.modules.service.projconfig.DmpProjectConfigInfoService;
/** /**
* @ClassName: FlowServiceImpl * @ClassName: FlowServiceImpl
...@@ -51,6 +54,9 @@ public class FlowServiceImpl implements FlowService { ...@@ -51,6 +54,9 @@ public class FlowServiceImpl implements FlowService {
@Autowired @Autowired
private DmpWorkFlowSubmitDetailsDao dmpWorkFlowSubmitDetailsDao; private DmpWorkFlowSubmitDetailsDao dmpWorkFlowSubmitDetailsDao;
@Autowired
private DmpProjectConfigInfoService dmpProjectConfigInfoService;
/** /**
*工作流发布 *工作流发布
...@@ -63,9 +69,12 @@ public class FlowServiceImpl implements FlowService { ...@@ -63,9 +69,12 @@ public class FlowServiceImpl implements FlowService {
flowPro.setCheckVerion(true); flowPro.setCheckVerion(true);
Long publishedToProjectId = flowPro.getPublishedToProjectId(); Long publishedToProjectId = flowPro.getPublishedToProjectId();
DmpProject publishToProject = dmpProjectDao.get(publishedToProjectId); DmpProject publishToProject = dmpProjectDao.get(publishedToProjectId);
DmpProjectSystemInfo publishToProjectSystemInfo = dmpProjectService.getProjectSystemInfo(publishedToProjectId); //DmpProjectSystemInfo publishToProjectSystemInfo = dmpProjectService.getProjectSystemInfo(publishedToProjectId);
//DmpProjectConfigInfoDto dmpProjectConfigInfoDto = BaseBeanResponse<DmpProjectConfigInfoDto> baseBeanResponse = dmpProjectConfigInfoService.findByProjectId(publishedToProjectId.intValue(), null);
DmpProjectConfigInfoDto dmpProjectConfigInfoDto = baseBeanResponse.getData();
try { try {
/*
FlowParseTool flowParseTool = new FlowParseTool(flowPro, FlowParseTool flowParseTool = new FlowParseTool(flowPro,
publishToProject, publishToProject,
publishToProjectSystemInfo, publishToProjectSystemInfo,
...@@ -73,6 +82,15 @@ public class FlowServiceImpl implements FlowService { ...@@ -73,6 +82,15 @@ public class FlowServiceImpl implements FlowService {
dmpNavigationTreeService, dmpNavigationTreeService,
dmpWorkFlowSubmitDetailsService dmpWorkFlowSubmitDetailsService
); );
*/
FlowParseTool flowParseTool = new FlowParseTool(flowPro,
publishToProject,
dmpProjectConfigInfoDto,
dmpDevelopTaskService,
dmpNavigationTreeService,
dmpWorkFlowSubmitDetailsService
);
//保存发布信息 //保存发布信息
List<FlowNodeChangeInfo> flowNodeChangeList = flowParseTool.getChangedNodes(); List<FlowNodeChangeInfo> flowNodeChangeList = flowParseTool.getChangedNodes();
......
...@@ -29,6 +29,8 @@ import com.jz.dmp.modules.controller.projconfig.bean.DmpProjectConfigInfoBatch; ...@@ -29,6 +29,8 @@ import com.jz.dmp.modules.controller.projconfig.bean.DmpProjectConfigInfoBatch;
import com.jz.dmp.modules.controller.projconfig.bean.DmpProjectConfigInfoDto; import com.jz.dmp.modules.controller.projconfig.bean.DmpProjectConfigInfoDto;
import com.jz.dmp.modules.controller.projconfig.bean.DmpProjectConfigInfoRequest; import com.jz.dmp.modules.controller.projconfig.bean.DmpProjectConfigInfoRequest;
import com.jz.dmp.modules.controller.projconfig.bean.DmpProjectEngineParamDto; import com.jz.dmp.modules.controller.projconfig.bean.DmpProjectEngineParamDto;
import com.jz.dmp.modules.controller.projconfig.bean.DmpPublicConfigInfoDto;
import com.jz.dmp.modules.controller.projconfig.bean.DmpPublicConfigInfoRequest;
import com.jz.dmp.modules.dao.projconfig.DmpProjectConfigEngineMapper; import com.jz.dmp.modules.dao.projconfig.DmpProjectConfigEngineMapper;
import com.jz.dmp.modules.dao.projconfig.DmpProjectConfigInfoMapper; import com.jz.dmp.modules.dao.projconfig.DmpProjectConfigInfoMapper;
import com.jz.dmp.modules.dao.projconfig.DmpProjectEngineParamMapper; import com.jz.dmp.modules.dao.projconfig.DmpProjectEngineParamMapper;
...@@ -36,6 +38,7 @@ import com.jz.dmp.modules.model.DmpProjectConfigEngine; ...@@ -36,6 +38,7 @@ import com.jz.dmp.modules.model.DmpProjectConfigEngine;
import com.jz.dmp.modules.model.DmpProjectConfigInfo; import com.jz.dmp.modules.model.DmpProjectConfigInfo;
import com.jz.dmp.modules.model.DmpProjectEngineParam; import com.jz.dmp.modules.model.DmpProjectEngineParam;
import com.jz.dmp.modules.service.projconfig.DmpProjectConfigInfoService; import com.jz.dmp.modules.service.projconfig.DmpProjectConfigInfoService;
import com.jz.dmp.modules.service.projconfig.DmpPublicConfigInfoService;
/** /**
* 项目配置表服务的实现? * 项目配置表服务的实现?
...@@ -54,6 +57,9 @@ public class DmpProjectConfigInfoServiceImpl extends BaseService implements DmpP ...@@ -54,6 +57,9 @@ public class DmpProjectConfigInfoServiceImpl extends BaseService implements DmpP
private DmpProjectConfigEngineMapper dmpProjectConfigEngineMapper; private DmpProjectConfigEngineMapper dmpProjectConfigEngineMapper;
@Autowired @Autowired
private DmpProjectEngineParamMapper dmpProjectEngineParamMapper; private DmpProjectEngineParamMapper dmpProjectEngineParamMapper;
@Autowired
private DmpPublicConfigInfoService dmpPublicConfigInfoService;
/* /*
* (non-Javadoc) * (non-Javadoc)
...@@ -668,6 +674,13 @@ public class DmpProjectConfigInfoServiceImpl extends BaseService implements DmpP ...@@ -668,6 +674,13 @@ public class DmpProjectConfigInfoServiceImpl extends BaseService implements DmpP
if (!CollectionUtils.isEmpty(list)) { if (!CollectionUtils.isEmpty(list)) {
dto = list.get(0); dto = list.get(0);
//设置公共属性
DmpPublicConfigInfoRequest request = new DmpPublicConfigInfoRequest();
BaseBeanResponse<DmpPublicConfigInfoDto> configInfoBeanResponse = dmpPublicConfigInfoService.findList(request, null);
List<DmpPublicConfigInfoDto> configInfoDtos = configInfoBeanResponse.getDatas();
if (!CollectionUtils.isEmpty(configInfoDtos)) {
dto.setDmpPublicConfigInfoDto(configInfoDtos.get(0));
}
} }
baseBeanResponse.setCode(StatuConstant.SUCCESS_CODE); baseBeanResponse.setCode(StatuConstant.SUCCESS_CODE);
......
...@@ -26,6 +26,10 @@ ...@@ -26,6 +26,10 @@
<result column="azkaban_exector_shell_export_data" property="azkabanExectorShellExportData" jdbcType="VARCHAR" /> <result column="azkaban_exector_shell_export_data" property="azkabanExectorShellExportData" jdbcType="VARCHAR" />
<result column="azkaban_monitor_url" property="azkabanMonitorUrl" jdbcType="VARCHAR" /> <result column="azkaban_monitor_url" property="azkabanMonitorUrl" jdbcType="VARCHAR" />
<result column="atlas_monitor_url" property="atlasMonitorUrl" jdbcType="VARCHAR" /> <result column="atlas_monitor_url" property="atlasMonitorUrl" jdbcType="VARCHAR" />
<result column="shell_cmd_server" property="shellCmdServer" jdbcType="VARCHAR" />
<result column="shell_cmd_user" property="shellCmdUser" jdbcType="VARCHAR" />
<result column="shell_cmd_password" property="shellCmdPassword" jdbcType="VARCHAR" />
<result column="shell_sftp_port" property="shellSftpPort" jdbcType="INTEGER" />
<result column="remark" property="remark" jdbcType="VARCHAR" /> <result column="remark" property="remark" jdbcType="VARCHAR" />
<result column="data_status" property="dataStatus" jdbcType="CHAR" /> <result column="data_status" property="dataStatus" jdbcType="CHAR" />
<result column="create_user_id" property="createUserId" jdbcType="INTEGER" /> <result column="create_user_id" property="createUserId" jdbcType="INTEGER" />
...@@ -43,8 +47,9 @@ ...@@ -43,8 +47,9 @@
kerberos_fqdn, kerberos_keytab_conf, kerberos_keytab_user, kerberos_spark_jaas_conf, hdfs_http_path, kerberos_fqdn, kerberos_keytab_conf, kerberos_keytab_user, kerberos_spark_jaas_conf, hdfs_http_path,
hdfs_syncing_path, hdfs_user_name, kafka_conector_url, kafka_schema_register_url, kafka_bootstrap_servers, hdfs_syncing_path, hdfs_user_name, kafka_conector_url, kafka_schema_register_url, kafka_bootstrap_servers,
azkaban_exector_shell_exec, azkaban_exector_sql_exec, azkaban_exector_xml_exec, azkaban_exector_sql_path, azkaban_exector_shell_path, azkaban_exector_shell_exec, azkaban_exector_sql_exec, azkaban_exector_xml_exec, azkaban_exector_sql_path, azkaban_exector_shell_path,
azkaban_local_task_file_path, azkaban_exector_shell_export_data, azkaban_monitor_url, atlas_monitor_url, remark, azkaban_local_task_file_path, azkaban_exector_shell_export_data, azkaban_monitor_url, atlas_monitor_url, shell_cmd_server,
data_status, create_user_id, create_time, update_user_id, update_time shell_cmd_user, shell_cmd_password, shell_sftp_port, remark, data_status,
create_user_id, create_time, update_user_id, update_time
</sql> </sql>
<sql id="BaseDto_Column_List"> <sql id="BaseDto_Column_List">
...@@ -152,6 +157,18 @@ ...@@ -152,6 +157,18 @@
<if test="atlasMonitorUrl != null" > <if test="atlasMonitorUrl != null" >
AND atlas_monitor_url = #{atlasMonitorUrl,jdbcType=VARCHAR} AND atlas_monitor_url = #{atlasMonitorUrl,jdbcType=VARCHAR}
</if> </if>
<if test="shellCmdServer != null" >
AND shell_cmd_server = #{shellCmdServer,jdbcType=VARCHAR}
</if>
<if test="shellCmdUser != null" >
AND shell_cmd_user = #{shellCmdUser,jdbcType=VARCHAR}
</if>
<if test="shellCmdPassword != null" >
AND shell_cmd_password = #{shellCmdPassword,jdbcType=VARCHAR}
</if>
<if test="shellSftpPort != null" >
AND shell_sftp_port = #{shellSftpPort,jdbcType=INTEGER}
</if>
<if test="remark != null" > <if test="remark != null" >
AND remark = #{remark,jdbcType=VARCHAR} AND remark = #{remark,jdbcType=VARCHAR}
</if> </if>
...@@ -259,6 +276,18 @@ ...@@ -259,6 +276,18 @@
<if test="atlasMonitorUrl != null" > <if test="atlasMonitorUrl != null" >
AND atlas_monitor_url = #{atlasMonitorUrl,jdbcType=VARCHAR} AND atlas_monitor_url = #{atlasMonitorUrl,jdbcType=VARCHAR}
</if> </if>
<if test="shellCmdServer != null" >
AND shell_cmd_server = #{shellCmdServer,jdbcType=VARCHAR}
</if>
<if test="shellCmdUser != null" >
AND shell_cmd_user = #{shellCmdUser,jdbcType=VARCHAR}
</if>
<if test="shellCmdPassword != null" >
AND shell_cmd_password = #{shellCmdPassword,jdbcType=VARCHAR}
</if>
<if test="shellSftpPort != null" >
AND shell_sftp_port = #{shellSftpPort,jdbcType=INTEGER}
</if>
<if test="remark != null" > <if test="remark != null" >
AND remark = #{remark,jdbcType=VARCHAR} AND remark = #{remark,jdbcType=VARCHAR}
</if> </if>
...@@ -294,16 +323,18 @@ ...@@ -294,16 +323,18 @@
kerberos_fqdn, kerberos_keytab_conf, kerberos_keytab_user, kerberos_spark_jaas_conf, hdfs_http_path, kerberos_fqdn, kerberos_keytab_conf, kerberos_keytab_user, kerberos_spark_jaas_conf, hdfs_http_path,
hdfs_syncing_path, hdfs_user_name, kafka_conector_url, kafka_schema_register_url, kafka_bootstrap_servers, hdfs_syncing_path, hdfs_user_name, kafka_conector_url, kafka_schema_register_url, kafka_bootstrap_servers,
azkaban_exector_shell_exec, azkaban_exector_sql_exec, azkaban_exector_xml_exec, azkaban_exector_sql_path, azkaban_exector_shell_path, azkaban_exector_shell_exec, azkaban_exector_sql_exec, azkaban_exector_xml_exec, azkaban_exector_sql_path, azkaban_exector_shell_path,
azkaban_local_task_file_path, azkaban_exector_shell_export_data, azkaban_monitor_url, atlas_monitor_url, remark, azkaban_local_task_file_path, azkaban_exector_shell_export_data, azkaban_monitor_url, atlas_monitor_url, shell_cmd_server,
data_status, create_user_id, create_time, update_user_id, update_time shell_cmd_user, shell_cmd_password, shell_sftp_port, remark, data_status,
create_user_id, create_time, update_user_id, update_time
) )
values ( values (
#{publicConfigId,jdbcType=INTEGER}, #{kerberosIsenable,jdbcType=CHAR}, #{kerberosJaasClientName,jdbcType=VARCHAR}, #{kerberosKrb5Conf,jdbcType=VARCHAR}, #{kerberosJaasConf,jdbcType=VARCHAR}, #{publicConfigId,jdbcType=INTEGER}, #{kerberosIsenable,jdbcType=CHAR}, #{kerberosJaasClientName,jdbcType=VARCHAR}, #{kerberosKrb5Conf,jdbcType=VARCHAR}, #{kerberosJaasConf,jdbcType=VARCHAR},
#{kerberosFqdn,jdbcType=VARCHAR}, #{kerberosKeytabConf,jdbcType=VARCHAR}, #{kerberosKeytabUser,jdbcType=VARCHAR}, #{kerberosSparkJaasConf,jdbcType=VARCHAR}, #{hdfsHttpPath,jdbcType=VARCHAR}, #{kerberosFqdn,jdbcType=VARCHAR}, #{kerberosKeytabConf,jdbcType=VARCHAR}, #{kerberosKeytabUser,jdbcType=VARCHAR}, #{kerberosSparkJaasConf,jdbcType=VARCHAR}, #{hdfsHttpPath,jdbcType=VARCHAR},
#{hdfsSyncingPath,jdbcType=VARCHAR}, #{hdfsUserName,jdbcType=VARCHAR}, #{kafkaConectorUrl,jdbcType=VARCHAR}, #{kafkaSchemaRegisterUrl,jdbcType=VARCHAR}, #{kafkaBootstrapServers,jdbcType=VARCHAR}, #{hdfsSyncingPath,jdbcType=VARCHAR}, #{hdfsUserName,jdbcType=VARCHAR}, #{kafkaConectorUrl,jdbcType=VARCHAR}, #{kafkaSchemaRegisterUrl,jdbcType=VARCHAR}, #{kafkaBootstrapServers,jdbcType=VARCHAR},
#{azkabanExectorShellExec,jdbcType=VARCHAR}, #{azkabanExectorSqlExec,jdbcType=VARCHAR}, #{azkabanExectorXmlExec,jdbcType=VARCHAR}, #{azkabanExectorSqlPath,jdbcType=VARCHAR}, #{azkabanExectorShellPath,jdbcType=VARCHAR}, #{azkabanExectorShellExec,jdbcType=VARCHAR}, #{azkabanExectorSqlExec,jdbcType=VARCHAR}, #{azkabanExectorXmlExec,jdbcType=VARCHAR}, #{azkabanExectorSqlPath,jdbcType=VARCHAR}, #{azkabanExectorShellPath,jdbcType=VARCHAR},
#{azkabanLocalTaskFilePath,jdbcType=VARCHAR}, #{azkabanExectorShellExportData,jdbcType=VARCHAR}, #{azkabanMonitorUrl,jdbcType=VARCHAR}, #{atlasMonitorUrl,jdbcType=VARCHAR}, #{remark,jdbcType=VARCHAR}, #{azkabanLocalTaskFilePath,jdbcType=VARCHAR}, #{azkabanExectorShellExportData,jdbcType=VARCHAR}, #{azkabanMonitorUrl,jdbcType=VARCHAR}, #{atlasMonitorUrl,jdbcType=VARCHAR}, #{shellCmdServer,jdbcType=VARCHAR},
#{dataStatus,jdbcType=CHAR}, #{createUserId,jdbcType=INTEGER}, #{createTime,jdbcType=TIMESTAMP}, #{updateUserId,jdbcType=INTEGER}, #{updateTime,jdbcType=TIMESTAMP} #{shellCmdUser,jdbcType=VARCHAR}, #{shellCmdPassword,jdbcType=VARCHAR}, #{shellSftpPort,jdbcType=INTEGER}, #{remark,jdbcType=VARCHAR}, #{dataStatus,jdbcType=CHAR},
#{createUserId,jdbcType=INTEGER}, #{createTime,jdbcType=TIMESTAMP}, #{updateUserId,jdbcType=INTEGER}, #{updateTime,jdbcType=TIMESTAMP}
) )
</insert> </insert>
...@@ -314,8 +345,9 @@ ...@@ -314,8 +345,9 @@
kerberos_fqdn, kerberos_keytab_conf, kerberos_keytab_user, kerberos_spark_jaas_conf, hdfs_http_path, kerberos_fqdn, kerberos_keytab_conf, kerberos_keytab_user, kerberos_spark_jaas_conf, hdfs_http_path,
hdfs_syncing_path, hdfs_user_name, kafka_conector_url, kafka_schema_register_url, kafka_bootstrap_servers, hdfs_syncing_path, hdfs_user_name, kafka_conector_url, kafka_schema_register_url, kafka_bootstrap_servers,
azkaban_exector_shell_exec, azkaban_exector_sql_exec, azkaban_exector_xml_exec, azkaban_exector_sql_path, azkaban_exector_shell_path, azkaban_exector_shell_exec, azkaban_exector_sql_exec, azkaban_exector_xml_exec, azkaban_exector_sql_path, azkaban_exector_shell_path,
azkaban_local_task_file_path, azkaban_exector_shell_export_data, azkaban_monitor_url, atlas_monitor_url, remark, azkaban_local_task_file_path, azkaban_exector_shell_export_data, azkaban_monitor_url, atlas_monitor_url, shell_cmd_server,
data_status, create_user_id, create_time, update_user_id, update_time shell_cmd_user, shell_cmd_password, shell_sftp_port, remark, data_status,
create_user_id, create_time, update_user_id, update_time
) )
values values
<foreach collection="list" item="item" separator=","> <foreach collection="list" item="item" separator=",">
...@@ -324,8 +356,9 @@ ...@@ -324,8 +356,9 @@
#{item.kerberosFqdn,jdbcType=VARCHAR}, #{item.kerberosKeytabConf,jdbcType=VARCHAR}, #{item.kerberosKeytabUser,jdbcType=VARCHAR}, #{item.kerberosSparkJaasConf,jdbcType=VARCHAR}, #{item.hdfsHttpPath,jdbcType=VARCHAR}, #{item.kerberosFqdn,jdbcType=VARCHAR}, #{item.kerberosKeytabConf,jdbcType=VARCHAR}, #{item.kerberosKeytabUser,jdbcType=VARCHAR}, #{item.kerberosSparkJaasConf,jdbcType=VARCHAR}, #{item.hdfsHttpPath,jdbcType=VARCHAR},
#{item.hdfsSyncingPath,jdbcType=VARCHAR}, #{item.hdfsUserName,jdbcType=VARCHAR}, #{item.kafkaConectorUrl,jdbcType=VARCHAR}, #{item.kafkaSchemaRegisterUrl,jdbcType=VARCHAR}, #{item.kafkaBootstrapServers,jdbcType=VARCHAR}, #{item.hdfsSyncingPath,jdbcType=VARCHAR}, #{item.hdfsUserName,jdbcType=VARCHAR}, #{item.kafkaConectorUrl,jdbcType=VARCHAR}, #{item.kafkaSchemaRegisterUrl,jdbcType=VARCHAR}, #{item.kafkaBootstrapServers,jdbcType=VARCHAR},
#{item.azkabanExectorShellExec,jdbcType=VARCHAR}, #{item.azkabanExectorSqlExec,jdbcType=VARCHAR}, #{item.azkabanExectorXmlExec,jdbcType=VARCHAR}, #{item.azkabanExectorSqlPath,jdbcType=VARCHAR}, #{item.azkabanExectorShellPath,jdbcType=VARCHAR}, #{item.azkabanExectorShellExec,jdbcType=VARCHAR}, #{item.azkabanExectorSqlExec,jdbcType=VARCHAR}, #{item.azkabanExectorXmlExec,jdbcType=VARCHAR}, #{item.azkabanExectorSqlPath,jdbcType=VARCHAR}, #{item.azkabanExectorShellPath,jdbcType=VARCHAR},
#{item.azkabanLocalTaskFilePath,jdbcType=VARCHAR}, #{item.azkabanExectorShellExportData,jdbcType=VARCHAR}, #{item.azkabanMonitorUrl,jdbcType=VARCHAR}, #{item.atlasMonitorUrl,jdbcType=VARCHAR}, #{item.remark,jdbcType=VARCHAR}, #{item.azkabanLocalTaskFilePath,jdbcType=VARCHAR}, #{item.azkabanExectorShellExportData,jdbcType=VARCHAR}, #{item.azkabanMonitorUrl,jdbcType=VARCHAR}, #{item.atlasMonitorUrl,jdbcType=VARCHAR}, #{item.shellCmdServer,jdbcType=VARCHAR},
#{item.dataStatus,jdbcType=CHAR}, #{item.createUserId,jdbcType=INTEGER}, #{item.createTime,jdbcType=TIMESTAMP}, #{item.updateUserId,jdbcType=INTEGER}, #{item.updateTime,jdbcType=TIMESTAMP} #{item.shellCmdUser,jdbcType=VARCHAR}, #{item.shellCmdPassword,jdbcType=VARCHAR}, #{item.shellSftpPort,jdbcType=INTEGER}, #{item.remark,jdbcType=VARCHAR}, #{item.dataStatus,jdbcType=CHAR},
#{item.createUserId,jdbcType=INTEGER}, #{item.createTime,jdbcType=TIMESTAMP}, #{item.updateUserId,jdbcType=INTEGER}, #{item.updateTime,jdbcType=TIMESTAMP}
) )
</foreach> </foreach>
</insert> </insert>
...@@ -406,6 +439,18 @@ ...@@ -406,6 +439,18 @@
<if test="atlasMonitorUrl != null" > <if test="atlasMonitorUrl != null" >
atlas_monitor_url, atlas_monitor_url,
</if> </if>
<if test="shellCmdServer != null" >
shell_cmd_server,
</if>
<if test="shellCmdUser != null" >
shell_cmd_user,
</if>
<if test="shellCmdPassword != null" >
shell_cmd_password,
</if>
<if test="shellSftpPort != null" >
shell_sftp_port,
</if>
<if test="remark != null" > <if test="remark != null" >
remark, remark,
</if> </if>
...@@ -498,6 +543,18 @@ ...@@ -498,6 +543,18 @@
<if test="atlasMonitorUrl != null" > <if test="atlasMonitorUrl != null" >
#{atlasMonitorUrl,jdbcType=VARCHAR}, #{atlasMonitorUrl,jdbcType=VARCHAR},
</if> </if>
<if test="shellCmdServer != null" >
#{shellCmdServer,jdbcType=VARCHAR},
</if>
<if test="shellCmdUser != null" >
#{shellCmdUser,jdbcType=VARCHAR},
</if>
<if test="shellCmdPassword != null" >
#{shellCmdPassword,jdbcType=VARCHAR},
</if>
<if test="shellSftpPort != null" >
#{shellSftpPort,jdbcType=INTEGER},
</if>
<if test="remark != null" > <if test="remark != null" >
#{remark,jdbcType=VARCHAR}, #{remark,jdbcType=VARCHAR},
</if> </if>
...@@ -546,6 +603,10 @@ ...@@ -546,6 +603,10 @@
azkaban_exector_shell_export_data = #{azkabanExectorShellExportData,jdbcType=VARCHAR}, azkaban_exector_shell_export_data = #{azkabanExectorShellExportData,jdbcType=VARCHAR},
azkaban_monitor_url = #{azkabanMonitorUrl,jdbcType=VARCHAR}, azkaban_monitor_url = #{azkabanMonitorUrl,jdbcType=VARCHAR},
atlas_monitor_url = #{atlasMonitorUrl,jdbcType=VARCHAR}, atlas_monitor_url = #{atlasMonitorUrl,jdbcType=VARCHAR},
shell_cmd_server = #{shellCmdServer,jdbcType=VARCHAR},
shell_cmd_user = #{shellCmdUser,jdbcType=VARCHAR},
shell_cmd_password = #{shellCmdPassword,jdbcType=VARCHAR},
shell_sftp_port = #{shellSftpPort,jdbcType=INTEGER},
remark = #{remark,jdbcType=VARCHAR}, remark = #{remark,jdbcType=VARCHAR},
data_status = #{dataStatus,jdbcType=CHAR}, data_status = #{dataStatus,jdbcType=CHAR},
create_user_id = #{createUserId,jdbcType=INTEGER}, create_user_id = #{createUserId,jdbcType=INTEGER},
...@@ -631,6 +692,18 @@ ...@@ -631,6 +692,18 @@
<if test="atlasMonitorUrl != null" > <if test="atlasMonitorUrl != null" >
atlas_monitor_url = #{atlasMonitorUrl,jdbcType=VARCHAR}, atlas_monitor_url = #{atlasMonitorUrl,jdbcType=VARCHAR},
</if> </if>
<if test="shellCmdServer != null" >
shell_cmd_server = #{shellCmdServer,jdbcType=VARCHAR},
</if>
<if test="shellCmdUser != null" >
shell_cmd_user = #{shellCmdUser,jdbcType=VARCHAR},
</if>
<if test="shellCmdPassword != null" >
shell_cmd_password = #{shellCmdPassword,jdbcType=VARCHAR},
</if>
<if test="shellSftpPort != null" >
shell_sftp_port = #{shellSftpPort,jdbcType=INTEGER},
</if>
<if test="remark != null" > <if test="remark != null" >
remark = #{remark,jdbcType=VARCHAR}, remark = #{remark,jdbcType=VARCHAR},
</if> </if>
...@@ -731,6 +804,18 @@ ...@@ -731,6 +804,18 @@
<if test="atlasMonitorUrl != null" > <if test="atlasMonitorUrl != null" >
AND atlas_monitor_url = #{atlasMonitorUrl,jdbcType=VARCHAR} AND atlas_monitor_url = #{atlasMonitorUrl,jdbcType=VARCHAR}
</if> </if>
<if test="shellCmdServer != null" >
AND shell_cmd_server = #{shellCmdServer,jdbcType=VARCHAR}
</if>
<if test="shellCmdUser != null" >
AND shell_cmd_user = #{shellCmdUser,jdbcType=VARCHAR}
</if>
<if test="shellCmdPassword != null" >
AND shell_cmd_password = #{shellCmdPassword,jdbcType=VARCHAR}
</if>
<if test="shellSftpPort != null" >
AND shell_sftp_port = #{shellSftpPort,jdbcType=INTEGER}
</if>
<if test="remark != null" > <if test="remark != null" >
AND remark = #{remark,jdbcType=VARCHAR} AND remark = #{remark,jdbcType=VARCHAR}
</if> </if>
...@@ -755,7 +840,6 @@ ...@@ -755,7 +840,6 @@
<if test="updateTimeEnd != null" > <if test="updateTimeEnd != null" >
AND update_time <![CDATA[ <= ]]> #{updateTimeEnd,jdbcType=TIMESTAMP} AND update_time <![CDATA[ <= ]]> #{updateTimeEnd,jdbcType=TIMESTAMP}
</if> </if>
AND data_status='1'
</where> </where>
</select> </select>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment