Commit bfec0993 authored by sml's avatar sml

代码提交

parent 00dbac5e
package com.jz.common.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* @ClassName: ResubmitCheck
* @Description: TODO(校验重复提交)
* @author ybz
* @date 2021年1月21日
*
*/
@Target({ElementType.FIELD})
@Retention(RetentionPolicy.RUNTIME)
public @interface FieldAssist {
String orderBy();//指定排序字段内容
}
package com.jz.common.bean; package com.jz.common.bean;
import java.lang.reflect.Field;
import org.apache.commons.lang3.StringUtils;
import com.jz.common.annotation.FieldAssist;
import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty; import io.swagger.annotations.ApiModelProperty;
...@@ -16,6 +22,12 @@ public class BasePageBean { ...@@ -16,6 +22,12 @@ public class BasePageBean {
@ApiModelProperty(value = "每页显示记录数") @ApiModelProperty(value = "每页显示记录数")
private int pageSize = 10; private int pageSize = 10;
@ApiModelProperty(value = "排序字段")
private String orderColumn;
@ApiModelProperty(value = "排序方式(asc:升序;desc:降序)")
private String orderType;
public int getPageNum() { public int getPageNum() {
return pageNum; return pageNum;
} }
...@@ -32,6 +44,41 @@ public class BasePageBean { ...@@ -32,6 +44,41 @@ public class BasePageBean {
this.pageSize = pageSize; this.pageSize = pageSize;
} }
public String getOrderColumn() {
return orderColumn;
}
public void setOrderColumn(String orderColumn) {
this.orderColumn = orderColumn;
}
public String getOrderType() {
return orderType;
}
public void setOrderType(String orderType) {
this.orderType = orderType;
}
//获取排序方式字符串
public String takeOrderByStr(Class<?> cls) {
if (StringUtils.isEmpty(orderColumn) || StringUtils.isEmpty(orderType)) {
return null;
}
try {
Field field = cls.getDeclaredField(orderColumn);
boolean fieldHasAnno = field.isAnnotationPresent(FieldAssist.class);
if (fieldHasAnno) {
FieldAssist fieldAssist = field.getAnnotation(FieldAssist.class);
String orderBy = fieldAssist.orderBy();
this.orderColumn = orderBy;
}
} catch (Exception e) {
throw new RuntimeException("该排序字段没有找到对应的排序字段");
}
return orderColumn+ " " + orderType;
}
} }
...@@ -723,5 +723,6 @@ public class AzkabanApiUtils2 { ...@@ -723,5 +723,6 @@ public class AzkabanApiUtils2 {
return list; return list;
} }
} }
...@@ -11,7 +11,7 @@ import java.util.UUID; ...@@ -11,7 +11,7 @@ import java.util.UUID;
*/ */
public class CodeGeneratorUtils { public class CodeGeneratorUtils {
//任务起始版本 //任务起始版本
private static final String TASKVERSION_START = "V1.0"; private static final String TASKVERSION_START = "1";
/** /**
* @Title: generatorNextTaskVesion * @Title: generatorNextTaskVesion
...@@ -27,19 +27,10 @@ public class CodeGeneratorUtils { ...@@ -27,19 +27,10 @@ public class CodeGeneratorUtils {
return TASKVERSION_START; return TASKVERSION_START;
} }
String[] strs = version.substring(1, version.length()).split("."); Long number = Long.parseLong(version);
number++;
Integer number_1 = Integer.parseInt(strs[0]); return number.toString();
Integer number_2 = Integer.parseInt(strs[1]);
if (number_2 + 1 > 9) {
number_2 = 0;
number_1 += 1;
}else {
number_2 += 1;
}
return "V"+number_1+"."+number_2;
} }
/** /**
......
...@@ -14,6 +14,7 @@ import org.springframework.data.redis.core.RedisTemplate; ...@@ -14,6 +14,7 @@ import org.springframework.data.redis.core.RedisTemplate;
import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSONObject;
import com.jz.common.constant.CommConstant;
import com.jz.common.enums.NodeChangeTypeEnum; import com.jz.common.enums.NodeChangeTypeEnum;
import com.jz.dmp.modules.controller.DataIntegration.bean.flow.FlowNode; import com.jz.dmp.modules.controller.DataIntegration.bean.flow.FlowNode;
import com.jz.dmp.modules.controller.DataIntegration.bean.flow.FlowNodeChangeInfo; import com.jz.dmp.modules.controller.DataIntegration.bean.flow.FlowNodeChangeInfo;
...@@ -219,8 +220,15 @@ public class FlowParseTool { ...@@ -219,8 +220,15 @@ public class FlowParseTool {
for (int i = 0; i < size; i++) { for (int i = 0; i < size; i++) {
tempMap = nodesList.get(i); tempMap = nodesList.get(i);
String nodeId = tempMap.get("id"); String nodeId = tempMap.get("id");
String nodeName = tempMap.get("label");
String nodeType = tempMap.get("taskType"); String nodeType = tempMap.get("taskType");
String nodeName = "";
if (nodeType.equals(CommConstant.WORK_TYPE_START)) {
nodeName = flowPro.getFlowName()+"_start";
}else if (nodeType.equals(CommConstant.WORK_TYPE_STOP)) {
nodeName = flowPro.getFlowName();
}else {
nodeName = tempMap.get("label")+"_"+flowPro.getFlowName();
}
String nodeLocation = tempMap.get("nodeLocation"); String nodeLocation = tempMap.get("nodeLocation");
String script = tempMap.get("script"); String script = tempMap.get("script");
String retryTimes = tempMap.get("retryTimes"); String retryTimes = tempMap.get("retryTimes");
...@@ -408,9 +416,9 @@ public class FlowParseTool { ...@@ -408,9 +416,9 @@ public class FlowParseTool {
List<String> contents; List<String> contents;
Iterator<Map.Entry<String, FlowNode>> iterator = flowNodeMap.entrySet().iterator(); Iterator<Map.Entry<String, FlowNode>> iterator = flowNodeMap.entrySet().iterator();
while (iterator.hasNext()) {
String azkabanJobType = ""; String azkabanJobType = "";
String azkabanJobCommand = ""; String azkabanJobCommand = "";
while (iterator.hasNext()) {
contents = new ArrayList<>(); contents = new ArrayList<>();
FlowNode flowNode = iterator.next().getValue(); FlowNode flowNode = iterator.next().getValue();
String nodeType = flowNode.getNodeType(); String nodeType = flowNode.getNodeType();
...@@ -419,11 +427,27 @@ public class FlowParseTool { ...@@ -419,11 +427,27 @@ public class FlowParseTool {
azkabanJobType = "command"; azkabanJobType = "command";
azkabanJobCommand = generateShellFile(flowNode, localTaskExecArgsPath); azkabanJobCommand = generateShellFile(flowNode, localTaskExecArgsPath);
//上传ftp下载相关参数
JSONObject scriptJsonObject = JSONObject.parseObject(flowNode.getNodeData());
//FTP链接
contents.add("ftpUrl=" + scriptJsonObject.getString("ftpUrl"));
} else if ("sql".equalsIgnoreCase(nodeType)) { } else if ("sql".equalsIgnoreCase(nodeType)) {
// sql 任务 // sql 任务
azkabanJobType = "command"; azkabanJobType = "command";
azkabanJobCommand = generateSqlFile(flowNode, localTaskExecArgsPath); azkabanJobCommand = generateSqlFile(flowNode, localTaskExecArgsPath);
//上传ftp下载相关参数
JSONObject scriptJsonObject = JSONObject.parseObject(flowNode.getNodeData());
//console
contents.add("console=" + scriptJsonObject.getString("console"));
//console
contents.add("hdfs=" + scriptJsonObject.getString("hdfs"));
//console
contents.add("table=" + scriptJsonObject.getString("table"));
//console
contents.add("topic=" + scriptJsonObject.getString("topic"));
} else if ("sync".equalsIgnoreCase(nodeType)) { } else if ("sync".equalsIgnoreCase(nodeType)) {
//同步任务 //同步任务
azkabanJobType = "command"; azkabanJobType = "command";
......
...@@ -122,7 +122,7 @@ public class SFTPUtils { ...@@ -122,7 +122,7 @@ public class SFTPUtils {
* @param remoteFileDirPath 要上传到的远程文件路径 * @param remoteFileDirPath 要上传到的远程文件路径
*/ */
public void singleUploadFile(String localFileDirPath,String uploadFileName,String remoteFileDirPath) { public void singleUploadFile(String localFileDirPath,String uploadFileName,String remoteFileDirPath) {
String pathTeString = "C:\\opt\\dmp\\dmp_web\\35\\705\\execArgs\\"; String pathTeString = "C:\\opt\\dmp\\dmp_web\\31\\705\\execArgs\\";
//本地文件绝对路径 //本地文件绝对路径
String localFileAbsolutePath = localFileDirPath+uploadFileName; String localFileAbsolutePath = localFileDirPath+uploadFileName;
String remoteFileAbsolutePath = remoteFileDirPath+"/"+uploadFileName; String remoteFileAbsolutePath = remoteFileDirPath+"/"+uploadFileName;
......
...@@ -2,6 +2,9 @@ package com.jz.dmp.modules.controller.bean; ...@@ -2,6 +2,9 @@ package com.jz.dmp.modules.controller.bean;
import java.util.Date; import java.util.Date;
import org.apache.commons.lang3.StringUtils;
import com.jz.common.annotation.FieldAssist;
import com.jz.common.bean.BasePageBean; import com.jz.common.bean.BasePageBean;
import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty; import io.swagger.annotations.ApiModelProperty;
...@@ -78,6 +81,13 @@ public class DmpDevelopTaskRequest extends BasePageBean { ...@@ -78,6 +81,13 @@ public class DmpDevelopTaskRequest extends BasePageBean {
@ApiModelProperty(value = "创建用户ID") @ApiModelProperty(value = "创建用户ID")
private String createUserId; private String createUserId;
/**
* 数据创建时间
*/
@ApiModelProperty(value = "数据创建时间")
@FieldAssist(orderBy = "task.create_time")
private Date createTime;
/** /**
* 数据创建时间起 * 数据创建时间起
*/ */
...@@ -96,6 +106,13 @@ public class DmpDevelopTaskRequest extends BasePageBean { ...@@ -96,6 +106,13 @@ public class DmpDevelopTaskRequest extends BasePageBean {
@ApiModelProperty(value = "创建用户ID") @ApiModelProperty(value = "创建用户ID")
private String updateUserId; private String updateUserId;
/**
* 数据更新时间
*/
@ApiModelProperty(value = "数据更新时间")
@FieldAssist(orderBy = "task.update_time")
private Date updateTime;
/** /**
* 数据更新时间起 * 数据更新时间起
*/ */
...@@ -204,6 +221,20 @@ public class DmpDevelopTaskRequest extends BasePageBean { ...@@ -204,6 +221,20 @@ public class DmpDevelopTaskRequest extends BasePageBean {
@ApiModelProperty(value = "项目id") @ApiModelProperty(value = "项目id")
private Integer projectId; private Integer projectId;
/**
* 创建用户名称
*/
@ApiModelProperty(value = "创建用户名称")
@FieldAssist(orderBy = "user.user_name")
private String createUserName;
/**
* 任务流程名称
*/
@ApiModelProperty(value = "任务流程名称")
@FieldAssist(orderBy = "tree.name")
private String name;
public Integer getId() { public Integer getId() {
return id; return id;
} }
...@@ -292,6 +323,14 @@ public class DmpDevelopTaskRequest extends BasePageBean { ...@@ -292,6 +323,14 @@ public class DmpDevelopTaskRequest extends BasePageBean {
this.createUserId = createUserId; this.createUserId = createUserId;
} }
public String getCreateUserName() {
return createUserName;
}
public void setCreateUserName(String createUserName) {
this.createUserName = createUserName;
}
public Date getCreateTimeStart() { public Date getCreateTimeStart() {
return createTimeStart; return createTimeStart;
} }
...@@ -459,4 +498,29 @@ public class DmpDevelopTaskRequest extends BasePageBean { ...@@ -459,4 +498,29 @@ public class DmpDevelopTaskRequest extends BasePageBean {
public void setProjectId(Integer projectId) { public void setProjectId(Integer projectId) {
this.projectId = projectId; this.projectId = projectId;
} }
public Date getCreateTime() {
return createTime;
}
public void setCreateTime(Date createTime) {
this.createTime = createTime;
}
public Date getUpdateTime() {
return updateTime;
}
public void setUpdateTime(Date updateTime) {
this.updateTime = updateTime;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
} }
...@@ -27,6 +27,7 @@ import org.springframework.util.CollectionUtils; ...@@ -27,6 +27,7 @@ import org.springframework.util.CollectionUtils;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.node.ObjectNode;
import com.github.pagehelper.Page;
import com.github.pagehelper.PageHelper; import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo; import com.github.pagehelper.PageInfo;
import com.jz.agent.service.DmpDsAgentService; import com.jz.agent.service.DmpDsAgentService;
...@@ -970,15 +971,31 @@ public class DmpDevelopTaskServiceImpl extends BaseService implements DmpDevelop ...@@ -970,15 +971,31 @@ public class DmpDevelopTaskServiceImpl extends BaseService implements DmpDevelop
if (dmpDevelopTaskRequest.getProjectId() != null) { if (dmpDevelopTaskRequest.getProjectId() != null) {
param.put("projectId", dmpDevelopTaskRequest.getProjectId()); param.put("projectId", dmpDevelopTaskRequest.getProjectId());
} }
//创建用户名称
if (!StringUtils.isEmpty(dmpDevelopTaskRequest.getCreateUserName())) {
param.put("createUserName", dmpDevelopTaskRequest.getCreateUserName());
}
//任务名称
if (!StringUtils.isEmpty(dmpDevelopTaskRequest.getName())) {
param.put("name", dmpDevelopTaskRequest.getName());
}
//未删除数据 //未删除数据
param.put("dataStatus", "1"); param.put("dataStatus", "1");
PageHelper.startPage(dmpDevelopTaskRequest.getPageNum(), dmpDevelopTaskRequest.getPageSize()); String obderByStr = dmpDevelopTaskRequest.takeOrderByStr(DmpDevelopTaskRequest.class);
Page page = null;
if (obderByStr==null) {
page = PageHelper.startPage(dmpDevelopTaskRequest.getPageNum(), dmpDevelopTaskRequest.getPageSize());
} else {
page = PageHelper.startPage(dmpDevelopTaskRequest.getPageNum(), dmpDevelopTaskRequest.getPageSize(), obderByStr);
}
List<DmpDevelopTask> list = dmpDevelopTaskDao.findList(param); List<DmpDevelopTask> list = dmpDevelopTaskDao.findList(param);
List<DmpDevelopTaskDto> dtos = MyDmpDevelopTaskConverter.INSTANCE().domain2dto(list); List<DmpDevelopTaskDto> dtos = MyDmpDevelopTaskConverter.INSTANCE().domain2dto(list);
PageInfo<DmpDevelopTaskDto> pageInfo = new PageInfo<>(dtos); PageInfo<DmpDevelopTaskDto> pageInfo = new PageInfo<>(dtos);
pageInfo.setTotal(page.getTotal());
pageInfoResponse.setCode(StatuConstant.SUCCESS_CODE); pageInfoResponse.setCode(StatuConstant.SUCCESS_CODE);
pageInfoResponse.setMessage("查询成功"); pageInfoResponse.setMessage("查询成功");
...@@ -1253,7 +1270,7 @@ public class DmpDevelopTaskServiceImpl extends BaseService implements DmpDevelop ...@@ -1253,7 +1270,7 @@ public class DmpDevelopTaskServiceImpl extends BaseService implements DmpDevelop
dmpDevelopTaskDao.update(dmpDevelopTask); dmpDevelopTaskDao.update(dmpDevelopTask);
DmpDevelopTask dmpDevelopTaskDb2 = dmpDevelopTaskDao.get(dmpDevelopTask.getTreeId().longValue()); DmpDevelopTask dmpDevelopTaskDb2 = dmpDevelopTaskDao.selectTaskById(dmpDevelopTask.getId().toString());
DmpDevelopTaskHistory dmpDevelopTaskHistory = MyDmpDevelopTaskHistoryConverter.INSTANCE().task2history(dmpDevelopTaskDb2); DmpDevelopTaskHistory dmpDevelopTaskHistory = MyDmpDevelopTaskHistoryConverter.INSTANCE().task2history(dmpDevelopTaskDb2);
...@@ -1262,7 +1279,7 @@ public class DmpDevelopTaskServiceImpl extends BaseService implements DmpDevelop ...@@ -1262,7 +1279,7 @@ public class DmpDevelopTaskServiceImpl extends BaseService implements DmpDevelop
baseBeanResponse.setCode(StatuConstant.SUCCESS_CODE); baseBeanResponse.setCode(StatuConstant.SUCCESS_CODE);
baseBeanResponse.setMessage("修改成功"); baseBeanResponse.setMessage("修改成功");
baseBeanResponse.setData(dmpDevelopTaskDb); baseBeanResponse.setData(dmpDevelopTaskDb2);
return baseBeanResponse; return baseBeanResponse;
} }
......
...@@ -4,7 +4,7 @@ ...@@ -4,7 +4,7 @@
<springProperty scope="context" name="env" source="spring.profiles"/> <springProperty scope="context" name="env" source="spring.profiles"/>
<property name="service" value="databank_admin"/> <property name="service" value="jz_dmp_service"/>
<property name="logfile" value="${LOG_DIR:-/opt/log/stash}/${service}/${service}"/> <property name="logfile" value="${LOG_DIR:-/opt/log/stash}/${service}/${service}"/>
<appender name="console" class="ch.qos.logback.core.ConsoleAppender"> <appender name="console" class="ch.qos.logback.core.ConsoleAppender">
......
...@@ -190,6 +190,7 @@ ...@@ -190,6 +190,7 @@
task.update_user_id,task.update_time,task.tree_id,task.flow_header, task.update_user_id,task.update_time,task.tree_id,task.flow_header,
task.flow_json, task.version, task.publish_version, task.is_gziped, tree.name task.flow_json, task.version, task.publish_version, task.is_gziped, tree.name
FROM dmp_develop_task task left join dmp_navigation_tree tree ON task.tree_id=tree.id FROM dmp_develop_task task left join dmp_navigation_tree tree ON task.tree_id=tree.id
LEFT JOIN dmp_member user ON task.CREATE_USER_ID=user.user_id
WHERE 1=1 AND tree.data_status = '1' WHERE 1=1 AND tree.data_status = '1'
<if test="taskType != null">AND task.task_type = #{taskType}</if> <if test="taskType != null">AND task.task_type = #{taskType}</if>
<if test="type != null">AND task.type = #{type}</if> <if test="type != null">AND task.type = #{type}</if>
...@@ -206,6 +207,8 @@ ...@@ -206,6 +207,8 @@
<if test="publishVersion != null">AND task.publish_version = #{publishVersion}</if> <if test="publishVersion != null">AND task.publish_version = #{publishVersion}</if>
<if test="gziped != null">AND task.is_gziped = #{gziped}</if> <if test="gziped != null">AND task.is_gziped = #{gziped}</if>
<if test="projectId != null">AND tree.project_id = #{projectId}</if> <if test="projectId != null">AND tree.project_id = #{projectId}</if>
<if test="createUserName != null">AND user.user_name like CONCAT('%',#{createUserName},'%')</if>
<if test="name != null">AND tree.name like CONCAT('%',#{name},'%')</if>
</select> </select>
<select id="queryTaskTreeInfo" resultType="com.jz.dmp.modules.controller.dataOperation.bean.DataDevTaskListDto"> <select id="queryTaskTreeInfo" resultType="com.jz.dmp.modules.controller.dataOperation.bean.DataDevTaskListDto">
......
...@@ -12,8 +12,49 @@ ...@@ -12,8 +12,49 @@
<result column="update_time" property="updateTime" jdbcType="TIMESTAMP" /> <result column="update_time" property="updateTime" jdbcType="TIMESTAMP" />
</resultMap> </resultMap>
<!-- 公共配置 -->
<resultMap id="pubCfgInfoResultMap" type="com.jz.dmp.modules.controller.projconfig.bean.DmpPublicConfigInfoDto">
<result column="public_config_id" property="publicConfigId" jdbcType="INTEGER" />
<result column="kerberos_isenable" property="kerberosIsenable" jdbcType="CHAR" />
<result column="kerberos_jaas_client_name" property="kerberosJaasClientName" jdbcType="VARCHAR" />
<result column="kerberos_krb5_conf" property="kerberosKrb5Conf" jdbcType="VARCHAR" />
<result column="kerberos_jaas_conf" property="kerberosJaasConf" jdbcType="VARCHAR" />
<result column="kerberos_fqdn" property="kerberosFqdn" jdbcType="VARCHAR" />
<result column="kerberos_keytab_conf" property="kerberosKeytabConf" jdbcType="VARCHAR" />
<result column="kerberos_keytab_user" property="kerberosKeytabUser" jdbcType="VARCHAR" />
<result column="kerberos_spark_jaas_conf" property="kerberosSparkJaasConf" jdbcType="VARCHAR" />
<result column="hdfs_http_path" property="hdfsHttpPath" jdbcType="VARCHAR" />
<result column="hdfs_syncing_path" property="hdfsSyncingPath" jdbcType="VARCHAR" />
<result column="hdfs_user_name" property="hdfsUserName" jdbcType="VARCHAR" />
<result column="kafka_conector_url" property="kafkaConectorUrl" jdbcType="VARCHAR" />
<result column="kafka_schema_register_url" property="kafkaSchemaRegisterUrl" jdbcType="VARCHAR" />
<result column="kafka_bootstrap_servers" property="kafkaBootstrapServers" jdbcType="VARCHAR" />
<result column="azkaban_exector_shell_exec" property="azkabanExectorShellExec" jdbcType="VARCHAR" />
<result column="azkaban_exector_sql_exec" property="azkabanExectorSqlExec" jdbcType="VARCHAR" />
<result column="azkaban_exector_xml_exec" property="azkabanExectorXmlExec" jdbcType="VARCHAR" />
<result column="azkaban_exector_sql_path" property="azkabanExectorSqlPath" jdbcType="VARCHAR" />
<result column="azkaban_exector_shell_path" property="azkabanExectorShellPath" jdbcType="VARCHAR" />
<result column="azkaban_local_task_file_path" property="azkabanLocalTaskFilePath" jdbcType="VARCHAR" />
<result column="azkaban_exector_shell_export_data" property="azkabanExectorShellExportData" jdbcType="VARCHAR" />
<result column="azkaban_monitor_url" property="azkabanMonitorUrl" jdbcType="VARCHAR" />
<result column="atlas_monitor_url" property="atlasMonitorUrl" jdbcType="VARCHAR" />
<result column="shell_cmd_server" property="shellCmdServer" jdbcType="VARCHAR" />
<result column="shell_cmd_user" property="shellCmdUser" jdbcType="VARCHAR" />
<result column="shell_cmd_password" property="shellCmdPassword" jdbcType="VARCHAR" />
<result column="shell_sftp_port" property="shellSftpPort" jdbcType="INTEGER" />
<result column="remark" property="remark" jdbcType="VARCHAR" />
<result column="data_status" property="dataStatus" jdbcType="CHAR" />
<result column="create_user_id" property="createUserId" jdbcType="INTEGER" />
<result column="create_time" property="createTime" jdbcType="TIMESTAMP" />
<result column="update_user_id" property="updateUserId" jdbcType="INTEGER" />
<result column="update_time" property="updateTime" jdbcType="TIMESTAMP" />
</resultMap>
<resultMap id="BaseDtoResultMap" type="com.jz.dmp.modules.controller.projconfig.bean.DmpProjectConfigInfoDto" extends="BaseResultMap"> <resultMap id="BaseDtoResultMap" type="com.jz.dmp.modules.controller.projconfig.bean.DmpProjectConfigInfoDto" extends="BaseResultMap">
<!-- /*$BaseDtoResultMapContent$*/ --> <!-- /*$BaseDtoResultMapContent$*/ -->
<association property="dmpPublicConfigInfoDto" javaType="com.jz.dmp.modules.controller.projconfig.bean.DmpPublicConfigInfoDto"
column="project_config_id" select="selectOne">
</association>
</resultMap> </resultMap>
<sql id="Base_Column_List"> <sql id="Base_Column_List">
...@@ -26,6 +67,20 @@ ...@@ -26,6 +67,20 @@
<!-- /*$BaseDtoColumnListContent$*/ --> <!-- /*$BaseDtoColumnListContent$*/ -->
</sql> </sql>
<!-- 查询公共配置 -->
<select id="selectOne" resultMap="pubCfgInfoResultMap">
select
public_config_id, kerberos_isenable, kerberos_jaas_client_name, kerberos_krb5_conf, kerberos_jaas_conf,
kerberos_fqdn, kerberos_keytab_conf, kerberos_keytab_user, kerberos_spark_jaas_conf, hdfs_http_path,
hdfs_syncing_path, hdfs_user_name, kafka_conector_url, kafka_schema_register_url, kafka_bootstrap_servers,
azkaban_exector_shell_exec, azkaban_exector_sql_exec, azkaban_exector_xml_exec, azkaban_exector_sql_path, azkaban_exector_shell_path,
azkaban_local_task_file_path, azkaban_exector_shell_export_data, azkaban_monitor_url, atlas_monitor_url, shell_cmd_server,
shell_cmd_user, shell_cmd_password, shell_sftp_port, remark, data_status,
create_user_id, create_time, update_user_id, update_time
from dmp_public_config_info
where data_status='1'
</select>
<!-- 根据主键查询项目配置表 --> <!-- 根据主键查询项目配置表 -->
<select id="selectByPrimaryKey" resultMap="BaseResultMap" parameterType="java.lang.String"> <select id="selectByPrimaryKey" resultMap="BaseResultMap" parameterType="java.lang.String">
select select
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment