Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
J
jz-dmp-service
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
姚本章
jz-dmp-service
Commits
16943735
Commit
16943735
authored
Feb 23, 2021
by
mcb
Browse files
Options
Browse Files
Download
Plain Diff
Merge branch 'dmp_dev' of
http://gitlab.ioubuy.cn/yaobenzhang/jz-dmp-service
into dmp_dev
parents
4ef746d4
bfec0993
Changes
11
Hide whitespace changes
Inline
Side-by-side
Showing
11 changed files
with
248 additions
and
25 deletions
+248
-25
FieldAssist.java
src/main/java/com/jz/common/annotation/FieldAssist.java
+21
-0
BasePageBean.java
src/main/java/com/jz/common/bean/BasePageBean.java
+48
-1
AzkabanApiUtils2.java
src/main/java/com/jz/common/utils/AzkabanApiUtils2.java
+1
-0
CodeGeneratorUtils.java
src/main/java/com/jz/common/utils/CodeGeneratorUtils.java
+4
-13
FlowParseTool.java
src/main/java/com/jz/common/utils/FlowParseTool.java
+27
-3
SFTPUtils.java
src/main/java/com/jz/common/utils/SFTPUtils.java
+1
-1
DmpDevelopTaskRequest.java
...jz/dmp/modules/controller/bean/DmpDevelopTaskRequest.java
+65
-1
DmpDevelopTaskServiceImpl.java
...z/dmp/modules/service/impl/DmpDevelopTaskServiceImpl.java
+22
-5
logback-spring.xml
src/main/resources/logback-spring.xml
+1
-1
DmpDevelopTaskMapper.xml
src/main/resources/mapper/dmp/DmpDevelopTaskMapper.xml
+3
-0
DmpProjectConfigInfoMapper.xml
...esources/mapper/projconfig/DmpProjectConfigInfoMapper.xml
+55
-0
No files found.
src/main/java/com/jz/common/annotation/FieldAssist.java
0 → 100644
View file @
16943735
package
com
.
jz
.
common
.
annotation
;
import
java.lang.annotation.ElementType
;
import
java.lang.annotation.Retention
;
import
java.lang.annotation.RetentionPolicy
;
import
java.lang.annotation.Target
;
/**
* @ClassName: ResubmitCheck
* @Description: TODO(校验重复提交)
* @author ybz
* @date 2021年1月21日
*
*/
@Target
({
ElementType
.
FIELD
})
@Retention
(
RetentionPolicy
.
RUNTIME
)
public
@interface
FieldAssist
{
String
orderBy
();
//指定排序字段内容
}
src/main/java/com/jz/common/bean/BasePageBean.java
View file @
16943735
package
com
.
jz
.
common
.
bean
;
import
java.lang.reflect.Field
;
import
org.apache.commons.lang3.StringUtils
;
import
com.jz.common.annotation.FieldAssist
;
import
io.swagger.annotations.ApiModel
;
import
io.swagger.annotations.ApiModelProperty
;
...
...
@@ -16,6 +22,12 @@ public class BasePageBean {
@ApiModelProperty
(
value
=
"每页显示记录数"
)
private
int
pageSize
=
10
;
@ApiModelProperty
(
value
=
"排序字段"
)
private
String
orderColumn
;
@ApiModelProperty
(
value
=
"排序方式(asc:升序;desc:降序)"
)
private
String
orderType
;
public
int
getPageNum
()
{
return
pageNum
;
}
...
...
@@ -31,7 +43,42 @@ public class BasePageBean {
public
void
setPageSize
(
int
pageSize
)
{
this
.
pageSize
=
pageSize
;
}
public
String
getOrderColumn
()
{
return
orderColumn
;
}
public
void
setOrderColumn
(
String
orderColumn
)
{
this
.
orderColumn
=
orderColumn
;
}
public
String
getOrderType
()
{
return
orderType
;
}
public
void
setOrderType
(
String
orderType
)
{
this
.
orderType
=
orderType
;
}
//获取排序方式字符串
public
String
takeOrderByStr
(
Class
<?>
cls
)
{
if
(
StringUtils
.
isEmpty
(
orderColumn
)
||
StringUtils
.
isEmpty
(
orderType
))
{
return
null
;
}
try
{
Field
field
=
cls
.
getDeclaredField
(
orderColumn
);
boolean
fieldHasAnno
=
field
.
isAnnotationPresent
(
FieldAssist
.
class
);
if
(
fieldHasAnno
)
{
FieldAssist
fieldAssist
=
field
.
getAnnotation
(
FieldAssist
.
class
);
String
orderBy
=
fieldAssist
.
orderBy
();
this
.
orderColumn
=
orderBy
;
}
}
catch
(
Exception
e
)
{
throw
new
RuntimeException
(
"该排序字段没有找到对应的排序字段"
);
}
return
orderColumn
+
" "
+
orderType
;
}
}
src/main/java/com/jz/common/utils/AzkabanApiUtils2.java
View file @
16943735
...
...
@@ -723,5 +723,6 @@ public class AzkabanApiUtils2 {
return
list
;
}
}
src/main/java/com/jz/common/utils/CodeGeneratorUtils.java
View file @
16943735
...
...
@@ -11,7 +11,7 @@ import java.util.UUID;
*/
public
class
CodeGeneratorUtils
{
//任务起始版本
private
static
final
String
TASKVERSION_START
=
"
V1.0
"
;
private
static
final
String
TASKVERSION_START
=
"
1
"
;
/**
* @Title: generatorNextTaskVesion
...
...
@@ -27,19 +27,10 @@ public class CodeGeneratorUtils {
return
TASKVERSION_START
;
}
String
[]
strs
=
version
.
substring
(
1
,
version
.
length
()).
split
(
"."
);
Long
number
=
Long
.
parseLong
(
version
);
number
++;
Integer
number_1
=
Integer
.
parseInt
(
strs
[
0
]);
Integer
number_2
=
Integer
.
parseInt
(
strs
[
1
]);
if
(
number_2
+
1
>
9
)
{
number_2
=
0
;
number_1
+=
1
;
}
else
{
number_2
+=
1
;
}
return
"V"
+
number_1
+
"."
+
number_2
;
return
number
.
toString
();
}
/**
...
...
src/main/java/com/jz/common/utils/FlowParseTool.java
View file @
16943735
...
...
@@ -14,6 +14,7 @@ import org.springframework.data.redis.core.RedisTemplate;
import
com.alibaba.fastjson.JSON
;
import
com.alibaba.fastjson.JSONObject
;
import
com.jz.common.constant.CommConstant
;
import
com.jz.common.enums.NodeChangeTypeEnum
;
import
com.jz.dmp.modules.controller.DataIntegration.bean.flow.FlowNode
;
import
com.jz.dmp.modules.controller.DataIntegration.bean.flow.FlowNodeChangeInfo
;
...
...
@@ -219,8 +220,15 @@ public class FlowParseTool {
for
(
int
i
=
0
;
i
<
size
;
i
++)
{
tempMap
=
nodesList
.
get
(
i
);
String
nodeId
=
tempMap
.
get
(
"id"
);
String
nodeName
=
tempMap
.
get
(
"label"
);
String
nodeType
=
tempMap
.
get
(
"taskType"
);
String
nodeName
=
""
;
if
(
nodeType
.
equals
(
CommConstant
.
WORK_TYPE_START
))
{
nodeName
=
flowPro
.
getFlowName
()+
"_start"
;
}
else
if
(
nodeType
.
equals
(
CommConstant
.
WORK_TYPE_STOP
))
{
nodeName
=
flowPro
.
getFlowName
();
}
else
{
nodeName
=
tempMap
.
get
(
"label"
)+
"_"
+
flowPro
.
getFlowName
();
}
String
nodeLocation
=
tempMap
.
get
(
"nodeLocation"
);
String
script
=
tempMap
.
get
(
"script"
);
String
retryTimes
=
tempMap
.
get
(
"retryTimes"
);
...
...
@@ -408,9 +416,9 @@ public class FlowParseTool {
List
<
String
>
contents
;
Iterator
<
Map
.
Entry
<
String
,
FlowNode
>>
iterator
=
flowNodeMap
.
entrySet
().
iterator
();
String
azkabanJobType
=
""
;
String
azkabanJobCommand
=
""
;
while
(
iterator
.
hasNext
())
{
String
azkabanJobType
=
""
;
String
azkabanJobCommand
=
""
;
contents
=
new
ArrayList
<>();
FlowNode
flowNode
=
iterator
.
next
().
getValue
();
String
nodeType
=
flowNode
.
getNodeType
();
...
...
@@ -418,11 +426,27 @@ public class FlowParseTool {
// shell
azkabanJobType
=
"command"
;
azkabanJobCommand
=
generateShellFile
(
flowNode
,
localTaskExecArgsPath
);
//上传ftp下载相关参数
JSONObject
scriptJsonObject
=
JSONObject
.
parseObject
(
flowNode
.
getNodeData
());
//FTP链接
contents
.
add
(
"ftpUrl="
+
scriptJsonObject
.
getString
(
"ftpUrl"
));
}
else
if
(
"sql"
.
equalsIgnoreCase
(
nodeType
))
{
// sql 任务
azkabanJobType
=
"command"
;
azkabanJobCommand
=
generateSqlFile
(
flowNode
,
localTaskExecArgsPath
);
//上传ftp下载相关参数
JSONObject
scriptJsonObject
=
JSONObject
.
parseObject
(
flowNode
.
getNodeData
());
//console
contents
.
add
(
"console="
+
scriptJsonObject
.
getString
(
"console"
));
//console
contents
.
add
(
"hdfs="
+
scriptJsonObject
.
getString
(
"hdfs"
));
//console
contents
.
add
(
"table="
+
scriptJsonObject
.
getString
(
"table"
));
//console
contents
.
add
(
"topic="
+
scriptJsonObject
.
getString
(
"topic"
));
}
else
if
(
"sync"
.
equalsIgnoreCase
(
nodeType
))
{
//同步任务
...
...
src/main/java/com/jz/common/utils/SFTPUtils.java
View file @
16943735
...
...
@@ -122,7 +122,7 @@ public class SFTPUtils {
* @param remoteFileDirPath 要上传到的远程文件路径
*/
public
void
singleUploadFile
(
String
localFileDirPath
,
String
uploadFileName
,
String
remoteFileDirPath
)
{
String
pathTeString
=
"C:\\opt\\dmp\\dmp_web\\3
5
\\705\\execArgs\\"
;
String
pathTeString
=
"C:\\opt\\dmp\\dmp_web\\3
1
\\705\\execArgs\\"
;
//本地文件绝对路径
String
localFileAbsolutePath
=
localFileDirPath
+
uploadFileName
;
String
remoteFileAbsolutePath
=
remoteFileDirPath
+
"/"
+
uploadFileName
;
...
...
src/main/java/com/jz/dmp/modules/controller/bean/DmpDevelopTaskRequest.java
View file @
16943735
...
...
@@ -2,6 +2,9 @@ package com.jz.dmp.modules.controller.bean;
import
java.util.Date
;
import
org.apache.commons.lang3.StringUtils
;
import
com.jz.common.annotation.FieldAssist
;
import
com.jz.common.bean.BasePageBean
;
import
io.swagger.annotations.ApiModel
;
import
io.swagger.annotations.ApiModelProperty
;
...
...
@@ -77,7 +80,14 @@ public class DmpDevelopTaskRequest extends BasePageBean {
*/
@ApiModelProperty
(
value
=
"创建用户ID"
)
private
String
createUserId
;
/**
* 数据创建时间
*/
@ApiModelProperty
(
value
=
"数据创建时间"
)
@FieldAssist
(
orderBy
=
"task.create_time"
)
private
Date
createTime
;
/**
* 数据创建时间起
*/
...
...
@@ -95,6 +105,13 @@ public class DmpDevelopTaskRequest extends BasePageBean {
*/
@ApiModelProperty
(
value
=
"创建用户ID"
)
private
String
updateUserId
;
/**
* 数据更新时间
*/
@ApiModelProperty
(
value
=
"数据更新时间"
)
@FieldAssist
(
orderBy
=
"task.update_time"
)
private
Date
updateTime
;
/**
* 数据更新时间起
...
...
@@ -203,6 +220,20 @@ public class DmpDevelopTaskRequest extends BasePageBean {
*/
@ApiModelProperty
(
value
=
"项目id"
)
private
Integer
projectId
;
/**
* 创建用户名称
*/
@ApiModelProperty
(
value
=
"创建用户名称"
)
@FieldAssist
(
orderBy
=
"user.user_name"
)
private
String
createUserName
;
/**
* 任务流程名称
*/
@ApiModelProperty
(
value
=
"任务流程名称"
)
@FieldAssist
(
orderBy
=
"tree.name"
)
private
String
name
;
public
Integer
getId
()
{
return
id
;
...
...
@@ -292,6 +323,14 @@ public class DmpDevelopTaskRequest extends BasePageBean {
this
.
createUserId
=
createUserId
;
}
public
String
getCreateUserName
()
{
return
createUserName
;
}
public
void
setCreateUserName
(
String
createUserName
)
{
this
.
createUserName
=
createUserName
;
}
public
Date
getCreateTimeStart
()
{
return
createTimeStart
;
}
...
...
@@ -459,4 +498,29 @@ public class DmpDevelopTaskRequest extends BasePageBean {
public
void
setProjectId
(
Integer
projectId
)
{
this
.
projectId
=
projectId
;
}
public
Date
getCreateTime
()
{
return
createTime
;
}
public
void
setCreateTime
(
Date
createTime
)
{
this
.
createTime
=
createTime
;
}
public
Date
getUpdateTime
()
{
return
updateTime
;
}
public
void
setUpdateTime
(
Date
updateTime
)
{
this
.
updateTime
=
updateTime
;
}
public
String
getName
()
{
return
name
;
}
public
void
setName
(
String
name
)
{
this
.
name
=
name
;
}
}
src/main/java/com/jz/dmp/modules/service/impl/DmpDevelopTaskServiceImpl.java
View file @
16943735
...
...
@@ -27,6 +27,7 @@ import org.springframework.util.CollectionUtils;
import
com.fasterxml.jackson.databind.ObjectMapper
;
import
com.fasterxml.jackson.databind.node.ObjectNode
;
import
com.github.pagehelper.Page
;
import
com.github.pagehelper.PageHelper
;
import
com.github.pagehelper.PageInfo
;
import
com.jz.agent.service.DmpDsAgentService
;
...
...
@@ -970,15 +971,31 @@ public class DmpDevelopTaskServiceImpl extends BaseService implements DmpDevelop
if
(
dmpDevelopTaskRequest
.
getProjectId
()
!=
null
)
{
param
.
put
(
"projectId"
,
dmpDevelopTaskRequest
.
getProjectId
());
}
//创建用户名称
if
(!
StringUtils
.
isEmpty
(
dmpDevelopTaskRequest
.
getCreateUserName
()))
{
param
.
put
(
"createUserName"
,
dmpDevelopTaskRequest
.
getCreateUserName
());
}
//任务名称
if
(!
StringUtils
.
isEmpty
(
dmpDevelopTaskRequest
.
getName
()))
{
param
.
put
(
"name"
,
dmpDevelopTaskRequest
.
getName
());
}
//未删除数据
param
.
put
(
"dataStatus"
,
"1"
);
PageHelper
.
startPage
(
dmpDevelopTaskRequest
.
getPageNum
(),
dmpDevelopTaskRequest
.
getPageSize
());
String
obderByStr
=
dmpDevelopTaskRequest
.
takeOrderByStr
(
DmpDevelopTaskRequest
.
class
);
Page
page
=
null
;
if
(
obderByStr
==
null
)
{
page
=
PageHelper
.
startPage
(
dmpDevelopTaskRequest
.
getPageNum
(),
dmpDevelopTaskRequest
.
getPageSize
());
}
else
{
page
=
PageHelper
.
startPage
(
dmpDevelopTaskRequest
.
getPageNum
(),
dmpDevelopTaskRequest
.
getPageSize
(),
obderByStr
);
}
List
<
DmpDevelopTask
>
list
=
dmpDevelopTaskDao
.
findList
(
param
);
List
<
DmpDevelopTaskDto
>
dtos
=
MyDmpDevelopTaskConverter
.
INSTANCE
().
domain2dto
(
list
);
PageInfo
<
DmpDevelopTaskDto
>
pageInfo
=
new
PageInfo
<>(
dtos
);
pageInfo
.
setTotal
(
page
.
getTotal
());
pageInfoResponse
.
setCode
(
StatuConstant
.
SUCCESS_CODE
);
pageInfoResponse
.
setMessage
(
"查询成功"
);
...
...
@@ -1253,7 +1270,7 @@ public class DmpDevelopTaskServiceImpl extends BaseService implements DmpDevelop
dmpDevelopTaskDao
.
update
(
dmpDevelopTask
);
DmpDevelopTask
dmpDevelopTaskDb2
=
dmpDevelopTaskDao
.
get
(
dmpDevelopTask
.
getTreeId
().
longValue
());
DmpDevelopTask
dmpDevelopTaskDb2
=
dmpDevelopTaskDao
.
selectTaskById
(
dmpDevelopTask
.
getId
().
toString
());
DmpDevelopTaskHistory
dmpDevelopTaskHistory
=
MyDmpDevelopTaskHistoryConverter
.
INSTANCE
().
task2history
(
dmpDevelopTaskDb2
);
...
...
@@ -1262,7 +1279,7 @@ public class DmpDevelopTaskServiceImpl extends BaseService implements DmpDevelop
baseBeanResponse
.
setCode
(
StatuConstant
.
SUCCESS_CODE
);
baseBeanResponse
.
setMessage
(
"修改成功"
);
baseBeanResponse
.
setData
(
dmpDevelopTaskDb
);
baseBeanResponse
.
setData
(
dmpDevelopTaskDb
2
);
return
baseBeanResponse
;
}
...
...
src/main/resources/logback-spring.xml
View file @
16943735
...
...
@@ -4,7 +4,7 @@
<springProperty
scope=
"context"
name=
"env"
source=
"spring.profiles"
/>
<property
name=
"service"
value=
"
databank_admin
"
/>
<property
name=
"service"
value=
"
jz_dmp_service
"
/>
<property
name=
"logfile"
value=
"${LOG_DIR:-/opt/log/stash}/${service}/${service}"
/>
<appender
name=
"console"
class=
"ch.qos.logback.core.ConsoleAppender"
>
...
...
src/main/resources/mapper/dmp/DmpDevelopTaskMapper.xml
View file @
16943735
...
...
@@ -190,6 +190,7 @@
task.update_user_id,task.update_time,task.tree_id,task.flow_header,
task.flow_json, task.version, task.publish_version, task.is_gziped, tree.name
FROM dmp_develop_task task left join dmp_navigation_tree tree ON task.tree_id=tree.id
LEFT JOIN dmp_member user ON task.CREATE_USER_ID=user.user_id
WHERE 1=1 AND tree.data_status = '1'
<if
test=
"taskType != null"
>
AND task.task_type = #{taskType}
</if>
<if
test=
"type != null"
>
AND task.type = #{type}
</if>
...
...
@@ -206,6 +207,8 @@
<if
test=
"publishVersion != null"
>
AND task.publish_version = #{publishVersion}
</if>
<if
test=
"gziped != null"
>
AND task.is_gziped = #{gziped}
</if>
<if
test=
"projectId != null"
>
AND tree.project_id = #{projectId}
</if>
<if
test=
"createUserName != null"
>
AND user.user_name like CONCAT('%',#{createUserName},'%')
</if>
<if
test=
"name != null"
>
AND tree.name like CONCAT('%',#{name},'%')
</if>
</select>
<select
id=
"queryTaskTreeInfo"
resultType=
"com.jz.dmp.modules.controller.dataOperation.bean.DataDevTaskListDto"
>
...
...
src/main/resources/mapper/projconfig/DmpProjectConfigInfoMapper.xml
View file @
16943735
...
...
@@ -12,8 +12,49 @@
<result
column=
"update_time"
property=
"updateTime"
jdbcType=
"TIMESTAMP"
/>
</resultMap>
<!-- 公共配置 -->
<resultMap
id=
"pubCfgInfoResultMap"
type=
"com.jz.dmp.modules.controller.projconfig.bean.DmpPublicConfigInfoDto"
>
<result
column=
"public_config_id"
property=
"publicConfigId"
jdbcType=
"INTEGER"
/>
<result
column=
"kerberos_isenable"
property=
"kerberosIsenable"
jdbcType=
"CHAR"
/>
<result
column=
"kerberos_jaas_client_name"
property=
"kerberosJaasClientName"
jdbcType=
"VARCHAR"
/>
<result
column=
"kerberos_krb5_conf"
property=
"kerberosKrb5Conf"
jdbcType=
"VARCHAR"
/>
<result
column=
"kerberos_jaas_conf"
property=
"kerberosJaasConf"
jdbcType=
"VARCHAR"
/>
<result
column=
"kerberos_fqdn"
property=
"kerberosFqdn"
jdbcType=
"VARCHAR"
/>
<result
column=
"kerberos_keytab_conf"
property=
"kerberosKeytabConf"
jdbcType=
"VARCHAR"
/>
<result
column=
"kerberos_keytab_user"
property=
"kerberosKeytabUser"
jdbcType=
"VARCHAR"
/>
<result
column=
"kerberos_spark_jaas_conf"
property=
"kerberosSparkJaasConf"
jdbcType=
"VARCHAR"
/>
<result
column=
"hdfs_http_path"
property=
"hdfsHttpPath"
jdbcType=
"VARCHAR"
/>
<result
column=
"hdfs_syncing_path"
property=
"hdfsSyncingPath"
jdbcType=
"VARCHAR"
/>
<result
column=
"hdfs_user_name"
property=
"hdfsUserName"
jdbcType=
"VARCHAR"
/>
<result
column=
"kafka_conector_url"
property=
"kafkaConectorUrl"
jdbcType=
"VARCHAR"
/>
<result
column=
"kafka_schema_register_url"
property=
"kafkaSchemaRegisterUrl"
jdbcType=
"VARCHAR"
/>
<result
column=
"kafka_bootstrap_servers"
property=
"kafkaBootstrapServers"
jdbcType=
"VARCHAR"
/>
<result
column=
"azkaban_exector_shell_exec"
property=
"azkabanExectorShellExec"
jdbcType=
"VARCHAR"
/>
<result
column=
"azkaban_exector_sql_exec"
property=
"azkabanExectorSqlExec"
jdbcType=
"VARCHAR"
/>
<result
column=
"azkaban_exector_xml_exec"
property=
"azkabanExectorXmlExec"
jdbcType=
"VARCHAR"
/>
<result
column=
"azkaban_exector_sql_path"
property=
"azkabanExectorSqlPath"
jdbcType=
"VARCHAR"
/>
<result
column=
"azkaban_exector_shell_path"
property=
"azkabanExectorShellPath"
jdbcType=
"VARCHAR"
/>
<result
column=
"azkaban_local_task_file_path"
property=
"azkabanLocalTaskFilePath"
jdbcType=
"VARCHAR"
/>
<result
column=
"azkaban_exector_shell_export_data"
property=
"azkabanExectorShellExportData"
jdbcType=
"VARCHAR"
/>
<result
column=
"azkaban_monitor_url"
property=
"azkabanMonitorUrl"
jdbcType=
"VARCHAR"
/>
<result
column=
"atlas_monitor_url"
property=
"atlasMonitorUrl"
jdbcType=
"VARCHAR"
/>
<result
column=
"shell_cmd_server"
property=
"shellCmdServer"
jdbcType=
"VARCHAR"
/>
<result
column=
"shell_cmd_user"
property=
"shellCmdUser"
jdbcType=
"VARCHAR"
/>
<result
column=
"shell_cmd_password"
property=
"shellCmdPassword"
jdbcType=
"VARCHAR"
/>
<result
column=
"shell_sftp_port"
property=
"shellSftpPort"
jdbcType=
"INTEGER"
/>
<result
column=
"remark"
property=
"remark"
jdbcType=
"VARCHAR"
/>
<result
column=
"data_status"
property=
"dataStatus"
jdbcType=
"CHAR"
/>
<result
column=
"create_user_id"
property=
"createUserId"
jdbcType=
"INTEGER"
/>
<result
column=
"create_time"
property=
"createTime"
jdbcType=
"TIMESTAMP"
/>
<result
column=
"update_user_id"
property=
"updateUserId"
jdbcType=
"INTEGER"
/>
<result
column=
"update_time"
property=
"updateTime"
jdbcType=
"TIMESTAMP"
/>
</resultMap>
<resultMap
id=
"BaseDtoResultMap"
type=
"com.jz.dmp.modules.controller.projconfig.bean.DmpProjectConfigInfoDto"
extends=
"BaseResultMap"
>
<!-- /*$BaseDtoResultMapContent$*/ -->
<association
property=
"dmpPublicConfigInfoDto"
javaType=
"com.jz.dmp.modules.controller.projconfig.bean.DmpPublicConfigInfoDto"
column=
"project_config_id"
select=
"selectOne"
>
</association>
</resultMap>
<sql
id=
"Base_Column_List"
>
...
...
@@ -25,6 +66,20 @@
<include
refid=
"Base_Column_List"
/>
<!-- /*$BaseDtoColumnListContent$*/ -->
</sql>
<!-- 查询公共配置 -->
<select
id=
"selectOne"
resultMap=
"pubCfgInfoResultMap"
>
select
public_config_id, kerberos_isenable, kerberos_jaas_client_name, kerberos_krb5_conf, kerberos_jaas_conf,
kerberos_fqdn, kerberos_keytab_conf, kerberos_keytab_user, kerberos_spark_jaas_conf, hdfs_http_path,
hdfs_syncing_path, hdfs_user_name, kafka_conector_url, kafka_schema_register_url, kafka_bootstrap_servers,
azkaban_exector_shell_exec, azkaban_exector_sql_exec, azkaban_exector_xml_exec, azkaban_exector_sql_path, azkaban_exector_shell_path,
azkaban_local_task_file_path, azkaban_exector_shell_export_data, azkaban_monitor_url, atlas_monitor_url, shell_cmd_server,
shell_cmd_user, shell_cmd_password, shell_sftp_port, remark, data_status,
create_user_id, create_time, update_user_id, update_time
from dmp_public_config_info
where data_status='1'
</select>
<!-- 根据主键查询项目配置表 -->
<select
id=
"selectByPrimaryKey"
resultMap=
"BaseResultMap"
parameterType=
"java.lang.String"
>
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment