Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
J
jz-dmp-service
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
姚本章
jz-dmp-service
Commits
b8dc5470
Commit
b8dc5470
authored
Jan 15, 2021
by
mcb
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
no message
parent
a02bf463
Changes
9
Show whitespace changes
Inline
Side-by-side
Showing
9 changed files
with
426 additions
and
72 deletions
+426
-72
update.sql
database/mcb/update.sql
+10
-0
DataSourceController.java
...ules/controller/DataIntegration/DataSourceController.java
+2
-0
DmpSyncingDatasourceReq.java
...troller/DataIntegration/bean/DmpSyncingDatasourceReq.java
+142
-41
DmpSyncingDatasourceDao.java
.../java/com/jz/dmp/modules/dao/DmpSyncingDatasourceDao.java
+12
-0
DmpSyncingDatasource.java
...n/java/com/jz/dmp/modules/model/DmpSyncingDatasource.java
+112
-0
DmpSyncingDatasourceService.java
...m/jz/dmp/modules/service/DmpSyncingDatasourceService.java
+48
-0
DmpSyncingDatasourceServiceImpl.java
...modules/service/impl/DmpSyncingDatasourceServiceImpl.java
+52
-9
DmpSyncingDatasourceMapper.xml
src/main/resources/mapper/dmp/DmpSyncingDatasourceMapper.xml
+28
-2
datasource.json
src/main/resources/templates/datasource.json
+20
-20
No files found.
database/mcb/update.sql
View file @
b8dc5470
...
...
@@ -7,3 +7,13 @@ alter table dmp_syncing_datasource_type add DB_ATTRS json default null comment '
alter
table
dmp_syncing_datasource
add
NETWORK_CONNECTION_TYPE
varchar
(
64
)
default
null
comment
'网络连接类型'
;
alter
table
dmp_syncing_datasource
add
TEST_CONNECT_STATUS
char
(
2
)
default
'01'
comment
'测试连通状态:01未测试,02连通性正常,03连通性异常'
;
alter
table
dmp_realtime_sync_info
add
tree_id
varchar
(
64
)
DEFAULT
NULL
COMMENT
'treeID'
;
alter
table
dmp_syncing_datasource
add
hdfs_namenode_principal
varchar
(
64
)
DEFAULT
NULL
COMMENT
'hdfs_namenode_principal地址(HDFS)'
,
add
hive_metastore_uris_thrift
varchar
(
64
)
DEFAULT
NULL
COMMENT
'hive_metastore_uris_thrift(HDFS)'
,
add
keytab_location
varchar
(
64
)
DEFAULT
NULL
COMMENT
'keytab_location(HDFS)'
,
add
bootstrap_address
varchar
(
128
)
DEFAULT
NULL
COMMENT
'bootstrap地址(Kafka)'
,
add
jaas_address
varchar
(
128
)
DEFAULT
NULL
COMMENT
'jaas地址(Kafka)'
,
add
krb5_address
varchar
(
128
)
DEFAULT
NULL
COMMENT
'krb5地址(Kafka)'
,
add
kudu_master
varchar
(
128
)
DEFAULT
NULL
COMMENT
'kkudu_master(Kudu)'
,
add
impala_master_fqdn
varchar
(
128
)
DEFAULT
NULL
COMMENT
'impala_master_fqdn(Kudu)'
;
src/main/java/com/jz/dmp/modules/controller/DataIntegration/DataSourceController.java
View file @
b8dc5470
...
...
@@ -9,6 +9,7 @@ import com.jz.dmp.modules.controller.DataIntegration.bean.DmpSyncingDatasourceRe
import
com.jz.dmp.modules.service.DmpSyncingDatasourceService
;
import
io.swagger.annotations.Api
;
import
io.swagger.annotations.ApiImplicitParam
;
import
io.swagger.annotations.ApiImplicitParams
;
import
io.swagger.annotations.ApiOperation
;
import
org.apache.commons.lang3.StringUtils
;
import
org.springframework.beans.factory.annotation.Autowired
;
...
...
@@ -135,6 +136,7 @@ public class DataSourceController {
*/
@ApiOperation
(
value
=
"编辑数据源--根据id查询数据回显"
,
notes
=
"编辑数据源--根据id查询数据回显"
)
@GetMapping
(
value
=
"/selectDataSourceInfoById"
)
@ApiImplicitParams
({
@ApiImplicitParam
(
name
=
"datasourceId"
,
value
=
"数据源id"
,
required
=
true
),
@ApiImplicitParam
(
name
=
"projectId"
,
value
=
"项目id"
)})
public
JsonResult
getDataSourceInfoById
(
@RequestParam
String
datasourceId
,
@RequestParam
(
value
=
"projectId"
,
required
=
false
)
String
projectId
)
throws
Exception
{
if
(
StringUtils
.
isEmpty
(
datasourceId
))
{
return
new
JsonResult
(
ResultCode
.
PARAMS_ERROR
);
...
...
src/main/java/com/jz/dmp/modules/controller/DataIntegration/bean/DmpSyncingDatasourceReq.java
View file @
b8dc5470
...
...
@@ -16,6 +16,15 @@ import java.io.Serializable;
@ApiModel
(
value
=
"数据源配置"
,
description
=
"数据源配置"
)
public
class
DmpSyncingDatasourceReq
implements
Serializable
{
private
static
final
long
serialVersionUID
=
588846270711686919L
;
/**
* 项目ID
*/
@NotNull
(
message
=
"项目ID不能为空"
)
@NotEmpty
(
message
=
"项目ID不能为空"
)
@ApiModelProperty
(
value
=
"项目ID"
)
private
String
projectId
;
/**
* 数据源ID
*/
...
...
@@ -41,8 +50,6 @@ public class DmpSyncingDatasourceReq implements Serializable {
/**
* JDBC URL
*/
@NotNull
(
message
=
"JDBC URL不能为空"
)
@NotEmpty
(
message
=
"JDBC URL不能为空"
)
@ApiModelProperty
(
value
=
"JDBC URL"
)
private
String
jdbcUrl
;
/**
...
...
@@ -53,8 +60,6 @@ public class DmpSyncingDatasourceReq implements Serializable {
/**
* 账号
*/
@NotNull
(
message
=
"账号不能为空"
)
@NotEmpty
(
message
=
"账号不能为空"
)
@ApiModelProperty
(
value
=
"账号"
)
private
String
userName
;
...
...
@@ -67,30 +72,9 @@ public class DmpSyncingDatasourceReq implements Serializable {
/**
* 密码
*/
@NotNull
(
message
=
"密码不能为空"
)
@NotEmpty
(
message
=
"密码不能为空"
)
@ApiModelProperty
(
value
=
"密码"
)
private
String
password
;
/**
* 终端信息
*/
@ApiModelProperty
(
value
=
"终端信息"
)
private
String
endpoint
;
/**
* Bucket信息
*/
@ApiModelProperty
(
value
=
"Bucket信息"
)
private
String
bucket
;
/**
* accessId
*/
@ApiModelProperty
(
value
=
"accessId"
)
private
String
accessId
;
/**
* accessKey
*/
@ApiModelProperty
(
value
=
"accessKey"
)
private
String
accessKey
;
/**
* FTP协议
*/
...
...
@@ -99,17 +83,65 @@ public class DmpSyncingDatasourceReq implements Serializable {
/**
* IP
*/
@ApiModelProperty
(
value
=
"
Bucket信息
"
)
@ApiModelProperty
(
value
=
"
IP
"
)
private
String
host
;
/**
* 端口
*/
@ApiModelProperty
(
value
=
"端口"
)
private
String
port
;
/**
* hdfsNamenodePrincipal
*/
@ApiModelProperty
(
value
=
"hdfsNamenodePrincipal"
)
private
String
hdfsNamenodePrincipal
;
/**
* hiveMetastoreUrisThrift
*/
@ApiModelProperty
(
value
=
"hiveMetastoreUrisThrift"
)
private
String
hiveMetastoreUrisThrift
;
/**
* keytabLocation
*/
@ApiModelProperty
(
value
=
"keytabLocation"
)
private
String
keytabLocation
;
/**
* bootstrap地址
*/
@ApiModelProperty
(
value
=
"bootstrap地址"
)
private
String
bootstrapAddress
;
/**
* jaas地址
*/
@ApiModelProperty
(
value
=
"jaas地址"
)
private
String
jaasAddress
;
/**
* krb5地址
*/
@ApiModelProperty
(
value
=
"krb5地址"
)
private
String
krb5Address
;
/**
* kudu Master
*/
@ApiModelProperty
(
value
=
"kudu Master"
)
private
String
kuduMaster
;
/**
* impalaMasterFqdn
*/
@ApiModelProperty
(
value
=
"impalaMasterFqdn"
)
private
String
impalaMasterFqdn
;
/**
* NameNode地址
*/
@ApiModelProperty
(
value
=
"NameNode地址"
)
private
String
defaultFs
;
/**
* 表空间
...
...
@@ -118,17 +150,30 @@ public class DmpSyncingDatasourceReq implements Serializable {
private
String
tableSchema
;
/**
*
项目ID
*
终端信息
*/
@ApiModelProperty
(
value
=
"项目ID"
)
private
String
projectId
;
@ApiModelProperty
(
value
=
"终端信息"
)
private
String
endpoint
;
/**
* Bucket信息
*/
@ApiModelProperty
(
value
=
"Bucket信息"
)
private
String
bucket
;
/**
* accessId
*/
@ApiModelProperty
(
value
=
"accessId"
)
private
String
accessId
;
/**
* accessKey
*/
@ApiModelProperty
(
value
=
"accessKey"
)
private
String
accessKey
;
private
String
delimiter
;
private
String
isHaveHeader
;
@ApiModelProperty
(
value
=
"网络连接类型"
)
private
String
networkConnectionType
;
public
String
getDatasourceId
()
{
return
datasourceId
;
...
...
@@ -290,14 +335,6 @@ public class DmpSyncingDatasourceReq implements Serializable {
this
.
isHaveHeader
=
isHaveHeader
;
}
public
String
getNetworkConnectionType
()
{
return
networkConnectionType
;
}
public
void
setNetworkConnectionType
(
String
networkConnectionType
)
{
this
.
networkConnectionType
=
networkConnectionType
;
}
public
String
getTestConnectStatus
()
{
return
testConnectStatus
;
}
...
...
@@ -305,4 +342,68 @@ public class DmpSyncingDatasourceReq implements Serializable {
public
void
setTestConnectStatus
(
String
testConnectStatus
)
{
this
.
testConnectStatus
=
testConnectStatus
;
}
public
String
getHdfsNamenodePrincipal
()
{
return
hdfsNamenodePrincipal
;
}
public
void
setHdfsNamenodePrincipal
(
String
hdfsNamenodePrincipal
)
{
this
.
hdfsNamenodePrincipal
=
hdfsNamenodePrincipal
;
}
public
String
getHiveMetastoreUrisThrift
()
{
return
hiveMetastoreUrisThrift
;
}
public
void
setHiveMetastoreUrisThrift
(
String
hiveMetastoreUrisThrift
)
{
this
.
hiveMetastoreUrisThrift
=
hiveMetastoreUrisThrift
;
}
public
String
getKeytabLocation
()
{
return
keytabLocation
;
}
public
void
setKeytabLocation
(
String
keytabLocation
)
{
this
.
keytabLocation
=
keytabLocation
;
}
public
String
getBootstrapAddress
()
{
return
bootstrapAddress
;
}
public
void
setBootstrapAddress
(
String
bootstrapAddress
)
{
this
.
bootstrapAddress
=
bootstrapAddress
;
}
public
String
getJaasAddress
()
{
return
jaasAddress
;
}
public
void
setJaasAddress
(
String
jaasAddress
)
{
this
.
jaasAddress
=
jaasAddress
;
}
public
String
getKrb5Address
()
{
return
krb5Address
;
}
public
void
setKrb5Address
(
String
krb5Address
)
{
this
.
krb5Address
=
krb5Address
;
}
public
String
getKuduMaster
()
{
return
kuduMaster
;
}
public
void
setKuduMaster
(
String
kuduMaster
)
{
this
.
kuduMaster
=
kuduMaster
;
}
public
String
getImpalaMasterFqdn
()
{
return
impalaMasterFqdn
;
}
public
void
setImpalaMasterFqdn
(
String
impalaMasterFqdn
)
{
this
.
impalaMasterFqdn
=
impalaMasterFqdn
;
}
}
\ No newline at end of file
src/main/java/com/jz/dmp/modules/dao/DmpSyncingDatasourceDao.java
View file @
b8dc5470
...
...
@@ -91,8 +91,20 @@ public interface DmpSyncingDatasourceDao {
*/
List
<
DataSourceListDto
>
queryDataSourceListPage
(
DataSourceListReq
req
)
throws
Exception
;
/**
* 批量删除数据源
*
* @return
* @author Bellamy
*/
void
delDataSourceById
(
@Param
(
"ids"
)
String
[]
ids
)
throws
Exception
;
/**
* 获取数据源类型-下拉框
*
* @return
* @author Bellamy
*/
List
<
Map
>
queryDatasourceType
()
throws
Exception
;
/**
...
...
src/main/java/com/jz/dmp/modules/model/DmpSyncingDatasource.java
View file @
b8dc5470
...
...
@@ -148,6 +148,54 @@ public class DmpSyncingDatasource implements Serializable {
@ApiModelProperty
(
value
=
"测试连通状态:01未测试,02连通性正常,03连通性异常"
)
private
String
testConnectStatus
;
/**
* hdfsNamenodePrincipal
*/
@ApiModelProperty
(
value
=
"hdfsNamenodePrincipal"
)
private
String
hdfsNamenodePrincipal
;
/**
* hiveMetastoreUrisThrift
*/
@ApiModelProperty
(
value
=
"hiveMetastoreUrisThrift"
)
private
String
hiveMetastoreUrisThrift
;
/**
* keytabLocation
*/
@ApiModelProperty
(
value
=
"keytabLocation"
)
private
String
keytabLocation
;
/**
* bootstrap地址
*/
@ApiModelProperty
(
value
=
"bootstrap地址"
)
private
String
bootstrapAddress
;
/**
* jaas地址
*/
@ApiModelProperty
(
value
=
"jaas地址"
)
private
String
jaasAddress
;
/**
* krb5地址
*/
@ApiModelProperty
(
value
=
"krb5地址"
)
private
String
krb5Address
;
/**
* kudu Master
*/
@ApiModelProperty
(
value
=
"kudu Master"
)
private
String
kuduMaster
;
/**
* impalaMasterFqdn
*/
@ApiModelProperty
(
value
=
"impalaMasterFqdn"
)
private
String
impalaMasterFqdn
;
public
Integer
getId
()
{
return
id
;
}
...
...
@@ -379,4 +427,68 @@ public class DmpSyncingDatasource implements Serializable {
public
void
setTestConnectStatus
(
String
testConnectStatus
)
{
this
.
testConnectStatus
=
testConnectStatus
;
}
public
String
getHdfsNamenodePrincipal
()
{
return
hdfsNamenodePrincipal
;
}
public
void
setHdfsNamenodePrincipal
(
String
hdfsNamenodePrincipal
)
{
this
.
hdfsNamenodePrincipal
=
hdfsNamenodePrincipal
;
}
public
String
getHiveMetastoreUrisThrift
()
{
return
hiveMetastoreUrisThrift
;
}
public
void
setHiveMetastoreUrisThrift
(
String
hiveMetastoreUrisThrift
)
{
this
.
hiveMetastoreUrisThrift
=
hiveMetastoreUrisThrift
;
}
public
String
getKeytabLocation
()
{
return
keytabLocation
;
}
public
void
setKeytabLocation
(
String
keytabLocation
)
{
this
.
keytabLocation
=
keytabLocation
;
}
public
String
getBootstrapAddress
()
{
return
bootstrapAddress
;
}
public
void
setBootstrapAddress
(
String
bootstrapAddress
)
{
this
.
bootstrapAddress
=
bootstrapAddress
;
}
public
String
getJaasAddress
()
{
return
jaasAddress
;
}
public
void
setJaasAddress
(
String
jaasAddress
)
{
this
.
jaasAddress
=
jaasAddress
;
}
public
String
getKrb5Address
()
{
return
krb5Address
;
}
public
void
setKrb5Address
(
String
krb5Address
)
{
this
.
krb5Address
=
krb5Address
;
}
public
String
getKuduMaster
()
{
return
kuduMaster
;
}
public
void
setKuduMaster
(
String
kuduMaster
)
{
this
.
kuduMaster
=
kuduMaster
;
}
public
String
getImpalaMasterFqdn
()
{
return
impalaMasterFqdn
;
}
public
void
setImpalaMasterFqdn
(
String
impalaMasterFqdn
)
{
this
.
impalaMasterFqdn
=
impalaMasterFqdn
;
}
}
\ No newline at end of file
src/main/java/com/jz/dmp/modules/service/DmpSyncingDatasourceService.java
View file @
b8dc5470
...
...
@@ -63,21 +63,69 @@ public interface DmpSyncingDatasourceService {
PageInfoResponse
<
DataSourceListDto
>
queryDataSourceListPage
(
DataSourceListReq
req
,
HttpServletRequest
httpRequest
)
throws
Exception
;
/**
* 批量删除数据源
*
* @return
* @author Bellamy
*/
JsonResult
delDataSourceById
(
String
datasourceId
)
throws
Exception
;
/**
* 获取数据源类型-下拉框
*
* @return
* @author Bellamy
*/
JsonResult
queryDatasourceType
()
throws
Exception
;
/**
* 新增获取数据源类型
*
* @return
* @author Bellamy
*/
JsonResult
queryGroupDatasourceType
()
throws
Exception
;
/**
* 保存数据源
*
* @return
* @author Bellamy
*/
JsonResult
addDatasourceInfo
(
DmpSyncingDatasourceReq
saveBody
)
throws
Exception
;
/**
* 测试连通性
*
* @return
* @author Bellamy
*/
JsonResult
testConnection
(
DmpSyncingDatasourceReq
saveBody
)
throws
Exception
;
/**
* 编辑数据源,根据id查询数据回显
*
* @return
* @author Bellamy
*/
JsonResult
selectDataSourceInfoById
(
Map
map
)
throws
Exception
;
/**
* 编辑数据源
*
* @return
* @author Bellamy
*/
JsonResult
updateDatasourceById
(
DmpSyncingDatasourceReq
saveBody
)
throws
Exception
;
List
<
DmpSyncingDatasource
>
findListByParams
(
DmpSyncingDatasource
ds
)
throws
Exception
;
/**
* 获取数据源类型输入框属性
*
* @return
* @author Bellamy
*/
JsonResult
selectDatasourceTypeAttrById
(
String
datasourceTypeId
)
throws
Exception
;
}
\ No newline at end of file
src/main/java/com/jz/dmp/modules/service/impl/DmpSyncingDatasourceServiceImpl.java
View file @
b8dc5470
...
...
@@ -10,6 +10,7 @@ import com.jz.common.enums.TestConnectStatusEnum;
import
com.jz.common.page.PageInfoResponse
;
import
com.jz.common.persistence.BaseService
;
import
com.jz.common.utils.JsonMapper
;
import
com.jz.common.utils.web.SessionUtils
;
import
com.jz.dmp.agent.DmpAgentResult
;
import
com.jz.dmp.modules.controller.DataIntegration.bean.DataSourceListDto
;
import
com.jz.dmp.modules.controller.DataIntegration.bean.DataSourceListReq
;
...
...
@@ -155,6 +156,12 @@ public class DmpSyncingDatasourceServiceImpl implements DmpSyncingDatasourceServ
return
new
JsonResult
();
}
/**
* 获取数据源类型-下拉框
*
* @return
* @author Bellamy
*/
@Override
public
JsonResult
queryDatasourceType
()
throws
Exception
{
List
<
Map
>
list
=
dmpSyncingDatasourceDao
.
queryDatasourceType
();
...
...
@@ -203,32 +210,45 @@ public class DmpSyncingDatasourceServiceImpl implements DmpSyncingDatasourceServ
if
(
StringUtils
.
isEmpty
(
saveBody
.
getDatasourceName
()))
{
return
new
JsonResult
(
ResultCode
.
PARAMS_ERROR
,
"数据源名称不能为空"
);
}
if
(
StringUtils
.
isEmpty
(
saveBody
.
getDatasourceType
()))
{
return
new
JsonResult
(
ResultCode
.
PARAMS_ERROR
,
"数据源类型ID不能为空"
);
}
if
(
StringUtils
.
isEmpty
(
saveBody
.
getProjectId
()))
{
return
new
JsonResult
(
ResultCode
.
PARAMS_ERROR
,
"项目ID不能为空"
);
}
//通过名称查询数据源是否存在
int
len
=
dmpSyncingDatasourceDao
.
countDatasourceByName
(
saveBody
.
getDatasourceName
(),
saveBody
.
getProjectId
());
if
(
len
>
0
)
{
return
new
JsonResult
(
ResultCode
.
PARAMS_ERROR
,
"数据源名称已存在"
);
}
//解析要保存的数据源信息
DmpSyncingDatasource
dmpSyncingDatasource
=
getDataSourceInfo
(
saveBody
);
DmpSyncingDatasource
dsd
=
new
DmpSyncingDatasource
();
BeanUtils
.
copyProperties
(
saveBody
,
dsd
);
dsd
.
setCreateTime
(
new
Date
());
dsd
.
setCreateUserId
(
SessionUtils
.
getCurrentUserId
());
dsd
.
setDatasourceType
(
Integer
.
valueOf
(
saveBody
.
getDatasourceType
()));
//数据源类型ID
dsd
.
setProjectId
(
Integer
.
valueOf
(
saveBody
.
getProjectId
()));
dsd
.
setDataStatus
(
"1"
);
dsd
.
setTestConnectStatus
(
saveBody
.
getTestConnectStatus
());
if
(
StringUtils
.
isEmpty
(
saveBody
.
getTestConnectStatus
()))
{
//默认未测试
dsd
.
setTestConnectStatus
(
TestConnectStatusEnum
.
WCS
.
getValue
());
}
dsd
.
setTestConnectStatus
(
saveBody
.
getTestConnectStatus
());
dsd
.
setDbName
(
dmpSyncingDatasource
.
getDbName
());
dsd
.
setHost
(
dmpSyncingDatasource
.
getHost
());
dsd
.
setPort
(
dmpSyncingDatasource
.
getPort
());
dsd
.
setPassword
(
dmpSyncingDatasource
.
getPassword
());
//解析要保存的数据源信息
DmpSyncingDatasource
dmpSyncingDatasource
=
getDataSourceInfo
(
saveBody
);
dsd
.
setPassword
(
dmpSyncingDatasource
.
getPassword
());
//密码
if
(
StringUtils
.
isNotEmpty
(
dmpSyncingDatasource
.
getDbName
()))
{
dsd
.
setDbName
(
dmpSyncingDatasource
.
getDbName
());
//数据库名称
}
if
(
StringUtils
.
isNotEmpty
(
dmpSyncingDatasource
.
getHost
()))
{
dsd
.
setHost
(
dmpSyncingDatasource
.
getHost
());
//IP
}
if
(
StringUtils
.
isNotEmpty
(
dmpSyncingDatasource
.
getPort
()))
{
dsd
.
setPort
(
dmpSyncingDatasource
.
getPort
());
//端口
}
dmpSyncingDatasourceDao
.
insert
(
dsd
);
return
new
JsonResult
(
dsd
);
return
new
JsonResult
();
}
/**
...
...
@@ -291,6 +311,12 @@ public class DmpSyncingDatasourceServiceImpl implements DmpSyncingDatasourceServ
}
}
/**
* 编辑数据源,根据id查询数据回显
*
* @return
* @author Bellamy
*/
@Override
public
JsonResult
selectDataSourceInfoById
(
Map
map
)
throws
Exception
{
DataSourceListDto
asd
=
dmpSyncingDatasourceDao
.
selectDataSourceInfoById
(
map
);
...
...
@@ -310,6 +336,8 @@ public class DmpSyncingDatasourceServiceImpl implements DmpSyncingDatasourceServ
BeanUtils
.
copyProperties
(
saveBody
,
dsd
);
dsd
.
setUpdateTime
(
new
Date
());
dsd
.
setUpdateUserId
(
SessionUtils
.
getCurrentUserId
());
dsd
.
setDataStatus
(
"1"
);
if
(
StringUtils
.
isNotEmpty
(
saveBody
.
getDatasourceType
()))
{
dsd
.
setDatasourceType
(
Integer
.
valueOf
(
saveBody
.
getDatasourceType
()));
//数据源类型ID
}
...
...
@@ -319,7 +347,22 @@ public class DmpSyncingDatasourceServiceImpl implements DmpSyncingDatasourceServ
if
(
StringUtils
.
isNotEmpty
(
saveBody
.
getDatasourceId
()))
{
//数据源id
dsd
.
setId
(
Integer
.
valueOf
(
saveBody
.
getDatasourceId
()));
}
dsd
.
setDataStatus
(
"1"
);
dsd
.
setTestConnectStatus
(
saveBody
.
getTestConnectStatus
());
if
(
StringUtils
.
isEmpty
(
saveBody
.
getTestConnectStatus
()))
{
//默认未测试
dsd
.
setTestConnectStatus
(
TestConnectStatusEnum
.
WCS
.
getValue
());
}
//解析要保存的数据源信息
DmpSyncingDatasource
dmpSyncingDatasource
=
getDataSourceInfo
(
saveBody
);
dsd
.
setPassword
(
dmpSyncingDatasource
.
getPassword
());
//密码
if
(
StringUtils
.
isNotEmpty
(
dmpSyncingDatasource
.
getDbName
()))
{
dsd
.
setDbName
(
dmpSyncingDatasource
.
getDbName
());
//数据库名称
}
if
(
StringUtils
.
isNotEmpty
(
dmpSyncingDatasource
.
getHost
()))
{
dsd
.
setHost
(
dmpSyncingDatasource
.
getHost
());
//IP
}
if
(
StringUtils
.
isNotEmpty
(
dmpSyncingDatasource
.
getPort
()))
{
dsd
.
setPort
(
dmpSyncingDatasource
.
getPort
());
//端口
}
int
len
=
dmpSyncingDatasourceDao
.
update
(
dsd
);
if
(
len
>
0
)
{
...
...
src/main/resources/mapper/dmp/DmpSyncingDatasourceMapper.xml
View file @
b8dc5470
...
...
@@ -127,9 +127,11 @@
<!--新增所有列-->
<insert
id=
"insert"
keyProperty=
"id"
useGeneratedKeys=
"true"
>
insert into dmp_syncing_datasource(DATASOURCE_TYPE, DATASOURCE_NAME, DATASOURCE_DESC, JDBC_URL, DB_NAME, USER_NAME, PASSWORD, ENDPOINT, BUCKET, ACCESS_ID
, ACCESS_KEY, PROTOCOL, HOST, PORT, DEFAULT_FS, TABLE_SCHEMA, DATA_STATUS, CREATE_USER_ID, CREATE_TIME, UPDATE_USER_ID, UPDATE_TIME, PROJECT_ID, NETWORK_CONNECTION_TYPE ,TEST_CONNECT_STATUS)
, ACCESS_KEY, PROTOCOL, HOST, PORT, DEFAULT_FS, TABLE_SCHEMA, DATA_STATUS, CREATE_USER_ID, CREATE_TIME, UPDATE_USER_ID, UPDATE_TIME, PROJECT_ID
, NETWORK_CONNECTION_TYPE ,TEST_CONNECT_STATUS,hdfs_namenode_principal,hive_metastore_uris_thrift,keytab_location,bootstrap_address,jaas_address,krb5_address,kudu_master,impala_master_fqdn)
values (#{datasourceType}, #{datasourceName}, #{datasourceDesc}, #{jdbcUrl}, #{dbName}, #{userName}, #{password}, #{endpoint}, #{bucket}, #{accessId}, #{accessKey}
, #{protocol}, #{host}, #{port}, #{defaultFs}, #{tableSchema}, #{dataStatus}, #{createUserId}, #{createTime}, #{updateUserId}, #{updateTime}, #{projectId}, #{networkConnectionType} ,#{testConnectStatus})
, #{protocol}, #{host}, #{port}, #{defaultFs}, #{tableSchema}, #{dataStatus}, #{createUserId}, #{createTime}, #{updateUserId}, #{updateTime}, #{projectId}
, #{networkConnectionType} ,#{testConnectStatus},#{hdfsNamenodePrincipal},#{hiveMetastoreUrisThrift},#{keytabLocation},#{bootstrapAddress},#{jaasAddress},#{krb5Address},#{kuduMaster},#{impalaMasterFqdn})
</insert>
<insert
id=
"insertBatch"
keyProperty=
"id"
useGeneratedKeys=
"true"
>
...
...
@@ -241,6 +243,30 @@
<if
test=
"networkConnectionType != null"
>
NETWORK_CONNECTION_TYPE = #{networkConnectionType},
</if>
<if
test=
"hdfsNamenodePrincipal != null"
>
hdfs_namenode_principal = #{hdfsNamenodePrincipal},
</if>
<if
test=
"hiveMetastoreUrisThrift != null"
>
hive_metastore_uris_thrift = #{hiveMetastoreUrisThrift},
</if>
<if
test=
"keytabLocation != null"
>
keytab_location = #{keytabLocation},
</if>
<if
test=
"bootstrapAddress != null"
>
bootstrap_address = #{bootstrapAddress},
</if>
<if
test=
"jaasAddress != null"
>
jaas_address = #{jaasAddress},
</if>
<if
test=
"krb5Address != null"
>
krb5_address = #{krb5Address},
</if>
<if
test=
"kuduMaster != null"
>
kudu_master = #{kuduMaster},
</if>
<if
test=
"impalaMasterFqdn != null"
>
impala_master_fqdn = #{impalaMasterFqdn},
</if>
</set>
where ID = #{id}
</update>
...
...
src/main/resources/templates/datasource.json
View file @
b8dc5470
...
...
@@ -41,24 +41,24 @@
"databaseType"
:
"Hive"
},
{
"jdbcUrl"
:
"Y"
,
"password"
:
"Y"
,
//密码
"userName"
:
"Y"
,
//用户名
"
datasourceDesc"
:
"Y"
,
//数据源描述
"datasource
Name"
:
"Y"
,
//数据源名称
"
kudu Master"
:
"Y"
,
//
"impala
Master fqdn"
:
"Y"
,
//
"jdbcUrl"
:
"Y"
,
"password"
:
"N"
,
"userName"
:
"N"
,
"
kudu_Master"
:
"Y"
,
"datasource
Desc"
:
"N"
,
"
datasourceName"
:
"Y"
,
"impala
_Master_fqdn"
:
"Y"
,
"databaseType"
:
"Kudu"
},
{
"jdbcUrl"
:
"Y"
,
"password"
:
"Y"
,
//密码
"userName"
:
"Y"
,
//用户名
"datasourceDesc"
:
"Y"
,
//数据源描述
"datasourceName"
:
"Y"
,
//数据源名称
"
hdfs.namenode.principal"
:
"Y"
,
//
"h
ive.metastore.uris:thrift"
:
"Y"
,
//
"
keytab.location
"
:
"Y"
,
"jdbcUrl"
:
"Y"
,
"password"
:
"N"
,
"userName"
:
"N"
,
"datasourceDesc"
:
"N"
,
"datasourceName"
:
"Y"
,
"
keytab_location"
:
"Y"
,
"h
dfs_namenode_principal"
:
"Y"
,
"
hive_metastore_uris_thrift
"
:
"Y"
,
"databaseType"
:
"HDFS"
},
{
...
...
@@ -80,11 +80,11 @@
"databaseType"
:
"Elasticsearch"
},
{
"
datasourceDesc"
:
"Y"
,
//数据源描述
"
datasourceName"
:
"Y"
,
//数据源名称
"
bootstrap地址"
:
"Y
"
,
"
jaas地址
"
:
"Y"
,
"
krb5地址
"
:
"Y"
,
"
jaas_address"
:
"N"
,
"
krb5_address"
:
"N"
,
"
datasourceDesc"
:
"N
"
,
"
datasourceName
"
:
"Y"
,
"
bootstrap_address
"
:
"Y"
,
"databaseType"
:
"Kafka"
}
]
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment