Commit b8dc5470 authored by mcb's avatar mcb

no message

parent a02bf463
......@@ -7,3 +7,13 @@ alter table dmp_syncing_datasource_type add DB_ATTRS json default null comment '
alter table dmp_syncing_datasource add NETWORK_CONNECTION_TYPE varchar(64) default null comment '网络连接类型';
alter table dmp_syncing_datasource add TEST_CONNECT_STATUS char(2) default '01' comment '测试连通状态:01未测试,02连通性正常,03连通性异常';
alter table dmp_realtime_sync_info add tree_id varchar(64) DEFAULT NULL COMMENT 'treeID';
alter table dmp_syncing_datasource
add hdfs_namenode_principal varchar(64) DEFAULT NULL COMMENT 'hdfs_namenode_principal地址(HDFS)',
add hive_metastore_uris_thrift varchar(64) DEFAULT NULL COMMENT 'hive_metastore_uris_thrift(HDFS)',
add keytab_location varchar(64) DEFAULT NULL COMMENT 'keytab_location(HDFS)',
add bootstrap_address varchar(128) DEFAULT NULL COMMENT 'bootstrap地址(Kafka)',
add jaas_address varchar(128) DEFAULT NULL COMMENT 'jaas地址(Kafka)',
add krb5_address varchar(128) DEFAULT NULL COMMENT 'krb5地址(Kafka)',
add kudu_master varchar(128) DEFAULT NULL COMMENT 'kkudu_master(Kudu)',
add impala_master_fqdn varchar(128) DEFAULT NULL COMMENT 'impala_master_fqdn(Kudu)';
......@@ -9,6 +9,7 @@ import com.jz.dmp.modules.controller.DataIntegration.bean.DmpSyncingDatasourceRe
import com.jz.dmp.modules.service.DmpSyncingDatasourceService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
......@@ -135,6 +136,7 @@ public class DataSourceController {
*/
@ApiOperation(value = "编辑数据源--根据id查询数据回显", notes = "编辑数据源--根据id查询数据回显")
@GetMapping(value = "/selectDataSourceInfoById")
@ApiImplicitParams({@ApiImplicitParam(name = "datasourceId", value = "数据源id" ,required = true),@ApiImplicitParam(name = "projectId", value = "项目id")})
public JsonResult getDataSourceInfoById(@RequestParam String datasourceId, @RequestParam(value = "projectId", required = false) String projectId) throws Exception {
if (StringUtils.isEmpty(datasourceId)) {
return new JsonResult(ResultCode.PARAMS_ERROR);
......
......@@ -16,6 +16,15 @@ import java.io.Serializable;
@ApiModel(value = "数据源配置", description = "数据源配置")
public class DmpSyncingDatasourceReq implements Serializable {
private static final long serialVersionUID = 588846270711686919L;
/**
* 项目ID
*/
@NotNull(message = "项目ID不能为空")
@NotEmpty(message = "项目ID不能为空")
@ApiModelProperty(value = "项目ID")
private String projectId;
/**
* 数据源ID
*/
......@@ -41,8 +50,6 @@ public class DmpSyncingDatasourceReq implements Serializable {
/**
* JDBC URL
*/
@NotNull(message = "JDBC URL不能为空")
@NotEmpty(message = "JDBC URL不能为空")
@ApiModelProperty(value = "JDBC URL")
private String jdbcUrl;
/**
......@@ -53,8 +60,6 @@ public class DmpSyncingDatasourceReq implements Serializable {
/**
* 账号
*/
@NotNull(message = "账号不能为空")
@NotEmpty(message = "账号不能为空")
@ApiModelProperty(value = "账号")
private String userName;
......@@ -67,30 +72,9 @@ public class DmpSyncingDatasourceReq implements Serializable {
/**
* 密码
*/
@NotNull(message = "密码不能为空")
@NotEmpty(message = "密码不能为空")
@ApiModelProperty(value = "密码")
private String password;
/**
* 终端信息
*/
@ApiModelProperty(value = "终端信息")
private String endpoint;
/**
* Bucket信息
*/
@ApiModelProperty(value = "Bucket信息")
private String bucket;
/**
* accessId
*/
@ApiModelProperty(value = "accessId")
private String accessId;
/**
* accessKey
*/
@ApiModelProperty(value = "accessKey")
private String accessKey;
/**
* FTP协议
*/
......@@ -99,17 +83,65 @@ public class DmpSyncingDatasourceReq implements Serializable {
/**
* IP
*/
@ApiModelProperty(value = "Bucket信息")
@ApiModelProperty(value = "IP")
private String host;
/**
* 端口
*/
@ApiModelProperty(value = "端口")
private String port;
/**
* hdfsNamenodePrincipal
*/
@ApiModelProperty(value = "hdfsNamenodePrincipal")
private String hdfsNamenodePrincipal;
/**
* hiveMetastoreUrisThrift
*/
@ApiModelProperty(value = "hiveMetastoreUrisThrift")
private String hiveMetastoreUrisThrift;
/**
* keytabLocation
*/
@ApiModelProperty(value = "keytabLocation")
private String keytabLocation;
/**
* bootstrap地址
*/
@ApiModelProperty(value = "bootstrap地址")
private String bootstrapAddress;
/**
* jaas地址
*/
@ApiModelProperty(value = "jaas地址")
private String jaasAddress;
/**
* krb5地址
*/
@ApiModelProperty(value = "krb5地址")
private String krb5Address;
/**
* kudu Master
*/
@ApiModelProperty(value = "kudu Master")
private String kuduMaster;
/**
* impalaMasterFqdn
*/
@ApiModelProperty(value = "impalaMasterFqdn")
private String impalaMasterFqdn;
/**
* NameNode地址
*/
@ApiModelProperty(value = "NameNode地址")
private String defaultFs;
/**
* 表空间
......@@ -118,17 +150,30 @@ public class DmpSyncingDatasourceReq implements Serializable {
private String tableSchema;
/**
* 项目ID
* 终端信息
*/
@ApiModelProperty(value = "项目ID")
private String projectId;
@ApiModelProperty(value = "终端信息")
private String endpoint;
/**
* Bucket信息
*/
@ApiModelProperty(value = "Bucket信息")
private String bucket;
/**
* accessId
*/
@ApiModelProperty(value = "accessId")
private String accessId;
/**
* accessKey
*/
@ApiModelProperty(value = "accessKey")
private String accessKey;
private String delimiter;
private String isHaveHeader;
@ApiModelProperty(value = "网络连接类型")
private String networkConnectionType;
public String getDatasourceId() {
return datasourceId;
......@@ -290,14 +335,6 @@ public class DmpSyncingDatasourceReq implements Serializable {
this.isHaveHeader = isHaveHeader;
}
public String getNetworkConnectionType() {
return networkConnectionType;
}
public void setNetworkConnectionType(String networkConnectionType) {
this.networkConnectionType = networkConnectionType;
}
public String getTestConnectStatus() {
return testConnectStatus;
}
......@@ -305,4 +342,68 @@ public class DmpSyncingDatasourceReq implements Serializable {
public void setTestConnectStatus(String testConnectStatus) {
this.testConnectStatus = testConnectStatus;
}
public String getHdfsNamenodePrincipal() {
return hdfsNamenodePrincipal;
}
public void setHdfsNamenodePrincipal(String hdfsNamenodePrincipal) {
this.hdfsNamenodePrincipal = hdfsNamenodePrincipal;
}
public String getHiveMetastoreUrisThrift() {
return hiveMetastoreUrisThrift;
}
public void setHiveMetastoreUrisThrift(String hiveMetastoreUrisThrift) {
this.hiveMetastoreUrisThrift = hiveMetastoreUrisThrift;
}
public String getKeytabLocation() {
return keytabLocation;
}
public void setKeytabLocation(String keytabLocation) {
this.keytabLocation = keytabLocation;
}
public String getBootstrapAddress() {
return bootstrapAddress;
}
public void setBootstrapAddress(String bootstrapAddress) {
this.bootstrapAddress = bootstrapAddress;
}
public String getJaasAddress() {
return jaasAddress;
}
public void setJaasAddress(String jaasAddress) {
this.jaasAddress = jaasAddress;
}
public String getKrb5Address() {
return krb5Address;
}
public void setKrb5Address(String krb5Address) {
this.krb5Address = krb5Address;
}
public String getKuduMaster() {
return kuduMaster;
}
public void setKuduMaster(String kuduMaster) {
this.kuduMaster = kuduMaster;
}
public String getImpalaMasterFqdn() {
return impalaMasterFqdn;
}
public void setImpalaMasterFqdn(String impalaMasterFqdn) {
this.impalaMasterFqdn = impalaMasterFqdn;
}
}
\ No newline at end of file
......@@ -91,8 +91,20 @@ public interface DmpSyncingDatasourceDao {
*/
List<DataSourceListDto> queryDataSourceListPage(DataSourceListReq req) throws Exception;
/**
* 批量删除数据源
*
* @return
* @author Bellamy
*/
void delDataSourceById(@Param("ids") String[] ids) throws Exception;
/**
* 获取数据源类型-下拉框
*
* @return
* @author Bellamy
*/
List<Map> queryDatasourceType() throws Exception;
/**
......
......@@ -148,6 +148,54 @@ public class DmpSyncingDatasource implements Serializable {
@ApiModelProperty(value = "测试连通状态:01未测试,02连通性正常,03连通性异常")
private String testConnectStatus;
/**
* hdfsNamenodePrincipal
*/
@ApiModelProperty(value = "hdfsNamenodePrincipal")
private String hdfsNamenodePrincipal;
/**
* hiveMetastoreUrisThrift
*/
@ApiModelProperty(value = "hiveMetastoreUrisThrift")
private String hiveMetastoreUrisThrift;
/**
* keytabLocation
*/
@ApiModelProperty(value = "keytabLocation")
private String keytabLocation;
/**
* bootstrap地址
*/
@ApiModelProperty(value = "bootstrap地址")
private String bootstrapAddress;
/**
* jaas地址
*/
@ApiModelProperty(value = "jaas地址")
private String jaasAddress;
/**
* krb5地址
*/
@ApiModelProperty(value = "krb5地址")
private String krb5Address;
/**
* kudu Master
*/
@ApiModelProperty(value = "kudu Master")
private String kuduMaster;
/**
* impalaMasterFqdn
*/
@ApiModelProperty(value = "impalaMasterFqdn")
private String impalaMasterFqdn;
public Integer getId() {
return id;
}
......@@ -379,4 +427,68 @@ public class DmpSyncingDatasource implements Serializable {
public void setTestConnectStatus(String testConnectStatus) {
this.testConnectStatus = testConnectStatus;
}
public String getHdfsNamenodePrincipal() {
return hdfsNamenodePrincipal;
}
public void setHdfsNamenodePrincipal(String hdfsNamenodePrincipal) {
this.hdfsNamenodePrincipal = hdfsNamenodePrincipal;
}
public String getHiveMetastoreUrisThrift() {
return hiveMetastoreUrisThrift;
}
public void setHiveMetastoreUrisThrift(String hiveMetastoreUrisThrift) {
this.hiveMetastoreUrisThrift = hiveMetastoreUrisThrift;
}
public String getKeytabLocation() {
return keytabLocation;
}
public void setKeytabLocation(String keytabLocation) {
this.keytabLocation = keytabLocation;
}
public String getBootstrapAddress() {
return bootstrapAddress;
}
public void setBootstrapAddress(String bootstrapAddress) {
this.bootstrapAddress = bootstrapAddress;
}
public String getJaasAddress() {
return jaasAddress;
}
public void setJaasAddress(String jaasAddress) {
this.jaasAddress = jaasAddress;
}
public String getKrb5Address() {
return krb5Address;
}
public void setKrb5Address(String krb5Address) {
this.krb5Address = krb5Address;
}
public String getKuduMaster() {
return kuduMaster;
}
public void setKuduMaster(String kuduMaster) {
this.kuduMaster = kuduMaster;
}
public String getImpalaMasterFqdn() {
return impalaMasterFqdn;
}
public void setImpalaMasterFqdn(String impalaMasterFqdn) {
this.impalaMasterFqdn = impalaMasterFqdn;
}
}
\ No newline at end of file
......@@ -63,21 +63,69 @@ public interface DmpSyncingDatasourceService {
PageInfoResponse<DataSourceListDto> queryDataSourceListPage(DataSourceListReq req, HttpServletRequest httpRequest) throws Exception;
/**
* 批量删除数据源
*
* @return
* @author Bellamy
*/
JsonResult delDataSourceById(String datasourceId) throws Exception;
/**
* 获取数据源类型-下拉框
*
* @return
* @author Bellamy
*/
JsonResult queryDatasourceType() throws Exception;
/**
* 新增获取数据源类型
*
* @return
* @author Bellamy
*/
JsonResult queryGroupDatasourceType() throws Exception;
/**
* 保存数据源
*
* @return
* @author Bellamy
*/
JsonResult addDatasourceInfo(DmpSyncingDatasourceReq saveBody) throws Exception;
/**
* 测试连通性
*
* @return
* @author Bellamy
*/
JsonResult testConnection(DmpSyncingDatasourceReq saveBody) throws Exception;
/**
* 编辑数据源,根据id查询数据回显
*
* @return
* @author Bellamy
*/
JsonResult selectDataSourceInfoById(Map map) throws Exception;
/**
* 编辑数据源
*
* @return
* @author Bellamy
*/
JsonResult updateDatasourceById(DmpSyncingDatasourceReq saveBody) throws Exception;
List<DmpSyncingDatasource> findListByParams(DmpSyncingDatasource ds) throws Exception;
/**
* 获取数据源类型输入框属性
*
* @return
* @author Bellamy
*/
JsonResult selectDatasourceTypeAttrById(String datasourceTypeId) throws Exception;
}
\ No newline at end of file
......@@ -10,6 +10,7 @@ import com.jz.common.enums.TestConnectStatusEnum;
import com.jz.common.page.PageInfoResponse;
import com.jz.common.persistence.BaseService;
import com.jz.common.utils.JsonMapper;
import com.jz.common.utils.web.SessionUtils;
import com.jz.dmp.agent.DmpAgentResult;
import com.jz.dmp.modules.controller.DataIntegration.bean.DataSourceListDto;
import com.jz.dmp.modules.controller.DataIntegration.bean.DataSourceListReq;
......@@ -155,6 +156,12 @@ public class DmpSyncingDatasourceServiceImpl implements DmpSyncingDatasourceServ
return new JsonResult();
}
/**
* 获取数据源类型-下拉框
*
* @return
* @author Bellamy
*/
@Override
public JsonResult queryDatasourceType() throws Exception {
List<Map> list = dmpSyncingDatasourceDao.queryDatasourceType();
......@@ -203,32 +210,45 @@ public class DmpSyncingDatasourceServiceImpl implements DmpSyncingDatasourceServ
if (StringUtils.isEmpty(saveBody.getDatasourceName())) {
return new JsonResult(ResultCode.PARAMS_ERROR, "数据源名称不能为空");
}
if (StringUtils.isEmpty(saveBody.getDatasourceType())) {
return new JsonResult(ResultCode.PARAMS_ERROR, "数据源类型ID不能为空");
}
if (StringUtils.isEmpty(saveBody.getProjectId())) {
return new JsonResult(ResultCode.PARAMS_ERROR, "项目ID不能为空");
}
//通过名称查询数据源是否存在
int len = dmpSyncingDatasourceDao.countDatasourceByName(saveBody.getDatasourceName(), saveBody.getProjectId());
if (len > 0) {
return new JsonResult(ResultCode.PARAMS_ERROR, "数据源名称已存在");
}
//解析要保存的数据源信息
DmpSyncingDatasource dmpSyncingDatasource = getDataSourceInfo(saveBody);
DmpSyncingDatasource dsd = new DmpSyncingDatasource();
BeanUtils.copyProperties(saveBody, dsd);
dsd.setCreateTime(new Date());
dsd.setCreateUserId(SessionUtils.getCurrentUserId());
dsd.setDatasourceType(Integer.valueOf(saveBody.getDatasourceType())); //数据源类型ID
dsd.setProjectId(Integer.valueOf(saveBody.getProjectId()));
dsd.setDataStatus("1");
dsd.setTestConnectStatus(saveBody.getTestConnectStatus());
if (StringUtils.isEmpty(saveBody.getTestConnectStatus())) { //默认未测试
dsd.setTestConnectStatus(TestConnectStatusEnum.WCS.getValue());
}
dsd.setTestConnectStatus(saveBody.getTestConnectStatus());
dsd.setDbName(dmpSyncingDatasource.getDbName());
dsd.setHost(dmpSyncingDatasource.getHost());
dsd.setPort(dmpSyncingDatasource.getPort());
dsd.setPassword(dmpSyncingDatasource.getPassword());
//解析要保存的数据源信息
DmpSyncingDatasource dmpSyncingDatasource = getDataSourceInfo(saveBody);
dsd.setPassword(dmpSyncingDatasource.getPassword()); //密码
if (StringUtils.isNotEmpty(dmpSyncingDatasource.getDbName())) {
dsd.setDbName(dmpSyncingDatasource.getDbName()); //数据库名称
}
if (StringUtils.isNotEmpty(dmpSyncingDatasource.getHost())) {
dsd.setHost(dmpSyncingDatasource.getHost()); //IP
}
if (StringUtils.isNotEmpty(dmpSyncingDatasource.getPort())) {
dsd.setPort(dmpSyncingDatasource.getPort()); //端口
}
dmpSyncingDatasourceDao.insert(dsd);
return new JsonResult(dsd);
return new JsonResult();
}
/**
......@@ -291,6 +311,12 @@ public class DmpSyncingDatasourceServiceImpl implements DmpSyncingDatasourceServ
}
}
/**
* 编辑数据源,根据id查询数据回显
*
* @return
* @author Bellamy
*/
@Override
public JsonResult selectDataSourceInfoById(Map map) throws Exception {
DataSourceListDto asd = dmpSyncingDatasourceDao.selectDataSourceInfoById(map);
......@@ -310,6 +336,8 @@ public class DmpSyncingDatasourceServiceImpl implements DmpSyncingDatasourceServ
BeanUtils.copyProperties(saveBody, dsd);
dsd.setUpdateTime(new Date());
dsd.setUpdateUserId(SessionUtils.getCurrentUserId());
dsd.setDataStatus("1");
if (StringUtils.isNotEmpty(saveBody.getDatasourceType())) {
dsd.setDatasourceType(Integer.valueOf(saveBody.getDatasourceType())); //数据源类型ID
}
......@@ -319,7 +347,22 @@ public class DmpSyncingDatasourceServiceImpl implements DmpSyncingDatasourceServ
if (StringUtils.isNotEmpty(saveBody.getDatasourceId())) { //数据源id
dsd.setId(Integer.valueOf(saveBody.getDatasourceId()));
}
dsd.setDataStatus("1");
dsd.setTestConnectStatus(saveBody.getTestConnectStatus());
if (StringUtils.isEmpty(saveBody.getTestConnectStatus())) { //默认未测试
dsd.setTestConnectStatus(TestConnectStatusEnum.WCS.getValue());
}
//解析要保存的数据源信息
DmpSyncingDatasource dmpSyncingDatasource = getDataSourceInfo(saveBody);
dsd.setPassword(dmpSyncingDatasource.getPassword()); //密码
if (StringUtils.isNotEmpty(dmpSyncingDatasource.getDbName())) {
dsd.setDbName(dmpSyncingDatasource.getDbName()); //数据库名称
}
if (StringUtils.isNotEmpty(dmpSyncingDatasource.getHost())) {
dsd.setHost(dmpSyncingDatasource.getHost()); //IP
}
if (StringUtils.isNotEmpty(dmpSyncingDatasource.getPort())) {
dsd.setPort(dmpSyncingDatasource.getPort()); //端口
}
int len = dmpSyncingDatasourceDao.update(dsd);
if (len > 0) {
......
......@@ -127,9 +127,11 @@
<!--新增所有列-->
<insert id="insert" keyProperty="id" useGeneratedKeys="true">
insert into dmp_syncing_datasource(DATASOURCE_TYPE, DATASOURCE_NAME, DATASOURCE_DESC, JDBC_URL, DB_NAME, USER_NAME, PASSWORD, ENDPOINT, BUCKET, ACCESS_ID
, ACCESS_KEY, PROTOCOL, HOST, PORT, DEFAULT_FS, TABLE_SCHEMA, DATA_STATUS, CREATE_USER_ID, CREATE_TIME, UPDATE_USER_ID, UPDATE_TIME, PROJECT_ID, NETWORK_CONNECTION_TYPE ,TEST_CONNECT_STATUS)
, ACCESS_KEY, PROTOCOL, HOST, PORT, DEFAULT_FS, TABLE_SCHEMA, DATA_STATUS, CREATE_USER_ID, CREATE_TIME, UPDATE_USER_ID, UPDATE_TIME, PROJECT_ID
, NETWORK_CONNECTION_TYPE ,TEST_CONNECT_STATUS,hdfs_namenode_principal,hive_metastore_uris_thrift,keytab_location,bootstrap_address,jaas_address,krb5_address,kudu_master,impala_master_fqdn)
values (#{datasourceType}, #{datasourceName}, #{datasourceDesc}, #{jdbcUrl}, #{dbName}, #{userName}, #{password}, #{endpoint}, #{bucket}, #{accessId}, #{accessKey}
, #{protocol}, #{host}, #{port}, #{defaultFs}, #{tableSchema}, #{dataStatus}, #{createUserId}, #{createTime}, #{updateUserId}, #{updateTime}, #{projectId}, #{networkConnectionType} ,#{testConnectStatus})
, #{protocol}, #{host}, #{port}, #{defaultFs}, #{tableSchema}, #{dataStatus}, #{createUserId}, #{createTime}, #{updateUserId}, #{updateTime}, #{projectId}
, #{networkConnectionType} ,#{testConnectStatus},#{hdfsNamenodePrincipal},#{hiveMetastoreUrisThrift},#{keytabLocation},#{bootstrapAddress},#{jaasAddress},#{krb5Address},#{kuduMaster},#{impalaMasterFqdn})
</insert>
<insert id="insertBatch" keyProperty="id" useGeneratedKeys="true">
......@@ -241,6 +243,30 @@
<if test="networkConnectionType != null">
NETWORK_CONNECTION_TYPE = #{networkConnectionType},
</if>
<if test="hdfsNamenodePrincipal != null">
hdfs_namenode_principal = #{hdfsNamenodePrincipal},
</if>
<if test="hiveMetastoreUrisThrift != null">
hive_metastore_uris_thrift = #{hiveMetastoreUrisThrift},
</if>
<if test="keytabLocation != null">
keytab_location = #{keytabLocation},
</if>
<if test="bootstrapAddress != null">
bootstrap_address = #{bootstrapAddress},
</if>
<if test="jaasAddress != null">
jaas_address = #{jaasAddress},
</if>
<if test="krb5Address != null">
krb5_address = #{krb5Address},
</if>
<if test="kuduMaster != null">
kudu_master = #{kuduMaster},
</if>
<if test="impalaMasterFqdn != null">
impala_master_fqdn = #{impalaMasterFqdn},
</if>
</set>
where ID = #{id}
</update>
......
......@@ -41,24 +41,24 @@
"databaseType": "Hive"
},
{
"jdbcUrl": "Y",
"password": "Y", //密码
"userName": "Y", //用户名
"datasourceDesc": "Y", //数据源描述
"datasourceName": "Y", //数据源名称
"kudu Master": "Y", //
"impala Master fqdn": "Y", //
"jdbcUrl":"Y",
"password":"N",
"userName":"N",
"kudu_Master":"Y",
"datasourceDesc":"N",
"datasourceName":"Y",
"impala_Master_fqdn":"Y",
"databaseType": "Kudu"
},
{
"jdbcUrl": "Y",
"password": "Y", //密码
"userName": "Y", //用户名
"datasourceDesc": "Y", //数据源描述
"datasourceName": "Y", //数据源名称
"hdfs.namenode.principal": "Y", //
"hive.metastore.uris:thrift": "Y", //
"keytab.location":"Y",
"jdbcUrl":"Y",
"password":"N",
"userName":"N",
"datasourceDesc":"N",
"datasourceName":"Y",
"keytab_location":"Y",
"hdfs_namenode_principal":"Y",
"hive_metastore_uris_thrift":"Y",
"databaseType": "HDFS"
},
{
......@@ -80,11 +80,11 @@
"databaseType": "Elasticsearch"
},
{
"datasourceDesc": "Y", //数据源描述
"datasourceName": "Y", //数据源名称
"bootstrap地址":"Y",
"jaas地址":"Y",
"krb5地址":"Y",
"jaas_address":"N",
"krb5_address":"N",
"datasourceDesc":"N",
"datasourceName":"Y",
"bootstrap_address":"Y",
"databaseType": "Kafka"
}
]
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment