Commit b7cfadf6 authored by mcb's avatar mcb

no message

parent 88e4a4f9
package com.jz.common.persistence;
public interface BaseDao {
}
\ No newline at end of file
package com.jz.common.persistence;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.transaction.annotation.Transactional;
import javax.crypto.Cipher;
import javax.crypto.spec.SecretKeySpec;
import java.math.BigDecimal;
import java.security.Key;
import java.util.Base64;
@SuppressWarnings("restriction")
@Transactional(readOnly = true)
public class BaseService {
protected Logger logger = LoggerFactory.getLogger(getClass());
@Value("${spring.public-key}")
private String publicKey;
public BigDecimal format(Object obj) {
BigDecimal bak = new BigDecimal(0);
if (obj != null) {
if (obj instanceof String) {
if (StringUtils.isNotBlank((String) obj)) {
bak = new BigDecimal((String) obj);
}
} else if (obj instanceof Integer) {
bak = new BigDecimal((Integer) obj);
} else if (obj instanceof Double) {
bak = new BigDecimal((Double) obj);
} else if (obj instanceof Float) {
bak = new BigDecimal((Float) obj);
} else {
bak = new BigDecimal(String.valueOf(obj));
}
}
return bak;
}
public String encode(String pass) {
String str = "";
try {
Key key = new SecretKeySpec(Base64.getMimeDecoder().decode(publicKey), "DESede");
Cipher cipher = Cipher.getInstance("DESede");
cipher.init(Cipher.ENCRYPT_MODE, key);
str = Base64.getMimeEncoder().encodeToString(cipher.doFinal(pass.getBytes()));
} catch (Exception e) {
e.printStackTrace();
}
return str;
}
public String decode(String pass) {
String str = "";
try {
Key key = new SecretKeySpec(Base64.getMimeDecoder().decode(publicKey), "DESede");
Cipher cipher = Cipher.getInstance("DESede");
cipher.init(Cipher.DECRYPT_MODE, key);
str = new String(cipher.doFinal(Base64.getMimeDecoder().decode(pass)));
} catch (Exception e) {
e.printStackTrace();
}
return str;
}
}
package com.jz.common.persistence;
import java.util.List;
public interface CrudDao<T> extends BaseDao {
/**
* 获取单条数据
* @param id
* @return
*/
public T get(Long id);
/**
* 获取单条数据
* @param entity
* @return
*/
public T get(T entity);
/**
* 查询数据列表,如果需要分页,请设置分页对象
*
* @param entity
* @return
*/
public List<T> findList(T entity);
/**
* 查询所有数据列表
* @param entity
* @return
*/
public List<T> findAllList();
/**
* 插入数据
* @param entity
* @return
*/
public int insert(T entity);
/**
* 删除数据(一般为逻辑删除,更新data_state字段为0)
* @param entity
* @return
*/
public int delete(long entity);
/**
* 更新数据
* @param entity
* @return
*/
public int update(T entity);
}
\ No newline at end of file
......@@ -58,9 +58,22 @@ public class OfflineSynchController {
*/
@ApiOperation(value = "获取源数据库名称-下拉框", notes = "获取源数据库名称")
@GetMapping(value = "/sourceDbList")
@ApiImplicitParam(name = "projectId",value = "项目id")
@ApiImplicitParam(name = "projectId", value = "项目id")
public JsonResult<List<SourceDbNameListDto>> getSourceDbList(@RequestParam Integer projectId) throws Exception {
JsonResult<List<SourceDbNameListDto>> jsonResult = offlineSynchService.querygSourceDbList(projectId);
return jsonResult;
}
/**
* 根据源数据库id,获取源数据表——下拉框
*
* @return
*/
@ApiOperation(value = "根据源数据库id,获取源数据表-下拉框", notes = "根据源数据库id,获取源数据表")
@GetMapping(value = "/sourceTableList")
@ApiImplicitParam(name = "sourceDbId", value = "源数据库id")
public JsonResult getSourceTableList(@RequestParam Integer sourceDbId,@RequestParam(value = "targetName", required = false) String targetName) throws Exception {
List<Map> list = offlineSynchService.querygSourceTableList(sourceDbId,targetName);
return new JsonResult(ResultCode.SUCCESS, list);
}
}
......@@ -16,7 +16,7 @@ public class SourceDbNameListDto {
/*
* 源数据id
* */
@ApiModelProperty(value = "源数据id")
@ApiModelProperty(value = "源数据id")
String id;
/*
......
package com.jz.dmp.modules.dao;
import com.jz.common.persistence.CrudDao;
import com.jz.dmp.modules.model.DmpProject;
import com.jz.dmp.modules.model.DmpProjectSystemInfo;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
import java.util.Map;
public interface DmpProjectDao extends CrudDao<DmpProject> {
public List<DmpProject> getExampleQuery(Map<String, Object> params);
public List<Map<String, ?>> getProjectPage(DmpProject params);
public List<Map<String, ?>> getHeaderProjectList(DmpProject params);
public void disable(Long id);public void enable(Long id);
public List<Map<String, ?>> getProjectOrgPermission();
public List<Map<String, ?>> getProjectInfo(Long id);
public void insertProjectSystemInfo(DmpProjectSystemInfo systemInfo);
public void updateProjectSystemInfo(DmpProjectSystemInfo systemInfo);
public List<DmpProjectSystemInfo> getProjectSystemInfo(Long projectId);
DmpProjectSystemInfo queryProjectSystemInfo(@Param("projectId") Integer projectId);
}
......@@ -3,6 +3,8 @@ package com.jz.dmp.modules.dao;
import com.jz.dmp.modules.controller.DataIntegration.bean.SourceDbNameListDto;
import com.jz.dmp.modules.controller.DataIntegration.bean.TaskListPageDto;
import com.jz.dmp.modules.controller.DataIntegration.bean.TaskListPageReq;
import com.jz.dmp.modules.model.DmpSyncingDatasource;
import org.apache.ibatis.annotations.Param;
import java.util.List;
import java.util.Map;
......@@ -16,4 +18,6 @@ public interface OfflineSynchDao {
List<TaskListPageDto> queryTaskListPage(TaskListPageReq taskListPageReq) throws Exception;
List<SourceDbNameListDto> querygSourceDbList(Map map) throws Exception;
DmpSyncingDatasource queryDmpSyncingDatasource(@Param("sourceDbId") Integer sourceDbId) throws Exception;
}
\ No newline at end of file
package com.jz.dmp.modules.model;
import java.io.Serializable;
import java.util.Date;
/**
* @ClassName: DmpAgentDatasourceInfo
* @Description:
* @Author
* @Date 2020/12/21
* @Version 1.0
*/
public class DmpAgentDatasourceInfo implements Serializable {
private static final long serialVersionUID = -6919790420219629982L;
/**
* ID
*/
private String id;
/**
* 数据源类型ID
*/
private String datasourceType;
/**
* 数据源名称
*/
private String datasourceName;
/**
* 数据源描述
*/
private String datasourceDesc;
/**
* JDBC URL
*/
private String jdbcUrl;
/**
* 数据库名
*/
private String dbName;
/**
* 账号
*/
private String userName;
/**
* 密码
*/
private String password;
/**
* 终端信息
*/
private String endpoint;
/**
* Bucket信息
*/
private String bucket;
/**
* accessId
*/
private String accessId;
/**
* accessKey
*/
private String accessKey;
/**
* FTP协议
*/
private String protocol;
/**
* IP
*/
private String host;
/**
* 端口
*/
private String port;
/**
* NameNode地址
*/
private String defaultFs;
/**
* 表空间
*/
private String tableSchema;
/**
* 数据状态
*/
private String dataStatus;
/**
* 创建用户ID
*/
private String createUserId;
/**
* 数据创建时间
*/
private String createTime;
/**
* 创建用户ID
*/
private String updateUserId;
/**
* 数据更新时间
*/
private String updateTime;
/**
* 项目ID
*/
private String projectId;
private String datasource;
private String datasourceCatecode;
private String datasourceCatename;
private String isEnabled;
private String datasourceCatetype;
private String driverClassName;
private String isEnableTest;
private String defaultSourceScript;
private String defaultTargetScript;
private String isEnableSource;
private String isEnableTarget;
private String delimiter;
private String isHaveHeader;
private String fileType;
private String targetDbName;
private String targetFileName;
private String kerberosIsenable;
private String kerberosKrb5Conf;
private String kerberosJaasConf;
private String kerberosFqdn;
private String hdfsAuthPath;
private String hdfsDefaultConf;
private String hdfsConfPath;
private String hdfsUserName;
private String hdfsSyncingPath;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getDatasourceType() {
return datasourceType;
}
public void setDatasourceType(String datasourceType) {
this.datasourceType = datasourceType;
}
public String getDatasourceName() {
return datasourceName;
}
public void setDatasourceName(String datasourceName) {
this.datasourceName = datasourceName;
}
public String getDatasourceDesc() {
return datasourceDesc;
}
public void setDatasourceDesc(String datasourceDesc) {
this.datasourceDesc = datasourceDesc;
}
public String getJdbcUrl() {
return jdbcUrl;
}
public void setJdbcUrl(String jdbcUrl) {
this.jdbcUrl = jdbcUrl;
}
public String getDbName() {
return dbName;
}
public void setDbName(String dbName) {
this.dbName = dbName;
}
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getEndpoint() {
return endpoint;
}
public void setEndpoint(String endpoint) {
this.endpoint = endpoint;
}
public String getBucket() {
return bucket;
}
public void setBucket(String bucket) {
this.bucket = bucket;
}
public String getAccessId() {
return accessId;
}
public void setAccessId(String accessId) {
this.accessId = accessId;
}
public String getAccessKey() {
return accessKey;
}
public void setAccessKey(String accessKey) {
this.accessKey = accessKey;
}
public String getProtocol() {
return protocol;
}
public void setProtocol(String protocol) {
this.protocol = protocol;
}
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;
}
public String getPort() {
return port;
}
public void setPort(String port) {
this.port = port;
}
public String getDefaultFs() {
return defaultFs;
}
public void setDefaultFs(String defaultFs) {
this.defaultFs = defaultFs;
}
public String getTableSchema() {
return tableSchema;
}
public void setTableSchema(String tableSchema) {
this.tableSchema = tableSchema;
}
public String getDataStatus() {
return dataStatus;
}
public void setDataStatus(String dataStatus) {
this.dataStatus = dataStatus;
}
public String getCreateUserId() {
return createUserId;
}
public void setCreateUserId(String createUserId) {
this.createUserId = createUserId;
}
public String getCreateTime() {
return createTime;
}
public void setCreateTime(String createTime) {
this.createTime = createTime;
}
public String getUpdateUserId() {
return updateUserId;
}
public void setUpdateUserId(String updateUserId) {
this.updateUserId = updateUserId;
}
public String getUpdateTime() {
return updateTime;
}
public void setUpdateTime(String updateTime) {
this.updateTime = updateTime;
}
public String getProjectId() {
return projectId;
}
public void setProjectId(String projectId) {
this.projectId = projectId;
}
public String getDatasource() {
return datasource;
}
public void setDatasource(String datasource) {
this.datasource = datasource;
}
public String getDatasourceCatecode() {
return datasourceCatecode;
}
public void setDatasourceCatecode(String datasourceCatecode) {
this.datasourceCatecode = datasourceCatecode;
}
public String getDatasourceCatename() {
return datasourceCatename;
}
public void setDatasourceCatename(String datasourceCatename) {
this.datasourceCatename = datasourceCatename;
}
public String getIsEnabled() {
return isEnabled;
}
public void setIsEnabled(String isEnabled) {
this.isEnabled = isEnabled;
}
public String getDatasourceCatetype() {
return datasourceCatetype;
}
public void setDatasourceCatetype(String datasourceCatetype) {
this.datasourceCatetype = datasourceCatetype;
}
public String getDriverClassName() {
return driverClassName;
}
public void setDriverClassName(String driverClassName) {
this.driverClassName = driverClassName;
}
public String getIsEnableTest() {
return isEnableTest;
}
public void setIsEnableTest(String isEnableTest) {
this.isEnableTest = isEnableTest;
}
public String getDefaultSourceScript() {
return defaultSourceScript;
}
public void setDefaultSourceScript(String defaultSourceScript) {
this.defaultSourceScript = defaultSourceScript;
}
public String getDefaultTargetScript() {
return defaultTargetScript;
}
public void setDefaultTargetScript(String defaultTargetScript) {
this.defaultTargetScript = defaultTargetScript;
}
public String getIsEnableSource() {
return isEnableSource;
}
public void setIsEnableSource(String isEnableSource) {
this.isEnableSource = isEnableSource;
}
public String getIsEnableTarget() {
return isEnableTarget;
}
public void setIsEnableTarget(String isEnableTarget) {
this.isEnableTarget = isEnableTarget;
}
public String getDelimiter() {
return delimiter;
}
public void setDelimiter(String delimiter) {
this.delimiter = delimiter;
}
public String getIsHaveHeader() {
return isHaveHeader;
}
public void setIsHaveHeader(String isHaveHeader) {
this.isHaveHeader = isHaveHeader;
}
public String getFileType() {
return fileType;
}
public void setFileType(String fileType) {
this.fileType = fileType;
}
public String getTargetDbName() {
return targetDbName;
}
public void setTargetDbName(String targetDbName) {
this.targetDbName = targetDbName;
}
public String getTargetFileName() {
return targetFileName;
}
public void setTargetFileName(String targetFileName) {
this.targetFileName = targetFileName;
}
public String getKerberosIsenable() {
return kerberosIsenable;
}
public void setKerberosIsenable(String kerberosIsenable) {
this.kerberosIsenable = kerberosIsenable;
}
public String getKerberosKrb5Conf() {
return kerberosKrb5Conf;
}
public void setKerberosKrb5Conf(String kerberosKrb5Conf) {
this.kerberosKrb5Conf = kerberosKrb5Conf;
}
public String getKerberosJaasConf() {
return kerberosJaasConf;
}
public void setKerberosJaasConf(String kerberosJaasConf) {
this.kerberosJaasConf = kerberosJaasConf;
}
public String getKerberosFqdn() {
return kerberosFqdn;
}
public void setKerberosFqdn(String kerberosFqdn) {
this.kerberosFqdn = kerberosFqdn;
}
public String getHdfsAuthPath() {
return hdfsAuthPath;
}
public void setHdfsAuthPath(String hdfsAuthPath) {
this.hdfsAuthPath = hdfsAuthPath;
}
public String getHdfsDefaultConf() {
return hdfsDefaultConf;
}
public void setHdfsDefaultConf(String hdfsDefaultConf) {
this.hdfsDefaultConf = hdfsDefaultConf;
}
public String getHdfsConfPath() {
return hdfsConfPath;
}
public void setHdfsConfPath(String hdfsConfPath) {
this.hdfsConfPath = hdfsConfPath;
}
public String getHdfsUserName() {
return hdfsUserName;
}
public void setHdfsUserName(String hdfsUserName) {
this.hdfsUserName = hdfsUserName;
}
public String getHdfsSyncingPath() {
return hdfsSyncingPath;
}
public void setHdfsSyncingPath(String hdfsSyncingPath) {
this.hdfsSyncingPath = hdfsSyncingPath;
}
}
package com.jz.dmp.modules.model;
import java.io.Serializable;
import java.util.Date;
/**
* 数据源配置(DmpSyncingDatasource)实体类
*
* @author Bellamy
* @since 2020-12-21 17:47:18
*/
public class DmpSyncingDatasource implements Serializable {
private static final long serialVersionUID = 588846270711686919L;
/**
* ID
*/
private Integer id;
/**
* 数据源类型ID
*/
private Integer datasourceType;
/**
* 数据源名称
*/
private String datasourceName;
/**
* 数据源描述
*/
private String datasourceDesc;
/**
* JDBC URL
*/
private String jdbcUrl;
/**
* 数据库名
*/
private String dbName;
/**
* 账号
*/
private String userName;
/**
* 密码
*/
private String password;
/**
* 终端信息
*/
private String endpoint;
/**
* Bucket信息
*/
private String bucket;
/**
* accessId
*/
private String accessId;
/**
* accessKey
*/
private String accessKey;
/**
* FTP协议
*/
private String protocol;
/**
* IP
*/
private String host;
/**
* 端口
*/
private String port;
/**
* NameNode地址
*/
private String defaultFs;
/**
* 表空间
*/
private String tableSchema;
/**
* 数据状态
*/
private String dataStatus;
/**
* 创建用户ID
*/
private String createUserId;
/**
* 数据创建时间
*/
private Date createTime;
/**
* 创建用户ID
*/
private String updateUserId;
/**
* 数据更新时间
*/
private Date updateTime;
/**
* 项目ID
*/
private Integer projectId;
private String delimiter;
private String isHaveHeader;
private String targetDbName;
private String targetFileName;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public Integer getDatasourceType() {
return datasourceType;
}
public void setDatasourceType(Integer datasourceType) {
this.datasourceType = datasourceType;
}
public String getDatasourceName() {
return datasourceName;
}
public void setDatasourceName(String datasourceName) {
this.datasourceName = datasourceName;
}
public String getDatasourceDesc() {
return datasourceDesc;
}
public void setDatasourceDesc(String datasourceDesc) {
this.datasourceDesc = datasourceDesc;
}
public String getJdbcUrl() {
return jdbcUrl;
}
public void setJdbcUrl(String jdbcUrl) {
this.jdbcUrl = jdbcUrl;
}
public String getDbName() {
return dbName;
}
public void setDbName(String dbName) {
this.dbName = dbName;
}
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getEndpoint() {
return endpoint;
}
public void setEndpoint(String endpoint) {
this.endpoint = endpoint;
}
public String getBucket() {
return bucket;
}
public void setBucket(String bucket) {
this.bucket = bucket;
}
public String getAccessId() {
return accessId;
}
public void setAccessId(String accessId) {
this.accessId = accessId;
}
public String getAccessKey() {
return accessKey;
}
public void setAccessKey(String accessKey) {
this.accessKey = accessKey;
}
public String getProtocol() {
return protocol;
}
public void setProtocol(String protocol) {
this.protocol = protocol;
}
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;
}
public String getPort() {
return port;
}
public void setPort(String port) {
this.port = port;
}
public String getDefaultFs() {
return defaultFs;
}
public void setDefaultFs(String defaultFs) {
this.defaultFs = defaultFs;
}
public String getTableSchema() {
return tableSchema;
}
public void setTableSchema(String tableSchema) {
this.tableSchema = tableSchema;
}
public String getDataStatus() {
return dataStatus;
}
public void setDataStatus(String dataStatus) {
this.dataStatus = dataStatus;
}
public String getCreateUserId() {
return createUserId;
}
public void setCreateUserId(String createUserId) {
this.createUserId = createUserId;
}
public Date getCreateTime() {
return createTime;
}
public void setCreateTime(Date createTime) {
this.createTime = createTime;
}
public String getUpdateUserId() {
return updateUserId;
}
public void setUpdateUserId(String updateUserId) {
this.updateUserId = updateUserId;
}
public Date getUpdateTime() {
return updateTime;
}
public void setUpdateTime(Date updateTime) {
this.updateTime = updateTime;
}
public Integer getProjectId() {
return projectId;
}
public void setProjectId(Integer projectId) {
this.projectId = projectId;
}
public String getDelimiter() {
return delimiter;
}
public void setDelimiter(String delimiter) {
this.delimiter = delimiter;
}
public String getIsHaveHeader() {
return isHaveHeader;
}
public void setIsHaveHeader(String isHaveHeader) {
this.isHaveHeader = isHaveHeader;
}
public String getTargetDbName() {
return targetDbName;
}
public void setTargetDbName(String targetDbName) {
this.targetDbName = targetDbName;
}
public String getTargetFileName() {
return targetFileName;
}
public void setTargetFileName(String targetFileName) {
this.targetFileName = targetFileName;
}
}
\ No newline at end of file
package com.jz.dmp.modules.model;
import java.io.Serializable;
/**
* 数据源类型(DmpSyncingDatasourceType)实体类
*
* @author Bellamy
* @since 2020-12-21 18:39:06
*/
public class DmpSyncingDatasourceType implements Serializable {
private static final long serialVersionUID = 526021146272437267L;
/**
* ID
*/
private Integer id;
/**
* 数据源名称
*/
private String datasource;
/**
* 数据源分类
*/
private String datasourceCatecode;
/**
* 数据源分类
*/
private String datasourceCatename;
/**
* 数据源类型
*/
private String datasourceType;
/**
* 数据源图标
*/
private String imgUrl;
private String dataStatus;
private String isEnabled;
private String datasourceCatetype;
private String driverClassName;
private String isEnableTest;
private Object defaultSourceScript;
private Object defaultTargetScript;
private String isEnableSource;
private String isEnableTarget;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getDatasource() {
return datasource;
}
public void setDatasource(String datasource) {
this.datasource = datasource;
}
public String getDatasourceCatecode() {
return datasourceCatecode;
}
public void setDatasourceCatecode(String datasourceCatecode) {
this.datasourceCatecode = datasourceCatecode;
}
public String getDatasourceCatename() {
return datasourceCatename;
}
public void setDatasourceCatename(String datasourceCatename) {
this.datasourceCatename = datasourceCatename;
}
public String getDatasourceType() {
return datasourceType;
}
public void setDatasourceType(String datasourceType) {
this.datasourceType = datasourceType;
}
public String getImgUrl() {
return imgUrl;
}
public void setImgUrl(String imgUrl) {
this.imgUrl = imgUrl;
}
public String getDataStatus() {
return dataStatus;
}
public void setDataStatus(String dataStatus) {
this.dataStatus = dataStatus;
}
public String getIsEnabled() {
return isEnabled;
}
public void setIsEnabled(String isEnabled) {
this.isEnabled = isEnabled;
}
public String getDatasourceCatetype() {
return datasourceCatetype;
}
public void setDatasourceCatetype(String datasourceCatetype) {
this.datasourceCatetype = datasourceCatetype;
}
public String getDriverClassName() {
return driverClassName;
}
public void setDriverClassName(String driverClassName) {
this.driverClassName = driverClassName;
}
public String getIsEnableTest() {
return isEnableTest;
}
public void setIsEnableTest(String isEnableTest) {
this.isEnableTest = isEnableTest;
}
public Object getDefaultSourceScript() {
return defaultSourceScript;
}
public void setDefaultSourceScript(Object defaultSourceScript) {
this.defaultSourceScript = defaultSourceScript;
}
public Object getDefaultTargetScript() {
return defaultTargetScript;
}
public void setDefaultTargetScript(Object defaultTargetScript) {
this.defaultTargetScript = defaultTargetScript;
}
public String getIsEnableSource() {
return isEnableSource;
}
public void setIsEnableSource(String isEnableSource) {
this.isEnableSource = isEnableSource;
}
public String getIsEnableTarget() {
return isEnableTarget;
}
public void setIsEnableTarget(String isEnableTarget) {
this.isEnableTarget = isEnableTarget;
}
}
\ No newline at end of file
......@@ -5,6 +5,9 @@ import com.jz.common.page.PageInfoResponse;
import com.jz.dmp.modules.controller.DataIntegration.bean.TaskListPageDto;
import com.jz.dmp.modules.controller.DataIntegration.bean.TaskListPageReq;
import java.util.List;
import java.util.Map;
/**
* @ClassName: OfflineSynchService
* @Description:
......@@ -17,4 +20,6 @@ public interface OfflineSynchService {
PageInfoResponse<TaskListPageDto> queryTaskListPage(TaskListPageReq taskListPageReq) throws Exception;
JsonResult querygSourceDbList(Integer projectId) throws Exception;
List<Map> querygSourceTableList(Integer sourceDbId,String targetName) throws Exception;
}
package com.jz.dmp.modules.service.impl;
import com.fasterxml.jackson.databind.json.JsonMapper;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import com.jz.common.constant.Constants;
import com.jz.common.constant.JsonResult;
import com.jz.common.constant.ResultCode;
import com.jz.common.page.PageInfoResponse;
import com.jz.common.persistence.BaseService;
import com.jz.dmp.agent.DmpAgentResult;
import com.jz.dmp.modules.controller.DataIntegration.bean.SourceDbNameListDto;
import com.jz.dmp.modules.controller.DataIntegration.bean.TaskListPageDto;
import com.jz.dmp.modules.controller.DataIntegration.bean.TaskListPageReq;
import com.jz.dmp.modules.dao.DmpProjectDao;
import com.jz.dmp.modules.dao.OfflineSynchDao;
import com.jz.dmp.modules.model.DmpAgentDatasourceInfo;
import com.jz.dmp.modules.model.DmpProjectSystemInfo;
import com.jz.dmp.modules.model.DmpSyncingDatasource;
import com.jz.dmp.modules.model.DmpSyncingDatasourceType;
import com.jz.dmp.modules.service.OfflineSynchService;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
......@@ -33,6 +41,9 @@ public class OfflineSynchServiceImpl implements OfflineSynchService {
@Autowired
private OfflineSynchDao offlineSynchDao;
@Autowired
private DmpProjectDao dmpProjectDao;
@Override
public PageInfoResponse<TaskListPageDto> queryTaskListPage(TaskListPageReq taskListPageReq) throws Exception {
PageInfoResponse<TaskListPageDto> pageInfoResponse = new PageInfoResponse<>();
......@@ -59,4 +70,60 @@ public class OfflineSynchServiceImpl implements OfflineSynchService {
List<SourceDbNameListDto> list = offlineSynchDao.querygSourceDbList(map);
return new JsonResult(ResultCode.SUCCESS, list);
}
@Override
public List<Map> querygSourceTableList(Integer sourceDbId,String targetName) throws Exception {
//通过源数据库id ,查询数据源配置
DmpSyncingDatasource dmpSyncingDatasource = offlineSynchDao.queryDmpSyncingDatasource(sourceDbId);
if (StringUtils.isNotBlank(dmpSyncingDatasource.getPassword())) {
dmpSyncingDatasource.setPassword(new BaseService().decode(dmpSyncingDatasource.getPassword()));
}
DmpAgentDatasourceInfo dsInfo = this.dsInfoDTO(dmpSyncingDatasource);
if (StringUtils.isNotBlank(targetName))
dsInfo.setTargetFileName(targetName);
/*DmpAgentResult rst = dmpDsAgentServiceImp.getTableNameList(dsInfo);
if (!rst.getCode().val().equals("200")) {
return new JsonResult(rst.getCode(), rst.getMessage());
} else {
rst.setResult(JsonMapper.fromJsonString(rst.getMessage(), List.class));
return new JsonResult(ResultCode.SUCCESS, rst);
}*/
return null;
}
private DmpAgentDatasourceInfo dsInfoDTO(DmpSyncingDatasource body) {
/*DmpSyncingDatasourceType type = dmpSyncingDatasourceTypeService.get(body.getDatasourceType());
DmpAgentDatasourceInfo ds = new DmpAgentDatasourceInfo();
ds.setDatasourceType(type.getDatasourceType());
ds.setDatasourceName(body.getDatasourceName());
ds.setDbName(body.getDbName());
ds.setJdbcUrl(body.getJdbcUrl());
ds.setUserName(body.getUserName());
ds.setPassword(body.getPassword());
ds.setDriverClassName(type.getDriverClassName());
ds.setHost(body.getHost());
ds.setPort(body.getPort());
ds.setDelimiter(body.getDelimiter());
ds.setProtocol(body.getProtocol());
ds.setProjectId(body.getProjectId() + "");
ds.setAccessId(body.getAccessId());
ds.setAccessKey(body.getAccessKey());
ds.setIsHaveHeader(body.getIsHaveHeader());
ds.setEndpoint(body.getEndpoint());
ds.setDefaultFs(body.getDefaultFs());
DmpProjectSystemInfo info = dmpProjectDao.queryProjectSystemInfo(body.getProjectId());
ds.setKerberosIsenable(info.getKerberosIsenable());
ds.setKerberosJaasConf(info.getKerberosJaasConf());
ds.setKerberosKrb5Conf(info.getKerberosKrb5Conf());
ds.setKerberosFqdn(info.getKerberosFqdn());
ds.setHdfsDefaultConf(""); // info.getHdfsDefaultConf());
ds.setHdfsConfPath(""); // info.getHdfsConfPath());
ds.setHdfsAuthPath(body.getAccessKey());
ds.setHdfsUserName(info.getHdfsUserName());
ds.setHdfsSyncingPath(info.getHdfsSyncingPath());
return ds;*/
return null;
}
}
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd" >
<mapper namespace="com.jz.dmp.web.ui.modules.dmp.repository.DmpProjectDao" >
<sql id="FIND_ALL_COLUMN" >
id,owner_id,production_id,data_warehouse_id,name,display_name,project_desc,publish_target_id,is_enable_scheduler,is_edit_task_code,is_enable_select,dw_name,dw_access_type,dw_is_enable_acl,dw_is_enable_access_obj,dw_is_enable_auth_obj,dw_is_protect_data,data_status,create_user_id,create_time,update_user_id,update_time,impala_jdbc_url,impala_master_fqdn
</sql>
<!-- 模块共有接口实现 -->
<insert id="insert" useGeneratedKeys="true" keyProperty="id" parameterType="com.jz.dmp.web.ui.modules.dmp.repository.domain.DmpProject">
INSERT INTO dmp_project( owner_id, production_id, data_warehouse_id, name, display_name, project_desc, publish_target_id, is_enable_scheduler, is_edit_task_code, is_enable_select, dw_name, dw_access_type, dw_is_enable_acl, dw_is_enable_access_obj, dw_is_enable_auth_obj, dw_is_protect_data, data_status, create_user_id, create_time, update_user_id, update_time)
VALUES( #{ownerId}, #{productionId}, #{dataWarehouseId}, #{name}, #{displayName}, #{projectDesc}, #{publishTargetId}, #{isEnableScheduler}, #{isEditTaskCode}, #{isEnableSelect}, #{dwName}, #{dwAccessType}, #{dwIsEnableAcl}, #{dwIsEnableAccessObj}, #{dwIsEnableAuthObj}, #{dwIsProtectData}, #{dataStatus}, #{createUserId}, #{createTime}, #{updateUserId}, #{updateTime})
</insert>
<!-- DELETE FROM dmp_project WHERE id =#{id} -->
<delete id="delete" parameterType="java.lang.Long" >
UPDATE dmp_project SET data_status = '0' WHERE id =#{id}
</delete>
<update id="disable" parameterType="java.lang.Long" >
UPDATE dmp_project SET data_status = '2' WHERE id =#{id}
</update>
<update id="enable" parameterType="java.lang.Long" >
UPDATE dmp_project SET data_status = '1' WHERE id =#{id}
</update>
<update id="update" parameterType="com.jz.dmp.web.ui.modules.dmp.repository.domain.DmpProject">
UPDATE dmp_project SET
<if test="dataStatus != null">data_status = #{dataStatus}</if><if test="ownerId != null">,owner_id = #{ownerId}</if><if test="productionId != null">,production_id = #{productionId}</if><if test="dataWarehouseId != null">,data_warehouse_id = #{dataWarehouseId}</if><if test="name != null">,name = #{name}</if><if test="displayName != null">,display_name = #{displayName}</if><if test="projectDesc != null">,project_desc = #{projectDesc}</if><if test="publishTargetId != null">,publish_target_id = #{publishTargetId}</if><if test="isEnableScheduler != null">,is_enable_scheduler = #{isEnableScheduler}</if><if test="isEditTaskCode != null">,is_edit_task_code = #{isEditTaskCode}</if><if test="isEnableSelect != null">,is_enable_select = #{isEnableSelect}</if><if test="dwName != null">,dw_name = #{dwName}</if><if test="dwAccessType != null">,dw_access_type = #{dwAccessType}</if><if test="dwIsEnableAcl != null">,dw_is_enable_acl = #{dwIsEnableAcl}</if><if test="dwIsEnableAccessObj != null">,dw_is_enable_access_obj = #{dwIsEnableAccessObj}</if><if test="dwIsEnableAuthObj != null">,dw_is_enable_auth_obj = #{dwIsEnableAuthObj}</if><if test="dwIsProtectData != null">,dw_is_protect_data = #{dwIsProtectData}</if><if test="createUserId != null">,create_user_id = #{createUserId}</if><if test="createTime != null">,create_time = #{createTime}</if><if test="updateUserId != null">,update_user_id = #{updateUserId}</if><if test="updateTime != null">,update_time = #{updateTime}</if>
WHERE id = #{id}
</update>
<select id="get" parameterType="java.lang.Long" resultType="com.jz.dmp.web.ui.modules.dmp.repository.domain.DmpProject">
SELECT <include refid="FIND_ALL_COLUMN" /> FROM dmp_project WHERE id = #{id}
</select>
<select id="getProjectInfo" parameterType="java.lang.Long" resultType="java.util.Map">
SELECT
ID as id ,
OWNER_ID as ownerId ,
PRODUCTION_ID as productionId ,
DATA_WAREHOUSE_ID as dataWarehouseId ,
NAME as name ,
DISPLAY_NAME as displayName ,
PROJECT_DESC as projectDesc ,
PUBLISH_TARGET_ID as publishTargetId ,
IS_ENABLE_SCHEDULER as isEnableScheduler ,
IS_EDIT_TASK_CODE as isEditTaskCode ,
IS_ENABLE_SELECT as isEnableSelect ,
DW_NAME as dwName ,
DW_ACCESS_TYPE as dwAccessType ,
DW_IS_ENABLE_ACL as dwIsEnableAcl ,
DW_IS_ENABLE_ACCESS_OBJ as dwIsEnableAccessObj,
DW_IS_ENABLE_AUTH_OBJ as dwIsEnableAuthObj ,
DW_IS_PROTECT_DATA as dwIsProtectData ,
case when data_status = '1' then '正常' when data_status = '2' then '禁用' else '删除' end as dataStatus,
CREATE_USER_ID as createUserId ,
ifnull(date_format(create_time, '%Y-%m-%d %H:%i:%S'), '-') as createTime,
UPDATE_USER_ID as updateUserId ,
UPDATE_TIME as updateTime
FROM dmp_project WHERE data_status = '1' and id = #{id}
</select>
<select id="findList" parameterType="com.jz.dmp.web.ui.modules.dmp.repository.domain.DmpProject" resultType="com.jz.dmp.web.ui.modules.dmp.repository.domain.DmpProject">
SELECT <include refid="FIND_ALL_COLUMN" /> FROM dmp_project WHERE 1=1
<if test="ownerId != null">AND owner_id = #{ownerId}</if><if test="productionId != null">AND production_id = #{productionId}</if><if test="dataWarehouseId != null">AND data_warehouse_id = #{dataWarehouseId}</if><if test="name != null">AND name = #{name}</if><if test="displayName != null">AND display_name = #{displayName}</if><if test="projectDesc != null">AND project_desc = #{projectDesc}</if><if test="publishTargetId != null">AND publish_target_id = #{publishTargetId}</if><if test="isEnableScheduler != null">AND is_enable_scheduler = #{isEnableScheduler}</if><if test="isEditTaskCode != null">AND is_edit_task_code = #{isEditTaskCode}</if><if test="isEnableSelect != null">AND is_enable_select = #{isEnableSelect}</if><if test="dwName != null">AND dw_name = #{dwName}</if><if test="dwAccessType != null">AND dw_access_type = #{dwAccessType}</if><if test="dwIsEnableAcl != null">AND dw_is_enable_acl = #{dwIsEnableAcl}</if><if test="dwIsEnableAccessObj != null">AND dw_is_enable_access_obj = #{dwIsEnableAccessObj}</if><if test="dwIsEnableAuthObj != null">AND dw_is_enable_auth_obj = #{dwIsEnableAuthObj}</if><if test="dwIsProtectData != null">AND dw_is_protect_data = #{dwIsProtectData}</if><if test="dataStatus != null">AND data_status = #{dataStatus}</if>
</select>
<select id="findAllList" resultType="com.jz.dmp.web.ui.modules.dmp.repository.domain.DmpProject">
SELECT <include refid="FIND_ALL_COLUMN" /> FROM dmp_project WHERE data_status = '1'
</select>
<!-- 模块内新增添加在下面 -->
<select id="getExampleQuery" parameterType="java.util.Map"
resultType="com.jz.dmp.web.ui.modules.dmp.repository.domain.DmpProject">
SELECT
<include refid="FIND_ALL_COLUMN" />
FROM dmp_project WHERE 1=1
<if test="ownerId != null">AND owner_id = #{ownerId}</if>
<if test="productionId != null">AND production_id = #{productionId}</if>
<if test="dataWarehouseId != null">AND data_warehouse_id = #{dataWarehouseId}</if>
<if test="name != null">AND name = #{name}</if>
<if test="displayName != null">AND display_name = #{displayName}</if>
<if test="projectDesc != null">AND project_desc = #{projectDesc}</if>
<if test="publishTargetId != null">AND publish_target_id = #{publishTargetId}</if>
<if test="isEnableScheduler != null">AND is_enable_scheduler = #{isEnableScheduler}</if>
<if test="isEditTaskCode != null">AND is_edit_task_code = #{isEditTaskCode}</if>
<if test="isEnableSelect != null">AND is_enable_select = #{isEnableSelect}</if>
<if test="dwName != null">AND dw_name = #{dwName}</if>
<if test="dwAccessType != null">AND dw_access_type = #{dwAccessType}</if>
<if test="dwIsEnableAcl != null">AND dw_is_enable_acl = #{dwIsEnableAcl}</if>
<if test="dwIsEnableAccessObj != null">AND dw_is_enable_access_obj = #{dwIsEnableAccessObj}</if>
<if test="dwIsEnableAuthObj != null">AND dw_is_enable_auth_obj = #{dwIsEnableAuthObj}</if>
<if test="dwIsProtectData != null">AND dw_is_protect_data = #{dwIsProtectData}</if>
<if test="dataStatus != null">AND data_status = #{dataStatus}</if>
</select>
<select id="getProjectPage1" resultType="java.util.Map"
parameterType="com.jz.dmp.web.ui.modules.dmp.repository.domain.DmpProject">
select
p.id as id,
p.name as name,
p.display_name as displayName,
ifnull(date_format(p.create_time, '%Y-%m-%d %H:%i:%S'), '-') as createTime,
p.owner_id as ownerId,
p.project_desc as projectDesc,
p.is_enable_scheduler as isEnableScheduler,
p.is_enable_select as isEnableSelect,
case when p.data_status = '1' then '正常' when p.data_status = '2' then '禁用' else '删除' end as dataStatus
from dmp_project p, dmp_project_member pm
where p.data_status != '0'
and pm.data_status = '1'
and p.id = pm.project_id
<if test="name != null">
and (p.name like CONCAT('%',#{name},'%') or p.display_name like CONCAT('%',#{name},'%'))
</if>
and pm.member_id = #{ownerId}
order by p.update_time desc
</select>
<select id="getProjectPageBak" resultType="java.util.Map"
parameterType="com.jz.dmp.web.ui.modules.dmp.repository.domain.DmpProject">
select
p.id as id,
p.name as name,
p.display_name as displayName,
ifnull(date_format(p.create_time, '%Y-%m-%d %H:%i:%S'), '-') as createTime,
p.owner_id as ownerId,
p.project_desc as projectDesc,
p.is_enable_scheduler as isEnableScheduler,
p.is_enable_select as isEnableSelect,
case when t.permission_id is not null then '1' else '0' end as isHaveIntegration,
case when p.data_status = '1' then '正常' when p.data_status = '2' then '禁用' else '删除' end as dataStatus
from dmp_project p
join dmp_project_member pm
on pm.member_id = #{ownerId}
and pm.data_status = '1'
left join
(select dp.id as permission_id, pp.project_id as project_id
from dmp_project_permission pp, dmp_permission dp
where pp.data_status = '1'
and dp.data_status = '1'
and pp.permission_id = dp.id
and dp.name = '数据集成'
and dp.data_status = '1'
) t on p.id = t.project_id
where p.data_status != '0'
and p.id = pm.project_id
<if test="name != null">
and (upper(p.name) like CONCAT('%',upper(#{name}),'%') or upper(p.display_name) like CONCAT('%',upper(#{name}),'%'))
</if>
order by p.update_time desc
<if test="limitNum != null">
limit ${limitNum}
</if>
</select>
<select id="getProjectPage" resultType="java.util.Map"
parameterType="com.jz.dmp.web.ui.modules.dmp.repository.domain.DmpProject">
select
p.id as id, p.name as name, p.display_name as displayName, ifnull(date_format(p.create_time, '%Y-%m-%d %H:%i:%S'), '-') as
createTime, p.owner_id as ownerId, p.project_desc as projectDesc, p.is_enable_scheduler as isEnableScheduler, p.is_enable_select as
isEnableSelect, case when t.permission_id is not null then '1' else '0' end as isHaveIntegration,
case when s.permission_id is not null then '1' else '0' end as isHaveDataService,
case when r.role_id ='1' then '1' ELSE '0' end as roleId,
case when p.data_status = '1' then '正常'
when p.data_status = '2' then '禁用' else '删除' end as dataStatus
from dmp_project p join dmp_project_member pm on pm.member_id = #{ownerId} and pm.data_status = '1'
left join (select dp.id as permission_id, pp.project_id as project_id from dmp_project_permission pp, dmp_permission dp where
pp.data_status = '1' and dp.data_status = '1' and pp.permission_id = dp.id and dp.name = '数据集成' and dp.data_status = '1' ) t
on p.id = t.project_id
LEFT JOIN (select dp2.id as permission_id, pp2.project_id as project_id from dmp_project_permission pp2, dmp_permission dp2 where
pp2.data_status = '1' and dp2.data_status = '1' and pp2.permission_id = dp2.id and dp2.name = '数据服务' and dp2.data_status = '1' ) s ON p.id = s.project_id
LEFT JOIN (SELECT role_id,project_id FROM dmp_project_member_role where role_id=1 and member_id = #{ownerId} and data_status='1') r ON r.project_id = p.id
where p.data_status != '0' and p.id = pm.project_id
<if test="name != null">
and (upper(p.name) like CONCAT('%',upper(#{name}),'%') or upper(p.display_name) like CONCAT('%',upper(#{name}),'%'))
</if>
order by p.update_time desc
<if test="limitNum != null">
limit ${limitNum}
</if>
</select>
<select id="getHeaderProjectList" resultType="java.util.Map"
parameterType="com.jz.dmp.web.ui.modules.dmp.repository.domain.DmpProject">
select
p.id as id,
p.name as name,
case when p.display_name = '' then p.name else p.display_name end as displayName,
ifnull(date_format(p.create_time, '%Y-%m-%d %H:%i:%S'), '-') as createTime,
p.owner_id as ownerId,
p.project_desc as projectDesc,
p.is_enable_scheduler as isEnableScheduler,
p.is_enable_select as isEnableSelect,
case when t.permission_id is not null then '1' else '0' end as isHaveIntegration,
case when p.data_status = '1' then '正常' when p.data_status = '2' then '禁用' else '删除' end as dataStatus
from dmp_project p
join dmp_project_member pm
on pm.member_id = #{ownerId}
and pm.data_status = '1'
left join
(select dp.id as permission_id, pp.project_id as project_id
from dmp_project_permission pp, dmp_permission dp
where pp.data_status = '1'
and dp.data_status = '1'
and pp.permission_id = dp.id
and dp.name = '数据集成'
and dp.data_status = '1'
) t on p.id = t.project_id
where p.data_status = '1'
<if test="name != null">
and (upper(p.name) like CONCAT('%',upper(#{name}),'%') or upper(p.display_name) like CONCAT('%',upper(#{name}),'%'))
</if>
and p.id = pm.project_id
order by p.update_time desc
limit 10
</select>
<select id="getProjectOrgPermission" resultType="java.util.Map">
select
id as id,
name as name,
pre_url as preUrl,
permission_url as permissionUrl,
method as method,
description as description
from dmp_permission
where data_status = '1'
and type = '1'
</select>
<insert id="insertProjectSystemInfo" useGeneratedKeys="true" keyProperty="id" parameterType="com.jz.dmp.web.ui.modules.dmp.repository.domain.DmpProjectSystemInfo">
INSERT INTO dmp_project_system_info (
data_status
, create_user_id
, create_time
, update_user_id
, update_time
, project_id
<if test="kerberosIsenable != null">, kerberos_isenable</if>
<if test="kerberosKrb5Conf != null">, kerberos_krb5_conf</if>
<if test="kerberosJaasConf != null">, kerberos_jaas_conf</if>
<if test="kerberosKeytabConf != null">, kerberos_keytab_conf</if>
<if test="kerberosKeytabUser != null">, kerberos_keytab_user</if>
<if test="kerberosFqdn != null">, kerberos_fqdn</if>
<if test="kerberosSparkJaasConf != null">, kerberos_spark_jaas_conf</if>
<if test="kerberosJaasClientName != null">, kerberos_jaas_client_name</if>
<if test="shellCmdServer != null">, shell_cmd_server</if>
<if test="shellCmdUser != null">, shell_cmd_user</if>
<if test="shellCmdPassword != null">, shell_cmd_password</if>
<if test="shellCmdSubmitSycning != null">, shell_cmd_submit_sycning</if>
<if test="shellCmdQueryStatus != null">, shell_cmd_query_status</if>
<if test="shellCmdStopSycning != null">, shell_cmd_stop_sycning</if>
<if test="shellCmdCatLog != null">, shell_cmd_cat_log</if>
<if test="shellFtpDownloadDir != null">, shell_ftp_download_dir</if>
<if test="hdfsHttpPath != null">, hdfs_http_path</if>
<if test="hdfsUserName != null">, hdfs_user_name</if>
<if test="hdfsSyncingPath != null">, hdfs_syncing_path</if>
<if test="impalaJdbcUrl != null">, impala_jdbc_url</if>
<if test="impalaShellUrl != null">, impala_shell_url</if>
<if test="azkabanMonitorUrl != null">, azkaban_monitor_url</if>
<if test="azkabanExectorShellExec != null">, azkaban_exector_shell_exec</if>
<if test="azkabanExectorSqlExec != null">, azkaban_exector_sql_exec</if>
<if test="azkabanExectorXmlExec != null">, azkaban_exector_xml_exec</if>
<if test="azkabanExectorSqlPath != null">, azkaban_exector_sql_path</if>
<if test="azkabanExectorShellPath != null">, azkaban_exector_shell_path</if>
<if test="azkabanExectorShellExportData != null">, azkaban_exector_shell_export_data</if>
<if test="azkabanLocalTaskFilePath != null">, azkaban_local_task_file_path</if>
<if test="kafkaConnectorUrl != null">, kafka_connector_url</if>
<if test="kafkaBootstrapServers != null">, kafka_bootstrap_servers</if>
<if test="kafkaMonitorUrl != null">, kafka_monitor_url</if>
<if test="kafkaOffsetUpdateShell != null">, kafka_offset_update_shell</if>
<if test="kafkaSchemaRegisterUrl != null">, kafka_schema_register_url</if>
<if test="kafkaInfluxUrl != null">, kafka_influx_url</if>
<if test="kafkaInfluxUserName != null">, kafka_influx_user_name</if>
<if test="kafkaInfluxPassword != null">, kafka_influx_password</if>
<if test="shellSftpPort != null">, shell_sftp_port</if>
<if test="sparkHiveMetastoreUris != null">, spark_hive_metastore_uris</if>
<if test="sparkYarnQueue != null">, spark_yarn_queue</if>
<if test="sparkDefaultExecutorMemory != null">, spark_default_executor_memory</if>
<if test="sparkDefaultExecutorCores != null">, spark_default_executor_cores</if>
<if test="sparkDefaultTotalExecutorCores != null">, spark_default_total_executor_cores</if>
<if test="sparkStatisticsSourceDataSwitch != null">, spark_statistics_source_data_switch</if>
<if test="atlasMonitorUrl != null">, atlas_monitor_url</if>
<if test="openapiInterfaceUrl != null">, openapi_interface_url</if>
) VALUES (
#{dataStatus}
, #{createUserId}
, #{createTime}
, #{updateUserId}
, #{updateTime}
, #{projectId}
<if test="kerberosIsenable != null">, #{kerberosIsenable}</if>
<if test="kerberosKrb5Conf != null">, #{kerberosKrb5Conf}</if>
<if test="kerberosJaasConf != null">, #{kerberosJaasConf}</if>
<if test="kerberosKeytabConf != null">, #{kerberosKeytabConf}</if>
<if test="kerberosKeytabUser != null">, #{kerberosKeytabUser}</if>
<if test="kerberosFqdn != null">, #{kerberosFqdn}</if>
<if test="kerberosSparkJaasConf != null">, #{kerberosSparkJaasConf}</if>
<if test="kerberosJaasClientName != null">, #{kerberosJaasClientName}</if>
<if test="shellCmdServer != null">, #{shellCmdServer}</if>
<if test="shellCmdUser != null">, #{shellCmdUser}</if>
<if test="shellCmdPassword != null">, #{shellCmdPassword}</if>
<if test="shellCmdSubmitSycning != null">, #{shellCmdSubmitSycning}</if>
<if test="shellCmdQueryStatus != null">, #{shellCmdQueryStatus}</if>
<if test="shellCmdStopSycning != null">, #{shellCmdStopSycning}</if>
<if test="shellCmdCatLog != null">, #{shellCmdCatLog}</if>
<if test="shellFtpDownloadDir != null">, #{shellFtpDownloadDir}</if>
<if test="hdfsHttpPath != null">, #{hdfsHttpPath}</if>
<if test="hdfsUserName != null">, #{hdfsUserName}</if>
<if test="hdfsSyncingPath != null">, #{hdfsSyncingPath}</if>
<if test="impalaJdbcUrl != null">, #{impalaJdbcUrl}</if>
<if test="impalaShellUrl != null">, #{impalaShellUrl}</if>
<if test="azkabanMonitorUrl != null">, #{azkabanMonitorUrl}</if>
<if test="azkabanExectorShellExec != null">, #{azkabanExectorShellExec}</if>
<if test="azkabanExectorSqlExec != null">, #{azkabanExectorSqlExec}</if>
<if test="azkabanExectorXmlExec != null">, #{azkabanExectorXmlExec}</if>
<if test="azkabanExectorSqlPath != null">, #{azkabanExectorSqlPath}</if>
<if test="azkabanExectorShellPath != null">, #{azkabanExectorShellPath}</if>
<if test="azkabanExectorShellExportData != null">, #{azkabanExectorShellExportData}</if>
<if test="azkabanLocalTaskFilePath != null">, #{azkabanLocalTaskFilePath}</if>
<if test="kafkaConnectorUrl != null">, #{kafkaConnectorUrl}</if>
<if test="kafkaBootstrapServers != null">, #{kafkaBootstrapServers}</if>
<if test="kafkaMonitorUrl != null">, #{kafkaMonitorUrl}</if>
<if test="kafkaOffsetUpdateShell != null">, #{kafkaOffsetUpdateShell}</if>
<if test="kafkaSchemaRegisterUrl != null">, #{kafkaSchemaRegisterUrl}</if>
<if test="kafkaInfluxUrl != null">, #{kafkaInfluxUrl}</if>
<if test="kafkaInfluxUserName != null">, #{kafkaInfluxUserName}</if>
<if test="kafkaInfluxPassword != null">, #{kafkaInfluxPassword}</if>
<if test="shellSftpPort != null">, #{shellSftpPort}</if>
<if test="sparkHiveMetastoreUris != null">, #{sparkHiveMetastoreUris}</if>
<if test="sparkYarnQueue != null">, #{sparkYarnQueue}</if>
<if test="sparkDefaultExecutorMemory != null">, #{sparkDefaultExecutorMemory}</if>
<if test="sparkDefaultExecutorCores != null">, #{sparkDefaultExecutorCores}</if>
<if test="sparkDefaultTotalExecutorCores != null">, #{sparkDefaultTotalExecutorCores}</if>
<if test="sparkStatisticsSourceDataSwitch != null">, #{sparkStatisticsSourceDataSwitch}</if>
<if test="atlasMonitorUrl != null">, #{atlasMonitorUrl}</if>
<if test="openapiInterfaceUrl != null">, #{openapiInterfaceUrl}</if>
)
</insert>
<update id="updateProjectSystemInfo" parameterType="com.jz.dmp.web.ui.modules.dmp.repository.domain.DmpProjectSystemInfo">
UPDATE dmp_project_system_info SET
data_status = #{dataStatus}
,create_user_id = #{createUserId}
,create_time = #{createTime}
,update_user_id = #{updateUserId}
,update_time = #{updateTime}
<if test="kerberosIsenable != null">,kerberos_isenable = #{kerberosIsenable}</if>
<if test="kerberosKrb5Conf != null">,kerberos_krb5_conf = #{kerberosKrb5Conf}</if>
<if test="kerberosJaasConf != null">,kerberos_jaas_conf = #{kerberosJaasConf}</if>
<if test="kerberosKeytabConf != null">, kerberos_keytab_conf = #{kerberosKeytabConf}</if>
<if test="kerberosKeytabUser != null">, kerberos_keytab_user = #{kerberosKeytabUser}</if>
<if test="kerberosFqdn != null">,kerberos_fqdn = #{kerberosFqdn}</if>
<if test="kerberosSparkJaasConf != null">,kerberos_spark_jaas_conf = #{kerberosSparkJaasConf}</if>
<if test="kerberosJaasClientName != null">,kerberos_jaas_client_name = #{kerberosJaasClientName}</if>
<if test="shellCmdServer != null">,shell_cmd_server = #{shellCmdServer}</if>
<if test="shellCmdUser != null">,shell_cmd_user = #{shellCmdUser}</if>
<if test="shellCmdPassword != null">,shell_cmd_password = #{shellCmdPassword}</if>
<if test="shellCmdSubmitSycning != null">,shell_cmd_submit_sycning = #{shellCmdSubmitSycning}</if>
<if test="shellCmdQueryStatus != null">,shell_cmd_query_status = #{shellCmdQueryStatus}</if>
<if test="shellCmdStopSycning != null">,shell_cmd_stop_sycning = #{shellCmdStopSycning}</if>
<if test="shellCmdCatLog != null">,shell_cmd_cat_log = #{shellCmdCatLog}</if>
<if test="shellFtpDownloadDir != null">,shell_ftp_download_dir = #{shellFtpDownloadDir}</if>
<if test="hdfsHttpPath != null">,hdfs_http_path = #{hdfsHttpPath}</if>
<if test="hdfsUserName != null">,hdfs_user_name = #{hdfsUserName}</if>
<if test="hdfsSyncingPath != null">,hdfs_syncing_path = #{hdfsSyncingPath}</if>
<if test="impalaJdbcUrl != null">,impala_jdbc_url = #{impalaJdbcUrl}</if>
<if test="impalaShellUrl != null">, impala_shell_url = #{impalaShellUrl}</if>
<if test="azkabanMonitorUrl != null">,azkaban_monitor_url = #{azkabanMonitorUrl}</if>
<if test="azkabanExectorShellExec != null">,azkaban_exector_shell_exec = #{azkabanExectorShellExec}</if>
<if test="azkabanExectorSqlExec != null">,azkaban_exector_sql_exec = #{azkabanExectorSqlExec}</if>
<if test="azkabanExectorXmlExec != null">,azkaban_exector_xml_exec = #{azkabanExectorXmlExec}</if>
<if test="azkabanExectorSqlPath != null">,azkaban_exector_sql_path = #{azkabanExectorSqlPath}</if>
<if test="azkabanExectorShellPath != null">,azkaban_exector_shell_path = #{azkabanExectorShellPath}</if>
<if test="azkabanExectorShellExportData != null">,azkaban_exector_shell_export_data = #{azkabanExectorShellExportData}</if>
<if test="azkabanLocalTaskFilePath != null">,azkaban_local_task_file_path = #{azkabanLocalTaskFilePath}</if>
<if test="kafkaConnectorUrl != null">,kafka_connector_url = #{kafkaConnectorUrl}</if>
<if test="kafkaBootstrapServers != null">,kafka_bootstrap_servers = #{kafkaBootstrapServers}</if>
<if test="kafkaMonitorUrl != null">,kafka_monitor_url = #{kafkaMonitorUrl}</if>
<if test="kafkaOffsetUpdateShell != null">,kafka_offset_update_shell = #{kafkaOffsetUpdateShell}</if>
<if test="kafkaSchemaRegisterUrl != null">,kafka_schema_register_url = #{kafkaSchemaRegisterUrl}</if>
<if test="kafkaInfluxUrl != null">,kafka_influx_url = #{kafkaInfluxUrl}</if>
<if test="kafkaInfluxUserName != null">,kafka_influx_user_name = #{kafkaInfluxUserName}</if>
<if test="kafkaInfluxPassword != null">,kafka_influx_password = #{kafkaInfluxPassword}</if>
<if test="shellSftpPort != null">,shell_sftp_port = #{shellSftpPort}</if>
<if test="sparkHiveMetastoreUris != null">, spark_hive_metastore_uris = #{sparkHiveMetastoreUris}</if>
<if test="sparkYarnQueue != null">, spark_yarn_queue = #{sparkYarnQueue}</if>
<if test="sparkDefaultExecutorMemory != null">, spark_default_executor_memory = #{sparkDefaultExecutorMemory}</if>
<if test="sparkDefaultExecutorCores != null">, spark_default_executor_cores = #{sparkDefaultExecutorCores}</if>
<if test="sparkDefaultTotalExecutorCores != null">, spark_default_total_executor_cores = #{sparkDefaultTotalExecutorCores}</if>
<if test="sparkStatisticsSourceDataSwitch != null">, spark_statistics_source_data_switch = #{sparkStatisticsSourceDataSwitch}</if>
<if test="atlasMonitorUrl != null">, atlas_monitor_url = #{atlasMonitorUrl}</if>
<if test="openapiInterfaceUrl != null">, openapi_interface_url = #{openapiInterfaceUrl}</if>
WHERE 1=1
<if test="projectId != null">AND project_id = #{projectId}</if>
<if test="id != null">AND id = #{id}</if>
</update>
<select id="getProjectSystemInfo" parameterType="java.lang.Long" resultType="com.jz.dmp.web.ui.modules.dmp.repository.domain.DmpProjectSystemInfo">
SELECT
ID,
PROJECT_ID,
KERBEROS_ISENABLE,
KERBEROS_KRB5_CONF,
KERBEROS_JAAS_CONF,
KERBEROS_KEYTAB_CONF,
KERBEROS_KEYTAB_USER,
KERBEROS_FQDN,
KERBEROS_SPARK_JAAS_CONF,
KERBEROS_JAAS_CLIENT_NAME,
SHELL_CMD_SERVER,
SHELL_CMD_USER,
SHELL_CMD_PASSWORD,
SHELL_CMD_SUBMIT_SYCNING,
SHELL_CMD_QUERY_STATUS,
SHELL_CMD_STOP_SYCNING,
SHELL_CMD_CAT_LOG,
SHELL_FTP_DOWNLOAD_DIR,
HDFS_HTTP_PATH,
HDFS_USER_NAME,
HDFS_SYNCING_PATH,
IMPALA_JDBC_URL,
IMPALA_SHELL_URL,
AZKABAN_MONITOR_URL,
AZKABAN_EXECTOR_SHELL_EXEC,
AZKABAN_EXECTOR_SQL_EXEC,
AZKABAN_EXECTOR_XML_EXEC,
AZKABAN_EXECTOR_SQL_PATH,
AZKABAN_EXECTOR_SHELL_PATH,
AZKABAN_EXECTOR_SHELL_EXPORT_DATA,
AZKABAN_LOCAL_TASK_FILE_PATH,
KAFKA_CONNECTOR_URL,
KAFKA_BOOTSTRAP_SERVERS,
KAFKA_MONITOR_URL,
KAFKA_OFFSET_UPDATE_SHELL,
KAFKA_SCHEMA_REGISTER_URL,
KAFKA_INFLUX_URL,
KAFKA_INFLUX_USER_NAME,
KAFKA_INFLUX_PASSWORD,
SHELL_SFTP_PORT,
SPARK_HIVE_METASTORE_URIS,
SPARK_YARN_QUEUE,
SPARK_DEFAULT_EXECUTOR_MEMORY,
SPARK_DEFAULT_EXECUTOR_CORES,
SPARK_DEFAULT_TOTAL_EXECUTOR_CORES,
SPARK_STATISTICS_SOURCE_DATA_SWITCH,
ATLAS_MONITOR_URL,
OPENAPI_INTERFACE_URL
FROM dmp_project_system_info
WHERE data_status = '1' and PROJECT_ID = #{projectId}
</select>
<select id="queryProjectSystemInfo" parameterType="java.lang.Integer" resultType="dmpProjectSystemInfo">
SELECT
ID,
PROJECT_ID,
KERBEROS_ISENABLE,
KERBEROS_KRB5_CONF,
KERBEROS_JAAS_CONF,
KERBEROS_KEYTAB_CONF,
KERBEROS_KEYTAB_USER,
KERBEROS_FQDN,
KERBEROS_SPARK_JAAS_CONF,
KERBEROS_JAAS_CLIENT_NAME,
SHELL_CMD_SERVER,
SHELL_CMD_USER,
SHELL_CMD_PASSWORD,
SHELL_CMD_SUBMIT_SYCNING,
SHELL_CMD_QUERY_STATUS,
SHELL_CMD_STOP_SYCNING,
SHELL_CMD_CAT_LOG,
SHELL_FTP_DOWNLOAD_DIR,
HDFS_HTTP_PATH,
HDFS_USER_NAME,
HDFS_SYNCING_PATH,
IMPALA_JDBC_URL,
IMPALA_SHELL_URL,
AZKABAN_MONITOR_URL,
AZKABAN_EXECTOR_SHELL_EXEC,
AZKABAN_EXECTOR_SQL_EXEC,
AZKABAN_EXECTOR_XML_EXEC,
AZKABAN_EXECTOR_SQL_PATH,
AZKABAN_EXECTOR_SHELL_PATH,
AZKABAN_EXECTOR_SHELL_EXPORT_DATA,
AZKABAN_LOCAL_TASK_FILE_PATH,
KAFKA_CONNECTOR_URL,
KAFKA_BOOTSTRAP_SERVERS,
KAFKA_MONITOR_URL,
KAFKA_OFFSET_UPDATE_SHELL,
KAFKA_SCHEMA_REGISTER_URL,
KAFKA_INFLUX_URL,
KAFKA_INFLUX_USER_NAME,
KAFKA_INFLUX_PASSWORD,
SHELL_SFTP_PORT,
SPARK_HIVE_METASTORE_URIS,
SPARK_YARN_QUEUE,
SPARK_DEFAULT_EXECUTOR_MEMORY,
SPARK_DEFAULT_EXECUTOR_CORES,
SPARK_DEFAULT_TOTAL_EXECUTOR_CORES,
SPARK_STATISTICS_SOURCE_DATA_SWITCH,
ATLAS_MONITOR_URL,
OPENAPI_INTERFACE_URL
FROM dmp_project_system_info
WHERE data_status = '1' and PROJECT_ID = #{projectId}
</select>
</mapper>
\ No newline at end of file
......@@ -61,4 +61,35 @@
and ds.DATASOURCE_TYPE = #{datasourceType}
</if>
</select>
<!--主键查询数据源-->
<select id="queryDmpSyncingDatasource" parameterType="map" resultType="dmpSyncingDatasource">
SELECT
id,
datasource_type,
datasource_name,
datasource_desc,
jdbc_url,
db_name,
user_name,
PASSWORD,
endpoint,
bucket,
access_id,
access_key,
protocol,
HOST,
PORT,
default_fs,
table_schema,
data_status,
create_user_id,
create_time,
update_user_id,
update_time,
project_id
FROM
dmp_syncing_datasource
WHERE id = #{sourceDbId}
</select>
</mapper>
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment