Commit e967006e authored by sml's avatar sml

代码提交

parent 02dd0b7c
......@@ -53,6 +53,7 @@ public class CommConstant {
public static final String WATERDROP_FTL_SOURCE_JDBC = "source_jdbc.ftl";
public static final String WATERDROP_FTL_SOURCE_ELASTICSEARCH = "source_elasticsearch.ftl";
public static final String WATERDROP_FTL_SOURCE_SFTP = "source_sftp.ftl";
public static final String WATERDROP_FTL_SOURCE_HIVE = "source_hive.ftl";
public static final String WATERDROP_FTL_TRANSFORM_SQL = "transform_sql.ftl";
public static final String WATERDROP_FTL_TRANSFORM_JSON2 = "transform_json2.ftl";
public static final String WATERDROP_FTL_SINK_CONSOLE = "sink_console.ftl";
......@@ -67,4 +68,9 @@ public class CommConstant {
public static final String FTL_DOCTRANS = "doctrans.ftl";//文件转码
public static final String FTL_HDFS_UPLOAD = "hdfs_upload.ftl";//HDFS上传
/***************************************************/
//执行引擎
public static final String EXECUTION_ENGINE_JDBC = "jdbc";//jdbc
public static final String EXECUTION_ENGINE_SPARK = "spark";//spark
}
......@@ -21,8 +21,9 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.web.servlet.view.freemarker.FreeMarkerConfigurer;
import com.alibaba.fastjson.JSONObject;
import com.jz.dmp.cmdexectool.common.constant.CommConstant;
import com.jz.dmp.cmdexectool.common.utils.EncryptionUtils;
......@@ -30,17 +31,19 @@ import com.jz.dmp.cmdexectool.common.utils.FreeMarkerUtils;
import com.jz.dmp.cmdexectool.controller.bean.DmpProjectConfigInfoDto;
import com.jz.dmp.cmdexectool.entity.DmpSyncingDatasource;
import com.jz.dmp.cmdexectool.mapper.DmpSyncingDatasourceDao;
import com.jz.dmp.cmdexectool.scheduler.common.enums.DbType;
import com.jz.dmp.cmdexectool.scheduler.common.enums.MyDbType;
import com.jz.dmp.cmdexectool.scheduler.common.process.ResourceInfo;
import com.jz.dmp.cmdexectool.scheduler.common.task.AbstractParameters;
import com.jz.dmp.cmdexectool.scheduler.common.utils.ParameterUtils;
import com.jz.dmp.cmdexectool.scheduler.dao.datasource.MyBaseDataSource;
/**
* Sql/Hql parameter
*/
public class SqlParameters extends AbstractParameters {
private static Logger logger = LoggerFactory.getLogger(SqlParameters.class);
/**
* shell script
*/
......@@ -74,34 +77,34 @@ public class SqlParameters extends AbstractParameters {
private String waterdropScript;
/**
* 前语句
* 前语句
*/
private List<String> preStatements;
/**
* 后语句
* 后语句
*/
private List<String> posStatements;
/**
* jdbcUrl
* 执行引擎
*/
private String jdbcUrl;
private String executioEngine;
/**
* jdbc user
* sql执行语句
*/
private String user;
private String sqlScript;
/**
* jdbc password
* 源数据源
*/
private String password;
private MyBaseDataSource sourceBaseDataSource;
/**
* 数据源类型
* 目标数据源
*/
private MyDbType myDbType;
private MyBaseDataSource targetBaseDataSource;
/**
* resource list
......@@ -120,61 +123,173 @@ public class SqlParameters extends AbstractParameters {
String outputType = scriptObj.getString("outputType");
String sqlScript = scriptObj.getString("sqlScript");
//evn
Map<String, String> envModel = new HashMap<String, String>();
envModel.put("sparkappname", "Waterdrop");
env = FreeMarkerUtils.freemakerJson(CommConstant.WATERDROP_FTL_ENV, envModel, freeMarkerConfig);
//设置sql执行语句
this.sqlScript = sqlScript;
//设置执行引擎
String executioEngine = scriptObj.getString("executioEngine");
this.executioEngine = executioEngine;
//产生evn模板
generatorEnvStr(freeMarkerConfig);
//source
Integer sourceId = scriptObj.getInteger("sourceId");
DmpSyncingDatasource dmpSyncingDatasource = dmpSyncingDatasourceDao.queryById(sourceId);
generatorSourceStr(dmpSyncingDatasourceDao, freeMarkerConfig, publicKey, scriptObj);
this.jdbcUrl = dmpSyncingDatasource.getJdbcUrl();
this.user = dmpSyncingDatasource.getUserName();
this.password = EncryptionUtils.decode(dmpSyncingDatasource.getPassword(), publicKey);
this.myDbType = MyDbType.obtainByIdStr(dmpSyncingDatasource.getId().toString());
if (CommConstant.OUTPUT_TYPE_CONSOLE.equals(outputType)) {
//产生console transform and sink
geneConsoleTransfAndSink(freeMarkerConfig);
}else if (CommConstant.OUTPUT_TYPE_HDFS.equals(outputType)) {
//产生hdfs transform and sink
geneHdfsTransfAndSink(freeMarkerConfig, scriptObj);
}else if (CommConstant.OUTPUT_TYPE_TABLE.equals(outputType)) {
//产生table transform and sink
geneTableTransfAndSink(dmpSyncingDatasourceDao, freeMarkerConfig, scriptObj, publicKey);
}else if (CommConstant.OUTPUT_TYPE_TOPIC.equals(outputType)) {
//产生topic transform and sink
geneTopicTransfAndSink(freeMarkerConfig, scriptObj);
}else if (CommConstant.OUTPUT_TYPE_API.equals(outputType)) {
//产生api transform and sink
geneApiTransfAndSink(freeMarkerConfig, scriptObj);
}
//waterdrop script
geneWaterdropStr(freeMarkerConfig);
String sourceTableNames = scriptObj.getString("sourceTableNames");
String[] tableNameArr = sourceTableNames.split(",");
for (String tableName : tableNameArr) {
Map<String, String> jdbcModel = new HashMap<String, String>();
jdbcModel.put("driver", dmpSyncingDatasource.getDriverClassName());
jdbcModel.put("url", this.jdbcUrl);
jdbcModel.put("table", tableName);
jdbcModel.put("result_table_name", tableName);
jdbcModel.put("user", this.user);
jdbcModel.put("password", this.password);
source = source + FreeMarkerUtils.freemakerJson(CommConstant.WATERDROP_FTL_SOURCE_JDBC, jdbcModel, freeMarkerConfig);
}
if (CommConstant.OUTPUT_TYPE_CONSOLE.equals(outputType)) {
/**
* @Title: geneWaterdropStr
* @Description: TODO(waterdrop script)
* @param @param freeMarkerConfig 参数
* @return void 返回类型
* @throws
*/
private void geneWaterdropStr(FreeMarkerConfigurer freeMarkerConfig) {
Map<String, String> waterdropModel = new HashMap<String, String>();
waterdropModel.put("env", env);
waterdropModel.put("source", source);
waterdropModel.put("transform", transform);
waterdropModel.put("sink", sink);
this.waterdropScript = FreeMarkerUtils.freemakerJson(CommConstant.WATERDROP_FTL, waterdropModel, freeMarkerConfig);
}
/**
* @Title: geneApiTransfAndSink
* @Description: TODO(产生api transform and sink)
* @param @param freeMarkerConfig
* @param @param scriptObj
* @param @param sqlScript 参数
* @return void 返回类型
* @throws
*/
private void geneApiTransfAndSink(FreeMarkerConfigurer freeMarkerConfig, JSONObject scriptObj) {
// 执行引擎是jdbc,不用生成waterdrop
if (this.executioEngine.equals(CommConstant.EXECUTION_ENGINE_JDBC)) {
return;
}
//transform
Map<String, String> transformSqlModel = new HashMap<String, String>();
transformSqlModel.put("sql", sqlScript);
transformSqlModel.put("sql", this.sqlScript);
transformSqlModel.put("table_name", "t_view");
transform = FreeMarkerUtils.freemakerJson(CommConstant.WATERDROP_FTL_TRANSFORM_SQL, transformSqlModel, freeMarkerConfig);
JSONObject apiObj = scriptObj.getJSONObject("api");
String columnFieldsObj = apiObj.getString("columnFields");
String sqlStr = ParameterUtils.columnMappingHandler(columnFieldsObj);
Map<String, String> transformMappingSqlModel = new HashMap<String, String>();
transformSqlModel.put("sql", sqlStr);
transform = transform + FreeMarkerUtils.freemakerJson(CommConstant.WATERDROP_FTL_TRANSFORM_JSON2, transformMappingSqlModel, freeMarkerConfig);
//sink
Map<String, String> stdoutModel = new HashMap<String, String>();
sink = FreeMarkerUtils.freemakerJson(CommConstant.WATERDROP_FTL_SINK_CONSOLE, stdoutModel, freeMarkerConfig);
}else if (CommConstant.OUTPUT_TYPE_HDFS.equals(outputType)) {
Map<String, String> sinkApiModel = new HashMap<String, String>();
sinkApiModel.put("url", apiObj.getString("apiUrl"));
sinkApiModel.put("apiKey", apiObj.getString("apiKey"));
sinkApiModel.put("method", apiObj.getString("method"));
sinkApiModel.put("signType", apiObj.getString("signType"));
sinkApiModel.put("authCode", apiObj.getString("authCode"));
sink = FreeMarkerUtils.freemakerJson(CommConstant.WATERDROP_FTL_SINK_API, sinkApiModel, freeMarkerConfig);
}
/**
* @Title: geneTopicTransfAndSink
* @Description: TODO(产生topic transform and sink)
* @param @param freeMarkerConfig
* @param @param scriptObj
* @param @param sqlScript 参数
* @return void 返回类型
* @throws
*/
private void geneTopicTransfAndSink(FreeMarkerConfigurer freeMarkerConfig, JSONObject scriptObj) {
// 执行引擎是jdbc,不用生成waterdrop
if (this.executioEngine.equals(CommConstant.EXECUTION_ENGINE_JDBC)) {
return;
}
//transform
Map<String, String> transformSqlModel = new HashMap<String, String>();
transformSqlModel.put("sql", sqlScript);
transformSqlModel.put("sql", this.sqlScript);
transform = FreeMarkerUtils.freemakerJson(CommConstant.WATERDROP_FTL_TRANSFORM_SQL, transformSqlModel, freeMarkerConfig);
//sink
JSONObject hdfsObj = scriptObj.getJSONObject("hdfs");
String hdfsDir = hdfsObj.getString("hdfsDir");
Map<String, String> hdfsModel = new HashMap<String, String>();
hdfsModel.put("path", hdfsDir);
sink = FreeMarkerUtils.freemakerJson(CommConstant.WATERDROP_FTL_SINK_HDFS, hdfsModel, freeMarkerConfig);
}else if (CommConstant.OUTPUT_TYPE_TABLE.equals(outputType)) {
JSONObject topicObj = scriptObj.getJSONObject("topic");
Map<String, String> kafkaModel = new HashMap<String, String>();
kafkaModel.put("topic", topicObj.getString("topic"));
kafkaModel.put("broker", topicObj.getString("server"));
sink = FreeMarkerUtils.freemakerJson(CommConstant.WATERDROP_FTL_SINK_KAFKA, kafkaModel, freeMarkerConfig);
}
/**
* @Title: geneTableTransfAndSink
* @Description: TODO(产生table transform and sink)
* @param @param dmpSyncingDatasourceDao
* @param @param freeMarkerConfig
* @param @param scriptObj
* @param @param sqlScript 参数
* @return void 返回类型
* @throws
*/
private void geneTableTransfAndSink(DmpSyncingDatasourceDao dmpSyncingDatasourceDao,
FreeMarkerConfigurer freeMarkerConfig, JSONObject scriptObj, String publicKey) {
JSONObject tableObj = scriptObj.getJSONObject("table");
//设置前导、后导语句
String preImportStatement = tableObj.getString("preImportStatement");
String postImportStatement = tableObj.getString("postImportStatement");
preStatements = new ArrayList<String>();
preStatements.add(preImportStatement);
posStatements = new ArrayList<String>();
posStatements.add(postImportStatement);
//设置目标执行前导后导语句目标数据源
Integer targetSourceId = tableObj.getInteger("targetSourceId");
DmpSyncingDatasource targetSource = dmpSyncingDatasourceDao.queryById(targetSourceId);
String jdbcUrl = targetSource.getJdbcUrl();
String user = targetSource.getUserName();
String password = EncryptionUtils.decode(targetSource.getPassword(), publicKey);
MyDbType myDbType = MyDbType.obtainByIdStr(targetSource.getId().toString());
targetBaseDataSource = new MyBaseDataSource();
targetBaseDataSource.setJdbcUrlDirect(jdbcUrl);
targetBaseDataSource.setUser(user);
targetBaseDataSource.setPassword(password);
targetBaseDataSource.setMyDbType(myDbType);
// 执行引擎是jdbc,不用生成waterdrop
if (this.executioEngine.equals(CommConstant.EXECUTION_ENGINE_JDBC)) {
return;
}
//transform
Map<String, String> transformSqlModel = new HashMap<String, String>();
transformSqlModel.put("sql", sqlScript);
transformSqlModel.put("sql", this.sqlScript);
transformSqlModel.put("table_name", "t_view");
transform = FreeMarkerUtils.freemakerJson(CommConstant.WATERDROP_FTL_TRANSFORM_SQL, transformSqlModel, freeMarkerConfig);
JSONObject tableObj = scriptObj.getJSONObject("table");
String tableFieldsObj = tableObj.getString("tableFields");
String sqlStr = ParameterUtils.columnMappingHandler(tableFieldsObj);
......@@ -185,8 +300,12 @@ public class SqlParameters extends AbstractParameters {
//sink
//targetSource
Integer targetSourceId = tableObj.getInteger("targetSourceId");
DmpSyncingDatasource targetSource = dmpSyncingDatasourceDao.queryById(targetSourceId);
if (this.targetBaseDataSource.getMyDbType() == MyDbType.MySQL
|| this.targetBaseDataSource.getMyDbType() == MyDbType.SQLServer
|| this.targetBaseDataSource.getMyDbType() == MyDbType.PostgreSQL
|| this.targetBaseDataSource.getMyDbType() == MyDbType.Oracle
|| this.targetBaseDataSource.getMyDbType() == MyDbType.DB2
|| this.targetBaseDataSource.getMyDbType() == MyDbType.INFORMIX) {
Map<String, String> sinkJdbcModel = new HashMap<String, String>();
sinkJdbcModel.put("save_mode", "overwrite");
......@@ -197,57 +316,137 @@ public class SqlParameters extends AbstractParameters {
sinkJdbcModel.put("password", targetSource.getPassword());
sinkJdbcModel.put("dbtable", targetSource.getDbName());
sink = FreeMarkerUtils.freemakerJson(CommConstant.WATERDROP_FTL_SINK_JDBC, sinkJdbcModel, freeMarkerConfig);
}
}
/**
* @Title: geneHdfsTransfAndSink
* @Description: TODO(产生hdfs transform and sink)
* @param @param freeMarkerConfig
* @param @param scriptObj
* @param @param sqlScript 参数
* @return void 返回类型
* @throws
*/
private void geneHdfsTransfAndSink(FreeMarkerConfigurer freeMarkerConfig, JSONObject scriptObj) {
// 执行引擎是jdbc,不用生成waterdrop
if (this.executioEngine.equals(CommConstant.EXECUTION_ENGINE_JDBC)) {
return ;
}
}else if (CommConstant.OUTPUT_TYPE_TOPIC.equals(outputType)) {
//transform
Map<String, String> transformSqlModel = new HashMap<String, String>();
transformSqlModel.put("sql", sqlScript);
transformSqlModel.put("sql", this.sqlScript);
transform = FreeMarkerUtils.freemakerJson(CommConstant.WATERDROP_FTL_TRANSFORM_SQL, transformSqlModel, freeMarkerConfig);
//sink
JSONObject hdfsObj = scriptObj.getJSONObject("hdfs");
String hdfsDir = hdfsObj.getString("hdfsDir");
JSONObject topicObj = scriptObj.getJSONObject("topic");
Map<String, String> hdfsModel = new HashMap<String, String>();
hdfsModel.put("path", hdfsDir);
sink = FreeMarkerUtils.freemakerJson(CommConstant.WATERDROP_FTL_SINK_HDFS, hdfsModel, freeMarkerConfig);
}
Map<String, String> kafkaModel = new HashMap<String, String>();
kafkaModel.put("topic", topicObj.getString("topic"));
kafkaModel.put("broker", topicObj.getString("server"));
sink = FreeMarkerUtils.freemakerJson(CommConstant.WATERDROP_FTL_SINK_KAFKA, kafkaModel, freeMarkerConfig);
}else if (CommConstant.OUTPUT_TYPE_API.equals(outputType)) {
/**
* @Title: geneConsoleTransfAndSink
* @Description: TODO(产生console transform and sink)
* @param @param freeMarkerConfig
* @param @param sqlScript 参数
* @return void 返回类型
* @throws
*/
private void geneConsoleTransfAndSink(FreeMarkerConfigurer freeMarkerConfig) {
//执行引擎是jdbc,不用生成waterdrop
if (this.executioEngine.equals(CommConstant.EXECUTION_ENGINE_JDBC)) {
return ;
}
//transform
Map<String, String> transformSqlModel = new HashMap<String, String>();
transformSqlModel.put("sql", sqlScript);
transformSqlModel.put("table_name", "t_view");
transformSqlModel.put("sql", this.sqlScript);
transform = FreeMarkerUtils.freemakerJson(CommConstant.WATERDROP_FTL_TRANSFORM_SQL, transformSqlModel, freeMarkerConfig);
//sink
Map<String, String> stdoutModel = new HashMap<String, String>();
sink = FreeMarkerUtils.freemakerJson(CommConstant.WATERDROP_FTL_SINK_CONSOLE, stdoutModel, freeMarkerConfig);
}
JSONObject apiObj = scriptObj.getJSONObject("api");
String columnFieldsObj = apiObj.getString("columnFields");
String sqlStr = ParameterUtils.columnMappingHandler(columnFieldsObj);
/**
* @Title: generatorSourceStr
* @Description: TODO(生成source模板)
* @param @param dmpSyncingDatasourceDao
* @param @param freeMarkerConfig
* @param @param publicKey
* @param @param scriptObj 参数
* @return void 返回类型
* @throws
*/
private void generatorSourceStr(DmpSyncingDatasourceDao dmpSyncingDatasourceDao,
FreeMarkerConfigurer freeMarkerConfig, String publicKey, JSONObject scriptObj) {
Integer sourceId = scriptObj.getInteger("sourceId");
DmpSyncingDatasource dmpSyncingDatasource = dmpSyncingDatasourceDao.queryById(sourceId);
Map<String, String> transformMappingSqlModel = new HashMap<String, String>();
transformSqlModel.put("sql", sqlStr);
transform = transform + FreeMarkerUtils.freemakerJson(CommConstant.WATERDROP_FTL_TRANSFORM_JSON2, transformMappingSqlModel, freeMarkerConfig);
String jdbcUrl = dmpSyncingDatasource.getJdbcUrl();
String user = dmpSyncingDatasource.getUserName();
String password = EncryptionUtils.decode(dmpSyncingDatasource.getPassword(), publicKey);
MyDbType myDbType = MyDbType.obtainByIdStr(dmpSyncingDatasource.getId().toString());
//sink
Map<String, String> sinkApiModel = new HashMap<String, String>();
sinkApiModel.put("url", apiObj.getString("apiUrl"));
sinkApiModel.put("apiKey", apiObj.getString("apiKey"));
sinkApiModel.put("method", apiObj.getString("method"));
sinkApiModel.put("signType", apiObj.getString("signType"));
sinkApiModel.put("authCode", apiObj.getString("authCode"));
sink = FreeMarkerUtils.freemakerJson(CommConstant.WATERDROP_FTL_SINK_API, sinkApiModel, freeMarkerConfig);
// 如果执行引擎选择的事jdbc,不用生成waterdrop source
if (this.executioEngine.equals(CommConstant.EXECUTION_ENGINE_JDBC)) {
sourceBaseDataSource = new MyBaseDataSource();
sourceBaseDataSource.setJdbcUrlDirect(jdbcUrl);
sourceBaseDataSource.setUser(user);
sourceBaseDataSource.setPassword(password);
sourceBaseDataSource.setMyDbType(myDbType);
return;
}
//waterdrop script
Map<String, String> waterdropModel = new HashMap<String, String>();
waterdropModel.put("env", env);
waterdropModel.put("source", source);
waterdropModel.put("transform", transform);
waterdropModel.put("sink", sink);
this.waterdropScript = FreeMarkerUtils.freemakerJson(CommConstant.WATERDROP_FTL, waterdropModel, freeMarkerConfig);
preStatements = new ArrayList<String>();
preStatements.add("insert into test(id, name) values(1, 'test')");
posStatements = new ArrayList<String>();
posStatements.add("insert into test(id, name) values(2, 'test2')");
//jdbc
if (this.sourceBaseDataSource.getMyDbType() == MyDbType.MySQL
|| this.sourceBaseDataSource.getMyDbType() == MyDbType.SQLServer
|| this.sourceBaseDataSource.getMyDbType() == MyDbType.PostgreSQL
|| this.sourceBaseDataSource.getMyDbType() == MyDbType.Oracle
|| this.sourceBaseDataSource.getMyDbType() == MyDbType.DB2
|| this.sourceBaseDataSource.getMyDbType() == MyDbType.INFORMIX) {
Map<String, String> jdbcModel = new HashMap<String, String>();
jdbcModel.put("driver", dmpSyncingDatasource.getDriverClassName());
jdbcModel.put("url", jdbcUrl);
jdbcModel.put("table", "("+this.sqlScript+") as table_view");
jdbcModel.put("result_table_name", "table_view");
jdbcModel.put("user", user);
jdbcModel.put("password", password);
this.source = FreeMarkerUtils.freemakerJson(CommConstant.WATERDROP_FTL_SOURCE_JDBC, jdbcModel,
freeMarkerConfig);
}else if (this.sourceBaseDataSource.getMyDbType() == MyDbType.Hive) {
Map<String, String> hiveModel = new HashMap<String, String>();
hiveModel.put("catalogImplementation", "hive");
hiveModel.put("pre_sql", "select * from hive_db.hive_table");
hiveModel.put("result_table_name", "table_view");
this.source = FreeMarkerUtils.freemakerJson(CommConstant.WATERDROP_FTL_SOURCE_HIVE, hiveModel,
freeMarkerConfig);
}else {
logger.info("waterdrow,不支持的数据源类型");
throw new RuntimeException("waterdrow,不支持的数据源");
}
}
/**
* @Title: generatorEnvStr
* @Description: TODO(产生env模板)
* @param @param freeMarkerConfig 参数
* @return void 返回类型
* @throws
*/
private void generatorEnvStr(FreeMarkerConfigurer freeMarkerConfig) {
//如果执行引擎选择的事jdbc,不用生成env
if (this.executioEngine.equals(CommConstant.EXECUTION_ENGINE_JDBC)) {
return ;
}
Map<String, String> envModel = new HashMap<String, String>();
envModel.put("sparkappname", "Waterdrop");
this.env = FreeMarkerUtils.freemakerJson(CommConstant.WATERDROP_FTL_ENV, envModel, freeMarkerConfig);
}
public String getScript() {
......@@ -322,44 +521,44 @@ public class SqlParameters extends AbstractParameters {
this.posStatements = posStatements;
}
public List<ResourceInfo> getResourceList() {
return resourceList;
public String getExecutioEngine() {
return executioEngine;
}
public void setResourceList(List<ResourceInfo> resourceList) {
this.resourceList = resourceList;
public void setExecutioEngine(String executioEngine) {
this.executioEngine = executioEngine;
}
public String getJdbcUrl() {
return jdbcUrl;
public String getSqlScript() {
return sqlScript;
}
public void setJdbcUrl(String jdbcUrl) {
this.jdbcUrl = jdbcUrl;
public void setSqlScript(String sqlScript) {
this.sqlScript = sqlScript;
}
public String getUser() {
return user;
public MyBaseDataSource getSourceBaseDataSource() {
return sourceBaseDataSource;
}
public void setUser(String user) {
this.user = user;
public void setSourceBaseDataSource(MyBaseDataSource sourceBaseDataSource) {
this.sourceBaseDataSource = sourceBaseDataSource;
}
public String getPassword() {
return password;
public MyBaseDataSource getTargetBaseDataSource() {
return targetBaseDataSource;
}
public void setPassword(String password) {
this.password = password;
public void setTargetBaseDataSource(MyBaseDataSource targetBaseDataSource) {
this.targetBaseDataSource = targetBaseDataSource;
}
public MyDbType getMyDbType() {
return myDbType;
public List<ResourceInfo> getResourceList() {
return resourceList;
}
public void setMyDbType(MyDbType myDbType) {
this.myDbType = myDbType;
public void setResourceList(List<ResourceInfo> resourceList) {
this.resourceList = resourceList;
}
@Override
......
......@@ -67,8 +67,6 @@ public abstract class BaseDataSource {
*/
private String principal;
private String dbType;
public String getPrincipal() {
return principal;
}
......@@ -227,14 +225,6 @@ public abstract class BaseDataSource {
this.other = other;
}
public String getDbType() {
return dbType;
}
public void setDbType(String dbType) {
this.dbType = dbType;
}
public String getJdbcUrlDirect() {
return jdbcUrlDirect;
}
......
......@@ -36,6 +36,7 @@ import com.jz.dmp.cmdexectool.scheduler.common.utils.CommonUtils;
import com.jz.dmp.cmdexectool.scheduler.common.utils.JSONUtils;
import com.jz.dmp.cmdexectool.scheduler.common.utils.ParameterUtils;
import com.jz.dmp.cmdexectool.scheduler.dao.datasource.BaseDataSource;
import com.jz.dmp.cmdexectool.scheduler.dao.datasource.MyBaseDataSource;
import com.jz.dmp.cmdexectool.scheduler.server.utils.ParamUtils;
public class DatabaseUtils {
......@@ -51,7 +52,7 @@ public class DatabaseUtils {
* @param postStatementsBinds post statements binds
* @param createFuncs create functions
*/
public static void executeUpdateSql(List<SqlBinds> statementsBinds, BaseDataSource baseDataSource){
public static void executeUpdateSql(List<SqlBinds> statementsBinds, MyBaseDataSource myBaseDataSource){
Connection connection = null;
PreparedStatement stmt = null;
ResultSet resultSet = null;
......@@ -59,7 +60,7 @@ public class DatabaseUtils {
// if upload resource is HDFS and kerberos startup
CommonUtils.loadKerberosConf();
// create connection
connection = createConnection(baseDataSource);
connection = createConnection(myBaseDataSource);
// create temp function
/*
if (CollectionUtils.isNotEmpty(createFuncs)) {
......@@ -89,7 +90,7 @@ public class DatabaseUtils {
List<SqlBinds> preStatementsBinds,
List<SqlBinds> postStatementsBinds,
List<String> createFuncs,
BaseDataSource baseDataSource){
MyBaseDataSource myBaseDataSource){
Connection connection = null;
PreparedStatement stmt = null;
ResultSet resultSet = null;
......@@ -97,7 +98,7 @@ public class DatabaseUtils {
// if upload resource is HDFS and kerberos startup
CommonUtils.loadKerberosConf();
// create connection
connection = createConnection(baseDataSource);
connection = createConnection(myBaseDataSource);
// create temp function
/*
if (CollectionUtils.isNotEmpty(createFuncs)) {
......@@ -171,24 +172,24 @@ public class DatabaseUtils {
* @return connection
* @throws Exception Exception
*/
private static Connection createConnection(BaseDataSource baseDataSource) throws Exception{
private static Connection createConnection(MyBaseDataSource myBaseDataSource) throws Exception{
// if hive , load connection params if exists
Connection connection = null;
if (HIVE == DbType.valueOf(baseDataSource.getDbType())) {
if (HIVE == DbType.valueOf(myBaseDataSource.getMyDbType().getDbType().name())) {
Properties paramProp = new Properties();
paramProp.setProperty(USER, baseDataSource.getUser());
paramProp.setProperty(PASSWORD, baseDataSource.getPassword());
paramProp.setProperty(USER, myBaseDataSource.getUser());
paramProp.setProperty(PASSWORD, myBaseDataSource.getPassword());
Map<String, String> connParamMap = CollectionUtils.stringToMap("",
SEMICOLON,
HIVE_CONF);
paramProp.putAll(connParamMap);
connection = DriverManager.getConnection(baseDataSource.getJdbcUrlDirect(),
connection = DriverManager.getConnection(myBaseDataSource.getJdbcUrlDirect(),
paramProp);
}else{
connection = DriverManager.getConnection(baseDataSource.getJdbcUrlDirect(),
baseDataSource.getUser(),
baseDataSource.getPassword());
connection = DriverManager.getConnection(myBaseDataSource.getJdbcUrlDirect(),
myBaseDataSource.getUser(),
myBaseDataSource.getPassword());
}
return connection;
}
......
......@@ -36,6 +36,7 @@ import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.springframework.util.CollectionUtils;
import com.jz.dmp.cmdexectool.common.constant.CommConstant;
import com.jz.dmp.cmdexectool.scheduler.common.Constants;
import com.jz.dmp.cmdexectool.scheduler.common.enums.DbType;
import com.jz.dmp.cmdexectool.scheduler.common.process.Property;
......@@ -45,6 +46,7 @@ import com.jz.dmp.cmdexectool.scheduler.common.task.sql.SqlParameters;
import com.jz.dmp.cmdexectool.scheduler.common.utils.OSUtils;
import com.jz.dmp.cmdexectool.scheduler.common.utils.ParameterUtils;
import com.jz.dmp.cmdexectool.scheduler.dao.datasource.BaseDataSource;
import com.jz.dmp.cmdexectool.scheduler.dao.datasource.MyBaseDataSource;
import com.jz.dmp.cmdexectool.scheduler.dao.utils.DatabaseUtils;
import com.jz.dmp.cmdexectool.scheduler.server.entity.TaskExecutionContext;
import com.jz.dmp.cmdexectool.scheduler.server.utils.ParamUtils;
......@@ -101,25 +103,7 @@ public class SqlTask extends AbstractTask {
public void handle() throws Exception {
try {
BaseDataSource baseDataSource = new BaseDataSource() {
@Override
public String driverClassSelector() {
// TODO Auto-generated method stub
return null;
}
@Override
public DbType dbTypeSelector() {
// TODO Auto-generated method stub
return null;
}
};
baseDataSource.setDbType(sqlParameters.getMyDbType().getDbType().name());
baseDataSource.setUser(sqlParameters.getUser());
baseDataSource.setPassword(sqlParameters.getPassword());
baseDataSource.setAddress(sqlParameters.getJdbcUrl());
MyBaseDataSource targetBaseDataSource = sqlParameters.getTargetBaseDataSource();
List<SqlBinds> preStatementSqlBinds = Optional.ofNullable(sqlParameters.getPreStatements())
.orElse(new ArrayList<>())
......@@ -136,20 +120,34 @@ public class SqlTask extends AbstractTask {
//判断是否需要运行前置sql
if (!CollectionUtils.isEmpty(preStatementSqlBinds)) {
DatabaseUtils.executeUpdateSql(preStatementSqlBinds, baseDataSource);
DatabaseUtils.executeUpdateSql(preStatementSqlBinds, targetBaseDataSource);
}
if (sqlParameters.getExecutioEngine().equals(CommConstant.EXECUTION_ENGINE_JDBC)) {
List<String> mainSqlScript = new ArrayList<String>();
mainSqlScript.add(sqlParameters.getSqlScript());
List<SqlBinds> mainStatementSqlBinds = Optional.ofNullable(mainSqlScript)
.orElse(new ArrayList<>())
.stream()
.map(DatabaseUtils::getSqlAndSqlParamsMap)
.collect(Collectors.toList());
DatabaseUtils.executeUpdateSql(mainStatementSqlBinds, sqlParameters.getSourceBaseDataSource());
}else {
// construct process
CommandExecuteResult commandExecuteResult = waterdropCommandExecutor.run(buildCommand());
setExitStatusCode(commandExecuteResult.getExitStatusCode());
setAppIds(commandExecuteResult.getAppIds());
setProcessId(commandExecuteResult.getProcessId());
}
//判断是否运行后置sql
if (!CollectionUtils.isEmpty(postStatementSqlBinds)) {
DatabaseUtils.executeUpdateSql(postStatementSqlBinds, baseDataSource);
DatabaseUtils.executeUpdateSql(postStatementSqlBinds, targetBaseDataSource);
}
setExitStatusCode(commandExecuteResult.getExitStatusCode());
setAppIds(commandExecuteResult.getAppIds());
setProcessId(commandExecuteResult.getProcessId());
} catch (Exception e) {
logger.error("sql task error", e);
setExitStatusCode(Constants.EXIT_CODE_FAILURE);
......
......@@ -23,6 +23,7 @@ import com.jz.dmp.cmdexectool.ApiApplication;
import com.jz.dmp.cmdexectool.scheduler.common.Constants;
import com.jz.dmp.cmdexectool.scheduler.common.enums.DbType;
import com.jz.dmp.cmdexectool.scheduler.common.enums.ExecutionStatus;
import com.jz.dmp.cmdexectool.scheduler.common.enums.MyDbType;
import com.jz.dmp.cmdexectool.scheduler.common.process.Property;
import com.jz.dmp.cmdexectool.scheduler.common.task.sql.SqlBinds;
import com.jz.dmp.cmdexectool.scheduler.common.task.sql.SqlType;
......@@ -32,6 +33,7 @@ import com.jz.dmp.cmdexectool.scheduler.common.utils.JSONUtils;
import com.jz.dmp.cmdexectool.scheduler.common.utils.ParameterUtils;
import com.jz.dmp.cmdexectool.scheduler.dao.datasource.BaseDataSource;
import com.jz.dmp.cmdexectool.scheduler.dao.datasource.DataSourceFactory;
import com.jz.dmp.cmdexectool.scheduler.dao.datasource.MyBaseDataSource;
import com.jz.dmp.cmdexectool.scheduler.dao.datasource.MySQLDataSource;
import com.jz.dmp.cmdexectool.scheduler.dao.utils.DatabaseUtils;
import com.jz.dmp.cmdexectool.scheduler.server.entity.TaskExecutionContext;
......@@ -134,21 +136,23 @@ public class SQLCommandExecutorTest {
// load class
DataSourceFactory.loadClass(DbType.valueOf("MYSQL"));
MySQLDataSource mySQLDataSource = new MySQLDataSource();
mySQLDataSource.setAddress("192.168.1.140:3307");
mySQLDataSource.setUser("dmp");
mySQLDataSource.setPassword("Ioubuy123");
MyBaseDataSource myBaseDataSource = new MyBaseDataSource();
myBaseDataSource.setJdbcUrlDirect("jdbc:mysql://192.168.1.140:3307/dmp_web_new");
myBaseDataSource.setUser("dmp");
myBaseDataSource.setPassword("Ioubuy123");
myBaseDataSource.setMyDbType(MyDbType.MySQL);
//String json = JSONObject.toJSONString(mySQLDataSource);
// get datasource
// BaseDataSource baseDataSource = DataSourceFactory.getDatasource(DbType.valueOf("MYSQL"), json);
BaseDataSource baseDataSource = mySQLDataSource;
baseDataSource.setDbType(DbType.MYSQL.name());
baseDataSource.setJdbcUrlDirect("jdbc:mysql://192.168.1.140:3307/dmp_web_new");
baseDataSource.setUser("dmp");
baseDataSource.setPassword("Ioubuy123");
/*
* BaseDataSource baseDataSource = mySQLDataSource;
* baseDataSource.s(DbType.MYSQL.name());
* baseDataSource.setJdbcUrlDirect("jdbc:mysql://192.168.1.140:3307/dmp_web_new"
* ); baseDataSource.setUser("dmp"); baseDataSource.setPassword("Ioubuy123");
*/
// ready to execute SQL and parameter entity Map
SqlBinds mainSqlBinds = getSqlAndSqlParamsMap("insert into test(id, name) values(1, 'test')");
......@@ -177,7 +181,7 @@ public class SQLCommandExecutorTest {
// execute sql task
//DatabaseUtils.executeFuncAndSql(mainSqlBinds, preStatementSqlBinds, postStatementSqlBinds, createFuncs);
DatabaseUtils.executeUpdateSql(preStatementSqlBinds, baseDataSource);
DatabaseUtils.executeUpdateSql(preStatementSqlBinds, myBaseDataSource);
//setExitStatusCode(Constants.EXIT_CODE_SUCCESS);
} catch (Exception e) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment