Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
J
jz-dmp-cmdexectool
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
姚本章
jz-dmp-cmdexectool
Commits
e967006e
Commit
e967006e
authored
Mar 05, 2021
by
sml
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
代码提交
parent
02dd0b7c
Changes
6
Hide whitespace changes
Inline
Side-by-side
Showing
6 changed files
with
397 additions
and
199 deletions
+397
-199
CommConstant.java
.../com/jz/dmp/cmdexectool/common/constant/CommConstant.java
+7
-1
SqlParameters.java
.../cmdexectool/scheduler/common/task/sql/SqlParameters.java
+339
-140
BaseDataSource.java
.../cmdexectool/scheduler/dao/datasource/BaseDataSource.java
+0
-10
DatabaseUtils.java
...jz/dmp/cmdexectool/scheduler/dao/utils/DatabaseUtils.java
+13
-12
SqlTask.java
...cmdexectool/scheduler/server/worker/task/sql/SqlTask.java
+24
-26
SQLCommandExecutorTest.java
...z/cmdexectool/test/task/shell/SQLCommandExecutorTest.java
+14
-10
No files found.
src/main/java/com/jz/dmp/cmdexectool/common/constant/CommConstant.java
View file @
e967006e
...
@@ -53,6 +53,7 @@ public class CommConstant {
...
@@ -53,6 +53,7 @@ public class CommConstant {
public
static
final
String
WATERDROP_FTL_SOURCE_JDBC
=
"source_jdbc.ftl"
;
public
static
final
String
WATERDROP_FTL_SOURCE_JDBC
=
"source_jdbc.ftl"
;
public
static
final
String
WATERDROP_FTL_SOURCE_ELASTICSEARCH
=
"source_elasticsearch.ftl"
;
public
static
final
String
WATERDROP_FTL_SOURCE_ELASTICSEARCH
=
"source_elasticsearch.ftl"
;
public
static
final
String
WATERDROP_FTL_SOURCE_SFTP
=
"source_sftp.ftl"
;
public
static
final
String
WATERDROP_FTL_SOURCE_SFTP
=
"source_sftp.ftl"
;
public
static
final
String
WATERDROP_FTL_SOURCE_HIVE
=
"source_hive.ftl"
;
public
static
final
String
WATERDROP_FTL_TRANSFORM_SQL
=
"transform_sql.ftl"
;
public
static
final
String
WATERDROP_FTL_TRANSFORM_SQL
=
"transform_sql.ftl"
;
public
static
final
String
WATERDROP_FTL_TRANSFORM_JSON2
=
"transform_json2.ftl"
;
public
static
final
String
WATERDROP_FTL_TRANSFORM_JSON2
=
"transform_json2.ftl"
;
public
static
final
String
WATERDROP_FTL_SINK_CONSOLE
=
"sink_console.ftl"
;
public
static
final
String
WATERDROP_FTL_SINK_CONSOLE
=
"sink_console.ftl"
;
...
@@ -66,5 +67,10 @@ public class CommConstant {
...
@@ -66,5 +67,10 @@ public class CommConstant {
public
static
final
String
FTL_UNZIPFILE
=
"unzipfile.ftl"
;
//文件解压
public
static
final
String
FTL_UNZIPFILE
=
"unzipfile.ftl"
;
//文件解压
public
static
final
String
FTL_DOCTRANS
=
"doctrans.ftl"
;
//文件转码
public
static
final
String
FTL_DOCTRANS
=
"doctrans.ftl"
;
//文件转码
public
static
final
String
FTL_HDFS_UPLOAD
=
"hdfs_upload.ftl"
;
//HDFS上传
public
static
final
String
FTL_HDFS_UPLOAD
=
"hdfs_upload.ftl"
;
//HDFS上传
/***************************************************/
//执行引擎
public
static
final
String
EXECUTION_ENGINE_JDBC
=
"jdbc"
;
//jdbc
public
static
final
String
EXECUTION_ENGINE_SPARK
=
"spark"
;
//spark
}
}
src/main/java/com/jz/dmp/cmdexectool/scheduler/common/task/sql/SqlParameters.java
View file @
e967006e
...
@@ -21,8 +21,9 @@ import java.util.HashMap;
...
@@ -21,8 +21,9 @@ import java.util.HashMap;
import
java.util.List
;
import
java.util.List
;
import
java.util.Map
;
import
java.util.Map
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
org.springframework.web.servlet.view.freemarker.FreeMarkerConfigurer
;
import
org.springframework.web.servlet.view.freemarker.FreeMarkerConfigurer
;
import
com.alibaba.fastjson.JSONObject
;
import
com.alibaba.fastjson.JSONObject
;
import
com.jz.dmp.cmdexectool.common.constant.CommConstant
;
import
com.jz.dmp.cmdexectool.common.constant.CommConstant
;
import
com.jz.dmp.cmdexectool.common.utils.EncryptionUtils
;
import
com.jz.dmp.cmdexectool.common.utils.EncryptionUtils
;
...
@@ -30,17 +31,19 @@ import com.jz.dmp.cmdexectool.common.utils.FreeMarkerUtils;
...
@@ -30,17 +31,19 @@ import com.jz.dmp.cmdexectool.common.utils.FreeMarkerUtils;
import
com.jz.dmp.cmdexectool.controller.bean.DmpProjectConfigInfoDto
;
import
com.jz.dmp.cmdexectool.controller.bean.DmpProjectConfigInfoDto
;
import
com.jz.dmp.cmdexectool.entity.DmpSyncingDatasource
;
import
com.jz.dmp.cmdexectool.entity.DmpSyncingDatasource
;
import
com.jz.dmp.cmdexectool.mapper.DmpSyncingDatasourceDao
;
import
com.jz.dmp.cmdexectool.mapper.DmpSyncingDatasourceDao
;
import
com.jz.dmp.cmdexectool.scheduler.common.enums.DbType
;
import
com.jz.dmp.cmdexectool.scheduler.common.enums.MyDbType
;
import
com.jz.dmp.cmdexectool.scheduler.common.enums.MyDbType
;
import
com.jz.dmp.cmdexectool.scheduler.common.process.ResourceInfo
;
import
com.jz.dmp.cmdexectool.scheduler.common.process.ResourceInfo
;
import
com.jz.dmp.cmdexectool.scheduler.common.task.AbstractParameters
;
import
com.jz.dmp.cmdexectool.scheduler.common.task.AbstractParameters
;
import
com.jz.dmp.cmdexectool.scheduler.common.utils.ParameterUtils
;
import
com.jz.dmp.cmdexectool.scheduler.common.utils.ParameterUtils
;
import
com.jz.dmp.cmdexectool.scheduler.dao.datasource.MyBaseDataSource
;
/**
/**
* Sql/Hql parameter
* Sql/Hql parameter
*/
*/
public
class
SqlParameters
extends
AbstractParameters
{
public
class
SqlParameters
extends
AbstractParameters
{
private
static
Logger
logger
=
LoggerFactory
.
getLogger
(
SqlParameters
.
class
);
/**
/**
* shell script
* shell script
*/
*/
...
@@ -74,35 +77,35 @@ public class SqlParameters extends AbstractParameters {
...
@@ -74,35 +77,35 @@ public class SqlParameters extends AbstractParameters {
private
String
waterdropScript
;
private
String
waterdropScript
;
/**
/**
* 前
置
语句
* 前
导
语句
*/
*/
private
List
<
String
>
preStatements
;
private
List
<
String
>
preStatements
;
/**
/**
* 后
置
语句
* 后
导
语句
*/
*/
private
List
<
String
>
posStatements
;
private
List
<
String
>
posStatements
;
/**
/**
*
jdbcUrl
*
执行引擎
*/
*/
private
String
jdbcUrl
;
private
String
executioEngine
;
/**
/**
*
jdbc user
*
sql执行语句
*/
*/
private
String
user
;
private
String
sqlScript
;
/**
/**
*
jdbc password
*
源数据源
*/
*/
private
String
password
;
private
MyBaseDataSource
sourceBaseDataSource
;
/**
/**
*
数据源类型
*
目标数据源
*/
*/
private
My
DbType
myDbTyp
e
;
private
My
BaseDataSource
targetBaseDataSourc
e
;
/**
/**
* resource list
* resource list
*/
*/
...
@@ -120,73 +123,189 @@ public class SqlParameters extends AbstractParameters {
...
@@ -120,73 +123,189 @@ public class SqlParameters extends AbstractParameters {
String
outputType
=
scriptObj
.
getString
(
"outputType"
);
String
outputType
=
scriptObj
.
getString
(
"outputType"
);
String
sqlScript
=
scriptObj
.
getString
(
"sqlScript"
);
String
sqlScript
=
scriptObj
.
getString
(
"sqlScript"
);
//设置sql执行语句
this
.
sqlScript
=
sqlScript
;
//设置执行引擎
String
executioEngine
=
scriptObj
.
getString
(
"executioEngine"
);
this
.
executioEngine
=
executioEngine
;
//evn
//产生evn模板
Map
<
String
,
String
>
envModel
=
new
HashMap
<
String
,
String
>();
generatorEnvStr
(
freeMarkerConfig
);
envModel
.
put
(
"sparkappname"
,
"Waterdrop"
);
env
=
FreeMarkerUtils
.
freemakerJson
(
CommConstant
.
WATERDROP_FTL_ENV
,
envModel
,
freeMarkerConfig
);
//source
//source
Integer
sourceId
=
scriptObj
.
getInteger
(
"sourceId"
);
generatorSourceStr
(
dmpSyncingDatasourceDao
,
freeMarkerConfig
,
publicKey
,
scriptObj
);
DmpSyncingDatasource
dmpSyncingDatasource
=
dmpSyncingDatasourceDao
.
queryById
(
sourceId
);
this
.
jdbcUrl
=
dmpSyncingDatasource
.
getJdbcUrl
();
this
.
user
=
dmpSyncingDatasource
.
getUserName
();
this
.
password
=
EncryptionUtils
.
decode
(
dmpSyncingDatasource
.
getPassword
(),
publicKey
);
this
.
myDbType
=
MyDbType
.
obtainByIdStr
(
dmpSyncingDatasource
.
getId
().
toString
());
String
sourceTableNames
=
scriptObj
.
getString
(
"sourceTableNames"
);
String
[]
tableNameArr
=
sourceTableNames
.
split
(
","
);
for
(
String
tableName
:
tableNameArr
)
{
Map
<
String
,
String
>
jdbcModel
=
new
HashMap
<
String
,
String
>();
jdbcModel
.
put
(
"driver"
,
dmpSyncingDatasource
.
getDriverClassName
());
jdbcModel
.
put
(
"url"
,
this
.
jdbcUrl
);
jdbcModel
.
put
(
"table"
,
tableName
);
jdbcModel
.
put
(
"result_table_name"
,
tableName
);
jdbcModel
.
put
(
"user"
,
this
.
user
);
jdbcModel
.
put
(
"password"
,
this
.
password
);
source
=
source
+
FreeMarkerUtils
.
freemakerJson
(
CommConstant
.
WATERDROP_FTL_SOURCE_JDBC
,
jdbcModel
,
freeMarkerConfig
);
}
if
(
CommConstant
.
OUTPUT_TYPE_CONSOLE
.
equals
(
outputType
))
{
if
(
CommConstant
.
OUTPUT_TYPE_CONSOLE
.
equals
(
outputType
))
{
//transform
//产生console transform and sink
Map
<
String
,
String
>
transformSqlModel
=
new
HashMap
<
String
,
String
>();
geneConsoleTransfAndSink
(
freeMarkerConfig
);
transformSqlModel
.
put
(
"sql"
,
sqlScript
);
transform
=
FreeMarkerUtils
.
freemakerJson
(
CommConstant
.
WATERDROP_FTL_TRANSFORM_SQL
,
transformSqlModel
,
freeMarkerConfig
);
//sink
Map
<
String
,
String
>
stdoutModel
=
new
HashMap
<
String
,
String
>();
sink
=
FreeMarkerUtils
.
freemakerJson
(
CommConstant
.
WATERDROP_FTL_SINK_CONSOLE
,
stdoutModel
,
freeMarkerConfig
);
}
else
if
(
CommConstant
.
OUTPUT_TYPE_HDFS
.
equals
(
outputType
))
{
}
else
if
(
CommConstant
.
OUTPUT_TYPE_HDFS
.
equals
(
outputType
))
{
//transform
//产生hdfs transform and sink
Map
<
String
,
String
>
transformSqlModel
=
new
HashMap
<
String
,
String
>();
geneHdfsTransfAndSink
(
freeMarkerConfig
,
scriptObj
);
transformSqlModel
.
put
(
"sql"
,
sqlScript
);
transform
=
FreeMarkerUtils
.
freemakerJson
(
CommConstant
.
WATERDROP_FTL_TRANSFORM_SQL
,
transformSqlModel
,
freeMarkerConfig
);
//sink
JSONObject
hdfsObj
=
scriptObj
.
getJSONObject
(
"hdfs"
);
String
hdfsDir
=
hdfsObj
.
getString
(
"hdfsDir"
);
Map
<
String
,
String
>
hdfsModel
=
new
HashMap
<
String
,
String
>();
hdfsModel
.
put
(
"path"
,
hdfsDir
);
sink
=
FreeMarkerUtils
.
freemakerJson
(
CommConstant
.
WATERDROP_FTL_SINK_HDFS
,
hdfsModel
,
freeMarkerConfig
);
}
else
if
(
CommConstant
.
OUTPUT_TYPE_TABLE
.
equals
(
outputType
))
{
}
else
if
(
CommConstant
.
OUTPUT_TYPE_TABLE
.
equals
(
outputType
))
{
//transform
//产生table transform and sink
Map
<
String
,
String
>
transformSqlModel
=
new
HashMap
<
String
,
String
>();
geneTableTransfAndSink
(
dmpSyncingDatasourceDao
,
freeMarkerConfig
,
scriptObj
,
publicKey
);
transformSqlModel
.
put
(
"sql"
,
sqlScript
);
}
else
if
(
CommConstant
.
OUTPUT_TYPE_TOPIC
.
equals
(
outputType
))
{
transformSqlModel
.
put
(
"table_name"
,
"t_view"
);
//产生topic transform and sink
transform
=
FreeMarkerUtils
.
freemakerJson
(
CommConstant
.
WATERDROP_FTL_TRANSFORM_SQL
,
transformSqlModel
,
freeMarkerConfig
);
geneTopicTransfAndSink
(
freeMarkerConfig
,
scriptObj
);
}
else
if
(
CommConstant
.
OUTPUT_TYPE_API
.
equals
(
outputType
))
{
JSONObject
tableObj
=
scriptObj
.
getJSONObject
(
"table"
);
//产生api transform and sink
String
tableFieldsObj
=
tableObj
.
getString
(
"tableFields"
);
geneApiTransfAndSink
(
freeMarkerConfig
,
scriptObj
);
String
sqlStr
=
ParameterUtils
.
columnMappingHandler
(
tableFieldsObj
);
}
//waterdrop script
Map
<
String
,
String
>
transformMappingSqlModel
=
new
HashMap
<
String
,
String
>();
geneWaterdropStr
(
freeMarkerConfig
);
transformSqlModel
.
put
(
"sql"
,
sqlStr
);
transform
=
transform
+
FreeMarkerUtils
.
freemakerJson
(
CommConstant
.
WATERDROP_FTL_TRANSFORM_JSON2
,
transformMappingSqlModel
,
freeMarkerConfig
);
}
//sink
/**
* @Title: geneWaterdropStr
//targetSource
* @Description: TODO(waterdrop script)
Integer
targetSourceId
=
tableObj
.
getInteger
(
"targetSourceId"
);
* @param @param freeMarkerConfig 参数
DmpSyncingDatasource
targetSource
=
dmpSyncingDatasourceDao
.
queryById
(
targetSourceId
);
* @return void 返回类型
* @throws
*/
private
void
geneWaterdropStr
(
FreeMarkerConfigurer
freeMarkerConfig
)
{
Map
<
String
,
String
>
waterdropModel
=
new
HashMap
<
String
,
String
>();
waterdropModel
.
put
(
"env"
,
env
);
waterdropModel
.
put
(
"source"
,
source
);
waterdropModel
.
put
(
"transform"
,
transform
);
waterdropModel
.
put
(
"sink"
,
sink
);
this
.
waterdropScript
=
FreeMarkerUtils
.
freemakerJson
(
CommConstant
.
WATERDROP_FTL
,
waterdropModel
,
freeMarkerConfig
);
}
/**
* @Title: geneApiTransfAndSink
* @Description: TODO(产生api transform and sink)
* @param @param freeMarkerConfig
* @param @param scriptObj
* @param @param sqlScript 参数
* @return void 返回类型
* @throws
*/
private
void
geneApiTransfAndSink
(
FreeMarkerConfigurer
freeMarkerConfig
,
JSONObject
scriptObj
)
{
// 执行引擎是jdbc,不用生成waterdrop
if
(
this
.
executioEngine
.
equals
(
CommConstant
.
EXECUTION_ENGINE_JDBC
))
{
return
;
}
//transform
Map
<
String
,
String
>
transformSqlModel
=
new
HashMap
<
String
,
String
>();
transformSqlModel
.
put
(
"sql"
,
this
.
sqlScript
);
transformSqlModel
.
put
(
"table_name"
,
"t_view"
);
transform
=
FreeMarkerUtils
.
freemakerJson
(
CommConstant
.
WATERDROP_FTL_TRANSFORM_SQL
,
transformSqlModel
,
freeMarkerConfig
);
JSONObject
apiObj
=
scriptObj
.
getJSONObject
(
"api"
);
String
columnFieldsObj
=
apiObj
.
getString
(
"columnFields"
);
String
sqlStr
=
ParameterUtils
.
columnMappingHandler
(
columnFieldsObj
);
Map
<
String
,
String
>
transformMappingSqlModel
=
new
HashMap
<
String
,
String
>();
transformSqlModel
.
put
(
"sql"
,
sqlStr
);
transform
=
transform
+
FreeMarkerUtils
.
freemakerJson
(
CommConstant
.
WATERDROP_FTL_TRANSFORM_JSON2
,
transformMappingSqlModel
,
freeMarkerConfig
);
//sink
Map
<
String
,
String
>
sinkApiModel
=
new
HashMap
<
String
,
String
>();
sinkApiModel
.
put
(
"url"
,
apiObj
.
getString
(
"apiUrl"
));
sinkApiModel
.
put
(
"apiKey"
,
apiObj
.
getString
(
"apiKey"
));
sinkApiModel
.
put
(
"method"
,
apiObj
.
getString
(
"method"
));
sinkApiModel
.
put
(
"signType"
,
apiObj
.
getString
(
"signType"
));
sinkApiModel
.
put
(
"authCode"
,
apiObj
.
getString
(
"authCode"
));
sink
=
FreeMarkerUtils
.
freemakerJson
(
CommConstant
.
WATERDROP_FTL_SINK_API
,
sinkApiModel
,
freeMarkerConfig
);
}
/**
* @Title: geneTopicTransfAndSink
* @Description: TODO(产生topic transform and sink)
* @param @param freeMarkerConfig
* @param @param scriptObj
* @param @param sqlScript 参数
* @return void 返回类型
* @throws
*/
private
void
geneTopicTransfAndSink
(
FreeMarkerConfigurer
freeMarkerConfig
,
JSONObject
scriptObj
)
{
// 执行引擎是jdbc,不用生成waterdrop
if
(
this
.
executioEngine
.
equals
(
CommConstant
.
EXECUTION_ENGINE_JDBC
))
{
return
;
}
//transform
Map
<
String
,
String
>
transformSqlModel
=
new
HashMap
<
String
,
String
>();
transformSqlModel
.
put
(
"sql"
,
this
.
sqlScript
);
transform
=
FreeMarkerUtils
.
freemakerJson
(
CommConstant
.
WATERDROP_FTL_TRANSFORM_SQL
,
transformSqlModel
,
freeMarkerConfig
);
//sink
JSONObject
topicObj
=
scriptObj
.
getJSONObject
(
"topic"
);
Map
<
String
,
String
>
kafkaModel
=
new
HashMap
<
String
,
String
>();
kafkaModel
.
put
(
"topic"
,
topicObj
.
getString
(
"topic"
));
kafkaModel
.
put
(
"broker"
,
topicObj
.
getString
(
"server"
));
sink
=
FreeMarkerUtils
.
freemakerJson
(
CommConstant
.
WATERDROP_FTL_SINK_KAFKA
,
kafkaModel
,
freeMarkerConfig
);
}
/**
* @Title: geneTableTransfAndSink
* @Description: TODO(产生table transform and sink)
* @param @param dmpSyncingDatasourceDao
* @param @param freeMarkerConfig
* @param @param scriptObj
* @param @param sqlScript 参数
* @return void 返回类型
* @throws
*/
private
void
geneTableTransfAndSink
(
DmpSyncingDatasourceDao
dmpSyncingDatasourceDao
,
FreeMarkerConfigurer
freeMarkerConfig
,
JSONObject
scriptObj
,
String
publicKey
)
{
JSONObject
tableObj
=
scriptObj
.
getJSONObject
(
"table"
);
//设置前导、后导语句
String
preImportStatement
=
tableObj
.
getString
(
"preImportStatement"
);
String
postImportStatement
=
tableObj
.
getString
(
"postImportStatement"
);
preStatements
=
new
ArrayList
<
String
>();
preStatements
.
add
(
preImportStatement
);
posStatements
=
new
ArrayList
<
String
>();
posStatements
.
add
(
postImportStatement
);
//设置目标执行前导后导语句目标数据源
Integer
targetSourceId
=
tableObj
.
getInteger
(
"targetSourceId"
);
DmpSyncingDatasource
targetSource
=
dmpSyncingDatasourceDao
.
queryById
(
targetSourceId
);
String
jdbcUrl
=
targetSource
.
getJdbcUrl
();
String
user
=
targetSource
.
getUserName
();
String
password
=
EncryptionUtils
.
decode
(
targetSource
.
getPassword
(),
publicKey
);
MyDbType
myDbType
=
MyDbType
.
obtainByIdStr
(
targetSource
.
getId
().
toString
());
targetBaseDataSource
=
new
MyBaseDataSource
();
targetBaseDataSource
.
setJdbcUrlDirect
(
jdbcUrl
);
targetBaseDataSource
.
setUser
(
user
);
targetBaseDataSource
.
setPassword
(
password
);
targetBaseDataSource
.
setMyDbType
(
myDbType
);
// 执行引擎是jdbc,不用生成waterdrop
if
(
this
.
executioEngine
.
equals
(
CommConstant
.
EXECUTION_ENGINE_JDBC
))
{
return
;
}
//transform
Map
<
String
,
String
>
transformSqlModel
=
new
HashMap
<
String
,
String
>();
transformSqlModel
.
put
(
"sql"
,
this
.
sqlScript
);
transformSqlModel
.
put
(
"table_name"
,
"t_view"
);
transform
=
FreeMarkerUtils
.
freemakerJson
(
CommConstant
.
WATERDROP_FTL_TRANSFORM_SQL
,
transformSqlModel
,
freeMarkerConfig
);
String
tableFieldsObj
=
tableObj
.
getString
(
"tableFields"
);
String
sqlStr
=
ParameterUtils
.
columnMappingHandler
(
tableFieldsObj
);
Map
<
String
,
String
>
transformMappingSqlModel
=
new
HashMap
<
String
,
String
>();
transformSqlModel
.
put
(
"sql"
,
sqlStr
);
transform
=
transform
+
FreeMarkerUtils
.
freemakerJson
(
CommConstant
.
WATERDROP_FTL_TRANSFORM_JSON2
,
transformMappingSqlModel
,
freeMarkerConfig
);
//sink
//targetSource
if
(
this
.
targetBaseDataSource
.
getMyDbType
()
==
MyDbType
.
MySQL
||
this
.
targetBaseDataSource
.
getMyDbType
()
==
MyDbType
.
SQLServer
||
this
.
targetBaseDataSource
.
getMyDbType
()
==
MyDbType
.
PostgreSQL
||
this
.
targetBaseDataSource
.
getMyDbType
()
==
MyDbType
.
Oracle
||
this
.
targetBaseDataSource
.
getMyDbType
()
==
MyDbType
.
DB2
||
this
.
targetBaseDataSource
.
getMyDbType
()
==
MyDbType
.
INFORMIX
)
{
Map
<
String
,
String
>
sinkJdbcModel
=
new
HashMap
<
String
,
String
>();
Map
<
String
,
String
>
sinkJdbcModel
=
new
HashMap
<
String
,
String
>();
sinkJdbcModel
.
put
(
"save_mode"
,
"overwrite"
);
sinkJdbcModel
.
put
(
"save_mode"
,
"overwrite"
);
...
@@ -197,59 +316,139 @@ public class SqlParameters extends AbstractParameters {
...
@@ -197,59 +316,139 @@ public class SqlParameters extends AbstractParameters {
sinkJdbcModel
.
put
(
"password"
,
targetSource
.
getPassword
());
sinkJdbcModel
.
put
(
"password"
,
targetSource
.
getPassword
());
sinkJdbcModel
.
put
(
"dbtable"
,
targetSource
.
getDbName
());
sinkJdbcModel
.
put
(
"dbtable"
,
targetSource
.
getDbName
());
sink
=
FreeMarkerUtils
.
freemakerJson
(
CommConstant
.
WATERDROP_FTL_SINK_JDBC
,
sinkJdbcModel
,
freeMarkerConfig
);
sink
=
FreeMarkerUtils
.
freemakerJson
(
CommConstant
.
WATERDROP_FTL_SINK_JDBC
,
sinkJdbcModel
,
freeMarkerConfig
);
}
}
/**
* @Title: geneHdfsTransfAndSink
* @Description: TODO(产生hdfs transform and sink)
* @param @param freeMarkerConfig
* @param @param scriptObj
* @param @param sqlScript 参数
* @return void 返回类型
* @throws
*/
private
void
geneHdfsTransfAndSink
(
FreeMarkerConfigurer
freeMarkerConfig
,
JSONObject
scriptObj
)
{
// 执行引擎是jdbc,不用生成waterdrop
if
(
this
.
executioEngine
.
equals
(
CommConstant
.
EXECUTION_ENGINE_JDBC
))
{
return
;
}
//transform
Map
<
String
,
String
>
transformSqlModel
=
new
HashMap
<
String
,
String
>();
transformSqlModel
.
put
(
"sql"
,
this
.
sqlScript
);
transform
=
FreeMarkerUtils
.
freemakerJson
(
CommConstant
.
WATERDROP_FTL_TRANSFORM_SQL
,
transformSqlModel
,
freeMarkerConfig
);
//sink
JSONObject
hdfsObj
=
scriptObj
.
getJSONObject
(
"hdfs"
);
String
hdfsDir
=
hdfsObj
.
getString
(
"hdfsDir"
);
Map
<
String
,
String
>
hdfsModel
=
new
HashMap
<
String
,
String
>();
hdfsModel
.
put
(
"path"
,
hdfsDir
);
sink
=
FreeMarkerUtils
.
freemakerJson
(
CommConstant
.
WATERDROP_FTL_SINK_HDFS
,
hdfsModel
,
freeMarkerConfig
);
}
/**
* @Title: geneConsoleTransfAndSink
* @Description: TODO(产生console transform and sink)
* @param @param freeMarkerConfig
* @param @param sqlScript 参数
* @return void 返回类型
* @throws
*/
private
void
geneConsoleTransfAndSink
(
FreeMarkerConfigurer
freeMarkerConfig
)
{
//执行引擎是jdbc,不用生成waterdrop
if
(
this
.
executioEngine
.
equals
(
CommConstant
.
EXECUTION_ENGINE_JDBC
))
{
return
;
}
//transform
Map
<
String
,
String
>
transformSqlModel
=
new
HashMap
<
String
,
String
>();
transformSqlModel
.
put
(
"sql"
,
this
.
sqlScript
);
transform
=
FreeMarkerUtils
.
freemakerJson
(
CommConstant
.
WATERDROP_FTL_TRANSFORM_SQL
,
transformSqlModel
,
freeMarkerConfig
);
//sink
Map
<
String
,
String
>
stdoutModel
=
new
HashMap
<
String
,
String
>();
sink
=
FreeMarkerUtils
.
freemakerJson
(
CommConstant
.
WATERDROP_FTL_SINK_CONSOLE
,
stdoutModel
,
freeMarkerConfig
);
}
/**
* @Title: generatorSourceStr
* @Description: TODO(生成source模板)
* @param @param dmpSyncingDatasourceDao
* @param @param freeMarkerConfig
* @param @param publicKey
* @param @param scriptObj 参数
* @return void 返回类型
* @throws
*/
private
void
generatorSourceStr
(
DmpSyncingDatasourceDao
dmpSyncingDatasourceDao
,
FreeMarkerConfigurer
freeMarkerConfig
,
String
publicKey
,
JSONObject
scriptObj
)
{
Integer
sourceId
=
scriptObj
.
getInteger
(
"sourceId"
);
DmpSyncingDatasource
dmpSyncingDatasource
=
dmpSyncingDatasourceDao
.
queryById
(
sourceId
);
String
jdbcUrl
=
dmpSyncingDatasource
.
getJdbcUrl
();
String
user
=
dmpSyncingDatasource
.
getUserName
();
String
password
=
EncryptionUtils
.
decode
(
dmpSyncingDatasource
.
getPassword
(),
publicKey
);
MyDbType
myDbType
=
MyDbType
.
obtainByIdStr
(
dmpSyncingDatasource
.
getId
().
toString
());
// 如果执行引擎选择的事jdbc,不用生成waterdrop source
if
(
this
.
executioEngine
.
equals
(
CommConstant
.
EXECUTION_ENGINE_JDBC
))
{
sourceBaseDataSource
=
new
MyBaseDataSource
();
}
else
if
(
CommConstant
.
OUTPUT_TYPE_TOPIC
.
equals
(
outputType
))
{
sourceBaseDataSource
.
setJdbcUrlDirect
(
jdbcUrl
);
//transform
sourceBaseDataSource
.
setUser
(
user
);
Map
<
String
,
String
>
transformSqlModel
=
new
HashMap
<
String
,
String
>();
sourceBaseDataSource
.
setPassword
(
password
);
transformSqlModel
.
put
(
"sql"
,
sqlScript
);
sourceBaseDataSource
.
setMyDbType
(
myDbType
);
transform
=
FreeMarkerUtils
.
freemakerJson
(
CommConstant
.
WATERDROP_FTL_TRANSFORM_SQL
,
transformSqlModel
,
freeMarkerConfig
);
//sink
JSONObject
topicObj
=
scriptObj
.
getJSONObject
(
"topic"
);
Map
<
String
,
String
>
kafkaModel
=
new
HashMap
<
String
,
String
>();
kafkaModel
.
put
(
"topic"
,
topicObj
.
getString
(
"topic"
));
kafkaModel
.
put
(
"broker"
,
topicObj
.
getString
(
"server"
));
sink
=
FreeMarkerUtils
.
freemakerJson
(
CommConstant
.
WATERDROP_FTL_SINK_KAFKA
,
kafkaModel
,
freeMarkerConfig
);
}
else
if
(
CommConstant
.
OUTPUT_TYPE_API
.
equals
(
outputType
))
{
//transform
Map
<
String
,
String
>
transformSqlModel
=
new
HashMap
<
String
,
String
>();
transformSqlModel
.
put
(
"sql"
,
sqlScript
);
transformSqlModel
.
put
(
"table_name"
,
"t_view"
);
transform
=
FreeMarkerUtils
.
freemakerJson
(
CommConstant
.
WATERDROP_FTL_TRANSFORM_SQL
,
transformSqlModel
,
freeMarkerConfig
);
JSONObject
apiObj
=
scriptObj
.
getJSONObject
(
"api"
);
String
columnFieldsObj
=
apiObj
.
getString
(
"columnFields"
);
String
sqlStr
=
ParameterUtils
.
columnMappingHandler
(
columnFieldsObj
);
Map
<
String
,
String
>
transformMappingSqlModel
=
new
HashMap
<
String
,
String
>();
transformSqlModel
.
put
(
"sql"
,
sqlStr
);
transform
=
transform
+
FreeMarkerUtils
.
freemakerJson
(
CommConstant
.
WATERDROP_FTL_TRANSFORM_JSON2
,
transformMappingSqlModel
,
freeMarkerConfig
);
//sink
return
;
Map
<
String
,
String
>
sinkApiModel
=
new
HashMap
<
String
,
String
>();
sinkApiModel
.
put
(
"url"
,
apiObj
.
getString
(
"apiUrl"
));
sinkApiModel
.
put
(
"apiKey"
,
apiObj
.
getString
(
"apiKey"
));
sinkApiModel
.
put
(
"method"
,
apiObj
.
getString
(
"method"
));
sinkApiModel
.
put
(
"signType"
,
apiObj
.
getString
(
"signType"
));
sinkApiModel
.
put
(
"authCode"
,
apiObj
.
getString
(
"authCode"
));
sink
=
FreeMarkerUtils
.
freemakerJson
(
CommConstant
.
WATERDROP_FTL_SINK_API
,
sinkApiModel
,
freeMarkerConfig
);
}
}
//waterdrop script
Map
<
String
,
String
>
waterdropModel
=
new
HashMap
<
String
,
String
>();
waterdropModel
.
put
(
"env"
,
env
);
waterdropModel
.
put
(
"source"
,
source
);
waterdropModel
.
put
(
"transform"
,
transform
);
waterdropModel
.
put
(
"sink"
,
sink
);
this
.
waterdropScript
=
FreeMarkerUtils
.
freemakerJson
(
CommConstant
.
WATERDROP_FTL
,
waterdropModel
,
freeMarkerConfig
);
preStatements
=
new
ArrayList
<
String
>();
//jdbc
preStatements
.
add
(
"insert into test(id, name) values(1, 'test')"
);
if
(
this
.
sourceBaseDataSource
.
getMyDbType
()
==
MyDbType
.
MySQL
posStatements
=
new
ArrayList
<
String
>();
||
this
.
sourceBaseDataSource
.
getMyDbType
()
==
MyDbType
.
SQLServer
posStatements
.
add
(
"insert into test(id, name) values(2, 'test2')"
);
||
this
.
sourceBaseDataSource
.
getMyDbType
()
==
MyDbType
.
PostgreSQL
||
this
.
sourceBaseDataSource
.
getMyDbType
()
==
MyDbType
.
Oracle
||
this
.
sourceBaseDataSource
.
getMyDbType
()
==
MyDbType
.
DB2
||
this
.
sourceBaseDataSource
.
getMyDbType
()
==
MyDbType
.
INFORMIX
)
{
Map
<
String
,
String
>
jdbcModel
=
new
HashMap
<
String
,
String
>();
jdbcModel
.
put
(
"driver"
,
dmpSyncingDatasource
.
getDriverClassName
());
jdbcModel
.
put
(
"url"
,
jdbcUrl
);
jdbcModel
.
put
(
"table"
,
"("
+
this
.
sqlScript
+
") as table_view"
);
jdbcModel
.
put
(
"result_table_name"
,
"table_view"
);
jdbcModel
.
put
(
"user"
,
user
);
jdbcModel
.
put
(
"password"
,
password
);
this
.
source
=
FreeMarkerUtils
.
freemakerJson
(
CommConstant
.
WATERDROP_FTL_SOURCE_JDBC
,
jdbcModel
,
freeMarkerConfig
);
}
else
if
(
this
.
sourceBaseDataSource
.
getMyDbType
()
==
MyDbType
.
Hive
)
{
Map
<
String
,
String
>
hiveModel
=
new
HashMap
<
String
,
String
>();
hiveModel
.
put
(
"catalogImplementation"
,
"hive"
);
hiveModel
.
put
(
"pre_sql"
,
"select * from hive_db.hive_table"
);
hiveModel
.
put
(
"result_table_name"
,
"table_view"
);
this
.
source
=
FreeMarkerUtils
.
freemakerJson
(
CommConstant
.
WATERDROP_FTL_SOURCE_HIVE
,
hiveModel
,
freeMarkerConfig
);
}
else
{
logger
.
info
(
"waterdrow,不支持的数据源类型"
);
throw
new
RuntimeException
(
"waterdrow,不支持的数据源"
);
}
}
}
/**
* @Title: generatorEnvStr
* @Description: TODO(产生env模板)
* @param @param freeMarkerConfig 参数
* @return void 返回类型
* @throws
*/
private
void
generatorEnvStr
(
FreeMarkerConfigurer
freeMarkerConfig
)
{
//如果执行引擎选择的事jdbc,不用生成env
if
(
this
.
executioEngine
.
equals
(
CommConstant
.
EXECUTION_ENGINE_JDBC
))
{
return
;
}
Map
<
String
,
String
>
envModel
=
new
HashMap
<
String
,
String
>();
envModel
.
put
(
"sparkappname"
,
"Waterdrop"
);
this
.
env
=
FreeMarkerUtils
.
freemakerJson
(
CommConstant
.
WATERDROP_FTL_ENV
,
envModel
,
freeMarkerConfig
);
}
public
String
getScript
()
{
public
String
getScript
()
{
return
script
;
return
script
;
}
}
...
@@ -322,44 +521,44 @@ public class SqlParameters extends AbstractParameters {
...
@@ -322,44 +521,44 @@ public class SqlParameters extends AbstractParameters {
this
.
posStatements
=
posStatements
;
this
.
posStatements
=
posStatements
;
}
}
public
List
<
ResourceInfo
>
getResourceList
()
{
public
String
getExecutioEngine
()
{
return
resourceList
;
return
executioEngine
;
}
}
public
void
set
ResourceList
(
List
<
ResourceInfo
>
resourceList
)
{
public
void
set
ExecutioEngine
(
String
executioEngine
)
{
this
.
resourceList
=
resourceList
;
this
.
executioEngine
=
executioEngine
;
}
}
public
String
get
JdbcUrl
()
{
public
String
get
SqlScript
()
{
return
jdbcUrl
;
return
sqlScript
;
}
}
public
void
set
JdbcUrl
(
String
jdbcUrl
)
{
public
void
set
SqlScript
(
String
sqlScript
)
{
this
.
jdbcUrl
=
jdbcUrl
;
this
.
sqlScript
=
sqlScript
;
}
}
public
String
getUser
()
{
public
MyBaseDataSource
getSourceBaseDataSource
()
{
return
user
;
return
sourceBaseDataSource
;
}
}
public
void
set
User
(
String
user
)
{
public
void
set
SourceBaseDataSource
(
MyBaseDataSource
sourceBaseDataSource
)
{
this
.
user
=
user
;
this
.
sourceBaseDataSource
=
sourceBaseDataSource
;
}
}
public
String
getPassword
()
{
public
MyBaseDataSource
getTargetBaseDataSource
()
{
return
password
;
return
targetBaseDataSource
;
}
}
public
void
set
Password
(
String
password
)
{
public
void
set
TargetBaseDataSource
(
MyBaseDataSource
targetBaseDataSource
)
{
this
.
password
=
password
;
this
.
targetBaseDataSource
=
targetBaseDataSource
;
}
}
public
MyDbType
getMyDbType
()
{
public
List
<
ResourceInfo
>
getResourceList
()
{
return
myDbType
;
return
resourceList
;
}
}
public
void
set
MyDbType
(
MyDbType
myDbType
)
{
public
void
set
ResourceList
(
List
<
ResourceInfo
>
resourceList
)
{
this
.
myDbType
=
myDbType
;
this
.
resourceList
=
resourceList
;
}
}
@Override
@Override
...
...
src/main/java/com/jz/dmp/cmdexectool/scheduler/dao/datasource/BaseDataSource.java
View file @
e967006e
...
@@ -67,8 +67,6 @@ public abstract class BaseDataSource {
...
@@ -67,8 +67,6 @@ public abstract class BaseDataSource {
*/
*/
private
String
principal
;
private
String
principal
;
private
String
dbType
;
public
String
getPrincipal
()
{
public
String
getPrincipal
()
{
return
principal
;
return
principal
;
}
}
...
@@ -227,14 +225,6 @@ public abstract class BaseDataSource {
...
@@ -227,14 +225,6 @@ public abstract class BaseDataSource {
this
.
other
=
other
;
this
.
other
=
other
;
}
}
public
String
getDbType
()
{
return
dbType
;
}
public
void
setDbType
(
String
dbType
)
{
this
.
dbType
=
dbType
;
}
public
String
getJdbcUrlDirect
()
{
public
String
getJdbcUrlDirect
()
{
return
jdbcUrlDirect
;
return
jdbcUrlDirect
;
}
}
...
...
src/main/java/com/jz/dmp/cmdexectool/scheduler/dao/utils/DatabaseUtils.java
View file @
e967006e
...
@@ -36,6 +36,7 @@ import com.jz.dmp.cmdexectool.scheduler.common.utils.CommonUtils;
...
@@ -36,6 +36,7 @@ import com.jz.dmp.cmdexectool.scheduler.common.utils.CommonUtils;
import
com.jz.dmp.cmdexectool.scheduler.common.utils.JSONUtils
;
import
com.jz.dmp.cmdexectool.scheduler.common.utils.JSONUtils
;
import
com.jz.dmp.cmdexectool.scheduler.common.utils.ParameterUtils
;
import
com.jz.dmp.cmdexectool.scheduler.common.utils.ParameterUtils
;
import
com.jz.dmp.cmdexectool.scheduler.dao.datasource.BaseDataSource
;
import
com.jz.dmp.cmdexectool.scheduler.dao.datasource.BaseDataSource
;
import
com.jz.dmp.cmdexectool.scheduler.dao.datasource.MyBaseDataSource
;
import
com.jz.dmp.cmdexectool.scheduler.server.utils.ParamUtils
;
import
com.jz.dmp.cmdexectool.scheduler.server.utils.ParamUtils
;
public
class
DatabaseUtils
{
public
class
DatabaseUtils
{
...
@@ -51,7 +52,7 @@ public class DatabaseUtils {
...
@@ -51,7 +52,7 @@ public class DatabaseUtils {
* @param postStatementsBinds post statements binds
* @param postStatementsBinds post statements binds
* @param createFuncs create functions
* @param createFuncs create functions
*/
*/
public
static
void
executeUpdateSql
(
List
<
SqlBinds
>
statementsBinds
,
BaseDataSource
b
aseDataSource
){
public
static
void
executeUpdateSql
(
List
<
SqlBinds
>
statementsBinds
,
MyBaseDataSource
myB
aseDataSource
){
Connection
connection
=
null
;
Connection
connection
=
null
;
PreparedStatement
stmt
=
null
;
PreparedStatement
stmt
=
null
;
ResultSet
resultSet
=
null
;
ResultSet
resultSet
=
null
;
...
@@ -59,7 +60,7 @@ public class DatabaseUtils {
...
@@ -59,7 +60,7 @@ public class DatabaseUtils {
// if upload resource is HDFS and kerberos startup
// if upload resource is HDFS and kerberos startup
CommonUtils
.
loadKerberosConf
();
CommonUtils
.
loadKerberosConf
();
// create connection
// create connection
connection
=
createConnection
(
b
aseDataSource
);
connection
=
createConnection
(
myB
aseDataSource
);
// create temp function
// create temp function
/*
/*
if (CollectionUtils.isNotEmpty(createFuncs)) {
if (CollectionUtils.isNotEmpty(createFuncs)) {
...
@@ -89,7 +90,7 @@ public class DatabaseUtils {
...
@@ -89,7 +90,7 @@ public class DatabaseUtils {
List
<
SqlBinds
>
preStatementsBinds
,
List
<
SqlBinds
>
preStatementsBinds
,
List
<
SqlBinds
>
postStatementsBinds
,
List
<
SqlBinds
>
postStatementsBinds
,
List
<
String
>
createFuncs
,
List
<
String
>
createFuncs
,
BaseDataSource
b
aseDataSource
){
MyBaseDataSource
myB
aseDataSource
){
Connection
connection
=
null
;
Connection
connection
=
null
;
PreparedStatement
stmt
=
null
;
PreparedStatement
stmt
=
null
;
ResultSet
resultSet
=
null
;
ResultSet
resultSet
=
null
;
...
@@ -97,7 +98,7 @@ public class DatabaseUtils {
...
@@ -97,7 +98,7 @@ public class DatabaseUtils {
// if upload resource is HDFS and kerberos startup
// if upload resource is HDFS and kerberos startup
CommonUtils
.
loadKerberosConf
();
CommonUtils
.
loadKerberosConf
();
// create connection
// create connection
connection
=
createConnection
(
b
aseDataSource
);
connection
=
createConnection
(
myB
aseDataSource
);
// create temp function
// create temp function
/*
/*
if (CollectionUtils.isNotEmpty(createFuncs)) {
if (CollectionUtils.isNotEmpty(createFuncs)) {
...
@@ -171,24 +172,24 @@ public class DatabaseUtils {
...
@@ -171,24 +172,24 @@ public class DatabaseUtils {
* @return connection
* @return connection
* @throws Exception Exception
* @throws Exception Exception
*/
*/
private
static
Connection
createConnection
(
BaseDataSource
b
aseDataSource
)
throws
Exception
{
private
static
Connection
createConnection
(
MyBaseDataSource
myB
aseDataSource
)
throws
Exception
{
// if hive , load connection params if exists
// if hive , load connection params if exists
Connection
connection
=
null
;
Connection
connection
=
null
;
if
(
HIVE
==
DbType
.
valueOf
(
baseDataSource
.
getDbTyp
e
()))
{
if
(
HIVE
==
DbType
.
valueOf
(
myBaseDataSource
.
getMyDbType
().
getDbType
().
nam
e
()))
{
Properties
paramProp
=
new
Properties
();
Properties
paramProp
=
new
Properties
();
paramProp
.
setProperty
(
USER
,
b
aseDataSource
.
getUser
());
paramProp
.
setProperty
(
USER
,
myB
aseDataSource
.
getUser
());
paramProp
.
setProperty
(
PASSWORD
,
b
aseDataSource
.
getPassword
());
paramProp
.
setProperty
(
PASSWORD
,
myB
aseDataSource
.
getPassword
());
Map
<
String
,
String
>
connParamMap
=
CollectionUtils
.
stringToMap
(
""
,
Map
<
String
,
String
>
connParamMap
=
CollectionUtils
.
stringToMap
(
""
,
SEMICOLON
,
SEMICOLON
,
HIVE_CONF
);
HIVE_CONF
);
paramProp
.
putAll
(
connParamMap
);
paramProp
.
putAll
(
connParamMap
);
connection
=
DriverManager
.
getConnection
(
b
aseDataSource
.
getJdbcUrlDirect
(),
connection
=
DriverManager
.
getConnection
(
myB
aseDataSource
.
getJdbcUrlDirect
(),
paramProp
);
paramProp
);
}
else
{
}
else
{
connection
=
DriverManager
.
getConnection
(
b
aseDataSource
.
getJdbcUrlDirect
(),
connection
=
DriverManager
.
getConnection
(
myB
aseDataSource
.
getJdbcUrlDirect
(),
b
aseDataSource
.
getUser
(),
myB
aseDataSource
.
getUser
(),
b
aseDataSource
.
getPassword
());
myB
aseDataSource
.
getPassword
());
}
}
return
connection
;
return
connection
;
}
}
...
...
src/main/java/com/jz/dmp/cmdexectool/scheduler/server/worker/task/sql/SqlTask.java
View file @
e967006e
...
@@ -36,6 +36,7 @@ import org.apache.commons.lang3.StringUtils;
...
@@ -36,6 +36,7 @@ import org.apache.commons.lang3.StringUtils;
import
org.slf4j.Logger
;
import
org.slf4j.Logger
;
import
org.springframework.util.CollectionUtils
;
import
org.springframework.util.CollectionUtils
;
import
com.jz.dmp.cmdexectool.common.constant.CommConstant
;
import
com.jz.dmp.cmdexectool.scheduler.common.Constants
;
import
com.jz.dmp.cmdexectool.scheduler.common.Constants
;
import
com.jz.dmp.cmdexectool.scheduler.common.enums.DbType
;
import
com.jz.dmp.cmdexectool.scheduler.common.enums.DbType
;
import
com.jz.dmp.cmdexectool.scheduler.common.process.Property
;
import
com.jz.dmp.cmdexectool.scheduler.common.process.Property
;
...
@@ -45,6 +46,7 @@ import com.jz.dmp.cmdexectool.scheduler.common.task.sql.SqlParameters;
...
@@ -45,6 +46,7 @@ import com.jz.dmp.cmdexectool.scheduler.common.task.sql.SqlParameters;
import
com.jz.dmp.cmdexectool.scheduler.common.utils.OSUtils
;
import
com.jz.dmp.cmdexectool.scheduler.common.utils.OSUtils
;
import
com.jz.dmp.cmdexectool.scheduler.common.utils.ParameterUtils
;
import
com.jz.dmp.cmdexectool.scheduler.common.utils.ParameterUtils
;
import
com.jz.dmp.cmdexectool.scheduler.dao.datasource.BaseDataSource
;
import
com.jz.dmp.cmdexectool.scheduler.dao.datasource.BaseDataSource
;
import
com.jz.dmp.cmdexectool.scheduler.dao.datasource.MyBaseDataSource
;
import
com.jz.dmp.cmdexectool.scheduler.dao.utils.DatabaseUtils
;
import
com.jz.dmp.cmdexectool.scheduler.dao.utils.DatabaseUtils
;
import
com.jz.dmp.cmdexectool.scheduler.server.entity.TaskExecutionContext
;
import
com.jz.dmp.cmdexectool.scheduler.server.entity.TaskExecutionContext
;
import
com.jz.dmp.cmdexectool.scheduler.server.utils.ParamUtils
;
import
com.jz.dmp.cmdexectool.scheduler.server.utils.ParamUtils
;
...
@@ -101,25 +103,7 @@ public class SqlTask extends AbstractTask {
...
@@ -101,25 +103,7 @@ public class SqlTask extends AbstractTask {
public
void
handle
()
throws
Exception
{
public
void
handle
()
throws
Exception
{
try
{
try
{
BaseDataSource
baseDataSource
=
new
BaseDataSource
()
{
MyBaseDataSource
targetBaseDataSource
=
sqlParameters
.
getTargetBaseDataSource
();
@Override
public
String
driverClassSelector
()
{
// TODO Auto-generated method stub
return
null
;
}
@Override
public
DbType
dbTypeSelector
()
{
// TODO Auto-generated method stub
return
null
;
}
};
baseDataSource
.
setDbType
(
sqlParameters
.
getMyDbType
().
getDbType
().
name
());
baseDataSource
.
setUser
(
sqlParameters
.
getUser
());
baseDataSource
.
setPassword
(
sqlParameters
.
getPassword
());
baseDataSource
.
setAddress
(
sqlParameters
.
getJdbcUrl
());
List
<
SqlBinds
>
preStatementSqlBinds
=
Optional
.
ofNullable
(
sqlParameters
.
getPreStatements
())
List
<
SqlBinds
>
preStatementSqlBinds
=
Optional
.
ofNullable
(
sqlParameters
.
getPreStatements
())
.
orElse
(
new
ArrayList
<>())
.
orElse
(
new
ArrayList
<>())
...
@@ -136,20 +120,34 @@ public class SqlTask extends AbstractTask {
...
@@ -136,20 +120,34 @@ public class SqlTask extends AbstractTask {
//判断是否需要运行前置sql
//判断是否需要运行前置sql
if
(!
CollectionUtils
.
isEmpty
(
preStatementSqlBinds
))
{
if
(!
CollectionUtils
.
isEmpty
(
preStatementSqlBinds
))
{
DatabaseUtils
.
executeUpdateSql
(
preStatementSqlBinds
,
b
aseDataSource
);
DatabaseUtils
.
executeUpdateSql
(
preStatementSqlBinds
,
targetB
aseDataSource
);
}
}
// construct process
if
(
sqlParameters
.
getExecutioEngine
().
equals
(
CommConstant
.
EXECUTION_ENGINE_JDBC
))
{
CommandExecuteResult
commandExecuteResult
=
waterdropCommandExecutor
.
run
(
buildCommand
());
List
<
String
>
mainSqlScript
=
new
ArrayList
<
String
>();
mainSqlScript
.
add
(
sqlParameters
.
getSqlScript
());
List
<
SqlBinds
>
mainStatementSqlBinds
=
Optional
.
ofNullable
(
mainSqlScript
)
.
orElse
(
new
ArrayList
<>())
.
stream
()
.
map
(
DatabaseUtils:
:
getSqlAndSqlParamsMap
)
.
collect
(
Collectors
.
toList
());
DatabaseUtils
.
executeUpdateSql
(
mainStatementSqlBinds
,
sqlParameters
.
getSourceBaseDataSource
());
}
else
{
// construct process
CommandExecuteResult
commandExecuteResult
=
waterdropCommandExecutor
.
run
(
buildCommand
());
setExitStatusCode
(
commandExecuteResult
.
getExitStatusCode
());
setAppIds
(
commandExecuteResult
.
getAppIds
());
setProcessId
(
commandExecuteResult
.
getProcessId
());
}
//判断是否运行后置sql
//判断是否运行后置sql
if
(!
CollectionUtils
.
isEmpty
(
postStatementSqlBinds
))
{
if
(!
CollectionUtils
.
isEmpty
(
postStatementSqlBinds
))
{
DatabaseUtils
.
executeUpdateSql
(
postStatementSqlBinds
,
b
aseDataSource
);
DatabaseUtils
.
executeUpdateSql
(
postStatementSqlBinds
,
targetB
aseDataSource
);
}
}
setExitStatusCode
(
commandExecuteResult
.
getExitStatusCode
());
setAppIds
(
commandExecuteResult
.
getAppIds
());
setProcessId
(
commandExecuteResult
.
getProcessId
());
}
catch
(
Exception
e
)
{
}
catch
(
Exception
e
)
{
logger
.
error
(
"sql task error"
,
e
);
logger
.
error
(
"sql task error"
,
e
);
setExitStatusCode
(
Constants
.
EXIT_CODE_FAILURE
);
setExitStatusCode
(
Constants
.
EXIT_CODE_FAILURE
);
...
...
src/test/java/com/jz/cmdexectool/test/task/shell/SQLCommandExecutorTest.java
View file @
e967006e
...
@@ -23,6 +23,7 @@ import com.jz.dmp.cmdexectool.ApiApplication;
...
@@ -23,6 +23,7 @@ import com.jz.dmp.cmdexectool.ApiApplication;
import
com.jz.dmp.cmdexectool.scheduler.common.Constants
;
import
com.jz.dmp.cmdexectool.scheduler.common.Constants
;
import
com.jz.dmp.cmdexectool.scheduler.common.enums.DbType
;
import
com.jz.dmp.cmdexectool.scheduler.common.enums.DbType
;
import
com.jz.dmp.cmdexectool.scheduler.common.enums.ExecutionStatus
;
import
com.jz.dmp.cmdexectool.scheduler.common.enums.ExecutionStatus
;
import
com.jz.dmp.cmdexectool.scheduler.common.enums.MyDbType
;
import
com.jz.dmp.cmdexectool.scheduler.common.process.Property
;
import
com.jz.dmp.cmdexectool.scheduler.common.process.Property
;
import
com.jz.dmp.cmdexectool.scheduler.common.task.sql.SqlBinds
;
import
com.jz.dmp.cmdexectool.scheduler.common.task.sql.SqlBinds
;
import
com.jz.dmp.cmdexectool.scheduler.common.task.sql.SqlType
;
import
com.jz.dmp.cmdexectool.scheduler.common.task.sql.SqlType
;
...
@@ -32,6 +33,7 @@ import com.jz.dmp.cmdexectool.scheduler.common.utils.JSONUtils;
...
@@ -32,6 +33,7 @@ import com.jz.dmp.cmdexectool.scheduler.common.utils.JSONUtils;
import
com.jz.dmp.cmdexectool.scheduler.common.utils.ParameterUtils
;
import
com.jz.dmp.cmdexectool.scheduler.common.utils.ParameterUtils
;
import
com.jz.dmp.cmdexectool.scheduler.dao.datasource.BaseDataSource
;
import
com.jz.dmp.cmdexectool.scheduler.dao.datasource.BaseDataSource
;
import
com.jz.dmp.cmdexectool.scheduler.dao.datasource.DataSourceFactory
;
import
com.jz.dmp.cmdexectool.scheduler.dao.datasource.DataSourceFactory
;
import
com.jz.dmp.cmdexectool.scheduler.dao.datasource.MyBaseDataSource
;
import
com.jz.dmp.cmdexectool.scheduler.dao.datasource.MySQLDataSource
;
import
com.jz.dmp.cmdexectool.scheduler.dao.datasource.MySQLDataSource
;
import
com.jz.dmp.cmdexectool.scheduler.dao.utils.DatabaseUtils
;
import
com.jz.dmp.cmdexectool.scheduler.dao.utils.DatabaseUtils
;
import
com.jz.dmp.cmdexectool.scheduler.server.entity.TaskExecutionContext
;
import
com.jz.dmp.cmdexectool.scheduler.server.entity.TaskExecutionContext
;
...
@@ -134,21 +136,23 @@ public class SQLCommandExecutorTest {
...
@@ -134,21 +136,23 @@ public class SQLCommandExecutorTest {
// load class
// load class
DataSourceFactory
.
loadClass
(
DbType
.
valueOf
(
"MYSQL"
));
DataSourceFactory
.
loadClass
(
DbType
.
valueOf
(
"MYSQL"
));
MySQLDataSource
mySQLDataSource
=
new
MySQLDataSource
();
MyBaseDataSource
myBaseDataSource
=
new
MyBaseDataSource
();
mySQLDataSource
.
setAddress
(
"192.168.1.140:3307"
);
myBaseDataSource
.
setJdbcUrlDirect
(
"jdbc:mysql://192.168.1.140:3307/dmp_web_new"
);
mySQLDataSource
.
setUser
(
"dmp"
);
myBaseDataSource
.
setUser
(
"dmp"
);
mySQLDataSource
.
setPassword
(
"Ioubuy123"
);
myBaseDataSource
.
setPassword
(
"Ioubuy123"
);
myBaseDataSource
.
setMyDbType
(
MyDbType
.
MySQL
);
//String json = JSONObject.toJSONString(mySQLDataSource);
//String json = JSONObject.toJSONString(mySQLDataSource);
// get datasource
// get datasource
// BaseDataSource baseDataSource = DataSourceFactory.getDatasource(DbType.valueOf("MYSQL"), json);
// BaseDataSource baseDataSource = DataSourceFactory.getDatasource(DbType.valueOf("MYSQL"), json);
BaseDataSource
baseDataSource
=
mySQLDataSource
;
/*
baseDataSource
.
setDbType
(
DbType
.
MYSQL
.
name
());
* BaseDataSource baseDataSource = mySQLDataSource;
baseDataSource
.
setJdbcUrlDirect
(
"jdbc:mysql://192.168.1.140:3307/dmp_web_new"
);
* baseDataSource.s(DbType.MYSQL.name());
baseDataSource
.
setUser
(
"dmp"
);
* baseDataSource.setJdbcUrlDirect("jdbc:mysql://192.168.1.140:3307/dmp_web_new"
baseDataSource
.
setPassword
(
"Ioubuy123"
);
* ); baseDataSource.setUser("dmp"); baseDataSource.setPassword("Ioubuy123");
*/
// ready to execute SQL and parameter entity Map
// ready to execute SQL and parameter entity Map
SqlBinds
mainSqlBinds
=
getSqlAndSqlParamsMap
(
"insert into test(id, name) values(1, 'test')"
);
SqlBinds
mainSqlBinds
=
getSqlAndSqlParamsMap
(
"insert into test(id, name) values(1, 'test')"
);
...
@@ -177,7 +181,7 @@ public class SQLCommandExecutorTest {
...
@@ -177,7 +181,7 @@ public class SQLCommandExecutorTest {
// execute sql task
// execute sql task
//DatabaseUtils.executeFuncAndSql(mainSqlBinds, preStatementSqlBinds, postStatementSqlBinds, createFuncs);
//DatabaseUtils.executeFuncAndSql(mainSqlBinds, preStatementSqlBinds, postStatementSqlBinds, createFuncs);
DatabaseUtils
.
executeUpdateSql
(
preStatementSqlBinds
,
b
aseDataSource
);
DatabaseUtils
.
executeUpdateSql
(
preStatementSqlBinds
,
myB
aseDataSource
);
//setExitStatusCode(Constants.EXIT_CODE_SUCCESS);
//setExitStatusCode(Constants.EXIT_CODE_SUCCESS);
}
catch
(
Exception
e
)
{
}
catch
(
Exception
e
)
{
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment