Commit 82519acf authored by sml's avatar sml

sql api 提交

parent f289f914
...@@ -59,6 +59,7 @@ public class CommConstant { ...@@ -59,6 +59,7 @@ public class CommConstant {
public static final String WATERDROP_FTL_SINK_HDFS = "sink_hdfs.ftl"; public static final String WATERDROP_FTL_SINK_HDFS = "sink_hdfs.ftl";
public static final String WATERDROP_FTL_SINK_JDBC = "sink_jdbc.ftl"; public static final String WATERDROP_FTL_SINK_JDBC = "sink_jdbc.ftl";
public static final String WATERDROP_FTL_SINK_KAFKA = "sink_kafka.ftl"; public static final String WATERDROP_FTL_SINK_KAFKA = "sink_kafka.ftl";
public static final String WATERDROP_FTL_SINK_API = "sink_api.ftl";
//其他script模板 //其他script模板
public static final String FTL_SFTP_DOWNLOAD = "sftp_download.ftl";//ftp下载 public static final String FTL_SFTP_DOWNLOAD = "sftp_download.ftl";//ftp下载
......
...@@ -25,6 +25,7 @@ import com.jz.dmp.cmdexectool.entity.DmpSyncingDatasource; ...@@ -25,6 +25,7 @@ import com.jz.dmp.cmdexectool.entity.DmpSyncingDatasource;
import com.jz.dmp.cmdexectool.mapper.DmpSyncingDatasourceDao; import com.jz.dmp.cmdexectool.mapper.DmpSyncingDatasourceDao;
import com.jz.dmp.cmdexectool.scheduler.common.process.ResourceInfo; import com.jz.dmp.cmdexectool.scheduler.common.process.ResourceInfo;
import com.jz.dmp.cmdexectool.scheduler.common.task.AbstractParameters; import com.jz.dmp.cmdexectool.scheduler.common.task.AbstractParameters;
import com.jz.dmp.cmdexectool.scheduler.common.utils.ParameterUtils;
import com.jz.dmp.cmdexectool.scheduler.server.entity.TaskExecutionContext; import com.jz.dmp.cmdexectool.scheduler.server.entity.TaskExecutionContext;
import java.util.HashMap; import java.util.HashMap;
...@@ -133,18 +134,17 @@ public class SqlParameters extends AbstractParameters { ...@@ -133,18 +134,17 @@ public class SqlParameters extends AbstractParameters {
//transform //transform
Map<String, String> transformSqlModel = new HashMap<String, String>(); Map<String, String> transformSqlModel = new HashMap<String, String>();
transformSqlModel.put("sql", sqlScript); transformSqlModel.put("sql", sqlScript);
transformSqlModel.put("table_name", "t_view");
transform = FreeMarkerUtils.freemakerJson(CommConstant.WATERDROP_FTL_TRANSFORM_SQL, transformSqlModel, freeMarkerConfig); transform = FreeMarkerUtils.freemakerJson(CommConstant.WATERDROP_FTL_TRANSFORM_SQL, transformSqlModel, freeMarkerConfig);
JSONObject tableObj = scriptObj.getJSONObject("table"); JSONObject tableObj = scriptObj.getJSONObject("table");
JSONArray fieldsArr = tableObj.getJSONArray("tableFields"); String tableFieldsObj = tableObj.getString("tableFields");
for (int index = 0; index < fieldsArr.size(); index++) { String sqlStr = ParameterUtils.columnMappingHandler(tableFieldsObj);
JSONObject fieldObj = fieldsArr.getJSONObject(index);
Map<String, String> transformMappingSqlModel = new HashMap<String, String>();
Map<String, String> transformJson2Model = new HashMap<String, String>(); transformSqlModel.put("sql", sqlStr);
transformJson2Model.put("source_field", fieldObj.getString("sourceFieldName")); transform = transform + FreeMarkerUtils.freemakerJson(CommConstant.WATERDROP_FTL_TRANSFORM_JSON2, transformMappingSqlModel, freeMarkerConfig);
transformJson2Model.put("target_field", fieldObj.getString("targetFieldName"));
transform = transform + FreeMarkerUtils.freemakerJson(CommConstant.WATERDROP_FTL_TRANSFORM_JSON2, transformJson2Model, freeMarkerConfig);
}
//sink //sink
//targetSource //targetSource
...@@ -175,7 +175,28 @@ public class SqlParameters extends AbstractParameters { ...@@ -175,7 +175,28 @@ public class SqlParameters extends AbstractParameters {
kafkaModel.put("broker", topicObj.getString("server")); kafkaModel.put("broker", topicObj.getString("server"));
sink = FreeMarkerUtils.freemakerJson(CommConstant.WATERDROP_FTL_SINK_KAFKA, kafkaModel, freeMarkerConfig); sink = FreeMarkerUtils.freemakerJson(CommConstant.WATERDROP_FTL_SINK_KAFKA, kafkaModel, freeMarkerConfig);
}else if (CommConstant.OUTPUT_TYPE_API.equals(outputType)) { }else if (CommConstant.OUTPUT_TYPE_API.equals(outputType)) {
//transform
Map<String, String> transformSqlModel = new HashMap<String, String>();
transformSqlModel.put("sql", sqlScript);
transformSqlModel.put("table_name", "t_view");
transform = FreeMarkerUtils.freemakerJson(CommConstant.WATERDROP_FTL_TRANSFORM_SQL, transformSqlModel, freeMarkerConfig);
JSONObject apiObj = scriptObj.getJSONObject("api");
String columnFieldsObj = apiObj.getString("columnFields");
String sqlStr = ParameterUtils.columnMappingHandler(columnFieldsObj);
Map<String, String> transformMappingSqlModel = new HashMap<String, String>();
transformSqlModel.put("sql", sqlStr);
transform = transform + FreeMarkerUtils.freemakerJson(CommConstant.WATERDROP_FTL_TRANSFORM_JSON2, transformMappingSqlModel, freeMarkerConfig);
//sink
Map<String, String> sinkApiModel = new HashMap<String, String>();
sinkApiModel.put("url", apiObj.getString("apiUrl"));
sinkApiModel.put("apiKey", apiObj.getString("apiKey"));
sinkApiModel.put("method", apiObj.getString("method"));
sinkApiModel.put("signType", apiObj.getString("signType"));
sinkApiModel.put("authCode", apiObj.getString("authCode"));
sink = FreeMarkerUtils.freemakerJson(CommConstant.WATERDROP_FTL_SINK_API, sinkApiModel, freeMarkerConfig);
} }
//waterdrop script //waterdrop script
Map<String, String> waterdropModel = new HashMap<String, String>(); Map<String, String> waterdropModel = new HashMap<String, String>();
......
...@@ -18,16 +18,21 @@ ...@@ -18,16 +18,21 @@
package com.jz.dmp.cmdexectool.scheduler.common.utils; package com.jz.dmp.cmdexectool.scheduler.common.utils;
import java.sql.PreparedStatement; import java.sql.PreparedStatement;
import java.util.ArrayList;
import java.util.Date; import java.util.Date;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.jz.dmp.cmdexectool.scheduler.common.Constants; import com.jz.dmp.cmdexectool.scheduler.common.Constants;
import com.jz.dmp.cmdexectool.scheduler.common.enums.CommandType; import com.jz.dmp.cmdexectool.scheduler.common.enums.CommandType;
import com.jz.dmp.cmdexectool.scheduler.common.enums.DataType; import com.jz.dmp.cmdexectool.scheduler.common.enums.DataType;
...@@ -36,6 +41,8 @@ import com.jz.dmp.cmdexectool.scheduler.common.utils.placeholder.BusinessTimeUti ...@@ -36,6 +41,8 @@ import com.jz.dmp.cmdexectool.scheduler.common.utils.placeholder.BusinessTimeUti
import com.jz.dmp.cmdexectool.scheduler.common.utils.placeholder.PlaceholderUtils; import com.jz.dmp.cmdexectool.scheduler.common.utils.placeholder.PlaceholderUtils;
import com.jz.dmp.cmdexectool.scheduler.common.utils.placeholder.TimePlaceholderUtils; import com.jz.dmp.cmdexectool.scheduler.common.utils.placeholder.TimePlaceholderUtils;
import freemarker.core.ReturnInstruction.Return;
/** /**
* parameter parse utils * parameter parse utils
*/ */
...@@ -248,4 +255,56 @@ public class ParameterUtils { ...@@ -248,4 +255,56 @@ public class ParameterUtils {
} }
return map; return map;
} }
/**
* @Title: columnMappingHandler
* @Description: TODO(开发任务SQL:字段映射处理)
* @param @param jsonStr
* @param @return 参数
* @return List<Map<String,String>> 返回类型
* @throws
*/
public static String columnMappingHandler(String jsonStr) {
JSONObject jsonObject = JSONObject.parseObject(jsonStr);
JSONArray sourceArray = jsonObject.getJSONArray("sourceFields");
Map<String, JSONObject> sourceMap = new HashMap<String, JSONObject>();
for (int index=0; index<sourceArray.size(); index++) {
JSONObject sourceObj = sourceArray.getJSONObject(index);
sourceMap.put(sourceObj.getString("customSoruceFiledId"), sourceObj);
}
JSONArray targetArray = jsonObject.getJSONArray("targetFields");
Map<String, JSONObject> targetMap = new HashMap<String, JSONObject>();
for (int index=0; index<targetArray.size(); index++) {
JSONObject targetObj = targetArray.getJSONObject(index);
targetMap.put(targetObj.getString("customTargetFieldId"), targetObj);
}
StringBuilder sb = new StringBuilder("SELECT ");
JSONArray mappingArray = jsonObject.getJSONArray("columnMapping");
Integer size = mappingArray.size();
for (int index=0; index<size; index++) {
JSONObject mappingObj = mappingArray.getJSONObject(index);
JSONObject sourceObj = sourceMap.get(mappingObj.getString("customSoruceFiledId"));
JSONObject targetObj = sourceMap.get(mappingObj.getString("customTargetFiledId"));
//String customSoruceFiledId = sourceObj.getString("customSoruceFiledId");
String sourceFieldName = sourceObj.getString("sourceFieldName");
//String sourceFieldType = sourceObj.getString("sourceFieldType");
//String customTargetFieldId = targetObj.getString("customTargetFieldId");
String targetFieldName = targetObj.getString("targetFieldName");
//String targetFieldType = targetObj.getString("targetFieldType");
sb.append(sourceFieldName +" AS "+targetFieldName);
if (index<size-1) {
sb.append(", ");
}else {
sb.append(" ");
}
}
sb.append("FROM t_view");
return sb.toString();
}
} }
Api {
<#if url??>
url = "${url!}"
</#if>
<#if apiKey??>
apiKey = "${apiKey!}"
</#if>
<#if method??>
method = "${method!}"
</#if>
<#if signType??>
signType = "${signType!}"
</#if>
<#if authCode??>
authCode = "${authCode!}"
</#if>
isTest = "false"
}
...@@ -8,16 +8,20 @@ public class Test { ...@@ -8,16 +8,20 @@ public class Test {
public static void main(String[] args) { public static void main(String[] args) {
try { try {
String fileName = "E:/123/test/test_node.bat"; /*
Path path = new File(fileName).toPath(); * String fileName = "E:/123/test/test_node.bat"; Path path = new
//Files.createDirectories(path); * File(fileName).toPath(); //Files.createDirectories(path);
//Files.createFile(path); * //Files.createFile(path); File file = path.toFile(); File parentFile =
File file = path.toFile(); * file.getParentFile(); if (!parentFile.exists()) { parentFile.mkdirs(); }
File parentFile = file.getParentFile(); * file.createNewFile();
if (!parentFile.exists()) { */
parentFile.mkdirs();
String aa = "E:/aa.csv";
String[] arrs = aa.split("\\.");
for (String str : arrs) {
System.out.println(str);
} }
file.createNewFile();
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); e.printStackTrace();
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment