Commit e2a8d910 authored by mcb's avatar mcb

commit

parent fe8b550d
......@@ -84,6 +84,8 @@ public class GatewayApiConstant {
public static final String listGetApiKey = "/api/interface/listGetApiKey";
//删除文件夹
public static final String delFolder = "/api/producer/delProjectFolder";
//删除文件夹
public static final String functionTemplate = "/api/producer/getFunctionTemplateList";
private static Logger logger = LoggerFactory.getLogger(GatewayApiConstant.class);
......
......@@ -574,7 +574,7 @@ public class HttpClientUtils {
System.out.println(response.getStatusLine());
if (entity != null) {
result = EntityUtils.toString(entity);
LOGGER.info(result);
LOGGER.info("response results{}" + result);
}
} catch (ClientProtocolException e) {
......
......@@ -70,7 +70,7 @@ public class RealTimeSyncController {
}
/**
* 批量启动实时同步任务
* 批量启动/停止启动实时同步任务
*
* @return
* @author Bellamy
......
......@@ -298,11 +298,11 @@ public class DmpApiServiceMangeController {
@ApiOperation(value = "获取文件夹列表", notes = "获取文件夹列表")
@GetMapping(value = "/folderTree")
@ApiImplicitParams({@ApiImplicitParam(name = "projectId", value = "项目id"),
@ApiImplicitParam(name = "orgCode", value = "组织编码")})
public JsonResult getFolderTree(@RequestParam(name = "projectId", required = false) String projectId, @RequestParam(name = "orgCode", required = false) String orgCode) {
@ApiImplicitParam(name = "orgFolder", value = "组织编码")})
public JsonResult getFolderTree(@RequestParam(name = "projectId", required = false) String projectId, @RequestParam(name = "orgFolder", required = false) String orgFolder) {
JsonResult jsonResult = new JsonResult();
try {
jsonResult = dmpApiServiceMangeService.getFolderTree(projectId, orgCode);
jsonResult = dmpApiServiceMangeService.getFolderTree(projectId, orgFolder);
} catch (Exception e) {
jsonResult.setMessage(e.getMessage());
jsonResult.setCode(ResultCode.INTERNAL_SERVER_ERROR);
......@@ -373,6 +373,27 @@ public class DmpApiServiceMangeController {
return jsonResult;
}
/**
* 获取function模板列表
*
* @return
* @author Bellamy
* @since 2021-03-3
*/
@ApiOperation(value = "获取function模板列表", notes = "获取function模板列表")
@GetMapping(value = "/functionTemplate")
public JsonResult getFunctionTemplateList() throws Exception {
JsonResult jsonResult = new JsonResult();
try {
jsonResult = dmpApiServiceMangeService.getFunctionTemplateList();
} catch (Exception e) {
jsonResult.setMessage(e.getMessage());
jsonResult.setCode(ResultCode.INTERNAL_SERVER_ERROR);
e.printStackTrace();
}
return jsonResult;
}
/**
* 获取数据源表字段
*
......
......@@ -27,4 +27,7 @@ public class OrganizationManageListQueryReq extends BasePageBean implements Seri
@ApiModelProperty(value = "联系人")
private String linkman;
@ApiModelProperty(value = "文件夹id")
private String fileId;
}
......@@ -97,7 +97,7 @@ public interface DmpApiServiceMangeService {
* @author Bellamy
* @since 2021-02-24
*/
JsonResult getFolderTree(String projectId, String orgCode) throws Exception;
JsonResult getFolderTree(String projectId, String orgFolder) throws Exception;
/**
* API计量--API已调用列表
......@@ -131,4 +131,13 @@ public interface DmpApiServiceMangeService {
* @since 2021-03-3
*/
JsonResult delFolderById(String id) throws Exception;
/**
* 获取function模板列表
*
* @return
* @author Bellamy
* @since 2021-03-3
*/
JsonResult getFunctionTemplateList() throws Exception;
}
\ No newline at end of file
......@@ -265,6 +265,21 @@ public class DmpApiServiceMangeServiceImpl implements DmpApiServiceMangeService
return result;
}
/**
* 获取function模板列表
*
* @return
* @author Bellamy
* @since 2021-03-3
*/
@Override
public JsonResult getFunctionTemplateList() throws Exception {
String url = gatewayUrl + GatewayApiConstant.functionTemplate;
Map params = new HashMap();
JsonResult result = GatewayApiConstant.getRequest2GetData(url, params);
return result;
}
/**
* 服务开发API列表
*
......@@ -289,14 +304,14 @@ public class DmpApiServiceMangeServiceImpl implements DmpApiServiceMangeService
* @since 2021-02-24
*/
@Override
public JsonResult getFolderTree(String projectId, String orgCode) throws Exception {
public JsonResult getFolderTree(String projectId, String orgFolder) throws Exception {
String url = gatewayUrl + GatewayApiConstant.folderTree;
Map params = new HashMap();
if (StringUtils.isNotEmpty(projectId)) {
params.put("projectId", projectId);
}
params.put("orgCode", orgCode);
params.put("orgFolder", orgFolder);
JsonResult result = GatewayApiConstant.getRequest2GetData(url, params);
return result;
}
......
......@@ -14,6 +14,7 @@ import java.util.regex.Pattern;
import javax.servlet.http.HttpServletRequest;
import com.alibaba.fastjson.JSONObject;
import com.jz.common.utils.web.HttpClientUtils;
import com.mysql.jdbc.Blob;
import org.apache.tomcat.jni.Mmap;
import org.slf4j.Logger;
......@@ -1603,4 +1604,5 @@ public class DmpDevelopTaskServiceImpl extends BaseService implements DmpDevelop
}
return JsonResult.ok(list);
}
}
\ No newline at end of file
......@@ -2,6 +2,7 @@ package com.jz.dmp.modules.service.impl;
import com.alibaba.fastjson.JSONObject;
import com.amazonaws.services.dynamodbv2.xspec.M;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import com.jz.agent.service.DmpDsAgentService;
......@@ -423,10 +424,20 @@ public class DmpRealtimeSyncInfoServiceImpl implements DmpRealtimeSyncInfoServic
DmpRealtimeTaskHistory taskHistory = new DmpRealtimeTaskHistory();
BeanUtils.copyProperties(saveBody, taskHistory);
if (StringUtils.isEmpty(String.valueOf(params.get("taskId")))) {
Map<String, String> respData = publishTask2Kafka(connectorUrl, jsonStr);
Map<String, String> respData = publishTask2Kafka(connectorUrl, jsonStr, "");
saveBody.setConnectorJobId(respData.get("connectorJobId"));
saveBody.setStatus(respData.get("status"));
dmpRealtimeSyncInfoDao.insert(saveBody);
//异步获取任务状态
Thread thread = new Thread(new Runnable() {
@Override
public void run() {
Map map = new HashMap();
map.put("id", saveBody.getId());
getKafkaTaskStatus(connectorUrl, map);
}
});
thread.start();
} else {
DmpRealtimeSyncInfo realtimeTask = dmpRealtimeSyncInfoDao.queryById(Integer.valueOf(params.get("taskId").toString()));
if (realtimeTask == null)
......@@ -436,7 +447,7 @@ public class DmpRealtimeSyncInfoServiceImpl implements DmpRealtimeSyncInfoServic
//编辑时,先删除任务,再发布任务
HttpClientUtils.httpDelete(connectorUrl + "/" + realtimeTask.getConnectorJobId() + "/");
Map<String, String> respData = publishTask2Kafka(connectorUrl, jsonStr);
Map<String, String> respData = publishTask2Kafka(connectorUrl, jsonStr, params.get("taskId").toString());
saveBody.setConnectorJobId(respData.get("connectorJobId"));
saveBody.setStatus(respData.get("status"));
saveBody.setUpdateTime(new Date());
......@@ -468,7 +479,7 @@ public class DmpRealtimeSyncInfoServiceImpl implements DmpRealtimeSyncInfoServic
/*
* 发布到kafak 并开始运行 ,请求接口正常则保存数据,否则失败
* */
public Map<String, String> publishTask2Kafka(String connectorUrl, String jsonStr) throws Exception {
public Map<String, String> publishTask2Kafka(String connectorUrl, String jsonStr, String id) throws Exception {
Map<String, String> returnMap = new HashMap();
Map<String, Object> result = RestClient.post(connectorUrl, jsonStr);
logger.info("=======response data {}" + JSONObject.toJSONString(result));
......@@ -476,7 +487,10 @@ public class DmpRealtimeSyncInfoServiceImpl implements DmpRealtimeSyncInfoServic
if (StringUtils.isEmpty(connectorJobId)) {
throw new RuntimeException("提交失败!");
}
String status = getExecuteShellStatus(connectorUrl + "/" + connectorJobId + "/status", new HashMap());
Map params = new HashMap();
params.put("id", id);
params.put("flag", "publish");
String status = getExecuteKafkaStatus(connectorUrl + "/" + connectorJobId + "/status", params);
returnMap.put("status", status);
returnMap.put("connectorJobId", connectorJobId);
return returnMap;
......@@ -990,7 +1004,6 @@ public class DmpRealtimeSyncInfoServiceImpl implements DmpRealtimeSyncInfoServic
String url = connectorUrl + "/" + connectorJobId;
String statusUrl = url;
//resume 恢复,删除 delete,pause 暂停
String param = "";
if ("01".equals(type)) {
url += "/resume";
} else if ("02".equals(type)) {
......@@ -998,11 +1011,14 @@ public class DmpRealtimeSyncInfoServiceImpl implements DmpRealtimeSyncInfoServic
}
logger.info("######执行表数据id{}" + ids[i]);
//执行 shell
//请求 kafka api
OKHttpUtil.httpPut(url, "");
//获取任务状态
String status = getExecuteShellStatus(statusUrl + "/status", new HashMap<>());
logger.info("response status{}" + status);
Map params = new HashMap<>();
params.put("id", realTaskId);
params.put("flag", url.substring(url.lastIndexOf("/") + 1));
String status = getExecuteKafkaStatus(statusUrl + "/status", params);
//执行后任务状态 :PAUSED 暂停 , RUNNING 运行中
saveBaby.setStatus(status);
saveBaby.setUptPerson(SessionUtils.getCurrentUserId());
......@@ -1015,23 +1031,55 @@ public class DmpRealtimeSyncInfoServiceImpl implements DmpRealtimeSyncInfoServic
}
/*
* 获取任务状态
* kafka rest api 获取任务状态
* */
public String getExecuteShellStatus(String statusUrl, Map params) throws Exception {
String respData = HttpClientUtils.getJsonForParam(statusUrl, params);
public String getExecuteKafkaStatus(String statusUrl, Map params) throws Exception {
String respData = HttpClientUtils.getJsonForParam(statusUrl, new HashMap<>());
if (StringUtils.isEmpty(respData)) {
throw new RuntimeException("执行失败!");
throw new RuntimeException("get status failed!");
}
logger.info("#################响应结果{}" + respData);
Map map = JSONObject.parseObject(respData);
if (!map.containsKey("connector")) {
throw new RuntimeException("执行失败!");
String tasksStatus = "";
List<Map> tasks = (List<Map>) map.get("tasks");
if (tasks.size() > 0 && null != tasks) {
tasksStatus = (String) tasks.get(0).get("state");
logger.info("response tasks status{}" + tasksStatus);
return tasksStatus;
}
Map connector = (Map) map.get("connector");
if (!connector.containsKey("state")) {
throw new RuntimeException("get status failed!");
}
String status = (String) connector.get("state");
if (StringUtils.isEmpty(status))
throw new RuntimeException("执行失败!");
logger.info("response connector status{}" + status);
if (StringUtils.isNotEmpty((String) params.get("id"))) {
Thread thread = new Thread(new Runnable() {
@Override
public void run() {
getKafkaTaskStatus(statusUrl, params);
}
});
thread.start();
}
return status;
}
public void getKafkaTaskStatus(String statusUrl, Map params) {
String respData = HttpClientUtils.getJsonForParam(statusUrl, params);
if (StringUtils.isEmpty(respData)) {
throw new RuntimeException("get status failed!");
}
Map map = JSONObject.parseObject(respData);
List<Map> tasks = (List<Map>) map.get("tasks");
if (tasks.size() == 0) {
getKafkaTaskStatus(statusUrl, params);
}
String status = (String) tasks.get(0).get("state");
logger.info("response tasks status{}" + status);
DmpRealtimeSyncInfo saveBody = new DmpRealtimeSyncInfo();
saveBody.setStatus(status);
saveBody.setId(Integer.valueOf(params.get("id").toString()));
dmpRealtimeSyncInfoDao.update(saveBody);
}
}
\ No newline at end of file
......@@ -182,12 +182,10 @@
execution_flows
where 1=1
<if test="businessTime != null and businessTime != ''"> and from_unixtime(submit_time/1000,'%Y-%m-%d') =#{businessTime} </if>
<if test="taskName != null and taskName != ''">
and flow_id in
<foreach collection="taskName" item="item" open="(" separator="," close=")">
#{item}
</foreach>
</if>
and flow_id in
<foreach collection="taskName" item="item" open="(" separator="," close=")">
#{item}
</foreach>
<if test="startTime != null and startTime != ''"> and from_unixtime(submit_time/1000,'%Y-%m-%d %H:%i:%s') >=#{startTime} </if>
<if test="endTime != null and endTime != ''">#{endTime} >= and from_unixtime(submit_time/1000,'%Y-%m-%d %H:%i:%s')</if>
</select>
......
......@@ -175,9 +175,9 @@
t3.real_name as userName
from
dmp_navigation_tree t1
left join dmp_develop_task t2 on t2.TREE_ID=t1.ID and t2.data_status ='1'
inner join dmp_develop_task t2 on t2.TREE_ID=t1.ID and t2.data_status ='1'
left join dmp_member t3 on t1.create_user_id=t3.user_id
where 1=1 and t1.type='01'
where 1=1 and IS_LEVEL ='1'
and t1.project_id = #{projectId}
<if test="taskId != null and taskId != ''"> and t2.id =#{taskId} </if>
<if test="treeIdOrName != null and treeIdOrName != ''"> and t1.name like concat('%',#{treeIdOrName},'%') </if>
......@@ -218,9 +218,9 @@
(case when t1.type='01' then '离线同步' when t1.type='02' then '实时同步' when t1.type='03' then '数据开发' end) as type
from
dmp_navigation_tree t1
left join dmp_develop_task t2 on t2.TREE_ID=t1.ID and t2.data_status ='1'
inner join dmp_develop_task t2 on t2.TREE_ID=t1.ID and t2.data_status ='1'
left join dmp_member t3 on t1.create_user_id=t3.user_id
where 1=1 and t1.type='01'
where 1=1 and is_level = '1' and t1.data_status ='1'
and t1.project_id = #{projectId}
<if test="treeId != null and treeId != ''"> and t1.id =#{treeId} </if>
<if test="taskName != null and taskName != ''"> and t1.name like concat('%',#{taskName},'%') </if>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment