上海大数据中心统一待办加字段

dev
wangxuanran 2023-06-14 19:37:35 +08:00
parent cdda440ef2
commit 36ed42ce30
6 changed files with 69 additions and 58 deletions

View File

@ -69,7 +69,7 @@ public class OtherSystemToOAServiceImpl implements OtherSystemToOAService {
String sql = "select id from hrmresource where " + Util.null2DefaultStr(ShBigDataUtil.getPropertiesValByKey("ssoOaCompareField"),"outkey") + " = #{outKey}"; String sql = "select id from hrmresource where " + Util.null2DefaultStr(ShBigDataUtil.getPropertiesValByKey("ssoOaCompareField"),"outkey") + " = #{outKey}";
int id = otherSystemToOAMapper.selectUserIdByOutKey(sql, oaOutKey); int id = otherSystemToOAMapper.selectUserIdByOutKey(sql, oaOutKey);
if(id < 0){ if(id < 0){
throw new CustomerException(Util.logStr("code : {} not found in OA!", id)); throw new CustomerException(Util.logStr("code : {} not found in OA!", oaOutKey));
} }
return id; return id;
} }

View File

@ -42,6 +42,7 @@ public class WorkFlowToVmsAndMQService {
// 表单字段 // 表单字段
private static final String VMS_SUCCESS = "vms_success"; private static final String VMS_SUCCESS = "vms_success";
private static final String SUCCESS = "0"; private static final String SUCCESS = "0";
{ {
httpUtils.getGlobalCache().header.put("Content-Type", MediaType.APPLICATION_JSON); // 全局请求头 httpUtils.getGlobalCache().header.put("Content-Type", MediaType.APPLICATION_JSON); // 全局请求头
} }
@ -50,11 +51,11 @@ public class WorkFlowToVmsAndMQService {
/** /**
* <h1></h1> * <h1></h1>
* *
* @param onlyMark * @param onlyMark
* @param billTable * @param billTable
* @param requestId id * @param requestId id
* @param vmsResponseVoField vms * @param vmsResponseVoField vms
* @param config kafka * @param config kafka
* @author xuanran.wang * @author xuanran.wang
* @dateTime 2022/12/5 17:05 * @dateTime 2022/12/5 17:05
**/ **/
@ -68,9 +69,9 @@ public class WorkFlowToVmsAndMQService {
String url = requestMappingConfig.getRequestUrl(); String url = requestMappingConfig.getRequestUrl();
dealWithMapping.setMainTable(billTable); dealWithMapping.setMainTable(billTable);
Map<String, Object> param = dealWithMapping.getRequestParam(recordSet, requestMappingConfig); Map<String, Object> param = dealWithMapping.getRequestParam(recordSet, requestMappingConfig);
String vmsSuccess = Util.null2DefaultStr(recordSet.getString(VMS_SUCCESS),""); String vmsSuccess = Util.null2DefaultStr(recordSet.getString(VMS_SUCCESS), "");
String mqSuccess = Util.null2DefaultStr(recordSet.getString(MQ_SUCCESS),""); String mqSuccess = Util.null2DefaultStr(recordSet.getString(MQ_SUCCESS), "");
if(!SUCCESS.equals(vmsSuccess)){ if (!SUCCESS.equals(vmsSuccess)) {
ResponeVo responeVo; ResponeVo responeVo;
try { try {
responeVo = httpUtils.apiPost(url, param); responeVo = httpUtils.apiPost(url, param);
@ -80,7 +81,7 @@ public class WorkFlowToVmsAndMQService {
parseResponseVo(responeVo, url, param, vmsResponseVoField); parseResponseVo(responeVo, url, param, vmsResponseVoField);
updateWorkFlow(VMS_SUCCESS, billTable, requestId); updateWorkFlow(VMS_SUCCESS, billTable, requestId);
} }
if(!SUCCESS.equals(mqSuccess) && StringUtils.isNotBlank(config)){ if (!SUCCESS.equals(mqSuccess) && StringUtils.isNotBlank(config)) {
sendToMQ(config, param); sendToMQ(config, param);
updateWorkFlow(MQ_SUCCESS, billTable, requestId); updateWorkFlow(MQ_SUCCESS, billTable, requestId);
} }
@ -88,13 +89,14 @@ public class WorkFlowToVmsAndMQService {
/** /**
* <h1></h1> * <h1></h1>
*
* @param responseVo
* @param url
* @param requestParam
* @author xuanran.wang * @author xuanran.wang
* @dateTime 2022/12/23 11:25 * @dateTime 2022/12/23 11:25
* @param responseVo
* @param url
* @param requestParam
**/ **/
private void parseResponseVo(ResponeVo responseVo, String url, Map<String, Object> requestParam, VmsResponseVoField vmsResponseVoField){ private void parseResponseVo(ResponeVo responseVo, String url, Map<String, Object> requestParam, VmsResponseVoField vmsResponseVoField) {
if (responseVo.getCode() != SUCCESS_CODE) { // 相应状态码 if (responseVo.getCode() != SUCCESS_CODE) { // 相应状态码
log.error(Util.logStr("can not fetch [{}]this request params is [{}]" + // 构建日志字符串 log.error(Util.logStr("can not fetch [{}]this request params is [{}]" + // 构建日志字符串
"this request heard is [{}]but response status code is [{}]" + "this request heard is [{}]but response status code is [{}]" +
@ -112,63 +114,65 @@ public class WorkFlowToVmsAndMQService {
/** /**
* <h1>kafka</h1> * <h1>kafka</h1>
*
* @param kafkaConfig kafka
* @param message
* @author xuanran.wang * @author xuanran.wang
* @dateTime 2023/3/30 14:56 * @dateTime 2023/3/30 14:56
* @param kafkaConfig kafka
* @param message
**/ **/
public void sendToMQ(String kafkaConfig, Map<String, Object> message){ public void sendToMQ(String kafkaConfig, Map<String, Object> message) {
KafkaProducer<String, String> producer = null; KafkaProducer<String, String> producer = null;
InputStream inputStream = null; InputStream inputStream = null;
try { try {
String path = GCONST.getPropertyPath() + "prop2map" + File.separator + kafkaConfig + ".properties"; String path = GCONST.getPropertyPath() + "prop2map" + File.separator + kafkaConfig + ".properties";
File configFile = new File(path); File configFile = new File(path);
if(!configFile.exists()){ if (!configFile.exists()) {
throw new CustomerException("please check /web-inf/prop2map has " + kafkaConfig + ".properties"); throw new CustomerException("please check /web-inf/prop2map has " + kafkaConfig + ".properties");
} }
Properties prop = new Properties(); Properties prop = new Properties();
inputStream= new BufferedInputStream(Files.newInputStream(configFile.toPath())); inputStream = new BufferedInputStream(Files.newInputStream(configFile.toPath()));
prop.load(inputStream); prop.load(inputStream);
log.info("prop => " + JSONObject.toJSONString(prop)); log.info("prop => " + JSONObject.toJSONString(prop));
log.info("msg => " + JSONObject.toJSONString(message)); log.info("msg => " + JSONObject.toJSONString(message));
String topic = Util.null2DefaultStr(prop.getProperty("topic"),""); String topic = Util.null2DefaultStr(prop.getProperty("topic"), "");
if(StringUtils.isBlank(topic)){ if (StringUtils.isBlank(topic)) {
throw new CustomerException("kafka properties topic can not null!"); throw new CustomerException("kafka properties topic can not null!");
} }
producer = new KafkaProducer<>(prop); producer = new KafkaProducer<>(prop);
// 发送消息到指定主题 // 发送消息到指定主题
ProducerRecord<String, String> record = new ProducerRecord<>(topic, JSONObject.toJSONString(message)); ProducerRecord<String, String> record = new ProducerRecord<>(topic, JSONObject.toJSONString(message));
producer.send(record).get(); producer.send(record).get();
}catch (Exception e){ } catch (Exception e) {
log.error(Util.getErrString(e)); log.error(Util.getErrString(e));
throw new CustomerException(Util.logStr("send to kafka error!: [{}]", e.getMessage())); throw new CustomerException(Util.logStr("send to kafka error!: [{}]", e.getMessage()));
}finally { } finally {
// 关闭Kafka生产者实例 // 关闭Kafka生产者实例
if(producer != null){ if (producer != null) {
producer.close(); producer.close();
} }
if(inputStream != null){ if (inputStream != null) {
try { try {
inputStream.close(); inputStream.close();
}catch (Exception e){ } catch (Exception e) {
log.error("inputStream close error! " + e.getMessage()); log.error("inputStream close error! " + e.getMessage());
} }
} }
} }
} }
/** /**
* <h1>sql</h1> * <h1>sql</h1>
* @author xuanran.wang *
* @dateTime 2023/3/30 19:18 * @param field
* @param field
* @param tableName * @param tableName
* @param requestId id * @param requestId id
* @author xuanran.wang
* @dateTime 2023/3/30 19:18
**/ **/
public void updateWorkFlow(String field, String tableName, String requestId){ public void updateWorkFlow(String field, String tableName, String requestId) {
String updateSQL = "update " + tableName + " set " + field + " = " + SUCCESS + " where requestid = ?"; String updateSQL = "update " + tableName + " set " + field + " = " + SUCCESS + " where requestid = ?";
RecordSet recordSet = new RecordSet(); RecordSet recordSet = new RecordSet();
if(!recordSet.executeUpdate(updateSQL, requestId)){ if (!recordSet.executeUpdate(updateSQL, requestId)) {
log.error(Util.logStr("update field error! sql: {}, requestId: {}", updateSQL, requestId)); log.error(Util.logStr("update field error! sql: {}, requestId: {}", updateSQL, requestId));
throw new CustomerException("更新表单字段失败!"); throw new CustomerException("更新表单字段失败!");
} }

View File

@ -74,6 +74,11 @@ public class SendTodoTaskUtil {
todoTask.setAgentid(agentId); todoTask.setAgentid(agentId);
todoTask.setTaskName(obj.getRequestnamenew()); todoTask.setTaskName(obj.getRequestnamenew());
todoTask.setTaskDesc(obj.getRequestnamenew()); todoTask.setTaskDesc(obj.getRequestnamenew());
String pcAgentId = ShBigDataUtil.getPropertiesValByKey("pcAgentId");
if(StringUtils.isBlank(pcAgentId)){
pcAgentId = agentId;
}
todoTask.setPcAgentId(pcAgentId);
String todoSSOCallBackUrl = ShBigDataUtil.getPropertiesValByKey("todoSSOCallBackUrl"); String todoSSOCallBackUrl = ShBigDataUtil.getPropertiesValByKey("todoSSOCallBackUrl");
StringBuilder sb = new StringBuilder(todoSSOCallBackUrl); StringBuilder sb = new StringBuilder(todoSSOCallBackUrl);
sb.append("?user=") sb.append("?user=")

View File

@ -37,6 +37,7 @@ public class ShBigDataUtil {
WHILTE_LIST.add("getUserIdDebugOutKey"); WHILTE_LIST.add("getUserIdDebugOutKey");
WHILTE_LIST.add("ssoInterfaceCompareField"); WHILTE_LIST.add("ssoInterfaceCompareField");
WHILTE_LIST.add("ssoOaCompareField"); WHILTE_LIST.add("ssoOaCompareField");
WHILTE_LIST.add("pcAgentId");
} }
/** /**

View File

@ -26,4 +26,5 @@ public class CusTodoTask {
protected String mobileLinkUrl; protected String mobileLinkUrl;
protected String receiver; protected String receiver;
protected String sender; protected String sender;
protected String pcAgentId;
} }

View File

@ -391,8 +391,8 @@ public class BigDataTest extends BaseTest {
@Test @Test
public void testG(){ public void testG(){
String oaOutKey = "111"; String oaOutKey = "wld";
String sql = "select id from hrmresource where " + Util.null2DefaultStr(ShBigDataUtil.getPropertiesValByKey("ssoOaCompareField"),"outkey") + " = #{outKey}"; String sql = "select id from hrmresource where loginid = #{outKey}";
int id = otherSystemToOAMapper.selectUserIdByOutKey(sql, oaOutKey); int id = otherSystemToOAMapper.selectUserIdByOutKey(sql, oaOutKey);
log.info("id => " + id); log.info("id => " + id);
} }