【feat】 优化记录SF同步返回的错误信息到data_log表
This commit is contained in:
parent
5ea0ff490b
commit
f221ee3f69
@ -9,10 +9,7 @@ import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
|
||||
import com.baomidou.mybatisplus.core.conditions.update.UpdateWrapper;
|
||||
import com.celnet.datadump.config.SalesforceExecutor;
|
||||
import com.celnet.datadump.config.SalesforceTargetConnect;
|
||||
import com.celnet.datadump.entity.DataBatch;
|
||||
import com.celnet.datadump.entity.DataBatchHistory;
|
||||
import com.celnet.datadump.entity.DataField;
|
||||
import com.celnet.datadump.entity.DataObject;
|
||||
import com.celnet.datadump.entity.*;
|
||||
import com.celnet.datadump.global.SystemConfigCode;
|
||||
import com.celnet.datadump.mapper.CustomMapper;
|
||||
import com.celnet.datadump.param.DataDumpParam;
|
||||
@ -30,6 +27,7 @@ import com.sforce.soap.partner.sobject.SObject;
|
||||
import com.xxl.job.core.biz.model.ReturnT;
|
||||
import com.xxl.job.core.log.XxlJobLogger;
|
||||
import com.xxl.job.core.util.DateUtil;
|
||||
import lombok.Data;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.collections.CollectionUtils;
|
||||
import org.apache.commons.lang.time.DateUtils;
|
||||
@ -73,6 +71,9 @@ public class DataImportBatchServiceImpl implements DataImportBatchService {
|
||||
@Autowired
|
||||
private DataBatchHistoryService dataBatchHistoryService;
|
||||
|
||||
@Autowired
|
||||
private DataLogService dataLogService;
|
||||
|
||||
|
||||
/**
|
||||
* Insert入口
|
||||
@ -337,11 +338,11 @@ public class DataImportBatchServiceImpl implements DataImportBatchService {
|
||||
for (int i = 0; i < resultCols; i++) {
|
||||
resultInfo.put(resultHeader.get(i), row.get(i));
|
||||
}
|
||||
boolean success = Boolean.valueOf(resultInfo.get("Success"));
|
||||
boolean insertStatus = Boolean.valueOf(resultInfo.get("Success"));
|
||||
boolean created = Boolean.valueOf(resultInfo.get("Created"));
|
||||
String id = resultInfo.get("Id");
|
||||
String error = resultInfo.get("Error");
|
||||
if (success && created) {
|
||||
if (insertStatus && created) {
|
||||
List<Map<String, Object>> maps = new ArrayList<>();
|
||||
Map<String, Object> m = new HashMap<>();
|
||||
m.put("key", "new_id");
|
||||
@ -350,7 +351,15 @@ public class DataImportBatchServiceImpl implements DataImportBatchService {
|
||||
customMapper.updateById(api, maps, ids[index]);
|
||||
index ++;
|
||||
log.info("Created row with id " + id);
|
||||
} else if (!success) {
|
||||
} else if (!insertStatus) {
|
||||
DataLog dataLog = new DataLog();
|
||||
dataLog.setRequestData("BulkInsert,api:" + api);
|
||||
dataLog.setEmailFlag(false);
|
||||
dataLog.setEndTime(new Date());
|
||||
dataLog.setStartTime(new Date());
|
||||
dataLog.setRequestType("Insert");
|
||||
dataLog.setErrorMessage(error);
|
||||
dataLogService.save(dataLog);
|
||||
log.info("Failed with error: " + error);
|
||||
}
|
||||
}
|
||||
@ -546,7 +555,7 @@ public class DataImportBatchServiceImpl implements DataImportBatchService {
|
||||
|
||||
BulkUtil.awaitCompletion(bulkConnection, salesforceInsertJob, batchInfos);
|
||||
|
||||
sfNum = sfNum + checkUpdateResults(bulkConnection, salesforceInsertJob, batchInfos);
|
||||
sfNum = sfNum + checkUpdateResults(bulkConnection, salesforceInsertJob, batchInfos,api);
|
||||
|
||||
} catch (Throwable e) {
|
||||
log.info(e.getMessage());
|
||||
@ -574,7 +583,7 @@ public class DataImportBatchServiceImpl implements DataImportBatchService {
|
||||
* 读写update结果
|
||||
*/
|
||||
public int checkUpdateResults(BulkConnection connection, JobInfo job,
|
||||
List<BatchInfo> batchInfoList)
|
||||
List<BatchInfo> batchInfoList,String api)
|
||||
throws AsyncApiException, IOException {
|
||||
int index = 0;
|
||||
// batchInfoList was populated when batches were created and submitted
|
||||
@ -590,13 +599,21 @@ public class DataImportBatchServiceImpl implements DataImportBatchService {
|
||||
for (int i = 0; i < resultCols; i++) {
|
||||
resultInfo.put(resultHeader.get(i), row.get(i));
|
||||
}
|
||||
boolean success = Boolean.valueOf(resultInfo.get("Success"));
|
||||
boolean updateStatus = Boolean.valueOf(resultInfo.get("Success"));
|
||||
String id = resultInfo.get("Id");
|
||||
String error = resultInfo.get("Error");
|
||||
if (success) {
|
||||
if (updateStatus) {
|
||||
index ++;
|
||||
log.info("Update row with id " + id);
|
||||
} else if (!success) {
|
||||
} else {
|
||||
DataLog dataLog = new DataLog();
|
||||
dataLog.setRequestData("BulkUpdate,api:" + api);
|
||||
dataLog.setEmailFlag(false);
|
||||
dataLog.setEndTime(new Date());
|
||||
dataLog.setStartTime(new Date());
|
||||
dataLog.setRequestType("Update");
|
||||
dataLog.setErrorMessage(error);
|
||||
dataLogService.save(dataLog);
|
||||
log.info("Failed with error: " + error);
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user