【feat】 增加日志操作信息记录

This commit is contained in:
Kris 2025-06-09 15:05:20 +08:00
parent 2ef79ab159
commit 8dfee0c830
8 changed files with 252 additions and 33 deletions

View File

@ -77,7 +77,11 @@
<artifactId>commons-csv</artifactId> <artifactId>commons-csv</artifactId>
<version>1.10.0</version> <version>1.10.0</version>
</dependency> </dependency>
<dependency>
<groupId>org.aspectj</groupId>
<artifactId>aspectjweaver</artifactId>
<version>1.9.6</version> <!-- 最新稳定版 -->
</dependency>
<dependency> <dependency>
<groupId>io.github.ljwlgl</groupId> <groupId>io.github.ljwlgl</groupId>
<artifactId>common-util</artifactId> <artifactId>common-util</artifactId>

View File

@ -0,0 +1,14 @@
package com.celnet.datadump.annotation;
import java.lang.annotation.*;
@Target({ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
@Documented
public @interface LogServiceAnnotation {
/** 操作类型 **/
String operateType();
/** 操作解释 **/
String remark() default "";
}

View File

@ -0,0 +1,165 @@
package com.celnet.datadump.aspect;
import com.alibaba.fastjson.JSON;
import com.celnet.datadump.annotation.LogServiceAnnotation;
import com.celnet.datadump.entity.DataLog;
import com.celnet.datadump.service.DataLogService;
import com.google.common.collect.Lists;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.Signature;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Pointcut;
import org.aspectj.lang.reflect.MethodSignature;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component;
import org.springframework.web.context.request.RequestAttributes;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.multipart.MultipartFile;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import java.lang.reflect.Method;
import java.util.Date;
/**
*
*/
@Aspect
@Component
@Order(-1)
public class OperateLogAspect {
private static final Logger log = LoggerFactory.getLogger(OperateLogAspect.class);
@Autowired
private DataLogService dataLogService;
@Pointcut(value = "@annotation(com.celnet.datadump.annotation.LogServiceAnnotation)")
public void operateLogAspectPoint(){
}
@Around("operateLogAspectPoint()")
public Object around(ProceedingJoinPoint joinPoint) {
//开始时间
Date startTime = new Date();
//日志注解
LogServiceAnnotation logServiceAnno = null;
//request请求
RequestAttributes requestAttributes = RequestContextHolder.getRequestAttributes();
//捕获请求参数
Object[] args = joinPoint.getArgs();
//结果
Object result = null;
try {
//获取注解
logServiceAnno = getAnnotationLog(joinPoint);
//执行程序
result = joinPoint.proceed();
//初始化日志记录
DataLog dataLog = initializeOperateLog(joinPoint,args,startTime,logServiceAnno,result,null);
//保存日志
dataLogService.save(dataLog);
} catch (Throwable throwable) {
//初始化日志记录
DataLog dataLog = initializeOperateLog(joinPoint,args,startTime,logServiceAnno,null,throwable);
//保存日志
dataLogService.save(dataLog);
log.error("日志拦截异常:"+throwable);
}
return result;
}
/**
* 初始化操作日志
* @param joinPoint 节点
*/
private DataLog initializeOperateLog(ProceedingJoinPoint joinPoint , Object[] args , Date startTime , LogServiceAnnotation logServiceAnno , Object result , Throwable throwable) {
if(logServiceAnno == null){
return null;
}
//request请求
RequestAttributes requestAttributes = RequestContextHolder.getRequestAttributes();
HttpServletRequest request = (HttpServletRequest) requestAttributes.resolveReference(RequestAttributes.REFERENCE_REQUEST);
//ip
String ip = request.getHeader("HTTP_X_FORWARDED_FOR");
//请求参数
Object[] arguments = new Object[args.length];
for (int i = 0; i < args.length; i++) {
if (args[i] instanceof ServletRequest || args[i] instanceof ServletResponse || args[i] instanceof MultipartFile) {
//ServletRequest不能序列化从入参里排除否则报异常java.lang.IllegalStateException: It is illegal to call this method if the current request is not in asynchronous mode (i.e. isAsyncStarted() returns false)
//ServletResponse不能序列化 从入参里排除否则报异常java.lang.IllegalStateException: getOutputStream() has already been called for this response
continue;
}
arguments[i] = args[i];
}
String argStr = arguments.length <= 0 ? "" : JSON.toJSONString(arguments);
//响应结果
String resultStr = result == null ? "" : JSON.toJSONString(result);
//异常信息
String exceptionStr = throwable == null ? "" : JSON.toJSONString(throwable.getMessage());
//结束时间
Date endTime = new Date();
DataLog dataLog = new DataLog();
dataLog.setIp(ip);
dataLog.setStartTime(startTime);
dataLog.setEndTime(endTime);
if(resultStr.contains("200")){
dataLog.setCode("200");
dataLog.setStatus("成功");
dataLog.setMessage(resultStr);
} else {
dataLog.setCode("500");
dataLog.setStatus("失败");
dataLog.setMessage(exceptionStr);
}
dataLog.setRequestUrl(request.getRequestURI());
dataLog.setRequestType(logServiceAnno.operateType());
dataLog.setRequestData(argStr);
dataLog.setRequestMethod(logServiceAnno.remark());
return dataLog;
}
/**
* 获取注解
* @param joinPoint 节点
* @return 结果
*/
private LogServiceAnnotation getAnnotationLog(ProceedingJoinPoint joinPoint) {
Signature signature = joinPoint.getSignature();
MethodSignature methodSignature = (MethodSignature) signature;
Method method = methodSignature.getMethod();
if (method != null) {
// 拿到自定义注解中的信息
LogServiceAnnotation annotation = method.getAnnotation(LogServiceAnnotation.class);
System.out.println("打印注解信息:"+ JSON.toJSONString(annotation));
return annotation;
}
return null;
}
}

View File

@ -0,0 +1,16 @@
package com.celnet.datadump.constant;
/**
* 操作类型
*/
public class OperateTypeConstant {
//删除
public static final String TYPE_DELETE = "DELETE";
//新增
public static final String TYPE_INSERT = "INSERT";
//更新
public static final String TYPE_UPDATE = "UPDATE";
//查询
public static final String TYPE_SELECT = "SELECT";
}

View File

@ -1,13 +1,14 @@
package com.celnet.datadump.controller; package com.celnet.datadump.controller;
import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSON;
import com.celnet.datadump.annotation.LogServiceAnnotation;
import com.celnet.datadump.entity.DataObject; import com.celnet.datadump.entity.DataObject;
import com.celnet.datadump.global.Result; import com.celnet.datadump.global.Result;
import com.celnet.datadump.param.*; import com.celnet.datadump.param.*;
import com.celnet.datadump.service.*; import com.celnet.datadump.service.*;
import com.celnet.datadump.util.DataUtil; import com.celnet.datadump.util.DataUtil;
import com.celnet.datadump.constant.OperateTypeConstant;
import com.xxl.job.core.biz.model.ReturnT; import com.xxl.job.core.biz.model.ReturnT;
import com.xxl.job.core.handler.annotation.XxlJob;
import io.swagger.annotations.Api; import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
@ -201,6 +202,7 @@ public class JobController {
*/ */
@PostMapping("/dataImportBatchJob") @PostMapping("/dataImportBatchJob")
@ApiOperation("生成newSFID大数据量") @ApiOperation("生成newSFID大数据量")
@LogServiceAnnotation(operateType = OperateTypeConstant.TYPE_INSERT, remark = "生成newSFID大数据量")
public ReturnT<String> dataImportBatchJob(String paramStr) throws Exception { public ReturnT<String> dataImportBatchJob(String paramStr) throws Exception {
log.info("dataImportBatchJob execute start .................."); log.info("dataImportBatchJob execute start ..................");
SalesforceParam param = new SalesforceParam(); SalesforceParam param = new SalesforceParam();
@ -227,6 +229,7 @@ public class JobController {
*/ */
@PostMapping("/dataUpdateBatchJob") @PostMapping("/dataUpdateBatchJob")
@ApiOperation("更新数据(大数据量)") @ApiOperation("更新数据(大数据量)")
@LogServiceAnnotation(operateType = OperateTypeConstant.TYPE_UPDATE, remark = "更新数据(大数据量)")
public ReturnT<String> dataUpdateBatchJob(String paramStr) throws Exception { public ReturnT<String> dataUpdateBatchJob(String paramStr) throws Exception {
log.info("dataImportBatchJob execute start .................."); log.info("dataImportBatchJob execute start ..................");
SalesforceParam param = new SalesforceParam(); SalesforceParam param = new SalesforceParam();

View File

@ -8,7 +8,6 @@ import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty; import io.swagger.annotations.ApiModelProperty;
import lombok.Getter; import lombok.Getter;
import lombok.Setter; import lombok.Setter;
import lombok.Value;
import java.io.Serializable; import java.io.Serializable;
import java.util.Date; import java.util.Date;
@ -32,6 +31,13 @@ public class DataLog implements Serializable {
@ApiModelProperty(value = "id") @ApiModelProperty(value = "id")
private Integer id; private Integer id;
/**
* 请求参数
*/
@TableField("request_url")
@ApiModelProperty(value = "请求接口")
private String requestUrl;
/** /**
* 请求参数 * 请求参数
*/ */
@ -40,11 +46,11 @@ public class DataLog implements Serializable {
private String requestData; private String requestData;
/** /**
* 请求状态 * 请求参数
*/ */
@TableField("request_status") @TableField("ip")
@ApiModelProperty(value = "请求状态") @ApiModelProperty(value = "请求IP")
private String requestStatus; private String ip;
/** /**
* 开始时间 * 开始时间
@ -77,14 +83,23 @@ public class DataLog implements Serializable {
/** /**
* 错误信息 * 错误信息
*/ */
@TableField("error_message") @TableField("message")
@ApiModelProperty(value = "错误信息") @ApiModelProperty(value = "响应信息")
private String errorMessage; private String message;
/** /**
* 是否发送邮件 * 请求状态
*/ */
@TableField("email_flag") @TableField("code")
@ApiModelProperty(value = "是否发送邮件") @ApiModelProperty(value = "响应码")
private Boolean emailFlag; private String code;
/**
* 请求状态
*/
@TableField("status")
@ApiModelProperty(value = "状态")
private String status;
} }

View File

@ -36,6 +36,8 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import java.io.IOException; import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.time.LocalDateTime; import java.time.LocalDateTime;
import java.time.ZoneId; import java.time.ZoneId;
import java.time.ZonedDateTime; import java.time.ZonedDateTime;
@ -296,6 +298,8 @@ public class DataImportBatchServiceImpl implements DataImportBatchService {
sfNum = sfNum + checkInsertResults(bulkConnection, salesforceInsertJob, batchInfos, api, ids); sfNum = sfNum + checkInsertResults(bulkConnection, salesforceInsertJob, batchInfos, api, ids);
// Files.delete(Paths.get(fullPath));
} catch (Exception e) { } catch (Exception e) {
log.error("manualCreatedNewId error api:{}", api, e); log.error("manualCreatedNewId error api:{}", api, e);
throw e; throw e;
@ -352,14 +356,13 @@ public class DataImportBatchServiceImpl implements DataImportBatchService {
index ++; index ++;
log.info("Created row with id " + id); log.info("Created row with id " + id);
} else if (!insertStatus) { } else if (!insertStatus) {
DataLog dataLog = new DataLog(); // DataLog dataLog = new DataLog();
dataLog.setRequestData("BulkInsertapi:" + api); // dataLog.setRequestData("BulkInsertapi:" + api);
dataLog.setEmailFlag(false); // dataLog.setEndTime(new Date());
dataLog.setEndTime(new Date()); // dataLog.setStartTime(new Date());
dataLog.setStartTime(new Date()); // dataLog.setRequestType("Insert");
dataLog.setRequestType("Insert"); // dataLog.setMessage(error);
dataLog.setErrorMessage(error); // dataLogService.save(dataLog);
dataLogService.save(dataLog);
log.info("Failed with error: " + error); log.info("Failed with error: " + error);
} }
} }
@ -557,6 +560,8 @@ public class DataImportBatchServiceImpl implements DataImportBatchService {
sfNum = sfNum + checkUpdateResults(bulkConnection, salesforceInsertJob, batchInfos,api); sfNum = sfNum + checkUpdateResults(bulkConnection, salesforceInsertJob, batchInfos,api);
// Files.delete(Paths.get(fullPath));
} catch (Throwable e) { } catch (Throwable e) {
log.info(e.getMessage()); log.info(e.getMessage());
throw e; throw e;
@ -606,14 +611,13 @@ public class DataImportBatchServiceImpl implements DataImportBatchService {
index ++; index ++;
log.info("Update row with id " + id); log.info("Update row with id " + id);
} else { } else {
DataLog dataLog = new DataLog(); // DataLog dataLog = new DataLog();
dataLog.setRequestData("BulkUpdateapi:" + api); // dataLog.setRequestData("BulkUpdateapi:" + api);
dataLog.setEmailFlag(false); // dataLog.setEndTime(new Date());
dataLog.setEndTime(new Date()); // dataLog.setStartTime(new Date());
dataLog.setStartTime(new Date()); // dataLog.setRequestType("Update");
dataLog.setRequestType("Update"); // dataLog.setMessage(error);
dataLog.setErrorMessage(error); // dataLogService.save(dataLog);
dataLogService.save(dataLog);
log.info("Failed with error: " + error); log.info("Failed with error: " + error);
} }
} }

View File

@ -6,9 +6,7 @@ import com.celnet.datadump.mapper.DataLogMapper;
import com.celnet.datadump.service.DataLogService; import com.celnet.datadump.service.DataLogService;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
/**
*
*/
@Service @Service
public class DataLogServiceImpl extends ServiceImpl<DataLogMapper, DataLog> implements DataLogService { public class DataLogServiceImpl extends ServiceImpl<DataLogMapper, DataLog> implements DataLogService {