查看日志場景
- 接口通過網關,訪問服務1
- 接口通過網關,訪問服務1,服務1訪問服務2
- 定時任務,訪問服務1
實現邏輯過程
- HTTP接口請求經過網關時,利用過濾器,將生成的traceId加到到RequestHeader中
- 通過網關請求到服務中,利用MVC攔截器取出Header中的traceId,並且將traceId值使用Log中MDC類寫入到日志中。
- 服務1,通過Feign請求其他服務之前,取出MDC類中的traceId賦值到RequestHeader中,被請求服務使用2中的方式取出traceId並記錄到日志中。
- 服務器安裝filebeat(或其他日志收集軟件)收集日志,發送給ElasticSearch。
- 通過Kiabana查看日志內容。通過網關或其他方式定位到存在問題的請求中的traceId,通過traceId查看請求內的所有日志
相關代碼
Zuul網關部分代碼
TracePreFilter.java 過濾器
生成traceId,將traceId加入到RequestHeader中,帶入到下游請求中。
@Component
public class TracePreFilter extends ZuulFilter {
@Override
public String filterType() {
return "pre";
}
@Override
public int filterOrder() {
return -1;
}
@Override
public boolean shouldFilter() {
return true;
}
@Override
public Object run() throws ZuulException {
String traceIdVal = LogHelper.getTraceId();
MDC.put(LogCollectionConstants.traceId, traceIdVal);
RequestContext ctx = RequestContext.getCurrentContext();
ctx.addZuulRequestHeader(LogCollectionConstants.traceId, traceIdVal);
return null;
}
}
LogFilter.java 過濾器
記錄請求的詳細信息,請求參數,返回值,時長等信息
@Component
@Slf4j
public class LogFilter extends ZuulFilter {
public static final String START_TIME_KEY = "start_time";
@Override
public String filterType() {
return FilterConstants.POST_TYPE;
}
@Override
public int filterOrder() {
return 0;
}
@Override
public boolean shouldFilter() {
return true;
}
@Override
public Object run() {
try {
HttpServletRequest request = RequestContext.getCurrentContext().getRequest();
RequestContext.getCurrentContext().set(START_TIME_KEY, System.currentTimeMillis());
HttpEntity httpEntity = new HttpEntity();
httpEntity.setMethod(request.getMethod());
httpEntity.setUrl(request.getRequestURI());
httpEntity.setIp(request.getRemoteAddr());
HashMap<String, Object> parameter = showParams(request);
httpEntity.setParameter(JSON.toJSONString(parameter));
httpEntity.setUserAgent(request.getHeader("user-agent"));
String body = "";
InputStream stream = RequestContext.getCurrentContext().getResponseDataStream();
byte[] bytes = StreamUtils.copyToByteArray(stream);
body = new String(bytes, StandardCharsets.UTF_8);
httpEntity.setResult(body);
long startTime = (long) RequestContext.getCurrentContext().get(START_TIME_KEY);
httpEntity.setLaunchTime(new Date(startTime));
httpEntity.setDuration(System.currentTimeMillis() - startTime);
httpEntity.setTraceId(RequestContext.getCurrentContext().getZuulRequestHeaders().get(LogCollectionConstants.traceId));
log.info("接口統計 {}",JSON.toJSONString(httpEntity));
RequestContext.getCurrentContext().setResponseBody(body);
} catch (Exception e) {
log.error("日志統計失敗", e);
return true;
}
return true;
}
public static HashMap<String, Object> showParams(HttpServletRequest request) {
HashMap<String, Object> map = new HashMap<>();
Enumeration paramNames = request.getParameterNames();
while (paramNames.hasMoreElements()) {
String paramName = (String) paramNames.nextElement();
String[] paramValues = request.getParameterValues(paramName);
if (paramValues.length > 0) {
String paramValue = paramValues[0];
if (paramValue.length() != 0) {
map.put(paramName, paramValue);
}
}
}
return map;
}
}
服務代碼
WebMvcConfig.java 過濾器
注冊過濾器 ,將上游請求中traceId值取出。使用MDC類將內容記錄到日志中
@Configuration
public class WebMvcConfig extends WebMvcConfigurationSupport {
@Override
protected void addInterceptors(InterceptorRegistry registry) {
registry.addInterceptor(new TraceInterceptor()).addPathPatterns("/**");
super.addInterceptors(registry);
}
}
public class TraceInterceptor implements HandlerInterceptor {
@Override
public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) {
String traceIdVal = request.getHeader(LogCollectionConstants.traceId);
if (StringUtils.isNotEmpty(traceIdVal)) {
MDC.put(LogCollectionConstants.traceId, traceIdVal);
}
else {
MDC.remove(LogCollectionConstants.traceId);
}
return true;
}
}
HystrixConfig.java 熔斷配置
@Configuration
public class HystrixConfig extends LogHystrixConfig {
public HystrixConfig(){
super();
}
}
public class LogHystrixConfig {
public static final Logger log = LoggerFactory.getLogger(LogHystrixConfig.class);
public LogHystrixConfig(){
try {
HystrixConcurrencyStrategy target = new MdcHystrixConcurrencyStrategy();
HystrixConcurrencyStrategy strategy = HystrixPlugins.getInstance().getConcurrencyStrategy();
if (strategy instanceof MdcHystrixConcurrencyStrategy) {
return;
}
HystrixCommandExecutionHook commandExecutionHook = HystrixPlugins
.getInstance().getCommandExecutionHook();
HystrixEventNotifier eventNotifier = HystrixPlugins.getInstance()
.getEventNotifier();
HystrixMetricsPublisher metricsPublisher = HystrixPlugins.getInstance()
.getMetricsPublisher();
HystrixPropertiesStrategy propertiesStrategy = HystrixPlugins.getInstance()
.getPropertiesStrategy();
HystrixPlugins.reset();
HystrixPlugins.getInstance().registerConcurrencyStrategy(target);
HystrixPlugins.getInstance()
.registerCommandExecutionHook(commandExecutionHook);
HystrixPlugins.getInstance().registerEventNotifier(eventNotifier);
HystrixPlugins.getInstance().registerMetricsPublisher(metricsPublisher);
HystrixPlugins.getInstance().registerPropertiesStrategy(propertiesStrategy);
}
catch (Exception e) {
log.error("Failed to register Sleuth Hystrix Concurrency Strategy", e);
}
}
}
FeignInterceptorConfig.java Feign配置
@Configuration
public class FeignInterceptorConfig extends LogFeignInterceptorConfig implements RequestInterceptor {
@Override
public void apply(RequestTemplate requestTemplate) {
requestTemplate.header(LogCollectionConstants.traceId, super.getTraceId());
}
}
public class LogFeignInterceptorConfig {
public String getTraceId() {
return MDC.get(LogCollectionConstants.traceId);
}
}
定時任務AOP
定時任務在開始執行之前,給MDC類 賦值traceId
@Aspect
@Component
@Slf4j
public class ScheduledAspect {
@Pointcut("@annotation(org.springframework.scheduling.annotation.Scheduled)")
public void proxyAspect() {
}
@Before("proxyAspect()")
public void before(JoinPoint joinPoint) throws Throwable {
String traceId= LogHelper.getTraceId();
MDC.put(LogCollectionConstants.traceId, traceId);
}
}
logback.xml 日志配置文件
使用{traceId}寫入MDC中的traceId的值
<property name="CONSOLE_LOG_PATTERN" value="${CONSOLE_LOG_PATTERN:-%clr(%d{${LOG_DATEFORMAT_PATTERN:-yyyy-MM-dd HH:mm:ss.SSS}}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }) [%X{traceId}] %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
<property name="FILE_LOG_PATTERN" value="${FILE_LOG_PATTERN:-%d{${LOG_DATEFORMAT_PATTERN:-yyyy-MM-dd HH:mm:ss.SSS}} ${LOG_LEVEL_PATTERN:-%5p} ${PID:- } [%X{traceId}] --- [%t] : %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
測試效果圖
1、接口通過網關,訪問服務1
在Service1中寫入測試接口
@RestController
@Slf4j
public class UserController {
@GetMapping("/user/getlist")
public List<String> getlist() {
log.info("測試接口user");
List<String> list = asList("user1", "user2", "user3");
log.warn("測測測");
return list;
}
}
通過網關請求接口效果如下
網關中打印日志 通過日志找到traceId為40417dd85d224eda8d67925dce335b6b
Service1中的日志
2、接口通過網關,訪問服務1,服務1訪問服務2
測試代碼
Service1中
@RestController
@Slf4j
public class UserController {
@Autowired
private DemoApiClient demoApiClient;
@GetMapping("/user/getlist2")
public List<String> getlist2() {
log.info("測試接口getlist2");
List<String> result = demoApiClient.getlist2();
log.info("請求Service2中接口成功,返回數據");
return result;
}
}
Service2中
@RestController
@Slf4j
public class DemoController {
@GetMapping("/demo/getlist2")
public List<String> getlist2() {
log.info("通過了其他的服務請求過來");
List<String> list = asList("haha", "hehe", "youyou");
log.info("請求完了");
return list;
}
}
通過網關查到traceId為f87e568753004b7e830e76497936fb08
Service1中打印日志
Service2中打印日志
3、定時任務,訪問服務1
在Service1中加入測試代碼
@Component
@Slf4j
public class TestJob {
@Autowired
private DemoApiClient demoApiClient;
@Scheduled(fixedRate = 2000)
public void execute() {
List<String> result = demoApiClient.getlist2();
log.info("定時任務執行 打印獲取數據結果 {} ", result);
}
}
在任務中找到一條執行記錄中的traceId:56fda9d194e241f9a32e6c1ecf61ee58
在Service2中查看打印日志
日志采集
filebeat
日志采集工具,以本文為例展示配置文件
filebeat部分配置內容
# ============================== Filebeat inputs ===============================
filebeat.inputs:
- type: log
enabled: true #設置成true才能啟用log這個任務
paths:
- /usr/local/webapp/pro-user-ceshi/logs/info/*.log # 配置讀取文件的地址
- /usr/local/webapp/pro-user-ceshi/logs/warn//*.log
include_lines: ['\[[0-9a-f]{8}([0-9a-f]{4}){3}[0-9a-f]{12}\]'] #配置讀取哪些行的日志
exclude_lines: ['.*: ==> Preparing:.*','.*: ==> Parameters:.*','.*: <== Total:.*'] #配置排除哪些行的日志
fields:
type: pro-user-ceshi
multiline.pattern: '^[[:space:]]+(at|\.{3})\b|^Caused by:| SQL參數' #配合多長文本 本條是 以空格,Caused by,SQL參數 開頭的都合並上一行
multiline.negate: false
multiline.match: after
- type: log
enabled: false # 關閉狀態下
paths:
- /usr/local/webapp/pro-user-ceshi2/logs/sql/*.log
include_lines: ['\[[0-9a-f]{8}([0-9a-f]{4}){3}[0-9a-f]{12}\]']
exclude_lines: ['.*: ==> Preparing:.*','.*: ==> Parameters:.*','.*: <== Total:.*']
fields:
type: pro-user-ceshi
multiline.pattern: '^[[:space:]]+(at|\.{3})\b|^Caused by:|\\$'
multiline.negate: false
multiline.match: after
- type: filestream
# Change to true to enable this input configuration.
enabled: false
# Paths that should be crawled and fetched. Glob based paths.
paths:
- /var/log/*.log
# ============================== Filebeat modules ==============================
filebeat.config.modules:
path: ${path.config}/modules.d/*.yml
reload.enabled: false
# ======================= Elasticsearch template setting =======================
setup.template.settings:
index.number_of_shards: 1
# ---------------------------- Elasticsearch Output ----------------------------
output.elasticsearch:
# Array of hosts to connect to.
hosts: ["192.168.1.132:9600"] #es地址
indices:
- index: "system-service-pro-user-ceshi_logs_%{+yyyy.MM.dd}" #es設置索引名稱
when.equals:
fields.type: "pro-user-ceshi"
- index: "system-pro-user-ceshi2_logs_%{+yyyy.MM.dd}"
when.equals:
fields.type: "pro-user-ceshi2"