Authored by Gino Zhang

consumer增加报告日志 用于定时查询关键信息

@@ -13,9 +13,7 @@ import org.slf4j.LoggerFactory; @@ -13,9 +13,7 @@ import org.slf4j.LoggerFactory;
13 import org.springframework.beans.factory.annotation.Autowired; 13 import org.springframework.beans.factory.annotation.Autowired;
14 import org.springframework.stereotype.Component; 14 import org.springframework.stereotype.Component;
15 15
16 -import java.util.ArrayList;  
17 -import java.util.List;  
18 -import java.util.Set; 16 +import java.util.*;
19 import java.util.stream.Collectors; 17 import java.util.stream.Collectors;
20 18
21 /** 19 /**
@@ -26,10 +24,14 @@ public class SuggestConvertorFlow implements RetryBusinessFlow { @@ -26,10 +24,14 @@ public class SuggestConvertorFlow implements RetryBusinessFlow {
26 24
27 private static final Logger logger = LoggerFactory.getLogger("FLOW_EXECUTOR"); 25 private static final Logger logger = LoggerFactory.getLogger("FLOW_EXECUTOR");
28 26
  27 + private static final Logger REPORT_LOGGER = LoggerFactory.getLogger("CONSUMER_REPORTER");
  28 +
29 private volatile Set<String> existSourceSet = null; 29 private volatile Set<String> existSourceSet = null;
30 30
31 private volatile YohoKeywordsBO yohoKeywordsBO = null; 31 private volatile YohoKeywordsBO yohoKeywordsBO = null;
32 32
  33 + private volatile Map<String, String> newConversionMap = new HashMap<>(100);
  34 +
33 @Autowired 35 @Autowired
34 private SuggestConversionService suggestConversionService; 36 private SuggestConversionService suggestConversionService;
35 37
@@ -46,6 +48,7 @@ public class SuggestConvertorFlow implements RetryBusinessFlow { @@ -46,6 +48,7 @@ public class SuggestConvertorFlow implements RetryBusinessFlow {
46 48
47 @Override 49 @Override
48 public void init() { 50 public void init() {
  51 + newConversionMap.clear();
49 existSourceSet = suggestConversionService.getAllSources(); 52 existSourceSet = suggestConversionService.getAllSources();
50 logger.info("[func=SuggestConversionFlow.init][existSourceSetSize={}]", existSourceSet.size()); 53 logger.info("[func=SuggestConversionFlow.init][existSourceSetSize={}]", existSourceSet.size());
51 yohoKeywordsBO = suggestConvertorService.buildYohoKeywordBO(); 54 yohoKeywordsBO = suggestConvertorService.buildYohoKeywordBO();
@@ -64,13 +67,14 @@ public class SuggestConvertorFlow implements RetryBusinessFlow { @@ -64,13 +67,14 @@ public class SuggestConvertorFlow implements RetryBusinessFlow {
64 return true; 67 return true;
65 } 68 }
66 69
67 - List<SpiderContent> filteredContentList = spiderContentList.stream().filter(spiderContent -> spiderContent != null && !existSourceSet.contains(spiderContent.getSubject())).collect(Collectors.toList()); 70 + List<SpiderContent> filteredContentList = spiderContentList.stream().filter(spiderContent -> spiderContent != null && !existSourceSet.contains(spiderContent.getSubject().toLowerCase().trim())).collect(Collectors.toList());
68 logger.info("[func=SuggestConversionFlow.doBusiness][pageNo={}][spiderContentListSize={}][filteredContentListSize={}]", pageNo, spiderContentList.size(), filteredContentList.size()); 71 logger.info("[func=SuggestConversionFlow.doBusiness][pageNo={}][spiderContentListSize={}][filteredContentListSize={}]", pageNo, spiderContentList.size(), filteredContentList.size());
69 if (CollectionUtils.isEmpty(filteredContentList)) { 72 if (CollectionUtils.isEmpty(filteredContentList)) {
70 return true; 73 return true;
71 } 74 }
72 75
73 // 获取每一个title与其关联的yoho关键词 76 // 获取每一个title与其关联的yoho关键词
  77 + Map<String, String> tempNewConversionMap = new HashMap<>(batchSize);
74 List<SuggestConversion> suggestConversionList = new ArrayList<>(); 78 List<SuggestConversion> suggestConversionList = new ArrayList<>();
75 for (SpiderContent spiderContent : filteredContentList) { 79 for (SpiderContent spiderContent : filteredContentList) {
76 String dest = suggestConvertorService.convert(spiderContent, yohoKeywordsBO); 80 String dest = suggestConvertorService.convert(spiderContent, yohoKeywordsBO);
@@ -80,6 +84,7 @@ public class SuggestConvertorFlow implements RetryBusinessFlow { @@ -80,6 +84,7 @@ public class SuggestConvertorFlow implements RetryBusinessFlow {
80 suggestConversion.setDest(dest); 84 suggestConversion.setDest(dest);
81 suggestConversion.setUpdateTime(DateUtil.getCurrentTimeSecond()); 85 suggestConversion.setUpdateTime(DateUtil.getCurrentTimeSecond());
82 suggestConversionList.add(suggestConversion); 86 suggestConversionList.add(suggestConversion);
  87 + tempNewConversionMap.put(spiderContent.getSubject(), dest);
83 } 88 }
84 } 89 }
85 90
@@ -88,6 +93,12 @@ public class SuggestConvertorFlow implements RetryBusinessFlow { @@ -88,6 +93,12 @@ public class SuggestConvertorFlow implements RetryBusinessFlow {
88 suggestConversionService.insertBatch(suggestConversionList); 93 suggestConversionService.insertBatch(suggestConversionList);
89 } 94 }
90 95
  96 + if (!tempNewConversionMap.isEmpty()) {
  97 + synchronized (this) {
  98 + this.newConversionMap.putAll(tempNewConversionMap);
  99 + }
  100 + }
  101 +
91 return true; 102 return true;
92 } 103 }
93 104
@@ -96,5 +107,7 @@ public class SuggestConvertorFlow implements RetryBusinessFlow { @@ -96,5 +107,7 @@ public class SuggestConvertorFlow implements RetryBusinessFlow {
96 this.yohoKeywordsBO = null; 107 this.yohoKeywordsBO = null;
97 this.existSourceSet = null; 108 this.existSourceSet = null;
98 logger.info("[func=SuggestConversionFlow.finish][doBusinessResult=" + doBusinessResult + "]", exception); 109 logger.info("[func=SuggestConversionFlow.finish][doBusinessResult=" + doBusinessResult + "]", exception);
  110 + REPORT_LOGGER.info("[key=IncrementConversionMap][incrementConversionMap={}]", newConversionMap);
  111 + newConversionMap.clear();
99 } 112 }
100 } 113 }
@@ -35,6 +35,8 @@ public class IncrementCrawlerFlow implements RetryBusinessFlow { @@ -35,6 +35,8 @@ public class IncrementCrawlerFlow implements RetryBusinessFlow {
35 35
36 private static final Logger logger = LoggerFactory.getLogger("FLOW_EXECUTOR"); 36 private static final Logger logger = LoggerFactory.getLogger("FLOW_EXECUTOR");
37 37
  38 + private static final Logger REPORT_LOGGER = LoggerFactory.getLogger("CONSUMER_REPORTER");
  39 +
38 private static final int KEYWORD_COUNT = 100; 40 private static final int KEYWORD_COUNT = 100;
39 41
40 @Resource(name = "yhNoSyncZSetOperations") 42 @Resource(name = "yhNoSyncZSetOperations")
@@ -54,6 +56,10 @@ public class IncrementCrawlerFlow implements RetryBusinessFlow { @@ -54,6 +56,10 @@ public class IncrementCrawlerFlow implements RetryBusinessFlow {
54 56
55 private List<String> validKeywordList = null; 57 private List<String> validKeywordList = null;
56 58
  59 + private volatile List<String> succeedKeywords = new ArrayList<>(100);
  60 +
  61 + private volatile List<String> failedKeywords = new ArrayList<>(100);
  62 +
57 @Override 63 @Override
58 public String flowName() { 64 public String flowName() {
59 return this.getClass().getSimpleName(); 65 return this.getClass().getSimpleName();
@@ -61,17 +67,26 @@ public class IncrementCrawlerFlow implements RetryBusinessFlow { @@ -61,17 +67,26 @@ public class IncrementCrawlerFlow implements RetryBusinessFlow {
61 67
62 @Override 68 @Override
63 public void init() { 69 public void init() {
64 - Set<String> keywordSet = new HashSet<>(1000); 70 + succeedKeywords.clear();
  71 + failedKeywords.clear();
  72 +
  73 + Set<String> keywordSet = new HashSet<>(200);
  74 + Set<String> topEmptySeachKeywords = new HashSet<>(100);
  75 + Set<String> topLessSeachKeywords = new HashSet<>(100);
65 Set<ZSetOperations.TypedTuple<String>> redisResults = yhNoSyncZSetOperations.reverseRangeWithScores(RedisKeys.getRedisKey4Yesterday(RedisKeys.YOHO_SEARCH_KEYWORDS_EMPTY), 0, KEYWORD_COUNT); 76 Set<ZSetOperations.TypedTuple<String>> redisResults = yhNoSyncZSetOperations.reverseRangeWithScores(RedisKeys.getRedisKey4Yesterday(RedisKeys.YOHO_SEARCH_KEYWORDS_EMPTY), 0, KEYWORD_COUNT);
66 for (ZSetOperations.TypedTuple<String> typedTuple : redisResults) { 77 for (ZSetOperations.TypedTuple<String> typedTuple : redisResults) {
67 - keywordSet.add(typedTuple.getValue()); 78 + topEmptySeachKeywords.add(typedTuple.getValue());
68 } 79 }
69 80
70 redisResults = yhNoSyncZSetOperations.reverseRangeWithScores(RedisKeys.getRedisKey4Yesterday(RedisKeys.YOHO_SEARCH_KEYWORDS_LESS), 0, KEYWORD_COUNT); 81 redisResults = yhNoSyncZSetOperations.reverseRangeWithScores(RedisKeys.getRedisKey4Yesterday(RedisKeys.YOHO_SEARCH_KEYWORDS_LESS), 0, KEYWORD_COUNT);
71 for (ZSetOperations.TypedTuple<String> typedTuple : redisResults) { 82 for (ZSetOperations.TypedTuple<String> typedTuple : redisResults) {
72 - keywordSet.add(typedTuple.getValue()); 83 + topLessSeachKeywords.add(typedTuple.getValue());
73 } 84 }
74 85
  86 + REPORT_LOGGER.info("[key=TopEmptySeachKeywords][topEmptySeachKeywords={}]", topEmptySeachKeywords);
  87 + REPORT_LOGGER.info("[key=TopLessSeachKeywords][topLessSeachKeywords={}]", topLessSeachKeywords);
  88 + keywordSet.addAll(topEmptySeachKeywords);
  89 + keywordSet.addAll(topLessSeachKeywords);
75 logger.info("[func=IncrementCrawlerFlow.init][keywordSetSize={}]", keywordSet.size()); 90 logger.info("[func=IncrementCrawlerFlow.init][keywordSetSize={}]", keywordSet.size());
76 if (keywordSet.isEmpty()) { 91 if (keywordSet.isEmpty()) {
77 return; 92 return;
@@ -89,6 +104,7 @@ public class IncrementCrawlerFlow implements RetryBusinessFlow { @@ -89,6 +104,7 @@ public class IncrementCrawlerFlow implements RetryBusinessFlow {
89 logger.info("[func=IncrementCrawlerFlow.init][keywordSetSizeRemovedInvalid={}]", keywordSet.size()); 104 logger.info("[func=IncrementCrawlerFlow.init][keywordSetSizeRemovedInvalid={}]", keywordSet.size());
90 this.validKeywordList = keywordSet.parallelStream().filter(keyword -> validKeyword(keyword)).collect(Collectors.toList()); 105 this.validKeywordList = keywordSet.parallelStream().filter(keyword -> validKeyword(keyword)).collect(Collectors.toList());
91 logger.info("[func=IncrementCrawlerFlow.init][validKeywordListSize={}]", validKeywordList != null ? validKeywordList.size() : 0); 106 logger.info("[func=IncrementCrawlerFlow.init][validKeywordListSize={}]", validKeywordList != null ? validKeywordList.size() : 0);
  107 + REPORT_LOGGER.info("[key=ValidKeywordList][validIncrementKeywords={}]", validKeywordList);
92 } 108 }
93 109
94 private boolean validKeyword(String keyword) { 110 private boolean validKeyword(String keyword) {
@@ -129,25 +145,41 @@ public class IncrementCrawlerFlow implements RetryBusinessFlow { @@ -129,25 +145,41 @@ public class IncrementCrawlerFlow implements RetryBusinessFlow {
129 return true; 145 return true;
130 } 146 }
131 147
132 - List<String> failedKeywords = new ArrayList<>(); 148 + List<String> tempFailedKeywords = new ArrayList<>();
  149 + List<String> tempSucceedKeywords = new ArrayList<>();
  150 +
133 List<BaikeBO> baikeBOList = new ArrayList<>(); 151 List<BaikeBO> baikeBOList = new ArrayList<>();
134 BaikeBO tempBaikeBO; 152 BaikeBO tempBaikeBO;
135 for (String keyword : subListKeywords) { 153 for (String keyword : subListKeywords) {
136 if ((tempBaikeBO = incrementCrawlerService.doCrawle(keyword)) != null) { 154 if ((tempBaikeBO = incrementCrawlerService.doCrawle(keyword)) != null) {
137 tempBaikeBO.setTitle(keyword); 155 tempBaikeBO.setTitle(keyword);
138 baikeBOList.add(tempBaikeBO); 156 baikeBOList.add(tempBaikeBO);
  157 + tempSucceedKeywords.add(keyword.toLowerCase().trim());
139 } else { 158 } else {
140 - failedKeywords.add(keyword.toLowerCase().trim()); 159 + tempFailedKeywords.add(keyword.toLowerCase().trim());
141 } 160 }
142 } 161 }
143 162
144 - logger.info("[func=IncrementCrawlerFlow.doBusiness][baikeBOListSize={}][failedKeywords={}]", baikeBOList.size(), failedKeywords); 163 + logger.info("[func=IncrementCrawlerFlow.doBusiness][baikeBOListSize={}][failedKeywords={}]", baikeBOList.size(), tempFailedKeywords);
145 if (CollectionUtils.isNotEmpty(baikeBOList)) { 164 if (CollectionUtils.isNotEmpty(baikeBOList)) {
146 List<SpiderContent> spiderContentList = baikeBOList.stream().map(baikeBO -> baikeBO.toSpiderContent()).collect(Collectors.toList()); 165 List<SpiderContent> spiderContentList = baikeBOList.stream().map(baikeBO -> baikeBO.toSpiderContent()).collect(Collectors.toList());
147 spiderContentService.insertBatch(spiderContentList); 166 spiderContentService.insertBatch(spiderContentList);
148 } 167 }
149 168
150 - blackKeywordsMgr.addBlackKeywords(failedKeywords); 169 + blackKeywordsMgr.addBlackKeywords(tempFailedKeywords);
  170 +
  171 + // 用于输出统计日志
  172 + if (!tempSucceedKeywords.isEmpty()) {
  173 + synchronized (this) {
  174 + this.succeedKeywords.addAll(tempSucceedKeywords);
  175 + }
  176 + }
  177 + if (!tempFailedKeywords.isEmpty()) {
  178 + synchronized (this) {
  179 + this.failedKeywords.addAll(tempFailedKeywords);
  180 + }
  181 + }
  182 +
151 return true; 183 return true;
152 } 184 }
153 185
@@ -155,6 +187,11 @@ public class IncrementCrawlerFlow implements RetryBusinessFlow { @@ -155,6 +187,11 @@ public class IncrementCrawlerFlow implements RetryBusinessFlow {
155 public void finish(boolean doBusinessResult, Exception exception) { 187 public void finish(boolean doBusinessResult, Exception exception) {
156 this.validKeywordList = null; 188 this.validKeywordList = null;
157 logger.info("[func=IncrementCrawlerFlow.finish][doBusinessResult=" + doBusinessResult + "]", exception); 189 logger.info("[func=IncrementCrawlerFlow.finish][doBusinessResult=" + doBusinessResult + "]", exception);
  190 +
  191 + REPORT_LOGGER.info("[key=SucceedIncrementKeywords][succeedIncrementKeywords={}]", succeedKeywords);
  192 + REPORT_LOGGER.info("[key=FailedIncrementKeywords][failedIncrementKeywords={}]", failedKeywords);
  193 + succeedKeywords.clear();
  194 + failedKeywords.clear();
158 } 195 }
159 196
160 public static void main(String[] args) throws UnsupportedEncodingException { 197 public static void main(String[] args) throws UnsupportedEncodingException {
@@ -158,6 +158,20 @@ @@ -158,6 +158,20 @@
158 </encoder> 158 </encoder>
159 </appender> 159 </appender>
160 160
  161 + <appender name="CONSUMER_REPORTER_APPEND" class="ch.qos.logback.core.rolling.RollingFileAppender">
  162 + <file>${catalina.home}/logs/search-consumer/consumer-reporter.log</file>
  163 + <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
  164 + <fileNamePattern>${catalina.home}/logs/search-consumer/archived/consumer-reporter.%d{yyyy-MM-dd}.%i.log</fileNamePattern>
  165 + <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
  166 + <maxFileSize>10MB</maxFileSize>
  167 + </timeBasedFileNamingAndTriggeringPolicy>
  168 + <maxHistory>7</maxHistory>
  169 + </rollingPolicy>
  170 + <encoder>
  171 + <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} - %msg%n</pattern>
  172 + </encoder>
  173 + </appender>
  174 +
161 <!-- 事件上报 appender --> 175 <!-- 事件上报 appender -->
162 <appender name="SEARCH_EVENT_APPAND" class="ch.qos.logback.core.rolling.RollingFileAppender"> 176 <appender name="SEARCH_EVENT_APPAND" class="ch.qos.logback.core.rolling.RollingFileAppender">
163 <file>${catalina.home}/logs/search-consumer/search-event.log</file> 177 <file>${catalina.home}/logs/search-consumer/search-event.log</file>
@@ -229,6 +243,11 @@ @@ -229,6 +243,11 @@
229 <appender-ref ref="SPIDER_APPAND"/> 243 <appender-ref ref="SPIDER_APPAND"/>
230 </logger> 244 </logger>
231 245
  246 + <logger name="CONSUMER_REPORTER" additivity="false">
  247 + <level value="INFO"/>
  248 + <appender-ref ref="CONSUMER_REPORTER_APPEND"/>
  249 + </logger>
  250 +
232 <!-- 事件上报日志 --> 251 <!-- 事件上报日志 -->
233 <logger name="SEARCH_EVENT_LOG" additivity="false"> 252 <logger name="SEARCH_EVENT_LOG" additivity="false">
234 <level value="INFO"/> 253 <level value="INFO"/>
@@ -130,10 +130,10 @@ @@ -130,10 +130,10 @@
130 <fileNamePattern>${yoho.logs.basedir}/${yoho.search.consumer.env.namespace}/archived/retry-business.%d{yyyy-MM-dd}.%i.log</fileNamePattern> 130 <fileNamePattern>${yoho.logs.basedir}/${yoho.search.consumer.env.namespace}/archived/retry-business.%d{yyyy-MM-dd}.%i.log</fileNamePattern>
131 <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP"> 131 <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
132 <!-- or whenever the file size reaches 100MB --> 132 <!-- or whenever the file size reaches 100MB -->
133 - <maxFileSize>10MB</maxFileSize> 133 + <maxFileSize>${yoho.logs.maxFileSize}</maxFileSize>
134 </timeBasedFileNamingAndTriggeringPolicy> 134 </timeBasedFileNamingAndTriggeringPolicy>
135 <!-- keep 30 days' worth of history --> 135 <!-- keep 30 days' worth of history -->
136 - <maxHistory>7</maxHistory> 136 + <maxHistory>${yoho.logs.maxHistory}</maxHistory>
137 </rollingPolicy> 137 </rollingPolicy>
138 <encoder> 138 <encoder>
139 <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger:%line - %msg%n</pattern> 139 <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger:%line - %msg%n</pattern>
@@ -148,16 +148,30 @@ @@ -148,16 +148,30 @@
148 <fileNamePattern>${yoho.logs.basedir}/${yoho.search.consumer.env.namespace}/archived/spider.%d{yyyy-MM-dd}.%i.log</fileNamePattern> 148 <fileNamePattern>${yoho.logs.basedir}/${yoho.search.consumer.env.namespace}/archived/spider.%d{yyyy-MM-dd}.%i.log</fileNamePattern>
149 <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP"> 149 <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
150 <!-- or whenever the file size reaches 100MB --> 150 <!-- or whenever the file size reaches 100MB -->
151 - <maxFileSize>10MB</maxFileSize> 151 + <maxFileSize>${yoho.logs.maxFileSize}</maxFileSize>
152 </timeBasedFileNamingAndTriggeringPolicy> 152 </timeBasedFileNamingAndTriggeringPolicy>
153 <!-- keep 30 days' worth of history --> 153 <!-- keep 30 days' worth of history -->
154 - <maxHistory>7</maxHistory> 154 + <maxHistory>${yoho.logs.maxHistory}</maxHistory>
155 </rollingPolicy> 155 </rollingPolicy>
156 <encoder> 156 <encoder>
157 <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level - %msg%n</pattern> 157 <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level - %msg%n</pattern>
158 </encoder> 158 </encoder>
159 </appender> 159 </appender>
160 160
  161 + <appender name="CONSUMER_REPORTER_APPEND" class="ch.qos.logback.core.rolling.RollingFileAppender">
  162 + <file>${yoho.logs.basedir}/${yoho.search.consumer.env.namespace}/consumer-reporter.log</file>
  163 + <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
  164 + <fileNamePattern>${yoho.logs.basedir}/${yoho.search.consumer.env.namespace}/archived/consumer-reporter.%d{yyyy-MM-dd}.%i.log</fileNamePattern>
  165 + <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
  166 + <maxFileSize>${yoho.logs.maxFileSize}</maxFileSize>
  167 + </timeBasedFileNamingAndTriggeringPolicy>
  168 + <maxHistory>${yoho.logs.maxHistory}</maxHistory>
  169 + </rollingPolicy>
  170 + <encoder>
  171 + <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} - %msg%n</pattern>
  172 + </encoder>
  173 + </appender>
  174 +
161 <!-- 事件上报 appender --> 175 <!-- 事件上报 appender -->
162 <appender name="SEARCH_EVENT_APPAND" class="ch.qos.logback.core.rolling.RollingFileAppender"> 176 <appender name="SEARCH_EVENT_APPAND" class="ch.qos.logback.core.rolling.RollingFileAppender">
163 <file>${yoho.logs.basedir}/${yoho.search.consumer.env.namespace}/search-event.log</file> 177 <file>${yoho.logs.basedir}/${yoho.search.consumer.env.namespace}/search-event.log</file>
@@ -229,6 +243,11 @@ @@ -229,6 +243,11 @@
229 <appender-ref ref="SPIDER_APPAND"/> 243 <appender-ref ref="SPIDER_APPAND"/>
230 </logger> 244 </logger>
231 245
  246 + <logger name="CONSUMER_REPORTER" additivity="false">
  247 + <level value="INFO"/>
  248 + <appender-ref ref="CONSUMER_REPORTER_APPEND"/>
  249 + </logger>
  250 +
232 <!-- 事件上报日志 --> 251 <!-- 事件上报日志 -->
233 <logger name="SEARCH_EVENT_LOG" additivity="false"> 252 <logger name="SEARCH_EVENT_LOG" additivity="false">
234 <level value="INFO"/> 253 <level value="INFO"/>