Authored by Gino Zhang

修改日志

... ... @@ -189,6 +189,7 @@ public class IndexController implements ApplicationEventPublisherAware {
}
indexRebuildJob.rebuildTblProductIndex();
indexRebuildJob.execute();
suggestionDiscoveryJob.execute();
suggestionCounterJob.execute();
indexRebuildJob.rebuildSuggestIndex();
return getResultMap(200, "rebuildAll success");
... ...
... ... @@ -60,7 +60,7 @@ public abstract class AbstractSuggestionCounter implements ApplicationEventPubli
}
Map<String, Integer> filterMap = filter(pageNo, countMap);
logger.info("[{} business][pageNo={}][fetchWordSize={}][countMapSize={}][countMapSize={}]", flowName(), pageNo, fetchSize, countMapSize, filterMap.size());
logger.info("[{} business][pageNo={}][fetchWordSize={}][countMapSize={}][filterMapSize={}]", flowName(), pageNo, fetchSize, countMapSize, filterMap.size());
logger.trace("[{} business][pageNo={}][keywordMap={}][countMap={}][filterMap={}]", flowName(), pageNo, keywordMap, countMap, filterMap);
return persistence(keywordMap, filterMap);
}
... ...
... ... @@ -57,9 +57,11 @@ public class SuggestionKeywordCounter extends AbstractSuggestionCounter {
Map<String, Integer> resultMap = super.filter(pageNo, countMap);
// 检查一些不友好的推荐词 输出到日志中
List<String> notNormalWords = resultMap.keySet().stream().filter(keyword -> notNormalKeyword(keyword)).collect(Collectors.toList());
if (notNormalWords != null && !notNormalWords.isEmpty()) {
logger.warn("found not normal words: {}", notNormalWords.stream().map(item -> "'" + item + "'").collect(Collectors.joining(",")));
if (logger.isInfoEnabled()) {
List<String> notNormalWords = resultMap.keySet().stream().filter(keyword -> notNormalKeyword(keyword)).collect(Collectors.toList());
if (notNormalWords != null && !notNormalWords.isEmpty()) {
logger.info("[{} check][pageNo={}]found not normal words: {}", flowName(), pageNo, notNormalWords.stream().map(item -> "'" + item + "'").collect(Collectors.joining(",")));
}
}
return resultMap;
... ...