Authored by FengRuwei

Merge branch 'master' of http://git.yoho.cn/ops/monitor-service

Showing 31 changed files with 449 additions and 291 deletions
... ... @@ -2,7 +2,6 @@ package com.monitor.cmdb.ctrl;
import com.monitor.cmdb.service.IRedisMonitorService;
import com.monitor.model.response.BaseResponse;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
... ... @@ -29,11 +28,11 @@ public class RedisMonitorCtrl {
public BaseResponse<Object> getRedisMonitors() throws Exception {
log.info("into getRedisInfo");
// 查询列表
String response = redisMonitorService.getRedisMonitors();
if (StringUtils.isBlank(response)) {
Object response = redisMonitorService.getRedisMonitors();
if (null==response) {
return null;
}
log.info("getRedisInfo success and value {}", response);
log.info("getRedisInfo success and data {}", response);
return new BaseResponse<Object>(response);
}
... ...
... ... @@ -35,7 +35,7 @@ public class ZkMonitorCtrl {
if (response == null || CollectionUtils.isEmpty(response)) {
return null;
}
log.info("getHostInfos success and total={}", response);
log.info("getZkMonitorRecords success and data={}", response);
return new BaseResponse<Object>(response);
}
... ...
... ... @@ -37,19 +37,23 @@ public class RedisMonitorServiceImpl implements IRedisMonitorService {
buff.append("'<chart charttopmargin=\"0\" chartBottomMargin=\"0\" chartleftmargin=\"0\" chartrightmargin=\"0\" bordercolor=\"#FFFFFF\" border=\"0\" borderAlpha=\"0\" borderThickness=\"0\" canvasBorderThickness=\"0\" canvasBorderColor=\"#FFFFFF\" showFormBtn=\"0\">\\n\\\n");
buff.append("<dataset plotborderAlpha=\"0\" >\\n\\\n");
for(RedisMonitor info : list){
String color="62D0FE";
if(info.getIsFailed()==0){
color="FF0000";
}
if(info.getLevel()==0){
buff.append("<set x=\""+width1+"\" y=\"80\" width=\"120\" height=\"40\" name=\""+info.getNodeFrom()+"\" color=\"62D0FE\" id=\""+info.getNodeTo()+"\" tooltext= \""+info.getParamMonitor()+"\" />\\n\\\n");
buff.append("<set x=\""+width1+"\" y=\"80\" width=\"120\" height=\"40\" name=\""+info.getNodeFrom()+"\" color=\""+color+"\" id=\""+info.getNodeTo()+"\" tooltext= \""+info.getParamMonitor()+"\" />\\n\\\n");
width1+=18;
}else if(info.getLevel()==1){
if(info.getNodeFrom().equals("Qcloud")){
buff.append("<set x=\""+width21+"\" y=\"70\" width=\"120\" height=\"40\" name=\""+info.getNodeFrom()+"\" color=\"62D0FE\" id=\""+info.getNodeTo()+"\" />\\n\\\n");
buff.append("<set x=\""+width21+"\" y=\"70\" width=\"120\" height=\"40\" name=\""+info.getNodeTo()+"\" color=\""+color+"\" id=\""+info.getNodeTo()+"\" />\\n\\\n");
width21+=8;
}else{
buff.append("<set x=\""+width2+"\" y=\"70\" width=\"120\" height=\"40\" name=\""+info.getNodeFrom()+"\" color=\"62D0FE\" id=\""+info.getNodeTo()+"\" />\\n\\\n");
buff.append("<set x=\""+width2+"\" y=\"70\" width=\"120\" height=\"40\" name=\""+info.getNodeTo()+"\" color=\""+color+"\" id=\""+info.getNodeTo()+"\" />\\n\\\n");
width2+=8;
}
}else if(info.getLevel()==2){
buff.append("<set x=\""+width3+"\" y=\"55\" width=\"55\" height=\"40\" name=\""+info.getNodeFrom()+"\" color=\"62D0FE\" id=\""+info.getNodeTo()+"\" />\\n\\\n");
buff.append("<set x=\""+width3+"\" y=\"55\" width=\"55\" height=\"40\" name=\""+info.getNodeTo() + "\" color=\""+color+"\" id=\""+info.getNodeTo()+"\" />\\n\\\n");
width3+=4;
}
}
... ...
... ... @@ -107,6 +107,12 @@
<groupId>redis.clients</groupId>
<artifactId>jedis</artifactId>
</dependency>
<dependency>
<groupId>commons-net</groupId>
<artifactId>commons-net</artifactId>
</dependency>
</dependencies>
</project>
\ No newline at end of file
... ...
package com.monitor.common.util;
import com.alibaba.fastjson.JSONObject;
import org.apache.commons.net.telnet.TelnetClient;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
public class TelnetUtils {
public final static void main(String[] args) throws Exception {
String string=getResult("192.168.102.222",22222);
JSONObject response=JSONObject.parseObject(string);
System.out.println(string);
System.out.println(response);
}
public static String getResult(String ip, int port) {
TelnetClient telnet = null;
InputStream in = null;
ByteArrayOutputStream baos = null;
try {
telnet = new TelnetClient();
telnet.connect(ip, port);
in = telnet.getInputStream();
baos = new ByteArrayOutputStream();
int i = -1;
while ((i = in.read()) != -1) {
baos.write(i);
}
return baos.toString();
} catch (Exception e) {
return null;
} finally {
if (in != null) {
try {
in.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if (baos != null) {
try {
baos.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if (telnet != null) {
try {
telnet.disconnect();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
... ...
... ... @@ -38,7 +38,7 @@ public class ZkMapper extends InfluxDBQuery implements IZkMapper {
@Override
public void insert(ZkInfo zkInfo) {
log.info("insert zkInfo param ip is {},isLive {}",zkInfo.getHostIp(),zkInfo.getIsLive());
Point point = Point.measurement(InfluxDBContants.ZOOKEEPER_ALARM)
.addField("hostIp", zkInfo.getHostIp())
.addField("cloudType", zkInfo.getCloudType())
... ...
... ... @@ -45,7 +45,7 @@ public interface InterVar {
String NGINXPREFIX = "(.*)nginx(.*)";
Integer API_LIMIT = 100;
Integer API_LIMIT = 5000;
String SCANINTERVAL = "2m";
... ... @@ -58,4 +58,12 @@ public interface InterVar {
String QC_TYPE = "qc";
String LOCK="lock";
String ALARM_NGINX_API="Nginx Api 非200响应";
String ALARM_NGINX_SERVICE="Nginx Service 非200响应";
String ALARM_NGINX_ERROR="Nginx Error";
}
... ...
... ... @@ -19,5 +19,5 @@ public class ErrorStaModel {
HashMap<String, String> logIpPercentMap;
List<ErrorSeriesModel> apiSeriesModelList = new ArrayList<>();
List<ErrorSeriesModel> errorSeriesModelList = new ArrayList<>();
}
... ...
... ... @@ -72,26 +72,29 @@ public class NginxService {
private NginxView buildNginxView(MObjectInfo mObjectInfo) {
String ip = mObjectInfo.getMoHostIp();
synchronized (InterVar.LOCK.intern())
{
String ip = mObjectInfo.getMoHostIp();
String name = mObjectInfo.getMoName();
String name = mObjectInfo.getMoName();
int id = mObjectInfo.getMoId();
int id = mObjectInfo.getMoId();
NginxView view = InterVar.NGINX_VIEW_CONCURRENT_HASH_MAP.get(ip);
NginxView view = InterVar.NGINX_VIEW_CONCURRENT_HASH_MAP.get(ip);
if (null == view) {
if (null == view) {
view = new NginxView();
}
view = new NginxView();
}
view.setIp(ip);
view.setIp(ip);
view.setName(name);
view.setName(name);
view.setMoId(id);
view.setMoId(id);
return view;
return view;
}
}
@RequestMapping(value = "/apiPie")
... ...
package com.monitor.middleware.nginx.task;
/**
* Created by yoho on 2016/6/22.
*/
import com.monitor.middleware.nginx.constant.InterVar;
import com.monitor.middleware.rabbitmq.component.InfluxComp;
import org.influxdb.dto.QueryResult;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import java.text.MessageFormat;
/**
* 每隔3分钟统计一次本周期内的监控特性的数量,超过阈值告警
*/
@EnableScheduling
@Component
public class NginxAlarmTask {
@Autowired
InfluxComp influxComp;
/*@Scheduled(cron = "0 0/3 * * * ? ")*/
public void doTask() {
}
}
package com.monitor.middleware.nginx.task;
import com.monitor.common.config.SnsMobileConfig;
import com.monitor.common.service.AlarmMsgService;
import com.monitor.middleware.nginx.constant.InterVar;
import com.monitor.middleware.nginx.task.job.BaseJob;
import com.monitor.middleware.rabbitmq.component.AlarmMsgComp;
import com.monitor.middleware.rabbitmq.component.InfluxComp;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.EnableScheduling;
... ... @@ -21,15 +24,18 @@ public class NginxCalTask {
@Autowired
InfluxComp qcInfluxComp;
@Autowired
public AlarmMsgComp alarmMsgComp;
@Scheduled(fixedRate = 2*60*1000L)
public void call() {
try {
InterVar.NGINX_VIEW_CONCURRENT_HASH_MAP.clear();
InterVar.EXECUTOR_SERVICE.submit(new BaseJob(awsInfluxComp, InterVar.AWS_TYPE));
InterVar.EXECUTOR_SERVICE.submit(new BaseJob(alarmMsgComp,awsInfluxComp, InterVar.AWS_TYPE));
InterVar.EXECUTOR_SERVICE.submit(new BaseJob(qcInfluxComp, InterVar.QC_TYPE));
InterVar.EXECUTOR_SERVICE.submit(new BaseJob(alarmMsgComp,qcInfluxComp, InterVar.QC_TYPE));
}
catch (Exception e)
{
... ...
package com.monitor.middleware.nginx.task.job;
import com.monitor.common.config.SnsMobileConfig;
import com.monitor.common.service.AlarmMsgService;
import com.monitor.middleware.nginx.task.job.oneJob.ApiJob;
import com.monitor.middleware.nginx.task.job.oneJob.BaseLogJob;
import com.monitor.middleware.nginx.task.job.oneJob.ErrorLogJob;
import com.monitor.middleware.nginx.task.job.oneJob.ServiceJob;
import com.monitor.middleware.rabbitmq.component.AlarmMsgComp;
import com.monitor.middleware.rabbitmq.component.InfluxComp;
import com.monitor.middleware.rabbitmq.constant.InterVar;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.Callable;
... ... @@ -15,6 +20,7 @@ import java.util.concurrent.Callable;
//分云统计任务
public class BaseJob implements Callable {
public static final Logger DEBUG = LoggerFactory.getLogger(BaseJob.class);
private BaseLogJob apiLogJob;
... ... @@ -22,30 +28,29 @@ public class BaseJob implements Callable {
private ErrorLogJob errorLogJob;
public BaseJob(InfluxComp influxComp, String type) {
public BaseJob(AlarmMsgComp alarmMsgComp, InfluxComp influxComp, String type) {
this.apiLogJob = new ApiJob(influxComp, type);
this.apiLogJob = new ApiJob(alarmMsgComp, influxComp, type);
this.serviceLogJob = new ServiceJob(influxComp, type);
this.serviceLogJob = new ServiceJob(alarmMsgComp, influxComp, type);
this.errorLogJob = new ErrorLogJob(influxComp, type);
this.errorLogJob = new ErrorLogJob(alarmMsgComp, influxComp, type);
}
@Override
public Object call() throws Exception {
public Object call() {
try {
InterVar.EXECUTOR_SERVICE.submit(this.apiLogJob);
InterVar.EXECUTOR_SERVICE.submit(this.serviceLogJob);
InterVar.EXECUTOR_SERVICE.submit(this.errorLogJob);
}
catch (Exception e)
{
e.printStackTrace();
}
} catch (Exception e) {
DEBUG.error("Failed to execute nginx monitor task , error {}", e);
}
return null;
}
... ...
package com.monitor.middleware.nginx.task.job.oneJob;
import com.monitor.common.config.SnsMobileConfig;
import com.monitor.common.service.AlarmMsgService;
import com.monitor.middleware.nginx.constant.InterVar;
import com.monitor.middleware.nginx.model.ApiSeriesModel;
import com.monitor.middleware.nginx.model.ApiStaModel;
import com.monitor.middleware.nginx.model.http.NginxView;
import com.monitor.middleware.rabbitmq.component.AlarmMsgComp;
import com.monitor.middleware.rabbitmq.component.InfluxComp;
import org.apache.commons.lang.StringUtils;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Created by yoho on 2016/6/22.
*/
public class ApiJob extends BaseLogJob {
public static final Logger DEBUG = LoggerFactory.getLogger(ApiJob.class);
private String type;
public ApiJob(InfluxComp influxComp, String type) {
private AlarmMsgComp alarmMsgComp;
public ApiJob(AlarmMsgComp alarmMsgComp, InfluxComp influxComp, String type) {
super(influxComp, InterVar.APIACCESSMEASURE, type);
this.type = type;
}
public void calNginxView() {
for (ApiSeriesModel apiSeriesModel : this.apiSeriesModelList) {
this.alarmMsgComp = alarmMsgComp;
}
String key = apiSeriesModel.getLogIp();
private void updateNginxView() {
synchronized (InterVar.LOCK.intern()) {
for (ApiSeriesModel apiSeriesModel : this.apiSeriesModelList) {
synchronized (key.intern()) {
String key = apiSeriesModel.getLogIp();
if (StringUtils.isNotBlank(key)) {
... ... @@ -52,25 +58,26 @@ public class ApiJob extends BaseLogJob {
super.doTask();
calNginxView();
updateNginxView();
updateNiginxInfoView();
saveResult();
doAlarm();
}
public void saveResult() {
private void updateNiginxInfoView() {
if (StringUtils.equals(InterVar.AWS_TYPE, this.type)) {
InterVar.AWS_NGINX_API_LIST.clear();
for(ApiStaModel apiStaModel:this.resultList)
{
for (ApiStaModel apiStaModel : this.resultList) {
InterVar.AWS_NGINX_API_LIST.add(apiStaModel);
}
} else {
InterVar.QC_NGINX_API_LIST.clear();
for(ApiStaModel apiStaModel:this.resultList)
{
for (ApiStaModel apiStaModel : this.resultList) {
InterVar.QC_NGINX_API_LIST.add(apiStaModel);
}
}
... ... @@ -78,17 +85,14 @@ public class ApiJob extends BaseLogJob {
}
/* public static void main(String[] args) {
InfluxComp influxComp = new InfluxComp("http://172.16.6.104:8086", "root", "root");
influxComp.init();
private void doAlarm() {
if (this.apiSeriesModelList.size() > InterVar.API_LIMIT) {
ErrorLogJob apiJob=new ErrorLogJob(influxComp,InterVar.AWS_TYPE);
String msg = buildAlarmMsg();
try {
apiJob.call();
} catch (Exception e) {
e.printStackTrace();
alarmMsgComp.doAlarm(InterVar.ALARM_NGINX_API,msg);
}
}*/
}
}
... ...
... ... @@ -7,6 +7,8 @@ import com.monitor.middleware.nginx.model.ApiStaModel;
import com.monitor.middleware.rabbitmq.component.InfluxComp;
import org.apache.commons.lang.StringUtils;
import org.influxdb.dto.QueryResult;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.text.DecimalFormat;
import java.util.*;
... ... @@ -17,6 +19,7 @@ import java.util.concurrent.ConcurrentHashMap;
* Created by yoho on 2016/6/28.
*/
public class BaseLogJob implements Callable {
public static final Logger DEBUG = LoggerFactory.getLogger(BaseLogJob.class);
private static ConcurrentHashMap<String, Integer> KINDEXMAPS = new ConcurrentHashMap<>();
... ... @@ -61,7 +64,7 @@ public class BaseLogJob implements Callable {
private List<ApiStaModel> sortMap(HashMap<String, ApiStaModel> map) {
List<ApiStaModel> sortedList = new ArrayList<>();
List<ApiStaModel> sortedList = new ArrayList<>();
List<Map.Entry<String, ApiStaModel>> entryList = new ArrayList<>(map.entrySet());
... ... @@ -83,15 +86,14 @@ public class BaseLogJob implements Callable {
private void queryData() {
String sql = "select * from " + measureName + " where time > now() - " + InterVar.SCANINTERVAL;
DEBUG.debug("Start to execute query command : {}", sql);
QueryResult result = influxComp.doQuery(sql, InterVar.DBNAME);
//parse result to api series model
buildApiModel(apiSeriesModelList, result.getResults().get(0).getSeries());
}
public void calNignxView(List<ApiSeriesModel> apiSeriesModelList) {
DEBUG.info("Success to query {} items from {}", apiSeriesModelList.size(), measureName);
}
//计算api内部占比 logip分类 errorcode分类
... ... @@ -110,15 +112,15 @@ public class BaseLogJob implements Callable {
}
public HashMap<String, String> calMapPercent(int total, HashMap<String, Integer> countMap) {
private HashMap<String, String> calMapPercent(int total, HashMap<String, Integer> countMap) {
HashMap<String, String> percentMap = new HashMap<>();
for (Map.Entry<String, Integer> oneCount : countMap.entrySet()) {
double percent = 100*oneCount.getValue() / (double) total;
double percent = 100 * oneCount.getValue() / (double) total;
percentMap.put(oneCount.getKey(), format.format(percent)+"%");
percentMap.put(oneCount.getKey(), format.format(percent) + "%");
}
return percentMap;
... ... @@ -133,9 +135,9 @@ public class BaseLogJob implements Callable {
oneEntry.getValue().setCount(count);
double percent = 100*count / (double) total;
double percent = 100 * count / (double) total;
oneEntry.getValue().setTotalPercent(format.format(percent)+"%");
oneEntry.getValue().setTotalPercent(format.format(percent) + "%");
}
}
... ... @@ -229,97 +231,7 @@ public class BaseLogJob implements Callable {
}
/* public static void main(String[] args) {
//String sql = "select * from cluster_info where time > now() - 2d";
InfluxComp influxComp = new InfluxComp("http://172.16.6.104:8086", "root", "root");
influxComp.init();
Random random=new Random();
Point point=Point.measurement(InterVar.SERACCESSMEASURE)
.addField("http_host","api.yoho.cn")
.addField("log_file","api.yoho.cn_access.log")
.addField("log_ip","172.16.6.104")
.addField("request_method","GET")
.addField("request_status","499")
.addField("request_url","/haha HTTP/1.1")
.addField("upstream","172.16.6.105:8800")
.time(System.currentTimeMillis() * 1000000 + random.nextInt(999999), TimeUnit.NANOSECONDS)
.build();
String request_url="/?app_version=4.0.0&client_secret=b7eaa613a3ccfda51b697964a276bcba&client_type=android&gender=&limit=60&method=app.SpaceOrders.push&os_version=android5.0%3ASM-G9006V&page=1&screen_size=1080x1920&type=1&v=7&yh_channel=1 HTTP/1.1";
Point point1=Point.measurement(InterVar.SERACCESSMEASURE)
.addField("http_host","api.yoho.cn")
.addField("log_file","api.yoho.cn_access.log")
.addField("log_ip","172.16.6.104")
.addField("request_method","GET")
.addField("request_status","500")
.addField("request_url",request_url)
.addField("upstream","172.16.6.105:8800")
.time(System.currentTimeMillis() * 1000000 + random.nextInt(999999), TimeUnit.NANOSECONDS)
.build();
BatchPoints points=BatchPoints.database(InterVar.DBNAME).retentionPolicy("default").build();
points.point(point);
points.point(point1);
influxComp.getInfluxDBClient().write(points);
*//*Point ponit=Point.measurement(InterVar.ERRORMEASURE)
.addField("err_msg","Lua error 45646576456")
.addField("err_severity","error")
.addField("log_ip","172.16.6.104")
.addField("log_time","2016/06/29 20:40:25")
.time(System.currentTimeMillis() * 1000000 + random.nextInt(999999), TimeUnit.NANOSECONDS)
.build();
Point ponit1=Point.measurement(InterVar.ERRORMEASURE)
.addField("err_msg","Lua error 1231231")
.addField("err_severity","error")
.addField("log_ip","172.16.6.104")
.addField("log_time","2016/06/29 20:40:25")
.time(System.currentTimeMillis() * 1000000 + random.nextInt(999999), TimeUnit.NANOSECONDS)
.build();
BatchPoints batchPoints=BatchPoints.database(InterVar.DBNAME).retentionPolicy("default").build();
batchPoints.point(ponit);
batchPoints.point(ponit1);
influxComp.getInfluxDBClient().write(batchPoints);
*//**//**//**//* influxComp.getInfluxDBClient().createDatabase(InterVar.DBNAME);
*//**//**//**//**//**//**//**//**//**//**//**//**//**//**//**//*
QueryResult result = influxComp.doQuery(sql, "rabbitmq_info");
List<String> columns = result.getResults().get(0).getSeries().get(0).getColumns();
System.out.println(columns);
List<Object> values = result.getResults().get(0).getSeries().get(0).getValues().get(0);
System.out.println(values);
String v = result.getResults().get(0).getSeries().get(0).getValues().get(0).get(1).toString();
System.out.println(v);*//**//**//**//**//**//**//**//**//**//**//**//**//**//**//**//*
System.out.println(new ApiJob().queryRequest("/?app_version=4.6.0.1606220001&client_secret=d039fbc04704d0679eb59c8c82593ddb&client_type=iphone&is_read=N&method=app.inbox.getTotal&os_version=9.3.2&screen_size=375x667&uid=3930397&v=7 HTTP/1.1"));
System.out.println(new ApiJob().queryRequest("/ HTTP/1.1"));*//*
}*/
protected String queryRequest(String request_url) {
private String queryRequest(String request_url) {
if (request_url.contains("&method=")) {
String[] afterUrl = request_url.split("&method=", 2);
... ... @@ -348,6 +260,18 @@ public class BaseLogJob implements Callable {
}
}
protected String buildAlarmMsg() {
StringBuilder builder = new StringBuilder();
builder.append("{");
builder.append("云:").append(type).append(";");
builder.append("总计:").append(this.apiSeriesModelList.size()).append(";");
builder.append("top接口:");
builder.append(this.resultList.get(0).getApiKey()).append(" : ").append(this.resultList.get(0).getApiSeriesModelList().size()).append(";");
builder.append("}");
return builder.toString();
}
@Override
public Object call() throws Exception {
doTask();
... ...
package com.monitor.middleware.nginx.task.job.oneJob;
import com.monitor.common.config.SnsMobileConfig;
import com.monitor.common.service.AlarmMsgService;
import com.monitor.middleware.nginx.common.ErrorMapValueCompare;
import com.monitor.middleware.nginx.constant.InterVar;
import com.monitor.middleware.nginx.model.ApiSeriesModel;
import com.monitor.middleware.nginx.model.ErrorSeriesModel;
import com.monitor.middleware.nginx.model.ErrorStaModel;
import com.monitor.middleware.nginx.model.http.NginxView;
import com.monitor.middleware.rabbitmq.component.AlarmMsgComp;
import com.monitor.middleware.rabbitmq.component.InfluxComp;
import org.apache.commons.lang.StringUtils;
import org.influxdb.dto.QueryResult;
import org.springframework.util.LinkedCaseInsensitiveMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.text.DecimalFormat;
import java.util.*;
... ... @@ -19,22 +22,27 @@ import java.util.concurrent.Callable;
* Created by yoho on 2016/6/29.
*/
public class ErrorLogJob implements Callable {
public static final Logger DEBUG = LoggerFactory.getLogger(ErrorLogJob.class);
private InfluxComp influxComp;
private String type;
private AlarmMsgComp alarmMsgComp;
public static final HashMap<String, Integer> KINDEMAPS = new HashMap<>();
private List<ErrorSeriesModel> errorSeriesModelList = new ArrayList<>();
private DecimalFormat format = new DecimalFormat("0.00");
public ErrorLogJob(InfluxComp influxComp, String type) {
public ErrorLogJob(AlarmMsgComp alarmMsgComp, InfluxComp influxComp, String type) {
this.influxComp = influxComp;
this.type = type;
this.alarmMsgComp = alarmMsgComp;
}
@Override
... ... @@ -47,10 +55,16 @@ public class ErrorLogJob implements Callable {
private void doTask() {
QueryResult result = this.influxComp.doQuery("select * from " + InterVar.ERRORMEASURE + " where time > now() - " + InterVar.SCANINTERVAL, InterVar.DBNAME);
String sql = "select * from " + InterVar.ERRORMEASURE + " where time > now() - " + InterVar.SCANINTERVAL;
DEBUG.debug("Start to execute query command : {}", sql);
QueryResult result = this.influxComp.doQuery(sql, InterVar.DBNAME);
buildErrorModel(result.getResults().get(0).getSeries());
DEBUG.info("Success to query {} items from {}", errorSeriesModelList.size(), InterVar.ERRORMEASURE);
calNginxView();
HashMap<String, ErrorStaModel> countMap = classByType();
... ... @@ -73,16 +87,17 @@ public class ErrorLogJob implements Callable {
InterVar.QC_NGINX_ERROR_LIST.addAll(list);
}
}
doAlarm();
}
public void calNginxView() {
for (ErrorSeriesModel errorSeriesModel : this.errorSeriesModelList) {
private void calNginxView() {
synchronized (InterVar.LOCK.intern()) {
for (ErrorSeriesModel errorSeriesModel : this.errorSeriesModelList) {
String key = errorSeriesModel.getLogIp();
String key = errorSeriesModel.getLogIp();
synchronized (key.intern()) {
if (StringUtils.isNotBlank(key)) {
if (!InterVar.NGINX_VIEW_CONCURRENT_HASH_MAP.containsKey(key)) {
... ... @@ -101,7 +116,7 @@ public class ErrorLogJob implements Callable {
}
}
public List<ErrorStaModel> sortMap(HashMap<String, ErrorStaModel> countMap) {
private List<ErrorStaModel> sortMap(HashMap<String, ErrorStaModel> countMap) {
List<ErrorStaModel> sortedList = new ArrayList<>();
... ... @@ -139,7 +154,7 @@ public class ErrorLogJob implements Callable {
ErrorStaModel errStaModel = countMap.get(errorSeriesModel.getType());
errStaModel.getApiSeriesModelList().add(errorSeriesModel);
errStaModel.getErrorSeriesModelList().add(errorSeriesModel);
int count = errStaModel.getCount() + 1;
... ... @@ -148,19 +163,19 @@ public class ErrorLogJob implements Callable {
return countMap;
}
public void calTotalPercent(int total, HashMap<String, ErrorStaModel> countMap) {
private void calTotalPercent(int total, HashMap<String, ErrorStaModel> countMap) {
for (Map.Entry<String, ErrorStaModel> entry : countMap.entrySet()) {
double percent = 100*entry.getValue().getApiSeriesModelList().size() / (double) total;
double percent = 100 * entry.getValue().getErrorSeriesModelList().size() / (double) total;
entry.getValue().setTotalPercent(format.format(percent)+"%");
entry.getValue().setTotalPercent(format.format(percent) + "%");
}
}
public void calLogIpPercent(ErrorStaModel errorStaModel) {
private void calLogIpPercent(ErrorStaModel errorStaModel) {
List<ErrorSeriesModel> errorSeriesModelList = errorStaModel.getApiSeriesModelList();
List<ErrorSeriesModel> errorSeriesModelList = errorStaModel.getErrorSeriesModelList();
int total = errorSeriesModelList.size();
... ... @@ -181,9 +196,9 @@ public class ErrorLogJob implements Callable {
for (Map.Entry<String, Integer> oneEntry : logIpMap.entrySet()) {
double percent = 100*oneEntry.getValue() / (double) total;
double percent = 100 * oneEntry.getValue() / (double) total;
percentmap.put(oneEntry.getKey(), format.format(percent)+"%");
percentmap.put(oneEntry.getKey(), format.format(percent) + "%");
}
errorStaModel.setLogIpPercentMap(percentmap);
... ... @@ -241,4 +256,33 @@ public class ErrorLogJob implements Callable {
}
}
private String buildAlarmMsg() {
StringBuilder builder = new StringBuilder();
builder.append("{");
builder.append("云:").append(type).append(";");
builder.append("总计:").append(this.errorSeriesModelList.size()).append(";");
List<ErrorStaModel> operList = null;
if (StringUtils.equals(type, InterVar.AWS_TYPE)) {
operList = InterVar.AWS_NGINX_ERROR_LIST;
} else {
operList = InterVar.QC_NGINX_ERROR_LIST;
}
for (ErrorStaModel errorStaModel : operList) {
builder.append(errorStaModel.getType()).append(":").append(errorStaModel.getErrorSeriesModelList().size());
}
builder.append("}");
return builder.toString();
}
private void doAlarm() {
if (this.errorSeriesModelList.size() > InterVar.API_LIMIT) {
String msg = buildAlarmMsg();
alarmMsgComp.doAlarm(InterVar.ALARM_NGINX_ERROR,msg);
}
}
}
... ...
package com.monitor.middleware.nginx.task.job.oneJob;
import com.monitor.common.config.SnsMobileConfig;
import com.monitor.common.service.AlarmMsgService;
import com.monitor.middleware.nginx.constant.InterVar;
import com.monitor.middleware.nginx.model.ApiSeriesModel;
import com.monitor.middleware.nginx.model.ApiStaModel;
import com.monitor.middleware.nginx.model.http.NginxView;
import com.monitor.middleware.rabbitmq.component.AlarmMsgComp;
import com.monitor.middleware.rabbitmq.component.InfluxComp;
import org.apache.commons.lang.StringUtils;
... ... @@ -17,19 +20,22 @@ public class ServiceJob extends BaseLogJob {
private String type;
public ServiceJob(InfluxComp influxComp, String type) {
private AlarmMsgComp alarmMsgComp;
public ServiceJob(AlarmMsgComp alarmMsgComp, InfluxComp influxComp, String type) {
super(influxComp, InterVar.SERACCESSMEASURE, type);
this.type = type;
}
public void calNginxView() {
this.alarmMsgComp =alarmMsgComp;
}
for (ApiSeriesModel apiSeriesModel : apiSeriesModelList) {
private void calNginxView() {
synchronized (InterVar.LOCK.intern()) {
for (ApiSeriesModel apiSeriesModel : apiSeriesModelList) {
String key = apiSeriesModel.getLogIp();
String key = apiSeriesModel.getLogIp();
synchronized (key.intern()) {
if (StringUtils.isNotBlank(key)) {
if (!InterVar.NGINX_VIEW_CONCURRENT_HASH_MAP.containsKey(key)) {
... ... @@ -47,7 +53,7 @@ public class ServiceJob extends BaseLogJob {
}
public void saveResult() {
private void saveResult() {
if (StringUtils.equals(InterVar.AWS_TYPE, this.type)) {
InterVar.AWS_NGINX_SERVICE_LIST.clear();
... ... @@ -74,5 +80,17 @@ public class ServiceJob extends BaseLogJob {
calNginxView();
saveResult();
doAlarm();
}
private void doAlarm() {
if (this.apiSeriesModelList.size() > InterVar.API_LIMIT) {
String msg = buildAlarmMsg();
alarmMsgComp.doAlarm(InterVar.ALARM_NGINX_SERVICE, msg);
}
}
}
... ...
package com.monitor.middleware.rabbitmq.component;
import com.monitor.common.config.SnsMobileConfig;
import com.monitor.common.service.AlarmMsgService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
/**
* Created by yoho on 2016/7/6.
*/
@Component
public class AlarmMsgComp {
@Autowired
AlarmMsgService alarmMsgService;
@Autowired
SnsMobileConfig snsMobileConfig;
public void doAlarm(String subject, String content) {
alarmMsgService.sendSms(subject, content, snsMobileConfig.getOpsManagerDeveloper());
}
}
... ...
... ... @@ -74,4 +74,6 @@ public interface InterVar {
String AWS_PREFIX = "172.";
String QC_PREFIX = "10.";
String ALARM_QUEUE="Rabbitmq Queue Alert";
}
... ...
... ... @@ -2,6 +2,10 @@ package com.monitor.middleware.rabbitmq.task;
import com.model.MObjectInfo;
import com.model.RabbitAlertInfo;
import com.monitor.cmdb.service.IMObjectInfoService;
import com.monitor.common.config.SnsMobileConfig;
import com.monitor.common.service.AlarmMsgService;
import com.monitor.middleware.rabbitmq.component.AlarmMsgComp;
import com.monitor.middleware.rabbitmq.component.InfluxComp;
import com.monitor.middleware.rabbitmq.constant.InterVar;
import com.monitor.middleware.rabbitmq.task.job.ClusterViewJob;
... ... @@ -41,6 +45,12 @@ public class RabbitMonitTask {
@Autowired
InfluxComp influxComp;
@Autowired
IMObjectInfoService imObjectInfoService;
@Autowired
AlarmMsgComp alarmMsgComp;
@Scheduled(fixedRate = 3 * 60 * 1000L)
public void doTask() {
synchronized (InterVar.LOCK.intern()) {
... ... @@ -50,7 +60,7 @@ public class RabbitMonitTask {
EXECUTOR_SERVICE.submit(new OneJob(new OverViewJob(influxComp, entry.getKey())));
EXECUTOR_SERVICE.submit(new OneJob(new QueueViewJob(influxComp, entry.getKey(), rabbitAlertMapper)));
EXECUTOR_SERVICE.submit(new OneJob(new QueueViewJob(influxComp, entry.getKey(), rabbitAlertMapper,imObjectInfoService,alarmMsgComp)));
}
}
... ...
package com.monitor.middleware.rabbitmq.task.job;
import com.monitor.middleware.rabbitmq.constant.InterVar;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.client.SimpleClientHttpRequestFactory;
import org.springframework.web.client.RestTemplate;
... ... @@ -12,6 +14,8 @@ import java.util.concurrent.*;
*/
public class OneJob implements Runnable {
public static final Logger DEBUG = LoggerFactory.getLogger(OneJob.class);
private Callable oneJob;
public OneJob(Callable oneJob) {
... ... @@ -25,12 +29,16 @@ public class OneJob implements Runnable {
try {
future.get(InterVar.MONIT_TIMEOUT, TimeUnit.SECONDS);
} catch (InterruptedException e) {
e.printStackTrace();
DEBUG.error("Failed to execute rabbit monitor task , error {}", e);
} catch (ExecutionException e) {
e.printStackTrace();
DEBUG.error("Failed to execute rabbit monitor task , error {}", e);
} catch (TimeoutException e) {
e.printStackTrace();
DEBUG.error("Failed to execute rabbit monitor task , error {}", e);
}
}
}
... ...
... ... @@ -45,9 +45,6 @@ public class OverViewJob implements Callable {
String url = MessageFormat.format(InterVar.URLFORMAT, moInfo.getMoHostIp(), InterVar.port) + InterVar.OVERVIEW_URL;
String testUrl = MessageFormat.format(InterVar.URLFORMAT, moInfo.getMoHostIp(), InterVar.port) + InterVar.TEST_URL;
if (null == moInfo) {
return;
... ... @@ -83,15 +80,7 @@ public class OverViewJob implements Callable {
overViewModel.setMessages(oneView.getQueue_totals().getTotalMsg());
String testJson = RestComp.buildClient().getForObject(testUrl, String.class);
if (StringUtils.isBlank(testJson) || !StringUtils.equals(testJson, "{\"status\":\"ok\"}")) {
overViewModel.setState(0);
} else {
overViewModel.setState(1);
}
overViewModel.setState(1);
} catch (IOException e) {
... ... @@ -101,6 +90,8 @@ public class OverViewJob implements Callable {
} catch (RestClientException e) {
DEBUG.error("Failed to execute restful request... error {} ", e);
overViewModel.setState(0);
}
if (null != oneView) {
... ...
... ... @@ -3,6 +3,10 @@ package com.monitor.middleware.rabbitmq.task.job;
import com.model.MObjectInfo;
import com.model.RabbitAlertInfo;
import com.monitor.cmdb.service.IMObjectInfoService;
import com.monitor.common.config.SnsMobileConfig;
import com.monitor.common.service.AlarmMsgService;
import com.monitor.middleware.rabbitmq.component.AlarmMsgComp;
import com.monitor.middleware.rabbitmq.component.InfluxComp;
import com.monitor.middleware.rabbitmq.component.RestComp;
import com.monitor.middleware.rabbitmq.constant.InterVar;
... ... @@ -14,6 +18,7 @@ import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.support.SimpleTriggerContext;
import org.springframework.web.client.RestTemplate;
import javax.annotation.Resource;
... ... @@ -31,19 +36,27 @@ import java.util.concurrent.Callable;
public class QueueViewJob implements Callable {
private final static Logger DEBUG = LoggerFactory.getLogger(QueueViewJob.class);
RabbitAlertMapper rabbitAlertMapper;
private RabbitAlertMapper rabbitAlertMapper;
private IMObjectInfoService moService;
private AlarmMsgComp alarmMsgComp;
private InfluxComp influxComp;
private int moId;
public QueueViewJob(InfluxComp influxComp,int moId, RabbitAlertMapper rabbitAlertMapper) {
public QueueViewJob(InfluxComp influxComp, int moId, RabbitAlertMapper rabbitAlertMapper, IMObjectInfoService imObjectInfoService, AlarmMsgComp alarmMsgComp) {
this.influxComp=influxComp;
this.influxComp = influxComp;
this.moId = moId;
this.rabbitAlertMapper = rabbitAlertMapper;
this.moService=imObjectInfoService;
this.alarmMsgComp=alarmMsgComp;
}
public void doTask() {
... ... @@ -83,7 +96,7 @@ public class QueueViewJob implements Callable {
if (null != oneView) {
influxComp.doWriteView(oneView,InterVar.DBNAME);
influxComp.doWriteView(oneView, InterVar.DBNAME);
InterVar.queueViewMaps.put(moId, oneView);
}
... ... @@ -105,6 +118,12 @@ public class QueueViewJob implements Callable {
if (queueInfo.getMessages() > alert) {
//alert
String msg = buildAlarmMsg(queueInfo);
alarmMsgComp.doAlarm(InterVar.ALARM_QUEUE,msg);
//ui alert
InterVar.overViewMaps.get(this.moId).setState(0);
}
}
}
... ... @@ -132,4 +151,28 @@ public class QueueViewJob implements Callable {
doTask();
return null;
}
private String buildAlarmMsg(QueueInfo queueInfo) {
StringBuilder builder = new StringBuilder();
builder.append("{");
String ip = moService.queryMObjectInfo(this.moId).getMoHostIp();
builder.append("宿主机:").append(ip).append(";");
builder.append("队列名:").append(queueInfo.getName()).append(";");
builder.append("总计:").append(queueInfo.getMessages()).append(";");
builder.append("等待:").append(queueInfo.getMessages_ready()).append(";");
builder.append("未确认:").append(queueInfo.getMessages_unacknowledged()).append(";");
builder.append("}");
return builder.toString();
}
}
... ...
... ... @@ -5,6 +5,7 @@ import com.model.MObjectInfo;
import com.model.RedisMonitor;
import com.monitor.common.util.HttpRestClient;
import com.monitor.common.util.RedisInfoUtil;
import com.monitor.common.util.TelnetUtils;
import com.monitor.middleware.redis.service.IRedisMonitorHandleService;
import com.monitor.mysql.mapper.MObjectInfoMapper;
import com.monitor.mysql.mapper.RedisMonitorMapper;
... ... @@ -27,6 +28,7 @@ public class RedisMonitorHandleServiceImpl implements IRedisMonitorHandleService
Logger log = LoggerFactory.getLogger(RedisMonitorHandleServiceImpl.class);
@Autowired
HttpRestClient httpRestClient;
... ... @@ -42,8 +44,8 @@ public class RedisMonitorHandleServiceImpl implements IRedisMonitorHandleService
*1、处理twemproxy
***********************************************************************/
List<RedisMonitor> redisInfoList=new ArrayList<RedisMonitor>();
redisInfoList.add(new RedisMonitor("AWS","AWS",0,null));
redisInfoList.add(new RedisMonitor("Qcloud","Qcloud",0,null));
redisInfoList.add(new RedisMonitor("AWS","AWS",0,1,null));
redisInfoList.add(new RedisMonitor("Qcloud","Qcloud",0,1,null));
RedisMonitor redisMonitor=null;
//List<MObjectInfo> mlist= mObjectInfoMapper.getMoInfosByAlias("com_twemproxy");
MObjectInfo a=new MObjectInfo();
... ... @@ -61,29 +63,37 @@ public class RedisMonitorHandleServiceImpl implements IRedisMonitorHandleService
List<String> ipList=null;
for(MObjectInfo obj:mlist){
paramMonitor=new StringBuffer();
JSONObject response=httpRestClient.defaultGet("http://" + obj.getMoHostIp() + ":22222/", JSONObject.class, null);
//JSONObject response=httpRestClient.defaultPost("http://192.168.102.222:22222", null, JSONObject.class);
if(null != response){
int total_connections=(Integer)response.get("total_connections");
int curr_connections=(Integer)response.get("curr_connections");
if(total_connections>0){
paramMonitor.append("总连接数:"+total_connections);
}
if (total_connections > 0) {
paramMonitor.append("当前接数:"+curr_connections);
}
//查看代理下的redis
JSONObject alpha=response.getJSONObject("alpha");
ipList=new ArrayList<String>();
for (Map.Entry<String, Object> entry : alpha.entrySet()) {
String key=entry.getKey();
if(key.indexOf(":")>1){
ipList.add(key);
String result= TelnetUtils.getResult(obj.getMoHostIp(),22222);
redisMonitor = new RedisMonitor();
if(StringUtils.isNotBlank(result)){
//取舍成功重新设为1
JSONObject response=JSONObject.parseObject(result);
if(null != response){
int total_connections=(Integer)response.get("total_connections");
int curr_connections=(Integer)response.get("curr_connections");
if(total_connections>0){
paramMonitor.append("总连接数:"+total_connections);
}
if (total_connections > 0) {
paramMonitor.append("当前接数:"+curr_connections);
}
//查看代理下的redis
JSONObject alpha=response.getJSONObject("alpha");
ipList=new ArrayList<String>();
for (Map.Entry<String, Object> entry : alpha.entrySet()) {
String key=entry.getKey();
if(key.indexOf(":")>1){
ipList.add(key);
}
}
tMap.put(obj.getMoHostIp(),ipList);
redisMonitor.setIsFailed(1);
redisMonitor.setParamMonitor(paramMonitor.toString());
}
tMap.put(obj.getMoHostIp(),ipList);
}else{
redisMonitor.setIsFailed(0);
redisMonitor.setParamMonitor("状态:宕机;");
}
redisMonitor = new RedisMonitor();
if(StringUtils.isNotBlank(obj.getMoName()) && obj.getMoName().contains("aws")){
redisMonitor.setNodeFrom("AWS");
}else if(obj.getMoName().contains("qq")){
... ... @@ -91,10 +101,7 @@ public class RedisMonitorHandleServiceImpl implements IRedisMonitorHandleService
}
redisMonitor.setNodeTo(obj.getMoHostIp());
redisMonitor.setLevel(1);
redisMonitor.setParamMonitor(paramMonitor.toString());
redisInfoList.add(redisMonitor);
//obj.setParamMonitor(paramMonitor.toString());
//redisMonitorMapper.updateByPrimaryKey(obj);
}
/**********************************************************************
... ... @@ -112,25 +119,27 @@ public class RedisMonitorHandleServiceImpl implements IRedisMonitorHandleService
String[] ipConfig=ipStr.split(":");
Map<String,Object> result=null;
if(ipConfig.length==2){
int isFailed=0;
result= RedisInfoUtil.getRedisInfo(ipConfig[0], Integer.valueOf(ipConfig[1]));
paramMonitor=new StringBuffer();
if(null==result){
paramMonitor.append("状态:宕机;");
}else{
isFailed=1;
String role=(String)result.get("role");
paramMonitor.append("角色:"+role+";");
paramMonitor.append("用量:"+result.get("used_memory_rss")+";");
paramMonitor.append("内存碎片比率:" + result.get("mem_fragmentation_ratio") + ";");
// paramMonitor.append(":"+isSlave+";");
}
redisInfoList.add(new RedisMonitor(key,ipStr,3,paramMonitor.toString()));
redisInfoList.add(new RedisMonitor(key,ipStr,2,isFailed,paramMonitor.toString()));
}
}
}
}
/**********************************************************************
*2、存储监控RedisInfo信息数据
*3、存储监控RedisInfo信息数据
***********************************************************************/
if(!CollectionUtils.isEmpty(redisInfoList)){
redisMonitorMapper.deleteAllRedisMonitor();
... ...
... ... @@ -2,7 +2,6 @@ package com.monitor.middleware.redis.task;
import com.monitor.middleware.redis.service.IRedisMonitorHandleService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
... ... @@ -12,7 +11,7 @@ public class RedisMonitorTask {
@Autowired
private IRedisMonitorHandleService redisMonitorService;
@Scheduled(fixedRate=20000)
//@Scheduled(fixedRate=20000)
//@Scheduled(cron="0 0/15 * * * ? ")
public void redisMonitor(){
redisMonitorService.redisMonitor();
... ...
... ... @@ -58,7 +58,8 @@ public class ZkMonitorHandleServiceImpl implements IZkMonitorHandleService {
for(MObjectInfo obj:mlist){
zk=new ZkInfo();
String ip=obj.getMoHostIp();
int result=checkConnection1(ip);
int result=checkConnection(ip);
log.info("get zookeeper connection is value {}",result);
if(result==0){
alarmList.add(obj);
}
... ... @@ -73,32 +74,34 @@ public class ZkMonitorHandleServiceImpl implements IZkMonitorHandleService {
}
/**********************************************************************
*2、处理zookeeper告警
*2、处理zookeeper告警,统计有哪几台机器挂了
***********************************************************************/
if(CollectionUtils.isEmpty(alarmList)){
return;
}
StringBuffer alarmMsg=new StringBuffer();
for(MObjectInfo alarmObj:alarmList){
alarmMsg.append(alarmObj.getMoHostIp()).append(",");
}
alarmMsgService.sendSms("zookeeper",alarmMsg.toString(), snsMobileConfig.getOpsManagerDeveloper());
String alarmMsg="[有货监控]您好,zookeeper挂了"+alarmList.size()+"台,请及时查看!";
alarmMsgService.sendSms("zookeeper",alarmMsg, snsMobileConfig.getOpsManagerDeveloper());
log.info("task end...");
}
private int checkConnection1(String ip){
private int checkConnection(String ip){
RetryPolicy retryPolicy=new RetryOneTime(1000);
CuratorFramework client =CuratorFrameworkFactory.newClient(ip+":2181", 5*1000, 5*1000, retryPolicy);
CuratorFramework client =null;
try {
client=CuratorFrameworkFactory.newClient(ip+":2181", 5*1000, 5*1000, retryPolicy);
client.start();
List<String> children = client.getChildren().forPath("/");
log.info("get zookeeper ip {} success",ip);
} catch (Exception e) {
log.error("get zookeeper children failed param ip {}.",ip,e);
return 0;
}finally {
client.close();
if(client!=null){
client.close();
}
}
return 1;
return 1;
}
}
... ...
... ... @@ -4,6 +4,7 @@ import com.monitor.middleware.zookeeper.service.IZkMonitorHandleService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
/**
... ... @@ -18,10 +19,9 @@ public class ZookeeperMonitorTask {
IZkMonitorHandleService zkMonitorService;
//@Scheduled(fixedRate=20000)
//@Scheduled(cron="0 0/15 * * * ? ")
@Scheduled(cron="0 0/2 * * * ? ")
public void zookeeperMonitor() {
zkMonitorService.zookeeperMonitor();
}
}
... ...
... ... @@ -4,6 +4,7 @@ import lombok.Data;
@Data
public class RedisMonitor {
private int id;
private String nodeFrom;
... ... @@ -11,18 +12,24 @@ public class RedisMonitor {
private int level;
private int isFailed;
private String paramMonitor;
private String createTime;
private String updateTime;
public RedisMonitor() {
super();
}
public RedisMonitor(String nodeFrom,String nodeTo,int level,String paramMonitor) {
public RedisMonitor(String nodeFrom,String nodeTo,int level,int isFailed,String paramMonitor) {
super();
this.nodeFrom = nodeFrom;
this.nodeTo = nodeTo;
this.level = level;
this.isFailed=isFailed;
this.paramMonitor=paramMonitor;
}
... ...
... ... @@ -7,11 +7,12 @@
<result column="node_to" property="nodeTo" jdbcType="VARCHAR" />
<result column="level" property="level" jdbcType="TINYINT" />
<result column="param_monitor" property="paramMonitor" jdbcType="VARCHAR" />
<result column="is_failed" property="isFailed" jdbcType="TINYINT" />
<result column="create_time" property="createTime" jdbcType="TIMESTAMP" />
<result column="update_time" property="updateTime" jdbcType="TIMESTAMP" />
</resultMap>
<sql id="Base_Column_List" >
id, node_from, node_to, level, param_monitor, create_time, update_time
id, node_from, node_to, level,is_failed, param_monitor, create_time, update_time
</sql>
<select id="selectByPrimaryKey" resultMap="BaseResultMap" parameterType="java.lang.Long" >
select
... ... @@ -25,10 +26,10 @@
</delete>
<insert id="insert" parameterType="com.model.RedisMonitor" >
insert into redis_monitor (id, node_from, node_to,
level, param_monitor, create_time,
level,is_failed, param_monitor, create_time,
update_time)
values (#{id,jdbcType=BIGINT}, #{nodeFrom,jdbcType=VARCHAR}, #{nodeTo,jdbcType=VARCHAR},
#{level,jdbcType=TINYINT}, #{paramMonitor,jdbcType=VARCHAR}, #{createTime,jdbcType=TIMESTAMP},
#{level,jdbcType=TINYINT},#{isFailed,jdbcType=TINYINT}, #{paramMonitor,jdbcType=VARCHAR}, #{createTime,jdbcType=TIMESTAMP},
#{updateTime,jdbcType=TIMESTAMP})
</insert>
<insert id="insertSelective" parameterType="com.model.RedisMonitor" >
... ... @@ -46,6 +47,9 @@
<if test="level != null" >
level,
</if>
<if test="isFailed != null" >
is_failed,
</if>
<if test="paramMonitor != null" >
param_monitor,
</if>
... ... @@ -69,6 +73,9 @@
<if test="level != null" >
#{level,jdbcType=TINYINT},
</if>
<if test="isFailed != null" >
#{isFailed,jdbcType=TINYINT},
</if>
<if test="paramMonitor != null" >
#{paramMonitor,jdbcType=VARCHAR},
</if>
... ... @@ -92,6 +99,9 @@
<if test="level != null" >
level = #{level,jdbcType=TINYINT},
</if>
<if test="isFailed != null" >
is_failed = #{isFailed,jdbcType=TINYINT},
</if>
<if test="paramMonitor != null" >
param_monitor = #{paramMonitor,jdbcType=VARCHAR},
</if>
... ... @@ -109,6 +119,7 @@
set node_from = #{nodeFrom,jdbcType=VARCHAR},
node_to = #{nodeTo,jdbcType=VARCHAR},
level = #{level,jdbcType=TINYINT},
is_failed = #{isFailed,jdbcType=TINYINT},
param_monitor = #{paramMonitor,jdbcType=VARCHAR},
create_time = #{createTime,jdbcType=TIMESTAMP},
update_time = #{updateTime,jdbcType=TIMESTAMP}
... ... @@ -121,11 +132,11 @@
<insert id="bachInsertRedisMonitor" parameterType="com.model.RedisMonitor">
insert into redis_monitor
(node_from, node_to,level, param_monitor)
(node_from, node_to,level,is_failed,param_monitor)
values
<foreach collection="list" item="item" index="index"
separator=",">
(#{item.nodeFrom},#{item.nodeTo},#{item.level},#{item.paramMonitor})
(#{item.nodeFrom},#{item.nodeTo},#{item.level},#{item.isFailed},#{item.paramMonitor})
</foreach>
</insert>
... ...
... ... @@ -8,7 +8,7 @@ influxdb.connect.timeout=15;15;15
influxdb.read.timeout=40;40;40
influxdb.write.timeout=20;20;20
aws.influxdb.url=http://172.31.31.170:8086
aws.influxdb.url=http://127.0.0.1:8086
aws.influxdb.user=root
aws.influxdb.pwd=root
... ...
... ... @@ -16,7 +16,7 @@
<appender name="WARN" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${catalina.home}/logs/warn.log</file>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>ERROR</level>
<level>WARN</level>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- daily rollover -->
... ...
... ... @@ -2,14 +2,13 @@
influxdb.num=2
influxdb.name=test;alarm
influxdb.ip=http://192.168.102.162:8086;http://123.206.79.151:18086
#influxdb.ip=http://192.168.102.162:8086;http://192.168.99.100:32768
influxdb.ip=http://192.168.102.162:8086;http://172.16.6.104:8086
influxdb.user=yoho;root
influxdb.pwd=Yoho_9646;root
influxdb.connect.timeout=15;15
influxdb.read.timeout=40;40
influxdb.write.timeout=20;20
aws.influxdb.url=http://123.206.79.151:8086
aws.influxdb.url=http://172.16.6.104:8086
aws.influxdb.user=root
aws.influxdb.pwd=root
\ No newline at end of file
... ...