Authored by simba

Merge branch 'master' of http://git.yoho.cn/ops/monitor-service

Showing 17 changed files with 249 additions and 168 deletions
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:task="http://www.springframework.org/schema/task"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans-4.0.xsd http://www.springframework.org/schema/task http://www.springframework.org/schema/task/spring-task.xsd">
http://www.springframework.org/schema/beans/spring-beans-4.0.xsd">
<bean id="javaapi_ConnectionManager" class="org.apache.http.impl.conn.PoolingHttpClientConnectionManager">
<property name="maxTotal" value="40" />
... ... @@ -21,8 +21,8 @@
<bean id="javaapi_clientHttpRequestFactory"
class="org.springframework.http.client.HttpComponentsClientHttpRequestFactory">
<constructor-arg ref="javaapi_httpClient" />
<property name="connectTimeout" value="800" />
<property name="readTimeout" value="1200" />
<property name="connectTimeout" value="3000" />
<property name="readTimeout" value="3000" />
</bean>
<bean id="javaapiRestTemplate" class="org.springframework.web.client.RestTemplate">
... ...
... ... @@ -11,6 +11,6 @@ import java.util.Map;
public class ApiMapValueCompare implements Comparator<Map.Entry<String, ApiStaModel>> {
@Override
public int compare(Map.Entry<String, ApiStaModel> o1, Map.Entry<String, ApiStaModel> o2) {
return o1.getValue().getCount() > o2.getValue().getCount() ? 1 : -1;
return o1.getValue().getCount() > o2.getValue().getCount() ? -1 : 1;
}
}
... ...
... ... @@ -11,6 +11,6 @@ import java.util.Map;
public class ErrorMapValueCompare implements Comparator<Map.Entry<String, ErrorStaModel>> {
@Override
public int compare(Map.Entry<String, ErrorStaModel> o1, Map.Entry<String, ErrorStaModel> o2) {
return o1.getValue().getCount() > o2.getValue().getCount() ? 1 : -1;
return o1.getValue().getCount() > o2.getValue().getCount() ? -1 : 1;
}
}
... ...
... ... @@ -6,6 +6,7 @@ import com.monitor.middleware.nginx.model.ErrorStaModel;
import com.monitor.middleware.nginx.model.http.NginxView;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
... ... @@ -20,21 +21,19 @@ public interface InterVar {
ConcurrentHashMap<String, NginxView> NGINX_VIEW_CONCURRENT_HASH_MAP = new ConcurrentHashMap<>();
CopyOnWriteArrayList<ApiStaModel> AWS_NGINX_API_LIST=new CopyOnWriteArrayList<>();
ConcurrentHashMap<String, ApiStaModel> AWS_NGINX_API_HASH_MAP = new ConcurrentHashMap<>();
CopyOnWriteArrayList<ApiStaModel> AWS_NGINX_SERVICE_LIST=new CopyOnWriteArrayList<>();
ConcurrentHashMap<String, ApiStaModel> Aws_NGINX_SERVICE_HASH_MAP = new ConcurrentHashMap<>();
CopyOnWriteArrayList<ApiStaModel> QC_NGINX_API_LIST=new CopyOnWriteArrayList<>();
CopyOnWriteArrayList<ApiStaModel> QC_NGINX_SERVICE_LIST=new CopyOnWriteArrayList<>();
ConcurrentHashMap<String, ApiStaModel> QC_NGINX_API_HASH_MAP = new ConcurrentHashMap<>();
CopyOnWriteArrayList<ErrorStaModel> AWS_NGINX_ERROR_LIST=new CopyOnWriteArrayList<>();
ConcurrentHashMap<String, ApiStaModel> QC_NGINX_SERVICE_HASH_MAP = new ConcurrentHashMap<>();
CopyOnWriteArrayList<ErrorStaModel> QC_NGINX_ERROR_LIST=new CopyOnWriteArrayList<>();
ConcurrentHashMap<String, ErrorStaModel> AWS_NGINX_ERROR_HASH_MAP = new ConcurrentHashMap<>();
ConcurrentHashMap<String, ErrorStaModel> QC_NGINX_ERROR_HASH_MAP = new ConcurrentHashMap<>();
String DBNAME = "nginx_access_error_report";
... ...
package com.monitor.middleware.nginx.model.http;
import com.monitor.model.page.PageRequest;
import lombok.Data;
/**
* Created by yoho on 2016/6/30.
*/
@Data
public class NginxTypeRequest {
public class NginxTypeRequest extends PageRequest {
String type;
}
... ...
... ... @@ -11,6 +11,8 @@ import com.monitor.middleware.nginx.model.http.NginxTabView;
import com.monitor.middleware.nginx.model.http.NginxTypeRequest;
import com.monitor.middleware.nginx.model.http.NginxView;
import com.monitor.middleware.rabbitmq.component.InfluxComp;
import com.monitor.middleware.rabbitmq.model.NodeInfo;
import com.monitor.model.page.PageResponse;
import com.monitor.model.response.BaseResponse;
import com.sun.javafx.image.BytePixelSetter;
import org.apache.commons.lang.StringUtils;
... ... @@ -97,34 +99,34 @@ public class NginxService {
BaseResponse response = new BaseResponse();
Map<String, ApiStaModel> operMap = null;
List<ApiStaModel> operList = null;
if (StringUtils.equals(InterVar.AWS_TYPE, request.getType())) {
operMap = InterVar.AWS_NGINX_API_HASH_MAP;
operList = InterVar.AWS_NGINX_API_LIST;
} else {
operMap = InterVar.QC_NGINX_API_HASH_MAP;
operList = InterVar.QC_NGINX_API_LIST;
}
List<NginxPieView> nginxPieViewList = new ArrayList<>();
List<Map.Entry<String, ApiStaModel>> apiNginxList = new ArrayList<>(operMap.entrySet());
buildNginxPieView(nginxPieViewList, apiNginxList);
buildNginxPieView(nginxPieViewList, operList);
response.setData(nginxPieViewList);
return response;
}
private void buildNginxPieView(List<NginxPieView> nginxPieViewList, List<Map.Entry<String, ApiStaModel>> apiNginxList) {
private void buildNginxPieView(List<NginxPieView> nginxPieViewList, List<ApiStaModel> apiNginxList) {
if (apiNginxList.size() > 5) {
for (int i = 0; i < 4; i++) {
NginxPieView pieView = new NginxPieView();
pieView.setName(apiNginxList.get(i).getValue().getApiKey());
pieView.setName(apiNginxList.get(i).getApiKey());
pieView.setCount(apiNginxList.get(i).getValue().getCount());
pieView.setCount(apiNginxList.get(i).getCount());
nginxPieViewList.add(pieView);
}
... ... @@ -132,7 +134,7 @@ public class NginxService {
int count = 0;
for (int i = 4; i < apiNginxList.size(); i++) {
count += apiNginxList.get(i).getValue().getCount();
count += apiNginxList.get(i).getCount();
}
NginxPieView lastPieView = new NginxPieView();
... ... @@ -141,15 +143,15 @@ public class NginxService {
lastPieView.setCount(count);
nginxPieViewList.add(lastPieView);
} else {
for (int i = 0; i < apiNginxList.size(); i++) {
NginxPieView pieView = new NginxPieView();
pieView.setName(apiNginxList.get(i).getValue().getApiKey());
pieView.setName(apiNginxList.get(i).getApiKey());
pieView.setCount(apiNginxList.get(i).getValue().getCount());
pieView.setCount(apiNginxList.get(i).getCount());
nginxPieViewList.add(pieView);
}
... ... @@ -161,19 +163,17 @@ public class NginxService {
public BaseResponse queryServiceInfo(@RequestBody NginxTypeRequest request) {
BaseResponse response = new BaseResponse();
Map<String, ApiStaModel> operMap = null;
List<ApiStaModel> operList = null;
if (StringUtils.equals(InterVar.AWS_TYPE, request.getType())) {
operMap = InterVar.Aws_NGINX_SERVICE_HASH_MAP;
operList = InterVar.AWS_NGINX_SERVICE_LIST;
} else {
operMap = InterVar.QC_NGINX_SERVICE_HASH_MAP;
operList = InterVar.QC_NGINX_SERVICE_LIST;
}
List<NginxPieView> nginxPieViewList = new ArrayList<>();
List<Map.Entry<String, ApiStaModel>> apiNginxList = new ArrayList<>(operMap.entrySet());
buildNginxPieView(nginxPieViewList, apiNginxList);
buildNginxPieView(nginxPieViewList, operList);
response.setData(nginxPieViewList);
... ... @@ -184,34 +184,33 @@ public class NginxService {
public BaseResponse queryErrorInfo(@RequestBody NginxTypeRequest request) {
BaseResponse response = new BaseResponse();
Map<String, ErrorStaModel> operMap = null;
List<ErrorStaModel> operList = null;
if (StringUtils.equals(InterVar.AWS_TYPE, request.getType())) {
operMap = InterVar.AWS_NGINX_ERROR_HASH_MAP;
operList = InterVar.AWS_NGINX_ERROR_LIST;
} else {
operMap = InterVar.QC_NGINX_ERROR_HASH_MAP;
operList = InterVar.QC_NGINX_ERROR_LIST;
}
List<NginxPieView> nginxPieViewList = new ArrayList<>();
List<Map.Entry<String, ErrorStaModel>> apiNginxList = new ArrayList<>(operMap.entrySet());
if (InterVar.AWS_NGINX_API_HASH_MAP.size() > 5) {
if (operList.size() > 5) {
for (int i = 0; i < 4; i++) {
NginxPieView pieView = new NginxPieView();
pieView.setName(apiNginxList.get(i).getValue().getType());
pieView.setName(operList.get(i).getType());
pieView.setCount(apiNginxList.get(i).getValue().getCount());
pieView.setCount(operList.get(i).getCount());
nginxPieViewList.add(pieView);
}
int count = 0;
for (int i = 4; i < apiNginxList.size(); i++) {
count += apiNginxList.get(i).getValue().getCount();
for (int i = 4; i < operList.size(); i++) {
count += operList.get(i).getCount();
}
NginxPieView lastPieView = new NginxPieView();
... ... @@ -223,13 +222,13 @@ public class NginxService {
} else {
for (int i = 0; i < apiNginxList.size(); i++) {
for (int i = 0; i < operList.size(); i++) {
NginxPieView pieView = new NginxPieView();
pieView.setName(apiNginxList.get(i).getValue().getType());
pieView.setName(operList.get(i).getType());
pieView.setCount(apiNginxList.get(i).getValue().getCount());
pieView.setCount(operList.get(i).getCount());
nginxPieViewList.add(pieView);
}
... ... @@ -251,39 +250,63 @@ public class NginxService {
@RequestMapping(value = "/apiTab")
public BaseResponse queryApiTab(@RequestBody NginxTypeRequest request) {
BaseResponse response = new BaseResponse();
BaseResponse baseResponse=new BaseResponse();
List<NginxTabView> tabViews = new ArrayList<>();
Map<String, ApiStaModel> operMap = null;
List<ApiStaModel> operList = null;
if (StringUtils.equals(request.getType(), InterVar.AWS_TYPE)) {
operMap = InterVar.AWS_NGINX_API_HASH_MAP;
operList = InterVar.AWS_NGINX_API_LIST;
} else {
operMap = InterVar.QC_NGINX_API_HASH_MAP;
operList = InterVar.QC_NGINX_API_LIST;
}
buildNginxTabView(tabViews, operMap);
buildNginxTabView(tabViews, operList);
response.setData(tabViews);
List<NginxTabView> selectTabViews = new ArrayList<>();
return response;
int start = (request.getCurrentPage() - 1) * request.getPageSize();
int end = (request.getCurrentPage() * request.getPageSize()) - 1;
int realCount = end < tabViews.size() ? request.getPageSize() : tabViews.size() - start;
for (int i = 0; i < realCount; i++) {
selectTabViews.add(tabViews.get(start + i));
}
PageResponse<NginxTabView> response = new PageResponse<>();
response.setCurrentPage(request.getCurrentPage());
response.setTotal(tabViews.size());
response.setPageSize(request.getPageSize());
response.setTotalPage(tabViews.size() / request.getPageSize() + 1);
response.setRows(selectTabViews);
baseResponse.setData(response);
return baseResponse;
}
private void buildNginxTabView(List<NginxTabView> tabViews, Map<String, ApiStaModel> operMap) {
for (Map.Entry<String, ApiStaModel> apiStaModelEntry : operMap.entrySet()) {
private void buildNginxTabView(List<NginxTabView> tabViews, List<ApiStaModel> operList) {
for (ApiStaModel apiStaModelEntry : operList) {
NginxTabView tabView = new NginxTabView();
tabView.setName(apiStaModelEntry.getKey());
tabView.setName(apiStaModelEntry.getApiKey());
tabView.setCount(apiStaModelEntry.getValue().getCount());
tabView.setCount(apiStaModelEntry.getCount());
String logIpPercent = StringUtils.EMPTY;
for (Map.Entry<String, String> entry : apiStaModelEntry.getValue().getLogIpPercentMap().entrySet()) {
for (Map.Entry<String, String> entry : apiStaModelEntry.getLogIpPercentMap().entrySet()) {
logIpPercent += " " + entry.getKey() + " : " + entry.getValue() + " / ";
}
... ... @@ -292,14 +315,14 @@ public class NginxService {
String codePercent = StringUtils.EMPTY;
for (Map.Entry<String, String> entry : apiStaModelEntry.getValue().getCodePercentMap().entrySet()) {
for (Map.Entry<String, String> entry : apiStaModelEntry.getCodePercentMap().entrySet()) {
codePercent += " " + entry.getKey() + " : " + entry.getValue() + " / ";
}
tabView.setCodePercent(codePercent);
tabView.setTotalPercent(apiStaModelEntry.getValue().getTotalPercent());
tabView.setTotalPercent(apiStaModelEntry.getTotalPercent());
tabViews.add(tabView);
}
... ... @@ -308,25 +331,51 @@ public class NginxService {
@RequestMapping(value = "/serviceTab")
public BaseResponse queryServiceTab(@RequestBody NginxTypeRequest request) {
BaseResponse response = new BaseResponse();
BaseResponse baseResponse = new BaseResponse();
List<NginxTabView> tabViews = new ArrayList<>();
Map<String, ApiStaModel> operMap = null;
List<ApiStaModel> operList = null;
if (StringUtils.equals(request.getType(), InterVar.AWS_TYPE)) {
operMap = InterVar.Aws_NGINX_SERVICE_HASH_MAP;
operList = InterVar.AWS_NGINX_SERVICE_LIST;
} else {
operMap = InterVar.QC_NGINX_SERVICE_HASH_MAP;
operList = InterVar.QC_NGINX_SERVICE_LIST;
}
buildNginxTabView(tabViews, operMap);
buildNginxTabView(tabViews, operList);
response.setData(tabViews);
return response;
List<NginxTabView> selectTabViews = new ArrayList<>();
int start = (request.getCurrentPage() - 1) * request.getPageSize();
int end = (request.getCurrentPage() * request.getPageSize()) - 1;
int realCount = end < tabViews.size() ? request.getPageSize() : tabViews.size() - start;
for (int i = 0; i < realCount; i++) {
selectTabViews.add(tabViews.get(start + i));
}
PageResponse<NginxTabView> response = new PageResponse<>();
response.setCurrentPage(request.getCurrentPage());
response.setTotal(tabViews.size());
response.setPageSize(request.getPageSize());
response.setTotalPage(tabViews.size() / request.getPageSize() + 1);
response.setRows(selectTabViews);
baseResponse.setData(response);
return baseResponse;
}
... ... @@ -336,34 +385,34 @@ public class NginxService {
List<NginxTabView> tabViews = new ArrayList<>();
Map<String, ErrorStaModel> operMap = null;
List< ErrorStaModel> operList = null;
if (StringUtils.equals(request.getType(), InterVar.AWS_TYPE)) {
operMap = InterVar.AWS_NGINX_ERROR_HASH_MAP;
operList = InterVar.AWS_NGINX_ERROR_LIST;
} else {
operMap = InterVar.QC_NGINX_ERROR_HASH_MAP;
operList = InterVar.QC_NGINX_ERROR_LIST;
}
for (Map.Entry<String, ErrorStaModel> apiStaModelEntry : operMap.entrySet()) {
for (ErrorStaModel errorStaModel : operList) {
NginxTabView tabView = new NginxTabView();
tabView.setName(apiStaModelEntry.getKey());
tabView.setName(errorStaModel.getType());
tabView.setCount(apiStaModelEntry.getValue().getCount());
tabView.setCount(errorStaModel.getCount());
String logIpPercent = StringUtils.EMPTY;
for (Map.Entry<String, String> entry : apiStaModelEntry.getValue().getLogIpPercentMap().entrySet()) {
for (Map.Entry<String, String> entry : errorStaModel.getLogIpPercentMap().entrySet()) {
logIpPercent += " " + entry.getKey() + " : " + entry.getValue() + " / ";
}
tabView.setLogIpPercent(logIpPercent);
tabView.setTotalPercent(apiStaModelEntry.getValue().getTotalPercent());
tabView.setTotalPercent(errorStaModel.getTotalPercent());
tabViews.add(tabView);
}
... ...
... ... @@ -2,11 +2,13 @@ package com.monitor.middleware.nginx.task.job.oneJob;
import com.monitor.middleware.nginx.constant.InterVar;
import com.monitor.middleware.nginx.model.ApiSeriesModel;
import com.monitor.middleware.nginx.model.ApiStaModel;
import com.monitor.middleware.nginx.model.http.NginxView;
import com.monitor.middleware.rabbitmq.component.InfluxComp;
import org.apache.commons.lang.StringUtils;
import java.util.List;
import java.util.Map;
/**
* Created by yoho on 2016/6/22.
... ... @@ -58,13 +60,19 @@ public class ApiJob extends BaseLogJob {
public void saveResult() {
if (StringUtils.equals(InterVar.AWS_TYPE, this.type)) {
InterVar.AWS_NGINX_API_HASH_MAP.clear();
InterVar.AWS_NGINX_API_LIST.clear();
InterVar.AWS_NGINX_API_HASH_MAP.putAll(this.resultMap);
for(ApiStaModel apiStaModel:this.resultList)
{
InterVar.AWS_NGINX_API_LIST.add(apiStaModel);
}
} else {
InterVar.QC_NGINX_API_HASH_MAP.clear();
InterVar.QC_NGINX_API_LIST.clear();
InterVar.QC_NGINX_API_HASH_MAP.putAll(this.resultMap);
for(ApiStaModel apiStaModel:this.resultList)
{
InterVar.QC_NGINX_API_LIST.add(apiStaModel);
}
}
}
... ...
... ... @@ -6,17 +6,12 @@ import com.monitor.middleware.nginx.model.ApiSeriesModel;
import com.monitor.middleware.nginx.model.ApiStaModel;
import com.monitor.middleware.rabbitmq.component.InfluxComp;
import org.apache.commons.lang.StringUtils;
import org.aspectj.lang.annotation.After;
import org.influxdb.dto.BatchPoints;
import org.influxdb.dto.Point;
import org.influxdb.dto.QueryResult;
import org.jboss.netty.util.internal.StringUtil;
import java.text.DecimalFormat;
import java.util.*;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
/**
* Created by yoho on 2016/6/28.
... ... @@ -28,7 +23,7 @@ public class BaseLogJob implements Callable {
//aws qc different
private InfluxComp influxComp;
protected Map<String, ApiStaModel> resultMap;
protected List<ApiStaModel> resultList;
protected List<ApiSeriesModel> apiSeriesModelList = new ArrayList<>();
... ... @@ -60,13 +55,13 @@ public class BaseLogJob implements Callable {
calClassPercent(oneSta.getValue());
}
resultMap = sortMap(apiStaMap);
resultList = sortMap(apiStaMap);
}
private Map<String, ApiStaModel> sortMap(HashMap<String, ApiStaModel> map) {
private List<ApiStaModel> sortMap(HashMap<String, ApiStaModel> map) {
Map<String, ApiStaModel> sortedMap = new LinkedHashMap<>();
List<ApiStaModel> sortedList = new ArrayList<>();
List<Map.Entry<String, ApiStaModel>> entryList = new ArrayList<>(map.entrySet());
... ... @@ -79,9 +74,9 @@ public class BaseLogJob implements Callable {
while (iterable.hasNext()) {
oneEntry = iterable.next();
sortedMap.put(oneEntry.getKey(), oneEntry.getValue());
sortedList.add(oneEntry.getValue());
}
return sortedMap;
return sortedList;
}
... ... @@ -121,9 +116,9 @@ public class BaseLogJob implements Callable {
for (Map.Entry<String, Integer> oneCount : countMap.entrySet()) {
double percent = oneCount.getValue() / (double) total;
double percent = 100*oneCount.getValue() / (double) total;
percentMap.put(oneCount.getKey(), format.format(percent));
percentMap.put(oneCount.getKey(), format.format(percent)+"%");
}
return percentMap;
... ... @@ -138,9 +133,9 @@ public class BaseLogJob implements Callable {
oneEntry.getValue().setCount(count);
double percent = count / (double) total;
double percent = 100*count / (double) total;
oneEntry.getValue().setTotalPercent(format.format(percent));
oneEntry.getValue().setTotalPercent(format.format(percent)+"%");
}
}
... ... @@ -234,27 +229,27 @@ public class BaseLogJob implements Callable {
}
/* public static void main(String[] args) {
String sql = "select * from cluster_info where time > now() - 2d";
/* public static void main(String[] args) {
//String sql = "select * from cluster_info where time > now() - 2d";
InfluxComp influxComp = new InfluxComp("http://172.16.6.104:8086", "root", "root");
influxComp.init();
Random random=new Random();
*//* Point point=Point.measurement(InterVar.SERACCESSMEASURE)
Point point=Point.measurement(InterVar.SERACCESSMEASURE)
.addField("http_host","api.yoho.cn")
.addField("log_file","api.yoho.cn_access.log")
.addField("log_ip","172.16.6.104")
.addField("request_method","GET")
.addField("request_status","499")
.addField("request_url","/ HTTP/1.1")
.addField("request_url","/haha HTTP/1.1")
.addField("upstream","172.16.6.105:8800")
.time(System.currentTimeMillis() * 1000000 + random.nextInt(999999), TimeUnit.NANOSECONDS)
.build();
String request_url="/?app_version=4.0.0&client_secret=b7eaa613a3ccfda51b697964a276bcba&client_type=android&gender=&limit=60&method=app.SpaceOrders.get&os_version=android5.0%3ASM-G9006V&page=1&screen_size=1080x1920&type=1&v=7&yh_channel=1 HTTP/1.1";
String request_url="/?app_version=4.0.0&client_secret=b7eaa613a3ccfda51b697964a276bcba&client_type=android&gender=&limit=60&method=app.SpaceOrders.push&os_version=android5.0%3ASM-G9006V&page=1&screen_size=1080x1920&type=1&v=7&yh_channel=1 HTTP/1.1";
Point point1=Point.measurement(InterVar.SERACCESSMEASURE)
... ... @@ -266,10 +261,15 @@ public class BaseLogJob implements Callable {
.addField("request_url",request_url)
.addField("upstream","172.16.6.105:8800")
.time(System.currentTimeMillis() * 1000000 + random.nextInt(999999), TimeUnit.NANOSECONDS)
.build();*//*
.build();
BatchPoints points=BatchPoints.database(InterVar.DBNAME).retentionPolicy("default").build();
points.point(point);
points.point(point1);
Point ponit=Point.measurement(InterVar.ERRORMEASURE)
influxComp.getInfluxDBClient().write(points);
*//*Point ponit=Point.measurement(InterVar.ERRORMEASURE)
.addField("err_msg","Lua error 45646576456")
.addField("err_severity","error")
.addField("log_ip","172.16.6.104")
... ... @@ -291,11 +291,14 @@ public class BaseLogJob implements Callable {
batchPoints.point(ponit1);
influxComp.getInfluxDBClient().createDatabase(InterVar.DBNAME);
influxComp.getInfluxDBClient().write(batchPoints);
*//**//**//**//* influxComp.getInfluxDBClient().createDatabase(InterVar.DBNAME);
influxComp.getInfluxDBClient().write(batchPoints);
*//**//**//**//**//**//**//**//**//**//**//**//**//**//**//**//*
*//* QueryResult result = influxComp.doQuery(sql, "rabbitmq_info");
QueryResult result = influxComp.doQuery(sql, "rabbitmq_info");
List<String> columns = result.getResults().get(0).getSeries().get(0).getColumns();
... ... @@ -307,10 +310,10 @@ public class BaseLogJob implements Callable {
String v = result.getResults().get(0).getSeries().get(0).getValues().get(0).get(1).toString();
System.out.println(v);*//*
System.out.println(v);*//**//**//**//**//**//**//**//**//**//**//**//**//**//**//**//*
*//* System.out.println(new ApiJob().queryRequest("/?app_version=4.6.0.1606220001&client_secret=d039fbc04704d0679eb59c8c82593ddb&client_type=iphone&is_read=N&method=app.inbox.getTotal&os_version=9.3.2&screen_size=375x667&uid=3930397&v=7 HTTP/1.1"));
System.out.println(new ApiJob().queryRequest("/?app_version=4.6.0.1606220001&client_secret=d039fbc04704d0679eb59c8c82593ddb&client_type=iphone&is_read=N&method=app.inbox.getTotal&os_version=9.3.2&screen_size=375x667&uid=3930397&v=7 HTTP/1.1"));
System.out.println(new ApiJob().queryRequest("/ HTTP/1.1"));*//*
}*/
... ...
... ... @@ -47,7 +47,7 @@ public class ErrorLogJob implements Callable {
private void doTask() {
QueryResult result = this.influxComp.doQuery("select * from " + InterVar.ERRORMEASURE + " where time > now() - "+InterVar.SCANINTERVAL, InterVar.DBNAME);
QueryResult result = this.influxComp.doQuery("select * from " + InterVar.ERRORMEASURE + " where time > now() - " + InterVar.SCANINTERVAL, InterVar.DBNAME);
buildErrorModel(result.getResults().get(0).getSeries());
... ... @@ -62,16 +62,16 @@ public class ErrorLogJob implements Callable {
calLogIpPercent(entry.getValue());
}
Map<String, ErrorStaModel> map = sortMap(countMap);
List<ErrorStaModel> list = sortMap(countMap);
if (StringUtils.equals(InterVar.AWS_TYPE, this.type)) {
InterVar.AWS_NGINX_ERROR_HASH_MAP.clear();
InterVar.AWS_NGINX_ERROR_LIST.clear();
InterVar.AWS_NGINX_ERROR_HASH_MAP.putAll(map);
InterVar.AWS_NGINX_ERROR_LIST.addAll(list);
} else {
InterVar.QC_NGINX_ERROR_HASH_MAP.clear();
InterVar.QC_NGINX_ERROR_LIST.clear();
InterVar.QC_NGINX_ERROR_HASH_MAP.putAll(map);
InterVar.QC_NGINX_ERROR_LIST.addAll(list);
}
}
... ... @@ -101,8 +101,9 @@ public class ErrorLogJob implements Callable {
}
}
public Map<String, ErrorStaModel> sortMap(HashMap<String, ErrorStaModel> countMap) {
Map<String, ErrorStaModel> sortedMap = new LinkedHashMap<>();
public List<ErrorStaModel> sortMap(HashMap<String, ErrorStaModel> countMap) {
List<ErrorStaModel> sortedList = new ArrayList<>();
List<Map.Entry<String, ErrorStaModel>> allList = new ArrayList<>(countMap.entrySet());
... ... @@ -116,10 +117,10 @@ public class ErrorLogJob implements Callable {
errorStaModelEntry = iterator.next();
sortedMap.put(errorStaModelEntry.getKey(), errorStaModelEntry.getValue());
sortedList.add(errorStaModelEntry.getValue());
}
return sortedMap;
return sortedList;
}
private HashMap<String, ErrorStaModel> classByType() {
... ... @@ -150,9 +151,9 @@ public class ErrorLogJob implements Callable {
public void calTotalPercent(int total, HashMap<String, ErrorStaModel> countMap) {
for (Map.Entry<String, ErrorStaModel> entry : countMap.entrySet()) {
double percent = entry.getValue().getApiSeriesModelList().size() / (double) total;
double percent = 100*entry.getValue().getApiSeriesModelList().size() / (double) total;
entry.getValue().setTotalPercent(format.format(percent));
entry.getValue().setTotalPercent(format.format(percent)+"%");
}
}
... ... @@ -180,9 +181,9 @@ public class ErrorLogJob implements Callable {
for (Map.Entry<String, Integer> oneEntry : logIpMap.entrySet()) {
double percent = oneEntry.getValue() / (double) total;
double percent = 100*oneEntry.getValue() / (double) total;
percentmap.put(oneEntry.getKey(), format.format(percent));
percentmap.put(oneEntry.getKey(), format.format(percent)+"%");
}
errorStaModel.setLogIpPercentMap(percentmap);
... ... @@ -206,8 +207,7 @@ public class ErrorLogJob implements Callable {
for (List<Object> oneRow : oneSeries.getValues()) {
if(!StringUtils.equals(type,queryLocation(oneRow.get(KINDEMAPS.get("log_ip")).toString())))
{
if (!StringUtils.equals(type, queryLocation(oneRow.get(KINDEMAPS.get("log_ip")).toString()))) {
continue;
}
ErrorSeriesModel errorSeriesModel = new ErrorSeriesModel();
... ...
... ... @@ -2,10 +2,13 @@ package com.monitor.middleware.nginx.task.job.oneJob;
import com.monitor.middleware.nginx.constant.InterVar;
import com.monitor.middleware.nginx.model.ApiSeriesModel;
import com.monitor.middleware.nginx.model.ApiStaModel;
import com.monitor.middleware.nginx.model.http.NginxView;
import com.monitor.middleware.rabbitmq.component.InfluxComp;
import org.apache.commons.lang.StringUtils;
import java.util.Map;
/**
* Created by yoho on 2016/6/28.
... ... @@ -47,17 +50,20 @@ public class ServiceJob extends BaseLogJob {
public void saveResult() {
if (StringUtils.equals(InterVar.AWS_TYPE, this.type)) {
InterVar.AWS_NGINX_SERVICE_LIST.clear();
InterVar.Aws_NGINX_SERVICE_HASH_MAP.clear();
InterVar.Aws_NGINX_SERVICE_HASH_MAP.putAll(this.resultMap);
for (ApiStaModel apiStaModel : this.resultList) {
InterVar.AWS_NGINX_SERVICE_LIST.add(apiStaModel);
}
} else {
InterVar.QC_NGINX_SERVICE_LIST.clear();
InterVar.QC_NGINX_SERVICE_HASH_MAP.clear();
InterVar.QC_NGINX_SERVICE_HASH_MAP.putAll(this.resultMap);
for (ApiStaModel apiStaModel : this.resultList) {
InterVar.QC_NGINX_SERVICE_LIST.add(apiStaModel);
}
}
}
... ...
... ... @@ -22,7 +22,8 @@ import java.util.concurrent.TimeUnit;
* Created by yoho on 2016/6/21.
*/
@Component
public class InfluxComp {
public class InfluxComp {
public static final Logger DEBUG = LoggerFactory.getLogger(InfluxComp.class);
... ... @@ -37,6 +38,11 @@ public class InfluxComp {
private InfluxDB influxDBClient;
/* public InfluxComp(String url, String name, String pwd) {
this.influxUrl = url;
this.influxUser = name;
this.influxPwd = pwd;
}*/
@PostConstruct
public void init() {
... ... @@ -52,18 +58,18 @@ public class InfluxComp {
influxDBClient = InfluxDBFactory.connect(this.influxUrl, this.influxUser, this.influxPwd, new OkClient(okHttpClient));
}
public void doWriteView(PointView pointView) {
public void doWriteView(PointView pointView, String DBName) {
try {
if (null != pointView) {
influxDBClient.createDatabase(InterVar.DBNAME);
if (null != pointView) {
influxDBClient.write(pointView.toPoint());
}
} catch (Exception e) {
influxDBClient.createDatabase(DBName);
DEBUG.error("Failed to write point to influx {}", pointView.toPoint());
influxDBClient.write(pointView.toPoint());
}
} catch (Exception e) {
DEBUG.error("Failed to write point to influx {}", pointView.toPoint());
}
}
... ...
... ... @@ -28,9 +28,7 @@ import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.*;
/**
* Created by yoho on 2016/6/21.
... ... @@ -107,6 +105,8 @@ public class RabbitmqService {
List<QueueInfo> queuesList = oneView.getQueueView();
sortQueue(queuesList);
List<QueueInfo> selectedList = new ArrayList<>();
int start = (request.getCurrentPage() - 1) * request.getPageSize();
... ... @@ -138,6 +138,17 @@ public class RabbitmqService {
return baseResponse;
}
private void sortQueue(List<QueueInfo> queuesList) {
Collections.sort(queuesList, new Comparator<QueueInfo>() {
@Override
public int compare(QueueInfo o1, QueueInfo o2) {
return o1.getMessages() > o2.getMessages() ? -1 : 1;
}
});
}
@RequestMapping(value = "/allRabbitMq")
public BaseResponse queryAllRabbit(@RequestBody QueryRequest request) {
BaseResponse response = new BaseResponse();
... ...
... ... @@ -69,7 +69,7 @@ public class ClusterViewJob implements Callable {
if (null != oneView) {
influxComp.doWriteView(oneView);
influxComp.doWriteView(oneView,InterVar.DBNAME);
InterVar.clusterViewMaps.put(moId, oneView);
}
... ...
... ... @@ -105,7 +105,7 @@ public class OverViewJob implements Callable {
if (null != oneView) {
influxComp.doWriteView(oneView);
influxComp.doWriteView(oneView,InterVar.DBNAME);
InterVar.overViewMaps.put(moId, overViewModel);
}
... ...
... ... @@ -83,7 +83,7 @@ public class QueueViewJob implements Callable {
if (null != oneView) {
influxComp.doWriteView(oneView);
influxComp.doWriteView(oneView,InterVar.DBNAME);
InterVar.queueViewMaps.put(moId, oneView);
}
... ...
... ... @@ -13,42 +13,46 @@
<!-- 日志最大的文件大小 10MB-->
<property name="maxFileSize" value="10MB"/>
<!-- WARN级别日志 appender -->
<appender name="WARN" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>/Data/tomcat_logs/monitor/warn-log</file>
<!-- 过滤器,过滤掉 TRACE 和 DEBUG 和 INFO 级别的日志 -->
<file>${catalina.home}/logs/warn.log</file>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>WARN</level>
<level>ERROR</level>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 按天回滚 daily -->
<fileNamePattern>${catalina.home}/logs/%d{yyyy-MM-dd}/warn-log.log</fileNamePattern>
<!-- 日志最大的保存天数 -->
<!-- daily rollover -->
<fileNamePattern>
${catalina.home}/logs/warn.%d{yyyy-MM-dd}.%i.log.gz
</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<!-- or whenever the file size reaches 100MB -->
<maxFileSize>${maxFileSize}</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<!-- keep 30 days' worth of history -->
<maxHistory>${maxHistory}</maxHistory>
</rollingPolicy>
<triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
<maxFileSize>${maxFileSize}</maxFileSize>
</triggeringPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger - %msg%n</pattern>
<pattern>%-1relative - %d{HH:mm:ss.SSS} [%thread] %-5level %logger{0}:%line -- %msg%n</pattern>
</encoder>
</appender>
<!-- DEBUG级别日志 appender -->
<appender name="DEBUG" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>/Data/tomcat_logs/monitor/debug-log</file>
<file>${catalina.home}/logs/debug.log</file>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>INFO</level>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 按天回滚 daily -->
<fileNamePattern>${catalina.home}/logs/%d{yyyy-MM-dd}/debug-log.log</fileNamePattern>
<fileNamePattern>
${catalina.home}/logs/debug.%d{yyyy-MM-dd}.%i.log.gz
</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<!-- or whenever the file size reaches 100MB -->
<maxFileSize>${maxFileSize}</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<!-- 日志最大的历史 60天 -->
<maxHistory>${maxHistory}</maxHistory>
</rollingPolicy>
<triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
<maxFileSize>${maxFileSize}</maxFileSize>
</triggeringPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger - %msg%n</pattern>
</encoder>
... ...
# ******************** send sms common configs ********************
sendsms.url=http://221.179.180.158:9009/HttpQuickProcess_utf-8/submitMessageAll
sendsms.username=yoho
sendsms.pwd=EMKVpwoJ
sendsms.username=yohoyw
sendsms.pwd=NCftHmJ9
sendsms.notice.productid=8
#---------------国都短信业务api参数----------------------
sendsms.guodu.Url=http://221.179.180.158:9007/QxtSms/QxtFirewall
sendsms.guodu.OperID=yoho
sendsms.guodu.OperPass=EMKVpwoJ
#---------------国都短信业务api参数----------------------
base_mobile=15905144483,18751986615,18652008443
#------------------------------------------支付回调手机号------------------------------------------------
... ...