PageAnalyzeSink.java 3.14 KB
package com.yoho.trace.online.sink;

import com.yoho.trace.anaylzer.model.ApiTraceResult;
import com.yoho.trace.store.HBasePool;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;

import java.io.IOException;
import java.util.Calendar;

/**
 * Created by mingdan.ge on 2019/11/8.
 */
public class PageAnalyzeSink extends RichSinkFunction<Tuple2<String, ApiTraceResult>> {
    private Connection conn;
    private HTable table1;
    private HTable table2;
    private static final String SPLIT_STR = "-";

    @Override
    public void open(Configuration parameters) throws Exception {
        conn = HBasePool.getConnection();

        table1 = (HTable) conn.getTable(TableName.valueOf("trace_page_analyze_minutes"));
        table1.setWriteBufferSize(1024 * 1024 * 20);
        table1.setAutoFlush(false, true);//不单个提交

        table2 = (HTable) conn.getTable(TableName.valueOf("trace_api_source_analyze_minutes"));
        table2.setWriteBufferSize(1024 * 1024 * 20);
        table2.setAutoFlush(false, true);//不单个提交
    }

    @Override
    public void invoke(Tuple2<String, ApiTraceResult> value, SinkFunction.Context context) throws Exception {
        String pageId = value.f0.split(SPLIT_STR)[0];
        String api = value.f0.split(SPLIT_STR)[1];

        long now = Calendar.getInstance().getTimeInMillis();
        String rowKey1 = pageId + "-" + now + "-" + api;
//        logger.info("rowKey is {}", rowKey1);
        Put put1 = new Put(Bytes.toBytes(rowKey1));
        put1.addColumn(Bytes.toBytes("data"), Bytes.toBytes("times"), Bytes.toBytes(value.f1.getCallTimes()));
        put1.addColumn(Bytes.toBytes("data"), Bytes.toBytes("duration"), Bytes.toBytes(value.f1.getDuration() / value.f1.getCallTimes()));
        put1.addColumn(Bytes.toBytes("data"), Bytes.toBytes("total_duration"), Bytes.toBytes(value.f1.getDuration()));
        if(StringUtils.isNotEmpty(value.f1.getRegion())){
            put1.addColumn(Bytes.toBytes("data"), Bytes.toBytes("region"), Bytes.toBytes(value.f1.getRegion()));
        }

        String rowKey2 = api + "-" + now + "-" + pageId;
        Put put2 = new Put(Bytes.toBytes(rowKey2));
        put2.addColumn(Bytes.toBytes("data"), Bytes.toBytes("times"), Bytes.toBytes(value.f1.getCallTimes()));
        if(StringUtils.isNotEmpty(value.f1.getRegion())){
            put2.addColumn(Bytes.toBytes("data"), Bytes.toBytes("region"), Bytes.toBytes(value.f1.getRegion()));
        }

        table1.put(put1);
        table2.put(put2);
    }

    @Override
    public void close() throws Exception {
        try {
            table1.close();
            table2.close();
        } catch (IOException e) {
            e.printStackTrace();
        }
        super.close();
        conn.close();
    }
}