由于工作需要进行各个数据流组件监控,将测量数据打入Influxdb,特此开发出基于 yammer metrics-2.2.0 的InfluxReport ,使用http通信发式。
成果特此分享给各位同仁
package com.bestv.metrics.report;
import com.yammer.metrics.Metrics;
import com.yammer.metrics.core.*;
import com.yammer.metrics.core.Timer;
import com.yammer.metrics.reporting.AbstractPollingReporter;
import com.yammer.metrics.stats.Snapshot;
import org.influxdb.InfluxDB;
import org.influxdb.InfluxDBFactory;
import org.influxdb.dto.BatchPoints;
import org.influxdb.dto.Point;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
import java.util.concurrent.TimeUnit;
/**
* Created by XXXX on 2017/4/20.
*/
/**
* 继承 AbstractPollingReporter,实现influx输出格式及httpreport方式
*/
public class InfluxReporter extends AbstractPollingReporter implements MetricProcessor<InfluxReporter.Context> {
/**
* 创建 enable
* Enables the console reporter for the default metrics registry, and causes it to print to
* influx with the specified period.
*
* @param period the period between successive outputs
* @param unit the time unit of {@code period}
*/
public static void enable(long period, String hostPort ,TimeUnit unit) {
enable(Metrics.defaultRegistry(),period, hostPort,unit);
}
/**
* Enables the console reporter for the given metrics registry, and causes it to print to influx
* with the specified period and unrestricted output.
*
* @param metricsRegistry the metrics registry
* @param period the period between successive outputs
* @param unit the time unit of {@code period}
*/
public static void enable(MetricsRegistry metricsRegistry, long period, String hostPort ,TimeUnit unit) {
final InfluxReporter reporter = new InfluxReporter(metricsRegistry,
hostPort,
MetricPredicate.ALL);
reporter.start(period, unit);
}
private static final Logger LOG = LoggerFactory.getLogger(InfluxReporter.class);
private static final MetricPredicate DEFAULT_METRIC_PREDICATE = MetricPredicate.ALL;
private BatchPoints batchPoints;
//传入 host名
private String hostport;
//本地 host
private String tag_host;
private InfluxDB influxDBclient;
private Clock clock;
private final MetricPredicate predicate;
private Context context;
/**
*
* @param hostPort,
* 快捷构造 ,for the default metrics registry,
*/
public InfluxReporter(String hostPort) {
this(Metrics.defaultRegistry(), hostPort, MetricPredicate.ALL);
}
/**
* Creates a new {@link AbstractPollingReporter} instance.
**/
public InfluxReporter(MetricsRegistry metricsRegistry, String hostPort, MetricPredicate predicate) {
super(metricsRegistry, "influx-reporter");
this.hostport = hostPort;
this.influxDBclient = InfluxDBFactory.connect(hostPort);
this.clock = Clock.defaultClock();
this.tag_host = HostUtils.getHostName();
this.predicate = predicate;
this.context = new Context() {
@Override
public long getTime() {
return InfluxReporter.this.clock.time();
}
};
}
@Override
public void run() {
try {
// BatchPoints 构造参数 均没有用,随便填,因为数据最终以http协议发出
this.batchPoints = BatchPoints
.database("Metrics")
.retentionPolicy("autogen")
.consistency(InfluxDB.ConsistencyLevel.ALL)
.build();
printRegularMetrics(context);
this.influxDBclient.write(batchPoints);
} catch (Exception e) {
LOG.error("Cannot send metrics to InfluxDB {}", e);
}
}
private void printRegularMetrics(final Context context) {
for (Map.Entry<String, SortedMap<MetricName, Metric>> entry : getMetricsRegistry().groupedMetrics(DEFAULT_METRIC_PREDICATE).entrySet()) {
for (Map.Entry<MetricName, Metric> subEntry : entry.getValue().entrySet()) {
final MetricName metricName = subEntry.getKey();
final Metric metric = subEntry.getValue();
if (predicate.matches(metricName, metric)){
try {
metric.processWith(this, subEntry.getKey(), context);
} catch (Exception ignored) {
LOG.error("Error printing regular metrics:", ignored);
}
}
}
}
}
public void processGauge(MetricName name, Gauge<?> gauge, Context context) throws Exception {
Point.Builder pointbuilder = buildMetricsPoint(name, context);
pointbuilder.tag("metric_type", "gague");
Object fieldValue = gauge.value();
String fieldName = "value";
// Long Interger 统一转Float型,以防止 schema冲突
if( fieldValue instanceof Float)
pointbuilder.addField(fieldName, (Float)fieldValue);
else if( fieldValue instanceof Double)
pointbuilder.addField(fieldName, (Double)fieldValue);
else if( fieldValue instanceof Long)
pointbuilder.addField(fieldName, Float.valueOf(((Long)fieldValue).toString()));
else if( fieldValue instanceof Integer)
pointbuilder.addField(fieldName, Float.valueOf(((Integer)fieldValue).toString()));
else if( fieldValue instanceof String)
pointbuilder.addField(fieldName, (String)fieldValue);
else
return;
batchPoints.point(pointbuilder.build());
}
@Override
public void processCounter(MetricName metricName, Counter counter, Context context) throws Exception {
Point.Builder pointbuilder = buildMetricsPoint(metricName, context);
pointbuilder.tag("metric_type", "counter");
pointbuilder.addField("count", counter.count());
batchPoints.point(pointbuilder.build());
}
@Override
public void processMeter(MetricName metricName, Metered meter, Context context) throws Exception {
Point.Builder pointbuilder = buildMetricsPoint(metricName, context);
pointbuilder.tag("metric_type", "meter");
pointbuilder.tag("eventType", meter.eventType());
pointbuilder.addField("count", meter.count());
pointbuilder.addField("meanRate", meter.meanRate());
pointbuilder.addField("1MinuteRate", meter.oneMinuteRate());
pointbuilder.addField("5MinuteRate", meter.fiveMinuteRate());
pointbuilder.addField("15MinuteRate", meter.fifteenMinuteRate());
batchPoints.point(pointbuilder.build());
}
@Override
public void processHistogram(MetricName metricName, Histogram histogram, Context context) throws Exception {
final Snapshot snapshot = histogram.getSnapshot();
Point.Builder pointbuilder = buildMetricsPoint(metricName, context);
pointbuilder.tag("metric_type", "histogram");
pointbuilder.addField("max", histogram.max());
pointbuilder.addField("mean", histogram.mean());
pointbuilder.addField("min", histogram.min());
pointbuilder.addField("stddev", histogram.max());
pointbuilder.addField("sum", histogram.sum());
pointbuilder.addField("median", snapshot.getMedian());
pointbuilder.addField("p75", snapshot.get75thPercentile());
pointbuilder.addField("p95", snapshot.get95thPercentile());
pointbuilder.addField("p98", snapshot.get98thPercentile());
pointbuilder.addField("p99", snapshot.get99thPercentile());
pointbuilder.addField("p999", snapshot.get999thPercentile());
batchPoints.point(pointbuilder.build());
}
public void processTimer(MetricName metricName, Timer timer, Context context) throws Exception {
final Snapshot snapshot = timer.getSnapshot();
Point.Builder pointbuilder = buildMetricsPoint(metricName, context);
pointbuilder.tag("metric_type", "timer");
pointbuilder.addField("count", timer.count());
pointbuilder.addField("meanRate", timer.meanRate());
pointbuilder.addField("1MinuteRate", timer.oneMinuteRate());
pointbuilder.addField("5MinuteRate", timer.fiveMinuteRate());
pointbuilder.addField("15MinuteRate", timer.fifteenMinuteRate());
pointbuilder.addField("max", timer.max());
pointbuilder.addField("mean", timer.mean());
pointbuilder.addField("min", timer.min());
pointbuilder.addField("stddev", timer.max());
pointbuilder.addField("sum", timer.sum());
pointbuilder.addField("median", snapshot.getMedian());
pointbuilder.addField("p75", snapshot.get75thPercentile());
pointbuilder.addField("p95", snapshot.get95thPercentile());
pointbuilder.addField("p98", snapshot.get98thPercentile());
pointbuilder.addField("p99", snapshot.get99thPercentile());
pointbuilder.addField("p999", snapshot.get999thPercentile());
batchPoints.point(pointbuilder.build());
}
private Point.Builder buildMetricsPoint(MetricName metricName, Context context) {
//name要注意规范,加上前缀
Point.Builder pointbuilder = Point.measurement(metricName.getName())
.time(context.getTime(), TimeUnit.MILLISECONDS)
.tag("group", metricName.getGroup())
.tag("type", metricName.getType())
//自动获取 host
.tag("host", tag_host);
//扩展区域
if( metricName.hasScope() ) {
String scope = metricName.getScope();
List<String> scopes = Arrays.asList(scope.split("\\."));
if( scopes.size() % 2 == 0) {
Iterator<String> iterator = scopes.iterator();
while (iterator.hasNext()) {
pointbuilder.tag(iterator.next(), iterator.next());
}
}
else pointbuilder.tag("scope", scope);
}
return pointbuilder;
}
public interface Context {
long getTime();
}
}