001/** 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hdfs.server.datanode.metrics; 019 020import org.apache.hadoop.hdfs.server.datanode.metrics.FSDatasetMBean; 021import org.apache.hadoop.metrics2.MetricsCollector; 022import org.apache.hadoop.metrics2.MetricsTag; 023import org.apache.hadoop.metrics2.lib.Interns; 024 025import java.io.IOException; 026 027public class DataNodeMetricHelper { 028 029 /** 030 * Get metrics helper provides Helper function for 031 * metrics2 interface to act as a Metric source 032 * 033 * @param collector Metrics Collector that is passed in 034 * @param beanClass The Class that currently impliments the metric functions 035 * @param context A string that idenitifies the context 036 * 037 * @throws IOException 038 */ 039 public static void getMetrics(MetricsCollector collector, 040 FSDatasetMBean beanClass, String context) 041 throws IOException { 042 043 if (beanClass == null) { 044 throw new IOException("beanClass cannot be null"); 045 } 046 047 String className = beanClass.getClass().getName(); 048 049 collector.addRecord(className) 050 .setContext(context) 051 .addGauge(Interns.info("Capacity", "Total storage capacity"), 052 beanClass.getCapacity()) 053 .addGauge(Interns.info("DfsUsed", "Total bytes used by dfs datanode"), 054 beanClass.getDfsUsed()) 055 .addGauge(Interns.info("Remaining", "Total bytes of free storage"), 056 beanClass.getRemaining()) 057 .add(new MetricsTag(Interns.info("StorageInfo", "Storage ID"), 058 beanClass.getStorageInfo())) 059 .addGauge(Interns.info("NumFailedVolumes", "Number of failed Volumes" + 060 " in the data Node"), beanClass.getNumFailedVolumes()) 061 .addGauge(Interns.info("LastVolumeFailureDate", "Last Volume failure in" + 062 " milliseconds from epoch"), beanClass.getLastVolumeFailureDate()) 063 .addGauge(Interns.info("EstimatedCapacityLostTotal", "Total capacity lost" 064 + " due to volume failure"), beanClass.getEstimatedCapacityLostTotal()) 065 .addGauge(Interns.info("CacheUsed", "Datanode cache used in bytes"), 066 beanClass.getCacheUsed()) 067 .addGauge(Interns.info("CacheCapacity", "Datanode cache capacity"), 068 beanClass.getCacheCapacity()) 069 .addGauge(Interns.info("NumBlocksCached", "Datanode number" + 070 " of blocks cached"), beanClass.getNumBlocksCached()) 071 .addGauge(Interns.info("NumBlocksFailedToCache", "Datanode number of " + 072 "blocks failed to cache"), beanClass.getNumBlocksFailedToCache()) 073 .addGauge(Interns.info("NumBlocksFailedToUnCache", "Datanode number of" + 074 " blocks failed in cache eviction"), 075 beanClass.getNumBlocksFailedToUncache()); 076 077 } 078 079}