[AMBARI-24180] Ambari metrics Service Check fails. (#1615)
authoravijayanhwx <avijayan@hortonworks.com>
Tue, 26 Jun 2018 20:17:08 +0000 (13:17 -0700)
committerGitHub <noreply@github.com>
Tue, 26 Jun 2018 20:17:08 +0000 (13:17 -0700)
* [AMBARI-24180] Ambari metrics Service Check fails post EU . Error - 401 Authentication required in response.

* [AMBARI-24180] Ambari metrics Service Check fails.

* [AMBARI-24180] Ambari metrics Service Check fails. - 3

ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/HBaseTimelineMetricsService.java
ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/PhoenixHBaseAccessor.java
ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/TimelineMetricStore.java
ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/aggregators/TimelineMetricClusterAggregator.java
ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/aggregators/TimelineMetricClusterAggregatorSecond.java
ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/aggregators/TimelineMetricHostAggregator.java
ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/aggregators/TimelineMetricReadHelper.java
ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/webapp/TimelineWebServices.java
ambari-metrics-timelineservice/src/test/java/org/apache/ambari/metrics/core/timeline/TestTimelineMetricStore.java
ambari-metrics-timelineservice/src/test/java/org/apache/ambari/metrics/core/timeline/TimelineMetricStoreWatcherTest.java

index 43a468c..d768d0d 100644 (file)
@@ -396,8 +396,10 @@ public class HBaseTimelineMetricsService extends AbstractService implements Time
     return metricsFunctions;
   }
 
-  public void putMetricsSkipCache(TimelineMetrics metrics) throws SQLException, IOException {
+  public TimelinePutResponse putMetricsSkipCache(TimelineMetrics metrics) throws SQLException, IOException {
+    TimelinePutResponse response = new TimelinePutResponse();
     hBaseAccessor.insertMetricRecordsWithMetadata(metricMetadataManager, metrics, true);
+    return response;
   }
 
   @Override
index 182748d..0946511 100644 (file)
@@ -1105,6 +1105,9 @@ public class PhoenixHBaseAccessor {
     throws SQLException, IOException {
     if (condition.getPrecision().equals(Precision.SECONDS)) {
       TimelineMetric metric = TIMELINE_METRIC_READ_HELPER.getTimelineMetricFromResultSet(rs);
+      if (metric == null) {
+        return;
+      }
       if (f != null && f.getSuffix() != null) { //Case : Requesting "._rate" for precision data
         metric.setMetricName(metric.getMetricName() + f.getSuffix());
       }
index a723a2c..9589272 100644 (file)
@@ -69,6 +69,16 @@ public interface TimelineMetricStore {
   TimelinePutResponse putMetrics(TimelineMetrics metrics) throws SQLException, IOException;
 
   /**
+   * Stores metric information to the timeline store without any buffering of data.
+   *
+   * @param metrics An {@link TimelineMetrics}.
+   * @return An {@link org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse}.
+   * @throws SQLException, IOException
+   */
+  TimelinePutResponse putMetricsSkipCache(TimelineMetrics metrics) throws SQLException, IOException;
+
+
+  /**
    * Store container metric into the timeline tore
    */
   TimelinePutResponse putContainerMetrics(List<ContainerMetric> metrics)
index 2ea5309..357e0ba 100644 (file)
@@ -98,7 +98,9 @@ public class TimelineMetricClusterAggregator extends AbstractTimelineAggregator
 
     while (rs.next()) {
       TimelineClusterMetric currentMetric = readHelper.fromResultSet(rs);
-
+      if (currentMetric == null) {
+        continue;
+      }
       MetricClusterAggregate currentHostAggregate =
         isClusterPrecisionInputTable ?
           readHelper.getMetricClusterAggregateFromResultSet(rs) :
index c9998d9..ba93792 100644 (file)
@@ -160,6 +160,10 @@ public class TimelineMetricClusterAggregatorSecond extends AbstractTimelineAggre
         // If rows belong to same host combine them before slicing. This
         // avoids issues across rows that belong to same hosts but get
         // counted as coming from different ones.
+        if (nextMetric == null) {
+          continue;
+        }
+        
         if (metric.equalsExceptTime(nextMetric)) {
           metric.addMetricValues(nextMetric.getMetricValues());
         } else {
index 6f2351b..a9ee385 100644 (file)
@@ -92,6 +92,9 @@ public class TimelineMetricHostAggregator extends AbstractTimelineAggregator {
     while (rs.next()) {
       TimelineMetric currentMetric =
         readHelper.getTimelineMetricKeyFromResultSet(rs);
+      if (currentMetric == null) {
+        continue;
+      }
       MetricHostAggregate currentHostAggregate =
         readHelper.getMetricHostAggregateFromResultSet(rs);
 
index 541cb46..9dfe4f7 100644 (file)
@@ -53,6 +53,9 @@ public class TimelineMetricReadHelper {
   public TimelineMetric getTimelineMetricFromResultSet(ResultSet rs)
     throws SQLException, IOException {
     TimelineMetric metric = getTimelineMetricCommonsFromResultSet(rs);
+    if (metric == null) {
+      return null;
+    }
     TreeMap<Long, Double> sortedByTimeMetrics = PhoenixHBaseAccessor.readMetricFromJSON(rs.getString("METRICS"));
     metric.setMetricValues(sortedByTimeMetrics);
     return metric;
@@ -110,6 +113,9 @@ public class TimelineMetricReadHelper {
 
     byte[] uuid = rs.getBytes("UUID");
     TimelineMetric metric = metadataManagerInstance.getMetricFromUuid(uuid);
+    if (metric == null) {
+      return null;
+    }
     if (ignoreInstance) {
       metric.setInstanceId(null);
     }
@@ -147,7 +153,9 @@ public class TimelineMetricReadHelper {
 
     byte[] uuid = rs.getBytes("UUID");
     TimelineMetric timelineMetric = metadataManagerInstance.getMetricFromUuid(uuid);
-
+    if (timelineMetric == null) {
+      return null;
+    }
     return new TimelineClusterMetric(
       timelineMetric.getMetricName(),
       timelineMetric.getAppId(),
index 3bcbaf6..b3378be 100644 (file)
@@ -48,6 +48,7 @@ import javax.xml.bind.annotation.XmlElement;
 import javax.xml.bind.annotation.XmlRootElement;
 
 import org.apache.ambari.metrics.core.timeline.TimelineMetricServiceSummary;
+import org.apache.commons.collections.CollectionUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
@@ -77,6 +78,7 @@ public class TimelineWebServices {
   private static final Log LOG = LogFactory.getLog(TimelineWebServices.class);
   
   private TimelineMetricStore timelineMetricStore;
+  private static final String SMOKETEST_METRIC_APP_ID = "amssmoketestfake";
 
   @Inject
   public TimelineWebServices(TimelineMetricStore timelineMetricStore) {
@@ -149,7 +151,11 @@ public class TimelineWebServices {
           TimelineUtils.dumpTimelineRecordtoJSON(metrics, true));
       }
 
-      return timelineMetricStore.putMetrics(metrics);
+      if (CollectionUtils.isNotEmpty(metrics.getMetrics()) && metrics.getMetrics().get(0).getAppId().equals(SMOKETEST_METRIC_APP_ID)) {
+        return timelineMetricStore.putMetricsSkipCache(metrics);
+      } else {
+        return timelineMetricStore.putMetrics(metrics);
+      }
 
     } catch (Exception e) {
       LOG.error("Error saving metrics.", e);
index 24b6fef..cec8afa 100644 (file)
@@ -82,6 +82,11 @@ public class TestTimelineMetricStore implements TimelineMetricStore {
   }
 
   @Override
+  public TimelinePutResponse putMetricsSkipCache(TimelineMetrics metrics) throws SQLException, IOException {
+    return new TimelinePutResponse();
+  }
+
+  @Override
   public TimelinePutResponse putContainerMetrics(List<ContainerMetric> metrics)
       throws SQLException, IOException {
     return new TimelinePutResponse();
index eb64198..83e1651 100644 (file)
@@ -50,9 +50,9 @@ public class TimelineMetricStoreWatcherTest {
   public void testRunPositive() throws Exception {
     HBaseTimelineMetricsService metricStore = createNiceMock(HBaseTimelineMetricsService.class);
 
-    metricStore.putMetricsSkipCache(anyObject(TimelineMetrics.class));
-    expectLastCall().once();
-
+    expect(metricStore.putMetricsSkipCache(anyObject(TimelineMetrics.class)))
+      .andReturn(new TimelinePutResponse());
+    
     // metric found
     expect(metricStore.getTimelineMetrics(EasyMock.<List<String>>anyObject(),
       EasyMock.<List<String>>anyObject(), anyObject(String.class),