YARN-6905 Multiple HBaseTimelineStorage test failures due to missing FastNumberFormat (Contributed by Haibo Chen)

This commit is contained in:
Vrushali C 2017-08-14 11:40:27 -07:00
parent d72124a442
commit 608a06cca5
5 changed files with 86 additions and 7 deletions

View File

@ -69,6 +69,7 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.application.Applica
import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationTable; import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationTable;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.EventColumnName; import org.apache.hadoop.yarn.server.timelineservice.storage.common.EventColumnName;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.EventColumnNameConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.common.EventColumnNameConverter;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.StringKeyConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.common.StringKeyConverter;
@ -493,7 +494,8 @@ public class TestHBaseTimelineStorageApps {
event.addInfo(expKey, expVal); event.addInfo(expKey, expVal);
final TimelineEntity entity = new ApplicationEntity(); final TimelineEntity entity = new ApplicationEntity();
entity.setId(ApplicationId.newInstance(0, 1).toString()); entity.setId(HBaseTimelineStorageUtils.convertApplicationIdToString(
ApplicationId.newInstance(0, 1)));
entity.addEvent(event); entity.addEvent(event);
TimelineEntities entities = new TimelineEntities(); TimelineEntities entities = new TimelineEntities();

View File

@ -62,6 +62,7 @@ import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelinePrefi
import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field; import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.EventColumnName; import org.apache.hadoop.yarn.server.timelineservice.storage.common.EventColumnName;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.EventColumnNameConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.common.EventColumnNameConverter;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator; import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.StringKeyConverter; import org.apache.hadoop.yarn.server.timelineservice.storage.common.StringKeyConverter;
@ -202,8 +203,9 @@ public class TestHBaseTimelineStorageEntities {
String flow = "some_flow_name"; String flow = "some_flow_name";
String flowVersion = "AB7822C10F1111"; String flowVersion = "AB7822C10F1111";
long runid = 1002345678919L; long runid = 1002345678919L;
String appName = ApplicationId.newInstance(System.currentTimeMillis() + String appName = HBaseTimelineStorageUtils.convertApplicationIdToString(
9000000L, 1).toString(); ApplicationId.newInstance(System.currentTimeMillis() + 9000000L, 1)
);
hbi.write(cluster, user, flow, flowVersion, runid, appName, te); hbi.write(cluster, user, flow, flowVersion, runid, appName, te);
hbi.stop(); hbi.stop();
@ -399,8 +401,8 @@ public class TestHBaseTimelineStorageEntities {
String flow = "other_flow_name"; String flow = "other_flow_name";
String flowVersion = "1111F01C2287BA"; String flowVersion = "1111F01C2287BA";
long runid = 1009876543218L; long runid = 1009876543218L;
String appName = ApplicationId.newInstance(System.currentTimeMillis() + String appName = HBaseTimelineStorageUtils.convertApplicationIdToString(
9000000L, 1).toString(); ApplicationId.newInstance(System.currentTimeMillis() + 9000000L, 1));
byte[] startRow = byte[] startRow =
new EntityRowKeyPrefix(cluster, user, flow, runid, appName) new EntityRowKeyPrefix(cluster, user, flow, runid, appName)
.getRowKeyPrefix(); .getRowKeyPrefix();
@ -487,7 +489,9 @@ public class TestHBaseTimelineStorageEntities {
event.addInfo(expKey, expVal); event.addInfo(expKey, expVal);
final TimelineEntity entity = new ApplicationEntity(); final TimelineEntity entity = new ApplicationEntity();
entity.setId(ApplicationId.newInstance(0, 1).toString()); entity.setId(
HBaseTimelineStorageUtils.convertApplicationIdToString(
ApplicationId.newInstance(0, 1)));
entity.addEvent(event); entity.addEvent(event);
TimelineEntities entities = new TimelineEntities(); TimelineEntities entities = new TimelineEntities();

View File

@ -82,7 +82,8 @@ public final class AppIdKeyConverter implements KeyConverter<String> {
Bytes.toLong(appIdBytes, 0, Bytes.SIZEOF_LONG)); Bytes.toLong(appIdBytes, 0, Bytes.SIZEOF_LONG));
int seqId = HBaseTimelineStorageUtils.invertInt( int seqId = HBaseTimelineStorageUtils.invertInt(
Bytes.toInt(appIdBytes, Bytes.SIZEOF_LONG, Bytes.SIZEOF_INT)); Bytes.toInt(appIdBytes, Bytes.SIZEOF_LONG, Bytes.SIZEOF_INT));
return ApplicationId.newInstance(clusterTs, seqId).toString(); return HBaseTimelineStorageUtils.convertApplicationIdToString(
ApplicationId.newInstance(clusterTs, seqId));
} }
/** /**

View File

@ -24,6 +24,7 @@ import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.server.timelineservice.storage.flow.AggregationCompactionDimension; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.AggregationCompactionDimension;
import org.apache.hadoop.yarn.server.timelineservice.storage.flow.AggregationOperation; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.AggregationOperation;
import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute;
@ -32,6 +33,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.io.IOException; import java.io.IOException;
import java.text.NumberFormat;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -240,4 +242,35 @@ public final class HBaseTimelineStorageUtils {
long dayTimestamp = ts - (ts % MILLIS_ONE_DAY); long dayTimestamp = ts - (ts % MILLIS_ONE_DAY);
return dayTimestamp; return dayTimestamp;
} }
private static final ThreadLocal<NumberFormat> APP_ID_FORMAT =
new ThreadLocal<NumberFormat>() {
@Override
public NumberFormat initialValue() {
NumberFormat fmt = NumberFormat.getInstance();
fmt.setGroupingUsed(false);
fmt.setMinimumIntegerDigits(4);
return fmt;
}
};
/**
* A utility method that converts ApplicationId to string without using
* FastNumberFormat in order to avoid the incompatibility issue caused
* by mixing hadoop-common 2.5.1 and hadoop-yarn-api 3.0 in this module.
* This is a work-around implementation as discussed in YARN-6905.
*
* @param appId application id
* @return the string representation of the given application id
*
*/
public static String convertApplicationIdToString(ApplicationId appId) {
StringBuilder sb = new StringBuilder(64);
sb.append(ApplicationId.appIdStrPrefix);
sb.append("_");
sb.append(appId.getClusterTimestamp());
sb.append('_');
sb.append(APP_ID_FORMAT.get().format(appId.getId()));
return sb.toString();
}
} }

View File

@ -0,0 +1,39 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.timelineservice.storage.common;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.junit.Assert;
import org.junit.Test;
/**
* Test for HBaseTimelineStorageUtils.convertApplicationIdToString(),
* a custom conversion from ApplicationId to String that avoids the
* incompatibility issue caused by mixing hadoop-common 2.5.1 and
* hadoop-yarn-api 3.0. See YARN-6905.
*/
public class TestCustomApplicationIdConversion {
@Test
public void testConvertAplicationIdToString() {
ApplicationId applicationId = ApplicationId.newInstance(0, 1);
String applicationIdStr =
HBaseTimelineStorageUtils.convertApplicationIdToString(applicationId);
Assert.assertEquals(applicationId,
ApplicationId.fromString(applicationIdStr));
}
}