HADOOP-12240. Fix tests requiring native library to be skipped in non-native profile. Contributed by Masatake Iwasaki.
(cherry picked from commit 90bda9c611
)
This commit is contained in:
parent
f95e3c3091
commit
90c5bf0605
|
@ -478,6 +478,9 @@ Release 2.8.0 - UNRELEASED
|
||||||
HADOOP-10615. FileInputStream in JenkinsHash#main() is never closed.
|
HADOOP-10615. FileInputStream in JenkinsHash#main() is never closed.
|
||||||
(Chen He via ozawa)
|
(Chen He via ozawa)
|
||||||
|
|
||||||
|
HADOOP-12240. Fix tests requiring native library to be skipped in non-native
|
||||||
|
profile. (Masatake Iwasaki via ozawa)
|
||||||
|
|
||||||
Release 2.7.2 - UNRELEASED
|
Release 2.7.2 - UNRELEASED
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
|
|
@ -19,19 +19,18 @@ package org.apache.hadoop.crypto;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||||
import org.apache.hadoop.util.NativeCodeLoader;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
|
|
||||||
import static org.junit.Assert.assertEquals;
|
import static org.junit.Assert.assertEquals;
|
||||||
import static org.junit.Assert.assertNotNull;
|
import static org.junit.Assert.assertNotNull;
|
||||||
import static org.junit.Assume.assumeTrue;
|
|
||||||
|
|
||||||
public class TestCryptoStreamsWithOpensslAesCtrCryptoCodec
|
public class TestCryptoStreamsWithOpensslAesCtrCryptoCodec
|
||||||
extends TestCryptoStreams {
|
extends TestCryptoStreams {
|
||||||
|
|
||||||
@BeforeClass
|
@BeforeClass
|
||||||
public static void init() throws Exception {
|
public static void init() throws Exception {
|
||||||
assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
|
GenericTestUtils.assumeNativeCodeLoaded();
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
conf.set(
|
conf.set(
|
||||||
CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_AES_CTR_NOPADDING_KEY,
|
CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_AES_CTR_NOPADDING_KEY,
|
||||||
|
|
|
@ -34,6 +34,7 @@ import org.apache.hadoop.io.SequenceFile.Writer.Option;
|
||||||
import org.apache.hadoop.io.compress.DefaultCodec;
|
import org.apache.hadoop.io.compress.DefaultCodec;
|
||||||
import org.apache.hadoop.io.compress.GzipCodec;
|
import org.apache.hadoop.io.compress.GzipCodec;
|
||||||
import org.apache.hadoop.io.serializer.JavaSerializationComparator;
|
import org.apache.hadoop.io.serializer.JavaSerializationComparator;
|
||||||
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.junit.AfterClass;
|
import org.junit.AfterClass;
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
@ -140,6 +141,7 @@ public class TestSequenceFileAppend {
|
||||||
|
|
||||||
@Test(timeout = 30000)
|
@Test(timeout = 30000)
|
||||||
public void testAppendRecordCompression() throws Exception {
|
public void testAppendRecordCompression() throws Exception {
|
||||||
|
GenericTestUtils.assumeNativeCodeLoaded();
|
||||||
|
|
||||||
Path file = new Path(ROOT_PATH, "testseqappendblockcompr.seq");
|
Path file = new Path(ROOT_PATH, "testseqappendblockcompr.seq");
|
||||||
fs.delete(file, true);
|
fs.delete(file, true);
|
||||||
|
@ -173,6 +175,7 @@ public class TestSequenceFileAppend {
|
||||||
|
|
||||||
@Test(timeout = 30000)
|
@Test(timeout = 30000)
|
||||||
public void testAppendBlockCompression() throws Exception {
|
public void testAppendBlockCompression() throws Exception {
|
||||||
|
GenericTestUtils.assumeNativeCodeLoaded();
|
||||||
|
|
||||||
Path file = new Path(ROOT_PATH, "testseqappendblockcompr.seq");
|
Path file = new Path(ROOT_PATH, "testseqappendblockcompr.seq");
|
||||||
fs.delete(file, true);
|
fs.delete(file, true);
|
||||||
|
@ -247,6 +250,8 @@ public class TestSequenceFileAppend {
|
||||||
|
|
||||||
@Test(timeout = 30000)
|
@Test(timeout = 30000)
|
||||||
public void testAppendSort() throws Exception {
|
public void testAppendSort() throws Exception {
|
||||||
|
GenericTestUtils.assumeNativeCodeLoaded();
|
||||||
|
|
||||||
Path file = new Path(ROOT_PATH, "testseqappendSort.seq");
|
Path file = new Path(ROOT_PATH, "testseqappendSort.seq");
|
||||||
fs.delete(file, true);
|
fs.delete(file, true);
|
||||||
|
|
||||||
|
|
|
@ -35,6 +35,7 @@ import java.util.regex.Pattern;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.impl.Log4JLogger;
|
import org.apache.commons.logging.impl.Log4JLogger;
|
||||||
import org.apache.hadoop.fs.FileUtil;
|
import org.apache.hadoop.fs.FileUtil;
|
||||||
|
import org.apache.hadoop.util.NativeCodeLoader;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
import org.apache.hadoop.util.Time;
|
import org.apache.hadoop.util.Time;
|
||||||
import org.apache.log4j.Layout;
|
import org.apache.log4j.Layout;
|
||||||
|
@ -43,6 +44,7 @@ import org.apache.log4j.LogManager;
|
||||||
import org.apache.log4j.Logger;
|
import org.apache.log4j.Logger;
|
||||||
import org.apache.log4j.WriterAppender;
|
import org.apache.log4j.WriterAppender;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
|
import org.junit.Assume;
|
||||||
import org.mockito.invocation.InvocationOnMock;
|
import org.mockito.invocation.InvocationOnMock;
|
||||||
import org.mockito.stubbing.Answer;
|
import org.mockito.stubbing.Answer;
|
||||||
|
|
||||||
|
@ -410,4 +412,11 @@ public abstract class GenericTestUtils {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Skip test if native code is not loaded.
|
||||||
|
*/
|
||||||
|
public static void assumeNativeCodeLoaded() {
|
||||||
|
Assume.assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue