HDFS-3753. Tests don't run with native libraries. Contributed by Colin Patrick McCabe
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1393188 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
b9131749aa
commit
04988bb77b
|
@ -626,7 +626,7 @@ runTests () {
|
||||||
echo ""
|
echo ""
|
||||||
|
|
||||||
echo "$MVN clean install -Pnative -D${PROJECT_NAME}PatchProcess"
|
echo "$MVN clean install -Pnative -D${PROJECT_NAME}PatchProcess"
|
||||||
$MVN clean install -Pnative -D${PROJECT_NAME}PatchProcess
|
$MVN clean install -Pnative -Drequire.test.libhadoop -D${PROJECT_NAME}PatchProcess
|
||||||
if [[ $? != 0 ]] ; then
|
if [[ $? != 0 ]] ; then
|
||||||
### Find and format names of failed tests
|
### Find and format names of failed tests
|
||||||
failed_tests=`find . -name 'TEST*.xml' | xargs $GREP -l -E "<failure|<error" | sed -e "s|.*target/surefire-reports/TEST-| |g" | sed -e "s|\.xml||g"`
|
failed_tests=`find . -name 'TEST*.xml' | xargs $GREP -l -E "<failure|<error" | sed -e "s|.*target/surefire-reports/TEST-| |g" | sed -e "s|\.xml||g"`
|
||||||
|
|
|
@ -0,0 +1,49 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.apache.hadoop.util;
|
||||||
|
|
||||||
|
import org.junit.Test;
|
||||||
|
import static org.junit.Assert.*;
|
||||||
|
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
import org.apache.hadoop.util.NativeCodeLoader;
|
||||||
|
|
||||||
|
public class TestNativeCodeLoader {
|
||||||
|
static final Log LOG = LogFactory.getLog(TestNativeCodeLoader.class);
|
||||||
|
|
||||||
|
private static boolean requireTestJni() {
|
||||||
|
String rtj = System.getProperty("require.test.libhadoop");
|
||||||
|
if (rtj == null) return false;
|
||||||
|
if (rtj.compareToIgnoreCase("false") == 0) return false;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testNativeCodeLoaded() {
|
||||||
|
if (requireTestJni() == false) {
|
||||||
|
LOG.info("TestNativeCodeLoader: libhadoop.so testing is not required.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (!NativeCodeLoader.isNativeCodeLoaded()) {
|
||||||
|
fail("TestNativeCodeLoader: libhadoop.so testing was required, but " +
|
||||||
|
"libhadoop.so was not loaded.");
|
||||||
|
}
|
||||||
|
LOG.info("TestNativeCodeLoader: libhadoop.so is loaded.");
|
||||||
|
}
|
||||||
|
}
|
|
@ -53,6 +53,9 @@ Release 2.0.3-alpha - Unreleased
|
||||||
HDFS-3992. Method org.apache.hadoop.hdfs.TestHftpFileSystem.tearDown()
|
HDFS-3992. Method org.apache.hadoop.hdfs.TestHftpFileSystem.tearDown()
|
||||||
sometimes throws NPEs. (Ivan A. Veselovsky via atm)
|
sometimes throws NPEs. (Ivan A. Veselovsky via atm)
|
||||||
|
|
||||||
|
HDFS-3753. Tests don't run with native libraries.
|
||||||
|
(Colin Patrick McCabe via eli)
|
||||||
|
|
||||||
Release 2.0.2-alpha - 2012-09-07
|
Release 2.0.2-alpha - 2012-09-07
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
|
|
@ -0,0 +1,51 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.apache.hadoop.fs;
|
||||||
|
|
||||||
|
import org.junit.Test;
|
||||||
|
import static org.junit.Assert.*;
|
||||||
|
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
import org.apache.hadoop.util.NativeCodeLoader;
|
||||||
|
|
||||||
|
public class TestHdfsNativeCodeLoader {
|
||||||
|
static final Log LOG = LogFactory.getLog(TestHdfsNativeCodeLoader.class);
|
||||||
|
|
||||||
|
private static boolean requireTestJni() {
|
||||||
|
String rtj = System.getProperty("require.test.libhadoop");
|
||||||
|
if (rtj == null) return false;
|
||||||
|
if (rtj.compareToIgnoreCase("false") == 0) return false;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testNativeCodeLoaded() {
|
||||||
|
if (requireTestJni() == false) {
|
||||||
|
LOG.info("TestNativeCodeLoader: libhadoop.so testing is not required.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (!NativeCodeLoader.isNativeCodeLoaded()) {
|
||||||
|
String LD_LIBRARY_PATH = System.getenv().get("LD_LIBRARY_PATH");
|
||||||
|
if (LD_LIBRARY_PATH == null) LD_LIBRARY_PATH = "";
|
||||||
|
fail("TestNativeCodeLoader: libhadoop.so testing was required, but " +
|
||||||
|
"libhadoop.so was not loaded. LD_LIBRARY_PATH = " + LD_LIBRARY_PATH);
|
||||||
|
}
|
||||||
|
LOG.info("TestHdfsNativeCodeLoader: libhadoop.so is loaded.");
|
||||||
|
}
|
||||||
|
}
|
|
@ -115,6 +115,14 @@ public class TestShortCircuitLocalRead {
|
||||||
stm.close();
|
stm.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static byte [] arrayFromByteBuffer(ByteBuffer buf) {
|
||||||
|
ByteBuffer alt = buf.duplicate();
|
||||||
|
alt.clear();
|
||||||
|
byte[] arr = new byte[alt.remaining()];
|
||||||
|
alt.get(arr);
|
||||||
|
return arr;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Verifies that reading a file with the direct read(ByteBuffer) api gives the expected set of bytes.
|
* Verifies that reading a file with the direct read(ByteBuffer) api gives the expected set of bytes.
|
||||||
*/
|
*/
|
||||||
|
@ -122,7 +130,7 @@ public class TestShortCircuitLocalRead {
|
||||||
int readOffset) throws IOException {
|
int readOffset) throws IOException {
|
||||||
DFSDataInputStream stm = (DFSDataInputStream)fs.open(name);
|
DFSDataInputStream stm = (DFSDataInputStream)fs.open(name);
|
||||||
|
|
||||||
ByteBuffer actual = ByteBuffer.allocate(expected.length - readOffset);
|
ByteBuffer actual = ByteBuffer.allocateDirect(expected.length - readOffset);
|
||||||
|
|
||||||
IOUtils.skipFully(stm, readOffset);
|
IOUtils.skipFully(stm, readOffset);
|
||||||
|
|
||||||
|
@ -136,7 +144,8 @@ public class TestShortCircuitLocalRead {
|
||||||
// Read across chunk boundary
|
// Read across chunk boundary
|
||||||
actual.limit(Math.min(actual.capacity(), nread + 517));
|
actual.limit(Math.min(actual.capacity(), nread + 517));
|
||||||
nread += stm.read(actual);
|
nread += stm.read(actual);
|
||||||
checkData(actual.array(), readOffset, expected, nread, "A few bytes");
|
checkData(arrayFromByteBuffer(actual), readOffset, expected, nread,
|
||||||
|
"A few bytes");
|
||||||
//Now read rest of it
|
//Now read rest of it
|
||||||
actual.limit(actual.capacity());
|
actual.limit(actual.capacity());
|
||||||
while (actual.hasRemaining()) {
|
while (actual.hasRemaining()) {
|
||||||
|
@ -147,7 +156,7 @@ public class TestShortCircuitLocalRead {
|
||||||
}
|
}
|
||||||
nread += nbytes;
|
nread += nbytes;
|
||||||
}
|
}
|
||||||
checkData(actual.array(), readOffset, expected, "Read 3");
|
checkData(arrayFromByteBuffer(actual), readOffset, expected, "Read 3");
|
||||||
stm.close();
|
stm.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -876,7 +876,7 @@
|
||||||
<forkedProcessTimeoutInSeconds>900</forkedProcessTimeoutInSeconds>
|
<forkedProcessTimeoutInSeconds>900</forkedProcessTimeoutInSeconds>
|
||||||
<argLine>-Xmx1024m -XX:+HeapDumpOnOutOfMemoryError</argLine>
|
<argLine>-Xmx1024m -XX:+HeapDumpOnOutOfMemoryError</argLine>
|
||||||
<environmentVariables>
|
<environmentVariables>
|
||||||
<LD_LIBRARY_PATH>${env.LD_LIBRARY_PATH}:${project.build.directory}/native/target/usr/local/lib</LD_LIBRARY_PATH>
|
<LD_LIBRARY_PATH>${env.LD_LIBRARY_PATH}:${project.build.directory}/native/target/usr/local/lib:${basedir}/../../hadoop-common-project/hadoop-common/target/native/target/usr/local/lib/</LD_LIBRARY_PATH>
|
||||||
<MALLOC_ARENA_MAX>4</MALLOC_ARENA_MAX>
|
<MALLOC_ARENA_MAX>4</MALLOC_ARENA_MAX>
|
||||||
</environmentVariables>
|
</environmentVariables>
|
||||||
<systemPropertyVariables>
|
<systemPropertyVariables>
|
||||||
|
@ -894,6 +894,7 @@
|
||||||
<java.net.preferIPv4Stack>true</java.net.preferIPv4Stack>
|
<java.net.preferIPv4Stack>true</java.net.preferIPv4Stack>
|
||||||
<java.security.krb5.conf>${basedir}/src/test/resources/krb5.conf</java.security.krb5.conf>
|
<java.security.krb5.conf>${basedir}/src/test/resources/krb5.conf</java.security.krb5.conf>
|
||||||
<java.security.egd>file:///dev/urandom</java.security.egd>
|
<java.security.egd>file:///dev/urandom</java.security.egd>
|
||||||
|
<require.test.libhadoop>${require.test.libhadoop}</require.test.libhadoop>
|
||||||
</systemPropertyVariables>
|
</systemPropertyVariables>
|
||||||
<includes>
|
<includes>
|
||||||
<include>**/Test*.java</include>
|
<include>**/Test*.java</include>
|
||||||
|
|
Loading…
Reference in New Issue