diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index 3c7709980af..d48479a9dc6 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -537,9 +537,6 @@ Trunk (Unreleased)
HADOOP-12553. [JDK8] Fix javadoc error caused by illegal tag. (aajisaka)
- HADOOP-11505. Various native parts use bswap incorrectly and unportably
- (Alan Burlison via aw)
-
OPTIMIZATIONS
HADOOP-7761. Improve the performance of raw comparisons. (todd)
diff --git a/hadoop-common-project/hadoop-common/src/CMakeLists.txt b/hadoop-common-project/hadoop-common/src/CMakeLists.txt
index a8762d5db5e..63bb773fb2b 100644
--- a/hadoop-common-project/hadoop-common/src/CMakeLists.txt
+++ b/hadoop-common-project/hadoop-common/src/CMakeLists.txt
@@ -41,131 +41,6 @@ endif()
# Configure JNI.
include(HadoopJNI)
-#
-# Endian configuration, as per http://austingroupbugs.net/view.php?id=162#c665
-#
-
-# Work out the endianness, set header macro values.
-include(TestBigEndian)
-include(CheckIncludeFile)
-include(CheckSymbolExists)
-test_big_endian(_bigendian)
-if(_bigendian)
- set(HADOOP_BYTE_ORDER "HADOOP_BIG_ENDIAN")
-else()
- set(HADOOP_BYTE_ORDER "HADOOP_LITTLE_ENDIAN")
-endif()
-
-# Linux, NetBSD, FreeBSD and OpenBSD all provide htoXXX definitions in endian.h or sys/endian.h.
-check_include_file("endian.h" _endian_h)
-if (_endian_h)
- set(HADOOP_ENDIAN_H "endian.h")
-else()
- check_include_file("sys/endian.h" _sys_endian_h)
- if (_sys_endian_h)
- set(HADOOP_ENDIAN_H "sys/endian.h")
- endif()
-endif()
-if(DEFINED HADOOP_ENDIAN_H)
-check_symbol_exists("be64toh" ${HADOOP_ENDIAN_H} _be64toh)
- if( _be64toh)
- set(HADOOP_HTOBE16 "htobe16")
- set(HADOOP_HTOLE16 "htole16")
- set(HADOOP_BE16TOH "be16toh")
- set(HADOOP_LE16TOH "le16toh")
- set(HADOOP_HTOBE32 "htobe32")
- set(HADOOP_HTOLE32 "htole32")
- set(HADOOP_BE32TOH "be32toh")
- set(HADOOP_LE32TOH "le32toh")
- set(HADOOP_HTOBE64 "htobe64")
- set(HADOOP_HTOLE64 "htole64")
- set(HADOOP_BE64TOH "be64toh")
- set(HADOOP_LE64TOH "le64toh")
- set(_have_endian TRUE)
- unset(_be64toh)
- else()
- message(FATAL_ERROR "endian.h located but doesn't contain be64toh")
- endif()
-endif()
-
-# Solaris doesn't provide htoXXX, we have to provide alternatives.
-if(NOT _have_endian)
- check_include_file("sys/byteorder.h" _sys_byteorder_h)
- if(_sys_byteorder_h)
- set(HADOOP_ENDIAN_H "sys/byteorder.h")
- check_symbol_exists("BSWAP_64" ${HADOOP_ENDIAN_H} _bswap_64)
- endif()
- if(_sys_byteorder_h AND _bswap_64)
- if(_bigendian)
- set(HADOOP_HTOBE16 "")
- set(HADOOP_HTOLE16 "BSWAP_16")
- set(HADOOP_BE16TOH "")
- set(HADOOP_LE16TOH "BSWAP_16")
- set(HADOOP_HTOBE32 "")
- set(HADOOP_HTOLE32 "BSWAP_32")
- set(HADOOP_BE32TOH "")
- set(HADOOP_LE32TOH "BSWAP_32")
- set(HADOOP_HTOBE64 "")
- set(HADOOP_HTOLE64 "BSWAP_64")
- set(HADOOP_BE64TOH "")
- set(HADOOP_LE64TOH "BSWAP_64")
- else()
- set(HADOOP_HTOBE16 "BSWAP_16")
- set(HADOOP_HTOLE16 "")
- set(HADOOP_BE16TOH "BSWAP_16")
- set(HADOOP_LE16TOH "")
- set(HADOOP_HTOBE32 "BSWAP_32")
- set(HADOOP_HTOLE32 "")
- set(HADOOP_BE32TOH "BSWAP_32")
- set(HADOOP_LE32TOH "")
- set(HADOOP_HTOBE64 "BSWAP_64")
- set(HADOOP_HTOLE64 "")
- set(HADOOP_BE64TOH "BSWAP_64")
- set(HADOOP_LE64TOH "")
- endif()
- set(_have_endian TRUE)
- unset(_sys_byteorder_h)
- unset(_bswap_64)
- endif()
-endif()
-
-# OSX uses libkern/OSByteOrder.h and OSSwapXtoY.
-if(NOT _have_endian)
- check_include_file("libkern/OSByteOrder.h" _libkern_osbyteorder_h)
- if(_libkern_osbyteorder_h)
- set(HADOOP_ENDIAN_H "libkern/OSByteOrder.h")
- check_symbol_exists("OSSwapHostToLittleInt64" ${HADOOP_ENDIAN_H} _osswaphosttolittleint64)
- endif()
- if(_libkern_osbyteorder_h AND _osswaphosttolittleint64)
- set(HADOOP_HTOBE16 "OSSwapHostToBigInt16")
- set(HADOOP_HTOLE16 "OSSwapHostToLittleInt16")
- set(HADOOP_BE16TOH "OSSwapBigToHostInt16")
- set(HADOOP_LE16TOH "OSSwapLittleToHostInt16")
- set(HADOOP_HTOBE32 "OSSwapHostToBigInt32")
- set(HADOOP_HTOLE32 "OSSwapHostToLittleInt32")
- set(HADOOP_BE32TOH "OSSwapBigToHostInt32")
- set(HADOOP_LE32TOH "OSSwapLittleToHostInt32")
- set(HADOOP_HTOBE64 "OSSwapHostToBigInt64")
- set(HADOOP_HTOLE64 "OSSwapHostToLittleInt64")
- set(HADOOP_BE64TOH "OSSwapBigToHostInt64")
- set(HADOOP_LE64TOH "OSSwapLittleToHostInt64")
- set(_have_endian TRUE)
- unset(_libkern_osbyteorder_h)
- unset(_osswaphosttolittleint64)
- endif()
-endif()
-
-# Bail if we don't know the endian definitions for this platform.
-if(NOT _have_endian)
- message(FATAL_ERROR "Can't provide endianness definitions for this platform")
-endif()
-
-# Configure the hadoop_endian.h header file.
-configure_file(${CMAKE_SOURCE_DIR}/hadoop_endian.h.cmake ${CMAKE_BINARY_DIR}/hadoop_endian.h)
-unset(_bigendian)
-unset(_have_endian)
-unset(HADOOP_ENDIAN_H)
-
# Require zlib.
set(STORED_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES})
hadoop_set_find_shared_library_version("1")
diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c
index 988ccf2eba6..b3bb69959b2 100644
--- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c
+++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c
@@ -37,7 +37,6 @@
#include "crc32c_tables.h"
#include "bulk_crc32.h"
#include "gcc_optimizations.h"
-#include "hadoop_endian.h"
#define CRC_INITIAL_VAL 0xffffffff
@@ -164,7 +163,7 @@ static uint32_t crc32c_sb8(uint32_t crc, const uint8_t *buf, size_t length) {
for (li=0; li < running_length/8; li++) {
uint32_t term1;
uint32_t term2;
- crc ^= hadoop_htole32(*(uint32_t *)buf);
+ crc ^= *(uint32_t *)buf;
buf += 4;
term1 = CRC32C_T8_7[crc & 0x000000FF] ^
CRC32C_T8_6[(crc >> 8) & 0x000000FF];
@@ -172,10 +171,10 @@ static uint32_t crc32c_sb8(uint32_t crc, const uint8_t *buf, size_t length) {
crc = term1 ^
CRC32C_T8_5[term2 & 0x000000FF] ^
CRC32C_T8_4[(term2 >> 8) & 0x000000FF];
- term1 = CRC32C_T8_3[hadoop_htole32(*(uint32_t *)buf) & 0x000000FF] ^
- CRC32C_T8_2[(hadoop_htole32(*(uint32_t *)buf) >> 8) & 0x000000FF];
+ term1 = CRC32C_T8_3[(*(uint32_t *)buf) & 0x000000FF] ^
+ CRC32C_T8_2[((*(uint32_t *)buf) >> 8) & 0x000000FF];
- term2 = hadoop_htole32((*(uint32_t *)buf)) >> 16;
+ term2 = (*(uint32_t *)buf) >> 16;
crc = crc ^
term1 ^
CRC32C_T8_1[term2 & 0x000000FF] ^
@@ -210,7 +209,7 @@ static uint32_t crc32_zlib_sb8(
for (li=0; li < running_length/8; li++) {
uint32_t term1;
uint32_t term2;
- crc ^= hadoop_htole32(*(uint32_t *)buf);
+ crc ^= *(uint32_t *)buf;
buf += 4;
term1 = CRC32_T8_7[crc & 0x000000FF] ^
CRC32_T8_6[(crc >> 8) & 0x000000FF];
@@ -218,10 +217,10 @@ static uint32_t crc32_zlib_sb8(
crc = term1 ^
CRC32_T8_5[term2 & 0x000000FF] ^
CRC32_T8_4[(term2 >> 8) & 0x000000FF];
- term1 = CRC32_T8_3[hadoop_htole32(*(uint32_t *)buf) & 0x000000FF] ^
- CRC32_T8_2[(hadoop_htole32(*(uint32_t *)buf) >> 8) & 0x000000FF];
+ term1 = CRC32_T8_3[(*(uint32_t *)buf) & 0x000000FF] ^
+ CRC32_T8_2[((*(uint32_t *)buf) >> 8) & 0x000000FF];
- term2 = hadoop_htole32(*(uint32_t *)buf) >> 16;
+ term2 = (*(uint32_t *)buf) >> 16;
crc = crc ^
term1 ^
CRC32_T8_1[term2 & 0x000000FF] ^
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
index 99428b0d772..f878a949f01 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
@@ -90,7 +90,6 @@ include_directories(
${SRC}/src/util
${SRC}/src/lib
${SRC}/test
- ../../../../hadoop-common-project/hadoop-common/target/native
${CMAKE_CURRENT_SOURCE_DIR}
${CMAKE_BINARY_DIR}
${JNI_INCLUDE_DIRS}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/INativeComparable.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/INativeComparable.java
index df6570a28bd..1ec05dbc077 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/INativeComparable.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/INativeComparable.java
@@ -42,8 +42,8 @@ import org.apache.hadoop.classification.InterfaceStability;
*
* int HivePlatform::HiveKeyComparator(const char * src, uint32_t srcLength,
* const char * dest, uint32_t destLength) {
- * uint32_t sl = hadoop_be32toh(*(uint32_t*)src);
- * uint32_t dl = hadoop_be32toh(*(uint32_t*)dest);
+ * uint32_t sl = bswap(*(uint32_t*)src);
+ * uint32_t dl = bswap(*(uint32_t*)dest);
* return NativeObjectFactory::BytesComparator(src + 4, sl, dest + 4, dl);
* }
*
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/NativeTask.h b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/NativeTask.h
index f1336efc17b..ba026f5f4fe 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/NativeTask.h
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/NativeTask.h
@@ -40,6 +40,15 @@ enum NativeObjectType {
BatchHandlerType = 1,
};
+/**
+ * Enduim setting
+ *
+ */
+enum Endium {
+ LITTLE_ENDIUM = 0,
+ LARGE_ENDIUM = 1
+};
+
#define NATIVE_COMBINER "native.combiner.class"
#define NATIVE_PARTITIONER "native.partitioner.class"
#define NATIVE_MAPPER "native.mapper.class"
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/BlockCodec.cc b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/BlockCodec.cc
index 7ce26f118ff..ce36239ff7b 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/BlockCodec.cc
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/BlockCodec.cc
@@ -104,8 +104,8 @@ int32_t BlockDecompressStream::read(void * buff, uint32_t length) {
THROW_EXCEPTION(IOException, "readFully get incomplete data");
}
_compressedBytesRead += rd;
- sizes[0] = hadoop_be32toh(sizes[0]);
- sizes[1] = hadoop_be32toh(sizes[1]);
+ sizes[0] = bswap(sizes[0]);
+ sizes[1] = bswap(sizes[1]);
if (sizes[0] <= length) {
uint32_t len = decompressOneBlock(sizes[1], buff, sizes[0]);
if (len != sizes[0]) {
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/Lz4Codec.cc b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/Lz4Codec.cc
index 23c6c46f47f..48c96b508c3 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/Lz4Codec.cc
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/Lz4Codec.cc
@@ -38,8 +38,8 @@ void Lz4CompressStream::compressOneBlock(const void * buff, uint32_t length) {
int ret = LZ4_compress((char*)buff, _tempBuffer + 8, length);
if (ret > 0) {
compressedLength = ret;
- ((uint32_t*)_tempBuffer)[0] = hadoop_be32toh(length);
- ((uint32_t*)_tempBuffer)[1] = hadoop_be32toh((uint32_t)compressedLength);
+ ((uint32_t*)_tempBuffer)[0] = bswap(length);
+ ((uint32_t*)_tempBuffer)[1] = bswap((uint32_t)compressedLength);
_stream->write(_tempBuffer, compressedLength + 8);
_compressedBytesWritten += (compressedLength + 8);
} else {
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/SnappyCodec.cc b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/SnappyCodec.cc
index 04380aca996..a0417e06f45 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/SnappyCodec.cc
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/SnappyCodec.cc
@@ -37,8 +37,8 @@ void SnappyCompressStream::compressOneBlock(const void * buff, uint32_t length)
snappy_status ret = snappy_compress((const char*)buff, length, _tempBuffer + 8,
&compressedLength);
if (ret == SNAPPY_OK) {
- ((uint32_t*)_tempBuffer)[0] = hadoop_be32toh(length);
- ((uint32_t*)_tempBuffer)[1] = hadoop_be32toh((uint32_t)compressedLength);
+ ((uint32_t*)_tempBuffer)[0] = bswap(length);
+ ((uint32_t*)_tempBuffer)[1] = bswap((uint32_t)compressedLength);
_stream->write(_tempBuffer, compressedLength + 8);
_compressedBytesWritten += (compressedLength + 8);
} else if (ret == SNAPPY_INVALID_INPUT) {
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/handler/CombineHandler.cc b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/handler/CombineHandler.cc
index b18d057efff..5f3863eb629 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/handler/CombineHandler.cc
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/handler/CombineHandler.cc
@@ -15,7 +15,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-
#include "CombineHandler.h"
namespace NativeTask {
@@ -49,8 +48,8 @@ uint32_t CombineHandler::feedDataToJavaInWritableSerialization() {
if (_kvCached) {
uint32_t kvLength = _key.outerLength + _value.outerLength + KVBuffer::headerLength();
- outputInt(hadoop_be32toh(_key.outerLength));
- outputInt(hadoop_be32toh(_value.outerLength));
+ outputInt(bswap(_key.outerLength));
+ outputInt(bswap(_value.outerLength));
outputKeyOrValue(_key, _kType);
outputKeyOrValue(_value, _vType);
@@ -74,8 +73,8 @@ uint32_t CombineHandler::feedDataToJavaInWritableSerialization() {
} else {
firstKV = false;
//write final key length and final value length
- outputInt(hadoop_be32toh(_key.outerLength));
- outputInt(hadoop_be32toh(_value.outerLength));
+ outputInt(bswap(_key.outerLength));
+ outputInt(bswap(_value.outerLength));
outputKeyOrValue(_key, _kType);
outputKeyOrValue(_value, _vType);
@@ -102,7 +101,7 @@ void CombineHandler::outputKeyOrValue(SerializeInfo & KV, KeyValueType type) {
output(KV.buffer.data(), KV.buffer.length());
break;
case BytesType:
- outputInt(hadoop_be32toh(KV.buffer.length()));
+ outputInt(bswap(KV.buffer.length()));
output(KV.buffer.data(), KV.buffer.length());
break;
default:
@@ -203,8 +202,8 @@ void CombineHandler::write(char * buf, uint32_t length) {
uint32_t outputRecordCount = 0;
while (remain > 0) {
kv = (KVBuffer *)pos;
- kv->keyLength = hadoop_be32toh(kv->keyLength);
- kv->valueLength = hadoop_be32toh(kv->valueLength);
+ kv->keyLength = bswap(kv->keyLength);
+ kv->valueLength = bswap(kv->valueLength);
_writer->write(kv->getKey(), kv->keyLength, kv->getValue(), kv->valueLength);
outputRecordCount++;
remain -= kv->length();
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/handler/MCollectorOutputHandler.cc b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/handler/MCollectorOutputHandler.cc
index 4921b3331bc..7e4ae448277 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/handler/MCollectorOutputHandler.cc
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/handler/MCollectorOutputHandler.cc
@@ -30,7 +30,7 @@ using std::vector;
namespace NativeTask {
MCollectorOutputHandler::MCollectorOutputHandler()
- : _collector(NULL), _dest(NULL) {
+ : _collector(NULL), _dest(NULL), _endium(LARGE_ENDIUM) {
}
MCollectorOutputHandler::~MCollectorOutputHandler() {
@@ -73,9 +73,11 @@ void MCollectorOutputHandler::handleInput(ByteBuffer & in) {
THROW_EXCEPTION(IOException, "k/v meta information incomplete");
}
- kvBuffer->partitionId = hadoop_be32toh(kvBuffer->partitionId);
- kvBuffer->buffer.keyLength = hadoop_be32toh(kvBuffer->buffer.keyLength);
- kvBuffer->buffer.valueLength = hadoop_be32toh(kvBuffer->buffer.valueLength);
+ if (_endium == LARGE_ENDIUM) {
+ kvBuffer->partitionId = bswap(kvBuffer->partitionId);
+ kvBuffer->buffer.keyLength = bswap(kvBuffer->buffer.keyLength);
+ kvBuffer->buffer.valueLength = bswap(kvBuffer->buffer.valueLength);
+ }
uint32_t kvLength = kvBuffer->buffer.length();
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/handler/MCollectorOutputHandler.h b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/handler/MCollectorOutputHandler.h
index 2e218061455..fe4635ff4a6 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/handler/MCollectorOutputHandler.h
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/handler/MCollectorOutputHandler.h
@@ -35,6 +35,8 @@ private:
// state info for large KV pairs
char * _dest;
+ Endium _endium;
+
public:
MCollectorOutputHandler();
virtual ~MCollectorOutputHandler();
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Buffers.h b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Buffers.h
index 09606d85437..4929426d9f8 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Buffers.h
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Buffers.h
@@ -115,7 +115,7 @@ public:
* read uint32_t big endian
*/
inline uint32_t read_uint32_be() {
- return hadoop_be32toh(read_uint32_le());
+ return bswap(read_uint32_le());
}
};
@@ -198,7 +198,7 @@ public:
}
inline void write_uint32_be(uint32_t v) {
- write_uint32_le(hadoop_be32toh(v));
+ write_uint32_le(bswap(v));
}
inline void write_uint64_le(uint64_t v) {
@@ -211,7 +211,7 @@ public:
}
inline void write_uint64_be(uint64_t v) {
- write_uint64_le(hadoop_be64toh(v));
+ write_uint64_le(bswap64(v));
}
inline void write_vlong(int64_t v) {
@@ -278,11 +278,12 @@ struct KVBuffer {
}
uint32_t length() {
- return keyLength + valueLength + SIZE_OF_KV_LENGTH;
+ return keyLength + valueLength + SIZE_OF_KEY_LENGTH + SIZE_OF_VALUE_LENGTH;
}
uint32_t lengthConvertEndium() {
- return hadoop_be32toh(keyLength) + hadoop_be32toh(valueLength) + SIZE_OF_KV_LENGTH;
+ long value = bswap64(*((long *)this));
+ return (value >> 32) + value + SIZE_OF_KEY_LENGTH + SIZE_OF_VALUE_LENGTH;
}
void fill(const void * key, uint32_t keylen, const void * value, uint32_t vallen) {
@@ -298,7 +299,7 @@ struct KVBuffer {
}
static uint32_t headerLength() {
- return SIZE_OF_KV_LENGTH;
+ return SIZE_OF_KEY_LENGTH + SIZE_OF_VALUE_LENGTH;
}
};
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/IFile.cc b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/IFile.cc
index cbe1b289f77..2d3e0b5da0f 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/IFile.cc
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/IFile.cc
@@ -60,7 +60,7 @@ bool IFileReader::nextPartition() {
if (4 != _stream->readFully(&chsum, 4)) {
THROW_EXCEPTION(IOException, "read ifile checksum failed");
}
- uint32_t actual = hadoop_be32toh(chsum);
+ uint32_t actual = bswap(chsum);
uint32_t expect = _source->getChecksum();
if (actual != expect) {
THROW_EXCEPTION_EX(IOException, "read ifile checksum not match, actual %x expect %x", actual,
@@ -130,7 +130,7 @@ void IFileWriter::endPartition() {
}
uint32_t chsum = _dest->getChecksum();
- chsum = hadoop_be32toh(chsum);
+ chsum = bswap(chsum);
_stream->write(&chsum, sizeof(chsum));
_stream->flush();
IFileSegment * info = &(_spillFileSegments[_spillFileSegments.size() - 1]);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/IFile.h b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/IFile.h
index 414dc271e73..e397f90545c 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/IFile.h
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/IFile.h
@@ -74,7 +74,7 @@ public:
keyLen = WritableUtils::ReadVInt(kvbuff, len);
break;
case BytesType:
- keyLen = hadoop_be32toh(*(uint32_t*)kvbuff);
+ keyLen = bswap(*(uint32_t*)kvbuff);
len = 4;
break;
default:
@@ -89,7 +89,7 @@ public:
_valuePos = vbuff + len;
break;
case BytesType:
- _valueLen = hadoop_be32toh(*(uint32_t*)vbuff);
+ _valueLen = bswap(*(uint32_t*)vbuff);
_valuePos = vbuff + 4;
break;
default:
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/NativeObjectFactory.cc b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/NativeObjectFactory.cc
index 5633fcf7a71..21857980f3a 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/NativeObjectFactory.cc
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/NativeObjectFactory.cc
@@ -317,8 +317,8 @@ int NativeObjectFactory::IntComparator(const char * src, uint32_t srcLength, con
uint32_t destLength) {
int result = (*src) - (*dest);
if (result == 0) {
- uint32_t from = hadoop_be32toh(*(uint32_t*)src);
- uint32_t to = hadoop_be32toh(*(uint32_t*)dest);
+ uint32_t from = bswap(*(uint32_t*)src);
+ uint32_t to = bswap(*(uint32_t*)dest);
if (from > to) {
return 1;
} else if (from == to) {
@@ -335,8 +335,8 @@ int NativeObjectFactory::LongComparator(const char * src, uint32_t srcLength, co
int result = (int)(*src) - (int)(*dest);
if (result == 0) {
- uint64_t from = hadoop_be64toh(*(uint64_t*)src);
- uint64_t to = hadoop_be64toh(*(uint64_t*)dest);
+ uint64_t from = bswap64(*(uint64_t*)src);
+ uint64_t to = bswap64(*(uint64_t*)dest);
if (from > to) {
return 1;
} else if (from == to) {
@@ -380,8 +380,8 @@ int NativeObjectFactory::FloatComparator(const char * src, uint32_t srcLength, c
THROW_EXCEPTION_EX(IOException, "float comparator, while src/dest lengt is not 4");
}
- uint32_t from = hadoop_be32toh(*(uint32_t*)src);
- uint32_t to = hadoop_be32toh(*(uint32_t*)dest);
+ uint32_t from = bswap(*(uint32_t*)src);
+ uint32_t to = bswap(*(uint32_t*)dest);
float * srcValue = (float *)(&from);
float * destValue = (float *)(&to);
@@ -401,8 +401,8 @@ int NativeObjectFactory::DoubleComparator(const char * src, uint32_t srcLength,
THROW_EXCEPTION_EX(IOException, "double comparator, while src/dest lengt is not 4");
}
- uint64_t from = hadoop_be64toh(*(uint64_t*)src);
- uint64_t to = hadoop_be64toh(*(uint64_t*)dest);
+ uint64_t from = bswap64(*(uint64_t*)src);
+ uint64_t to = bswap64(*(uint64_t*)dest);
double * srcValue = (double *)(&from);
double * destValue = (double *)(&to);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/SpillInfo.cc b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/SpillInfo.cc
index c1a36ced541..9cff52975e4 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/SpillInfo.cc
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/SpillInfo.cc
@@ -58,10 +58,10 @@ void SingleSpillInfo::writeSpillInfo(const std::string & filepath) {
appendBuffer.flush();
uint32_t chsum = dest.getChecksum();
#ifdef SPILLRECORD_CHECKSUM_UINT
- chsum = hadoop_be32toh(chsum);
+ chsum = bswap(chsum);
fout->write(&chsum, sizeof(uint32_t));
#else
- uint64_t wtchsum = hadoop_be64toh((uint64_t)chsum);
+ uint64_t wtchsum = bswap64((uint64_t)chsum);
fout->write(&wtchsum, sizeof(uint64_t));
#endif
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/commons.h b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/commons.h
index 9c69f42c10c..57500b78a15 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/commons.h
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/commons.h
@@ -41,7 +41,6 @@
#include