diff --git a/hadoop3.orig/Makefile b/hadoop3/Makefile --- a/hadoop3.orig/Makefile +++ b/hadoop3/Makefile @@ -0,0 +1,120 @@ +PORTNAME= hadoop +PORTVERSION= 3.2.0 +CATEGORIES= devel java +MASTER_SITES= https://archive.apache.org/dist/${PORTNAME}/common/${PORTNAME}-${PORTVERSION}/ \ + http://archive.apache.org/dist/tomcat/tomcat-10/v${TOMCAT_VERSION}/bin/:tomcat + +PKGNAMESUFFIX= 3 +DISTNAME= ${PORTNAME}-${PORTVERSION}-src +DISTFILES= ${DISTNAME}${EXTRACT_SUFX} \ + apache-tomcat-${TOMCAT_VERSION}.tar.gz:tomcat +DIST_SUBDIR= hadoop +EXTRACT_ONLY= ${DISTNAME}${EXTRACT_SUFX} + +MAINTAINER= freebsd@sysctl.cz +COMMENT= Apache Map/Reduce framework + +LICENSE= APACHE20 + +BUILD_DEPENDS= ant:devel/apache-ant \ + bash:shells/bash \ + cmake:devel/cmake \ + mvn:devel/maven \ + protobuf25>0:devel/protobuf25 +LIB_DEPENDS= libprotobuf.so:devel/protobuf25 \ + libsasl2.so:security/cyrus-sasl2 \ + libsnappy.so:archivers/snappy \ + libzstd.so:archivers/zstd +RUN_DEPENDS= bash:shells/bash \ + setsid:sysutils/setsid + +USES= compiler:c++11-lang cpe pkgconfig shebangfix ssl +CPE_VENDOR= apache +USE_JAVA= yes +JAVA_VERSION= 1.7+ +USE_LDCONFIG= yes + +SHEBANG_FILES= hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh \ + hadoop-common-project/hadoop-kms/src/main/sbin/kms.sh \ + hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-env.sh \ + hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh \ + hadoop-tools/hadoop-sls/src/main/bin/rumen2sls.sh \ + hadoop-tools/hadoop-sls/src/main/bin/slsrun.sh + +CONFLICTS_INSTAL= yarn + +MAKE_ENV+= HADOOP_PROTOC_PATH=${LOCALBASE}/protobuf25/bin/protoc \ + JAVA_HOME=${JAVA_HOME} +MAKE_ARGS+= CXXFLAGS="${CXXFLAGS} -fPIC" + +OPTIONS_DEFINE= EXAMPLES + +TOMCAT_VERSION= 10.1.0-M17 +HADOOP_DIST= ${WRKSRC}/hadoop-dist/target/hadoop-${PORTVERSION} + +HADOOP_LOGDIR= /var/log/hadoop +HADOOP_RUNDIR= /var/run/hadoop + +HDFS_USER= hdfs +MAPRED_USER= mapred +HADOOP_GROUP= hadoop +USERS= ${HDFS_USER} ${MAPRED_USER} +GROUPS= ${HADOOP_GROUP} + +SUB_FILES= hadoop-layout.sh httpfs-env.sh kms-env.sh +USE_RC_SUBR= historyserver nodemanager resourcemanager webappproxyserver \ + datanode namenode secondarynamenode journalnode zkfc + +PLIST_SUB= HADOOP_GROUP="${HADOOP_GROUP}" \ + HADOOP_LOGDIR="${HADOOP_LOGDIR}" \ + HADOOP_RUNDIR="${HADOOP_RUNDIR}" \ + HDFS_USER="${HDFS_USER}" \ + MAPRED_USER="${MAPRED_USER}" \ + PORTVERSION="${PORTVERSION}" +SUB_LIST= HADOOP_GROUP="${HADOOP_GROUP}" \ + HADOOP_LOGDIR="${HADOOP_LOGDIR}" \ + HADOOP_RUNDIR="${HADOOP_RUNDIR}" \ + HDFS_USER="${HDFS_USER}" \ + JAVA_HOME="${JAVA_HOME}" \ + MAPRED_USER="${MAPRED_USER}" + +post-patch: + ${REINPLACE_CMD} -e "s#/bin/bash#${LOCALBASE}/bin/bash#" ${WRKSRC}/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java ${WRKSRC}/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java ${WRKSRC}/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java ${WRKSRC}/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java ${WRKSRC}/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh + ${RM} ${WRKSRC}/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/primitives.h + +do-build: + ${MKDIR} ${WRKSRC}/hadoop-hdfs-project/hadoop-hdfs-httpfs/downloads + ${MKDIR} ${WRKSRC}/hadoop-common-project/hadoop-kms/downloads + ${CP} ${DISTDIR}/${DIST_SUBDIR}/apache-tomcat-${TOMCAT_VERSION}.tar.gz ${WRKSRC}/hadoop-common-project/hadoop-kms/downloads/ + cd ${WRKSRC} && ${SETENV} ${MAKE_ENV} ${LOCALBASE}/bin/mvn -Dmaven.javadoc.skip=true -Dmaven.clean.skip=true -Dmaven.repo.local=${WRKDIR}/m2 package \ + -Pdist,native -DskipTests -Drequire.snappy -Dsnappy.prefix=${LOCALBASE} -Drequire.openssl -Drequire.zstd -Dzstd.prefix=${LOCALBASE} + +post-build: + ${RM} ${HADOOP_DIST}/etc/hadoop/*.cmd + ${RM} ${HADOOP_DIST}/etc/hadoop/*.cmd + +do-install: + cd ${HADOOP_DIST}/bin && ${INSTALL_SCRIPT} hadoop hdfs mapred yarn ${STAGEDIR}${PREFIX}/bin/ + cd ${HADOOP_DIST} && ${COPYTREE_BIN} "libexec sbin" ${STAGEDIR}${PREFIX}/ "! -name *.cmd" + cd ${HADOOP_DIST}/include && ${INSTALL_DATA} *h ${STAGEDIR}${PREFIX}/include/ + cd ${HADOOP_DIST}/lib/native && ${INSTALL_DATA} *.a ${STAGEDIR}${PREFIX}/lib/ + cd ${HADOOP_DIST}/lib/native && ${INSTALL_DATA} libhadoop.so.1.0.0 ${STAGEDIR}${PREFIX}/lib/libhadoop.so.1.0.0 + ${LN} -sf libhadoop.so.1.0.0 ${STAGEDIR}${PREFIX}/lib/libhadoop.so + cd ${HADOOP_DIST}/lib/native && ${INSTALL_DATA} libnativetask.so.1.0.0 ${STAGEDIR}${PREFIX}/lib/libnativetask.so.1.0.0 + ${LN} -sf libnativetask.so.1.0.0 ${STAGEDIR}${PREFIX}/lib/libnativetask.so + cd ${WRKSRC}/hadoop-hdfs-project/hadoop-hdfs-native-client/target/target/usr/local/lib/ && ${INSTALL_DATA} libhdfs.so.0.0.0 ${STAGEDIR}${PREFIX}/lib/libhdfs.so.0.0.0 && ${LN} -sf libhdfs.so.0.0.0 ${STAGEDIR}${PREFIX}/lib/libhdfs.so + cd ${WRKSRC}/hadoop-hdfs-project/hadoop-hdfs-native-client/target/main/native/libhdfspp/ && ${INSTALL_DATA} libhdfspp.so.0.1.0 ${STAGEDIR}${PREFIX}/lib/libhdfspp.so.0.1.0 && ${LN} -sf libhdfspp.so.0.1.0 ${STAGEDIR}${PREFIX}/lib/libhdfspp.so + cd ${HADOOP_DIST}/share/hadoop && ${COPYTREE_SHARE} "*" ${STAGEDIR}${DATADIR}/ "! -name *-sources.jar -and ! -name sources" + ${MKDIR} ${STAGEDIR}${EXAMPLESDIR}/conf + cd ${HADOOP_DIST}/etc/hadoop && ${COPYTREE_SHARE} "*" ${STAGEDIR}${EXAMPLESDIR}/conf/ + ${INSTALL_DATA} ${WRKSRC}/hadoop-hdfs-project/hadoop-hdfs/target/classes/hdfs-default.xml ${WRKSRC}/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/classes/httpfs-default.xml ${WRKSRC}/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/target/classes/yarn-default.xml ${WRKSRC}/hadoop-common-project/hadoop-common/target/classes/core-default.xml ${WRKSRC}/hadoop-tools/hadoop-distcp/target/classes/distcp-default.xml ${WRKSRC}/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/target/classes/mapred-default.xml ${STAGEDIR}/${EXAMPLESDIR}/ + ${INSTALL_DATA} ${WRKDIR}/hadoop-layout.sh ${STAGEDIR}${PREFIX}/libexec/ + ${MKDIR} ${STAGEDIR}${ETCDIR} + ${INSTALL_DATA} ${WRKDIR}/httpfs-env.sh ${STAGEDIR}${ETCDIR} + ${INSTALL_DATA} ${WRKDIR}/kms-env.sh ${STAGEDIR}${ETCDIR} + ${INSTALL_DATA} ${HADOOP_DIST}/etc/hadoop/core-site.xml ${STAGEDIR}${ETCDIR} + ${INSTALL_DATA} ${HADOOP_DIST}/etc/hadoop/log4j.properties ${STAGEDIR}${ETCDIR} + ${MKDIR} ${STAGEDIR}${HADOOP_LOGDIR} + ${MKDIR} ${STAGEDIR}${HADOOP_RUNDIR} + +.include diff --git a/hadoop3.orig/distinfo b/hadoop3/distinfo --- a/hadoop3.orig/distinfo +++ b/hadoop3/distinfo @@ -0,0 +1,5 @@ +TIMESTAMP = 1663525778 +SHA256 (hadoop/hadoop-3.2.0-src.tar.gz) = c30d448d3712b518e892efdc189e7b3f81c4ce4b6532ebb981515f016f735568 +SIZE (hadoop/hadoop-3.2.0-src.tar.gz) = 30751465 +SHA256 (hadoop/apache-tomcat-10.1.0-M17.tar.gz) = 8d3ada5a826aa35074289469f4f1d4e02dfd8c344ee8e018d9bea5b262e2f785 +SIZE (hadoop/apache-tomcat-10.1.0-M17.tar.gz) = 11848876 diff --git a/hadoop3.orig/files/datanode.in b/hadoop3/files/datanode.in --- a/hadoop3.orig/files/datanode.in +++ b/hadoop3/files/datanode.in @@ -0,0 +1,47 @@ +#!/bin/sh +# +# $FreeBSD$ +# +# PROVIDE: datanode +# REQUIRE: LOGIN +# KEYWORD: shutdown +# +# datanode_enable (bool): Set to NO by default. +# Set it to YES to enable datanode. + +. /etc/rc.subr + +export PATH=${PATH}:%%LOCALBASE%%/bin +name=datanode +rcvar=datanode_enable +pidfile=%%HADOOP_RUNDIR%%/hadoop-%%HDFS_USER%%-${name}.pid + +load_rc_config "${name}" + +: ${datanode_enable:=NO} +: ${datanode_user:=%%HDFS_USER%%} + +command="%%PREFIX%%/sbin/hadoop-daemon.sh" +command_interpreter_execution="%%JAVA_HOME%%/bin/java" +command_args='--config %%ETCDIR%% start datanode' + +start_postcmd="start_postcmd" +stop_cmd=datanode_stop +status_precmd=find_pid + +start_postcmd () { + rc_pid=$(check_pidfile ${pidfile} %%JAVA_HOME%%/bin/java) + if [ -n "$rc_pid" ]; then + protect -p $rc_pid + fi +} + +datanode_stop () { + su -m ${datanode_user} -c "${command} --config %%ETCDIR%% stop datanode" +} + +find_pid () { + rc_pid=$(check_pidfile $pidfile $command_interpreter_execution) +} + +run_rc_command "$1" diff --git a/hadoop3.orig/files/hadoop-layout.sh.in b/hadoop3/files/hadoop-layout.sh.in --- a/hadoop3.orig/files/hadoop-layout.sh.in +++ b/hadoop3/files/hadoop-layout.sh.in @@ -0,0 +1,14 @@ +export JAVA_HOME=${JAVA_HOME:-%%JAVA_HOME%%} +export HADOOP_PREFIX=%%PREFIX%% +export HADOOP_CONF_DIR=%%ETCDIR%% +export HADOOP_LOG_DIR=%%HADOOP_LOGDIR%% +export HADOOP_PID_DIR=%%HADOOP_RUNDIR%% +export HADOOP_IDENT_STRING=hdfs + +export YARN_LOG_DIR=%%HADOOP_LOGDIR%% +export YARN_PID_DIR=%%HADOOP_RUNDIR%% +export YARN_IDENT_STRING=yarn + +export HADOOP_MAPRED_LOG_DIR=%%HADOOP_LOGDIR%% +export HADOOP_MAPRED_PID_DIR=%%HADOOP_RUNDIR%% +export HADOOP_MAPRED_IDENT_STRING=mapred diff --git a/hadoop3.orig/files/historyserver.in b/hadoop3/files/historyserver.in --- a/hadoop3.orig/files/historyserver.in +++ b/hadoop3/files/historyserver.in @@ -0,0 +1,32 @@ +#!/bin/sh +# +# $FreeBSD$ +# +# PROVIDE: historyserver +# REQUIRE: LOGIN +# KEYWORD: shutdown +# +# historyserver_enable (bool): Set to NO by default. +# Set it to YES to enable resourcemanager. + +. /etc/rc.subr + +export PATH=${PATH}:%%LOCALBASE%%/bin +name=historyserver +rcvar=historyserver_enable + +load_rc_config "${name}" + +: ${historyserver_enable:=NO} +: ${historyserver_user:=%%MAPRED_USER%%} + +command="%%PREFIX%%/sbin/mr-jobhistory-daemon.sh" +command_args='--config %%ETCDIR%% start historyserver' + +stop_cmd=historyserver_stop + +historyserver_stop () { + su -m ${historyserver_user} -c "${command} --config %%ETCDIR%% stop historyserver" +} + +run_rc_command "$1" diff --git a/hadoop3.orig/files/httpfs-env.sh.in b/hadoop3/files/httpfs-env.sh.in --- a/hadoop3.orig/files/httpfs-env.sh.in +++ b/hadoop3/files/httpfs-env.sh.in @@ -0,0 +1,4 @@ +# $FreeBSD$ + +export HTTPFS_LOG=/var/log/hadoop +export HTTPFS_TEMP=/var/tmp diff --git a/hadoop3.orig/files/journalnode.in b/hadoop3/files/journalnode.in --- a/hadoop3.orig/files/journalnode.in +++ b/hadoop3/files/journalnode.in @@ -0,0 +1,38 @@ +#!/bin/sh +# +# $FreeBSD$ +# +# PROVIDE: journalnode +# REQUIRE: LOGIN +# KEYWORD: shutdown +# +# journalnode_enable (bool): Set to NO by default. +# Set it to YES to enable journalnode. + +. /etc/rc.subr + +export PATH=${PATH}:%%LOCALBASE%%/bin +name=journalnode +rcvar=journalnode_enable + +load_rc_config "${name}" + +: ${journalnode_enable:=NO} +: ${journalnode_user:=%%HDFS_USER%%} + +command="%%PREFIX%%/sbin/hadoop-daemon.sh" +command_interpreter_execution="%%JAVA_HOME%%/bin/java" +command_args='--config %%ETCDIR%% start journalnode' + +stop_cmd=journalnode_stop +status_precmd=find_pid + +journalnode_stop () { + su -m ${journalnode_user} -c "${command} --config %%ETCDIR%% stop journalnode" +} + +find_pid () { + rc_pid=$(check_pidfile $pidfile $command_interpreter_execution) +} + +run_rc_command "$1" diff --git a/hadoop3.orig/files/kms-env.sh.in b/hadoop3/files/kms-env.sh.in --- a/hadoop3.orig/files/kms-env.sh.in +++ b/hadoop3/files/kms-env.sh.in @@ -0,0 +1,4 @@ +# $FreeBSD$ + +export KMS_LOG=/var/log/hadoop +export KMS_TEMP=/var/tmp diff --git a/hadoop3.orig/files/namenode.in b/hadoop3/files/namenode.in --- a/hadoop3.orig/files/namenode.in +++ b/hadoop3/files/namenode.in @@ -0,0 +1,38 @@ +#!/bin/sh +# +# $FreeBSD$ +# +# PROVIDE: namenode +# REQUIRE: LOGIN +# KEYWORD: shutdown +# +# namenode_enable (bool): Set to NO by default. +# Set it to YES to enable namenode. + +. /etc/rc.subr + +export PATH=${PATH}:%%LOCALBASE%%/bin +name=namenode +rcvar=namenode_enable + +load_rc_config "${name}" + +: ${namenode_enable:=NO} +: ${namenode_user:=%%HDFS_USER%%} + +command="%%PREFIX%%/sbin/hadoop-daemon.sh" +command_interpreter_execution="%%JAVA_HOME%%/bin/java" +command_args='--config %%ETCDIR%% start namenode' + +stop_cmd=namenode_stop +status_precmd=find_pid + +namenode_stop () { + su -m ${namenode_user} -c "${command} --config %%ETCDIR%% stop namenode" +} + +find_pid () { + rc_pid=$(check_pidfile $pidfile $command_interpreter_execution) +} + +run_rc_command "$1" diff --git a/hadoop3.orig/files/nodemanager.in b/hadoop3/files/nodemanager.in --- a/hadoop3.orig/files/nodemanager.in +++ b/hadoop3/files/nodemanager.in @@ -0,0 +1,47 @@ +#!/bin/sh +# +# $FreeBSD$ +# +# PROVIDE: nodemanager +# REQUIRE: LOGIN +# KEYWORD: shutdown +# +# nodemanager_enable (bool): Set to NO by default. +# Set it to YES to enable resourcemanager. + +. /etc/rc.subr + +export PATH=${PATH}:%%LOCALBASE%%/bin +name=nodemanager +rcvar=nodemanager_enable +pidfile=%%HADOOP_RUNDIR%%/yarn-yarn-${name}.pid + +load_rc_config "${name}" + +: ${nodemanager_enable:=NO} +: ${nodemanager_user:=%%MAPRED_USER%%} + +command="%%PREFIX%%/sbin/yarn-daemon.sh" +command_interpreter_execution="%%JAVA_HOME%%/bin/java" +command_args='--config %%ETCDIR%% start nodemanager' + +start_postcmd="start_postcmd" +stop_cmd=nodemanager_stop +status_precmd=find_pid + +start_postcmd () { + rc_pid=$(check_pidfile ${pidfile} %%JAVA_HOME%%/bin/java) + if [ -n "$rc_pid" ]; then + protect -p $rc_pid + fi +} + +nodemanager_stop () { + su -m ${nodemanager_user} -c "${command} --config %%ETCDIR%% stop nodemanager" +} + +find_pid () { + rc_pid=$(check_pidfile $pidfile $command_interpreter_execution) +} + +run_rc_command "$1" diff --git a/hadoop3.orig/files/patch-hadoop-common-project-hadoop-common-src-main-java-org-apache-hadoop-util-StringUtils.java b/hadoop3/files/patch-hadoop-common-project-hadoop-common-src-main-java-org-apache-hadoop-util-StringUtils.java --- a/hadoop3.orig/files/patch-hadoop-common-project-hadoop-common-src-main-java-org-apache-hadoop-util-StringUtils.java +++ b/hadoop3/files/patch-hadoop-common-project-hadoop-common-src-main-java-org-apache-hadoop-util-StringUtils.java @@ -0,0 +1,11 @@ +--- hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java.orig 2018-07-07 08:16:53 UTC ++++ hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java +@@ -713,7 +713,7 @@ public class StringUtils { + final String classname = clazz.getSimpleName(); + LOG.info(createStartupShutdownMessage(classname, hostname, args)); + +- if (SystemUtils.IS_OS_UNIX) { ++ if (true) { + try { + SignalLogger.INSTANCE.register(LOG); + } catch (Throwable t) { diff --git a/hadoop3.orig/files/patch-hadoop-common-project_hadoop-common_src_main_java_org_apache_hadoop_io_nativeio_SharedFileDescriptorFactory.java b/hadoop3/files/patch-hadoop-common-project_hadoop-common_src_main_java_org_apache_hadoop_io_nativeio_SharedFileDescriptorFactory.java --- a/hadoop3.orig/files/patch-hadoop-common-project_hadoop-common_src_main_java_org_apache_hadoop_io_nativeio_SharedFileDescriptorFactory.java +++ b/hadoop3/files/patch-hadoop-common-project_hadoop-common_src_main_java_org_apache_hadoop_io_nativeio_SharedFileDescriptorFactory.java @@ -0,0 +1,11 @@ +--- hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.java.orig 2018-03-21 17:57:55 UTC ++++ hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.java +@@ -54,7 +54,7 @@ public class SharedFileDescriptorFactory { + if (!NativeIO.isAvailable()) { + return "NativeIO is not available."; + } +- if (!SystemUtils.IS_OS_UNIX) { ++ if (false) { + return "The OS is not UNIX."; + } + return null; diff --git a/hadoop3.orig/files/patch-hadoop-common-project_hadoop-common_src_main_java_org_apache_hadoop_util_Shell.java b/hadoop3/files/patch-hadoop-common-project_hadoop-common_src_main_java_org_apache_hadoop_util_Shell.java --- a/hadoop3.orig/files/patch-hadoop-common-project_hadoop-common_src_main_java_org_apache_hadoop_util_Shell.java +++ b/hadoop3/files/patch-hadoop-common-project_hadoop-common_src_main_java_org_apache_hadoop_util_Shell.java @@ -0,0 +1,29 @@ +--- hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java.orig 2019-04-21 12:19:26 UTC ++++ hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java +@@ -796,14 +796,14 @@ public abstract class Shell { + ShellCommandExecutor shexec = null; + boolean setsidSupported = true; + try { +- String[] args = {"setsid", "bash", "-c", "echo $$"}; ++ String[] args = {"ssid", "bash", "-c", "echo $$"}; + shexec = new ShellCommandExecutor(args); + shexec.execute(); + } catch (IOException ioe) { +- LOG.debug("setsid is not available on this machine. So not using it."); ++ LOG.debug("ssid is not available on this machine. So not using it."); + setsidSupported = false; + } catch (SecurityException se) { +- LOG.debug("setsid is not allowed to run by the JVM "+ ++ LOG.debug("ssid is not allowed to run by the JVM "+ + "security manager. So not using it."); + setsidSupported = false; + } catch (Error err) { +@@ -818,7 +818,7 @@ public abstract class Shell { + } + } finally { // handle the exit code + if (LOG.isDebugEnabled()) { +- LOG.debug("setsid exited with exit code " ++ LOG.debug("ssid exited with exit code " + + (shexec != null ? shexec.getExitCode() : "(null executor)")); + } + } diff --git a/hadoop3.orig/files/patch-hadoop-hdfs-project_hadoop-hdfs-native-client_pom.xml b/hadoop3/files/patch-hadoop-hdfs-project_hadoop-hdfs-native-client_pom.xml --- a/hadoop3.orig/files/patch-hadoop-hdfs-project_hadoop-hdfs-native-client_pom.xml +++ b/hadoop3/files/patch-hadoop-hdfs-project_hadoop-hdfs-native-client_pom.xml @@ -0,0 +1,14 @@ +--- hadoop-hdfs-project/hadoop-hdfs-native-client/pom.xml.orig 2018-10-19 09:04:13 UTC ++++ hadoop-hdfs-project/hadoop-hdfs-native-client/pom.xml +@@ -216,6 +216,11 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> + ${require.fuse} + ${require.valgrind} + 1 ++ OFF ++ protobuf ++ protoc ++ /usr/local/protobuf25/include ++ /usr/local/protobuf25/bin/protoc + ${require.libwebhdfs} + ${require.openssl} + ${openssl.prefix} diff --git a/hadoop3.orig/files/patch-hadoop-hdfs-project_hadoop-hdfs-native-client_src_main_native_libhdfspp_CMakeLists.txt b/hadoop3/files/patch-hadoop-hdfs-project_hadoop-hdfs-native-client_src_main_native_libhdfspp_CMakeLists.txt --- a/hadoop3.orig/files/patch-hadoop-hdfs-project_hadoop-hdfs-native-client_src_main_native_libhdfspp_CMakeLists.txt +++ b/hadoop3/files/patch-hadoop-hdfs-project_hadoop-hdfs-native-client_src_main_native_libhdfspp_CMakeLists.txt @@ -0,0 +1,59 @@ +--- hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/CMakeLists.txt.orig 2018-10-31 07:05:58 UTC ++++ hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/CMakeLists.txt +@@ -40,6 +40,8 @@ SET(CMAKE_PREFIX_PATH "${CMAKE_PREFIX_PATH};${CYRUS_SA + # Specify PROTOBUF_HOME so that find_package picks up the correct version + SET(CMAKE_PREFIX_PATH "${CMAKE_PREFIX_PATH};$ENV{PROTOBUF_HOME}") + ++include(FindProtobuf) ++include(FindThreads) + find_package(Doxygen) + find_package(OpenSSL REQUIRED) + find_package(Protobuf REQUIRED) +@@ -144,11 +146,11 @@ add_definitions(-DASIO_STANDALONE -DASIO_CPP11_DATE_TI + + # Disable optimizations if compiling debug + set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -O0") +-set(CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -O0") ++set(CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -O0 ") + + if(UNIX) +-set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wextra -pedantic -std=c++11 -g -fPIC -fno-strict-aliasing") +-set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g -fPIC -fno-strict-aliasing") ++set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wextra -pedantic -std=c++11 -g -fPIC -fno-strict-aliasing -L/usr/local/protobuf25/lib -I/usr/local/protobuf25/include -Wl,-rpath=/usr/local/protobuf25/lib") ++set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g -fPIC -fno-strict-aliasing -L/usr/local/protobuf25/lib -I/usr/local/protobuf25/include -Wl,-rpath=/usr/local/protobuf25/lib") + endif() + + if (CMAKE_CXX_COMPILER_ID STREQUAL "Clang") +@@ -231,7 +233,6 @@ include_directories( SYSTEM + ${PROTOBUF_INCLUDE_DIRS} + ) + +- + add_subdirectory(third_party/gmock-1.7.0) + add_subdirectory(third_party/uriparser2) + add_subdirectory(lib) +@@ -257,20 +258,20 @@ if (HADOOP_BUILD) + hadoop_add_dual_library(hdfspp ${EMPTY_FILE_CC} ${LIBHDFSPP_ALL_OBJECTS}) + hadoop_target_link_dual_libraries(hdfspp + ${LIB_DL} +- ${PROTOBUF_LIBRARY} ++ ${PROTOBUF_LIBRARIES} + ${OPENSSL_LIBRARIES} + ${SASL_LIBRARIES} +- ${CMAKE_THREAD_LIBS_INIT} ++ Threads::Threads + ) + set_target_properties(hdfspp PROPERTIES SOVERSION ${LIBHDFSPP_VERSION}) + else (HADOOP_BUILD) + add_library(hdfspp_static STATIC ${EMPTY_FILE_CC} ${LIBHDFSPP_ALL_OBJECTS}) + target_link_libraries(hdfspp_static + ${LIB_DL} +- ${PROTOBUF_LIBRARY} ++ ${PROTOBUF_LIBRARIES} + ${OPENSSL_LIBRARIES} + ${SASL_LIBRARIES} +- ${CMAKE_THREAD_LIBS_INIT} ++ Threads::Threads + ) + if(BUILD_SHARED_HDFSPP) + add_library(hdfspp SHARED ${EMPTY_FILE_CC} ${LIBHDFSPP_ALL_OBJECTS}) diff --git a/hadoop3.orig/files/patch-hadoop-hdfs-project_hadoop-hdfs-native-client_src_main_native_libhdfspp_third__party_tr2_optional.hpp b/hadoop3/files/patch-hadoop-hdfs-project_hadoop-hdfs-native-client_src_main_native_libhdfspp_third__party_tr2_optional.hpp --- a/hadoop3.orig/files/patch-hadoop-hdfs-project_hadoop-hdfs-native-client_src_main_native_libhdfspp_third__party_tr2_optional.hpp +++ b/hadoop3/files/patch-hadoop-hdfs-project_hadoop-hdfs-native-client_src_main_native_libhdfspp_third__party_tr2_optional.hpp @@ -0,0 +1,16 @@ +--- hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/third_party/tr2/optional.hpp.orig 2019-04-21 09:03:51 UTC ++++ hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/third_party/tr2/optional.hpp +@@ -199,10 +199,10 @@ template inline constexpr typename std::remo + #if defined NDEBUG + # define TR2_OPTIONAL_ASSERTED_EXPRESSION(CHECK, EXPR) (EXPR) + #elif defined __clang__ || defined __GNU_LIBRARY__ +-# define TR2_OPTIONAL_ASSERTED_EXPRESSION(CHECK, EXPR) ((CHECK) ? (EXPR) : (fail(#CHECK, __FILE__, __LINE__), (EXPR))) +- inline void fail(const char* expr, const char* file, int line) ++# define TR2_OPTIONAL_ASSERTED_EXPRESSION(CHECK, EXPR) ((CHECK) ? (EXPR) : (fail(#CHECK, __FILE__, __func__, __LINE__), (EXPR))) ++ inline void fail(const char* expr, const char* file, const char* func, int line) + { +- __assert(expr, file, line); ++ __assert(file, func, line, expr); + } + #elif defined __GNUC__ + # define TR2_OPTIONAL_ASSERTED_EXPRESSION(CHECK, EXPR) ((CHECK) ? (EXPR) : (fail(#CHECK, __FILE__, __LINE__), (EXPR))) diff --git a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-core_src_main_java_org_apache_hadoop_mapred_TaskLog.java b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-core_src_main_java_org_apache_hadoop_mapred_TaskLog.java --- a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-core_src_main_java_org_apache_hadoop_mapred_TaskLog.java +++ b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-core_src_main_java_org_apache_hadoop_mapred_TaskLog.java @@ -0,0 +1,11 @@ +--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java.orig 2019-04-21 12:21:44 UTC ++++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java +@@ -546,7 +546,7 @@ public class TaskLog { + mergedCmd.append("("); + } else if(ProcessTree.isSetsidAvailable && useSetsid && + !Shell.WINDOWS) { +- mergedCmd.append("exec setsid "); ++ mergedCmd.append("exec ssid "); + } else { + mergedCmd.append("exec "); + } diff --git a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-core_src_main_java_org_apache_hadoop_mapreduce_util_ProcessTree.java b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-core_src_main_java_org_apache_hadoop_mapreduce_util_ProcessTree.java --- a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-core_src_main_java_org_apache_hadoop_mapreduce_util_ProcessTree.java +++ b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-core_src_main_java_org_apache_hadoop_mapreduce_util_ProcessTree.java @@ -0,0 +1,20 @@ +--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ProcessTree.java.orig 2019-04-21 12:22:36 UTC ++++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ProcessTree.java +@@ -53,14 +53,14 @@ public class ProcessTree { + ShellCommandExecutor shexec = null; + boolean setsidSupported = true; + try { +- String[] args = {"setsid", "bash", "-c", "echo $$"}; ++ String[] args = {"ssid", "bash", "-c", "echo $$"}; + shexec = new ShellCommandExecutor(args); + shexec.execute(); + } catch (IOException ioe) { +- LOG.warn("setsid is not available on this machine. So not using it."); ++ LOG.warn("ssid is not available on this machine. So not using it."); + setsidSupported = false; + } finally { // handle the exit code +- LOG.info("setsid exited with exit code " + shexec.getExitCode()); ++ LOG.info("ssid exited with exit code " + shexec.getExitCode()); + } + return setsidSupported; + } diff --git a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_CMakeLists.txt b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_CMakeLists.txt --- a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_CMakeLists.txt +++ b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_CMakeLists.txt @@ -0,0 +1,18 @@ +--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt.orig 2018-03-21 17:57:56 UTC ++++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt +@@ -27,6 +27,7 @@ set(GTEST_SRC_DIR ${CMAKE_SOURCE_DIR}/../../../../hado + # Add extra compiler and linker flags. + # -Wno-sign-compare + hadoop_add_compiler_flags("-DNDEBUG -DSIMPLE_MEMCPY -fno-strict-aliasing -fsigned-char") ++hadoop_add_linker_flags("-lexecinfo") + + # Source location. + set(SRC main/native) +@@ -45,7 +46,6 @@ include(CheckIncludeFiles) + + check_include_files(fcntl.h HAVE_FCNTL_H) + check_include_files(malloc.h HAVE_MALLOC_H) +-check_include_files(mach/mach.h HAVE_MACH_MACH_H) + check_include_files(memory.h HAVE_MEMORY_H) + check_include_files(stddef.h HAVE_STDDEF_H) + check_include_files(stdint.h HAVE_STDINT_H) diff --git a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_java_org_apache_hadoop_mapred_nativetask_INativeComparable.java b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_java_org_apache_hadoop_mapred_nativetask_INativeComparable.java --- a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_java_org_apache_hadoop_mapred_nativetask_INativeComparable.java +++ b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_java_org_apache_hadoop_mapred_nativetask_INativeComparable.java @@ -0,0 +1,13 @@ +--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/INativeComparable.java.orig 2018-10-18 18:38:39 UTC ++++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/INativeComparable.java +@@ -42,8 +42,8 @@ import org.apache.hadoop.classification.InterfaceStabi + * + * int HivePlatform::HiveKeyComparator(const char * src, uint32_t srcLength, + * const char * dest, uint32_t destLength) { +- * uint32_t sl = bswap(*(uint32_t*)src); +- * uint32_t dl = bswap(*(uint32_t*)dest); ++ * uint32_t sl = bswap32(*(uint32_t*)src); ++ * uint32_t dl = bswap32(*(uint32_t*)dest); + * return NativeObjectFactory::BytesComparator(src + 4, sl, dest + 4, dl); + * } + * diff --git a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_codec_BlockCodec.cc b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_codec_BlockCodec.cc --- a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_codec_BlockCodec.cc +++ b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_codec_BlockCodec.cc @@ -0,0 +1,13 @@ +--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/BlockCodec.cc.orig 2018-10-18 18:38:39 UTC ++++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/BlockCodec.cc +@@ -104,8 +104,8 @@ int32_t BlockDecompressStream::read(void * buff, uint3 + THROW_EXCEPTION(IOException, "readFully get incomplete data"); + } + _compressedBytesRead += rd; +- sizes[0] = bswap(sizes[0]); +- sizes[1] = bswap(sizes[1]); ++ sizes[0] = bswap32(sizes[0]); ++ sizes[1] = bswap32(sizes[1]); + if (sizes[0] <= length) { + uint32_t len = decompressOneBlock(sizes[1], buff, sizes[0]); + if (len != sizes[0]) { diff --git a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_codec_Lz4Codec.cc b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_codec_Lz4Codec.cc --- a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_codec_Lz4Codec.cc +++ b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_codec_Lz4Codec.cc @@ -0,0 +1,13 @@ +--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/Lz4Codec.cc.orig 2018-10-18 18:38:39 UTC ++++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/Lz4Codec.cc +@@ -38,8 +38,8 @@ void Lz4CompressStream::compressOneBlock(const void * + int ret = LZ4_compress((char*)buff, _tempBuffer + 8, length); + if (ret > 0) { + compressedLength = ret; +- ((uint32_t*)_tempBuffer)[0] = bswap(length); +- ((uint32_t*)_tempBuffer)[1] = bswap((uint32_t)compressedLength); ++ ((uint32_t*)_tempBuffer)[0] = bswap32(length); ++ ((uint32_t*)_tempBuffer)[1] = bswap32((uint32_t)compressedLength); + _stream->write(_tempBuffer, compressedLength + 8); + _compressedBytesWritten += (compressedLength + 8); + } else { diff --git a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_codec_SnappyCodec.cc b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_codec_SnappyCodec.cc --- a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_codec_SnappyCodec.cc +++ b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_codec_SnappyCodec.cc @@ -0,0 +1,13 @@ +--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/SnappyCodec.cc.orig 2018-10-18 18:38:39 UTC ++++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/SnappyCodec.cc +@@ -37,8 +37,8 @@ void SnappyCompressStream::compressOneBlock(const void + snappy_status ret = snappy_compress((const char*)buff, length, _tempBuffer + 8, + &compressedLength); + if (ret == SNAPPY_OK) { +- ((uint32_t*)_tempBuffer)[0] = bswap(length); +- ((uint32_t*)_tempBuffer)[1] = bswap((uint32_t)compressedLength); ++ ((uint32_t*)_tempBuffer)[0] = bswap32(length); ++ ((uint32_t*)_tempBuffer)[1] = bswap32((uint32_t)compressedLength); + _stream->write(_tempBuffer, compressedLength + 8); + _compressedBytesWritten += (compressedLength + 8); + } else if (ret == SNAPPY_INVALID_INPUT) { diff --git a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_handler_BatchHandler.h b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_handler_BatchHandler.h --- a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_handler_BatchHandler.h +++ b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_handler_BatchHandler.h @@ -0,0 +1,11 @@ +--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/handler/BatchHandler.h.orig 2018-10-18 18:38:39 UTC ++++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/handler/BatchHandler.h +@@ -108,7 +108,7 @@ class BatchHandler : public Configurable { (protected) + flushOutput(); + } + uint32_t cp = length < remain ? length : remain; +- simple_memcpy(_out.current(), buff, cp); ++ memcpy(_out.current(), buff, cp); + buff += cp; + length -= cp; + _out.advance(cp); diff --git a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_handler_CombineHandler.cc b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_handler_CombineHandler.cc --- a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_handler_CombineHandler.cc +++ b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_handler_CombineHandler.cc @@ -0,0 +1,44 @@ +--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/handler/CombineHandler.cc.orig 2018-10-18 18:38:39 UTC ++++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/handler/CombineHandler.cc +@@ -48,8 +48,8 @@ uint32_t CombineHandler::feedDataToJavaInWritableSeria + + if (_kvCached) { + uint32_t kvLength = _key.outerLength + _value.outerLength + KVBuffer::headerLength(); +- outputInt(bswap(_key.outerLength)); +- outputInt(bswap(_value.outerLength)); ++ outputInt(bswap32(_key.outerLength)); ++ outputInt(bswap32(_value.outerLength)); + outputKeyOrValue(_key, _kType); + outputKeyOrValue(_value, _vType); + +@@ -73,8 +73,8 @@ uint32_t CombineHandler::feedDataToJavaInWritableSeria + } else { + firstKV = false; + //write final key length and final value length +- outputInt(bswap(_key.outerLength)); +- outputInt(bswap(_value.outerLength)); ++ outputInt(bswap32(_key.outerLength)); ++ outputInt(bswap32(_value.outerLength)); + outputKeyOrValue(_key, _kType); + outputKeyOrValue(_value, _vType); + +@@ -101,7 +101,7 @@ void CombineHandler::outputKeyOrValue(SerializeInfo & + output(KV.buffer.data(), KV.buffer.length()); + break; + case BytesType: +- outputInt(bswap(KV.buffer.length())); ++ outputInt(bswap32(KV.buffer.length())); + output(KV.buffer.data(), KV.buffer.length()); + break; + default: +@@ -202,8 +202,8 @@ void CombineHandler::write(char * buf, uint32_t length + uint32_t outputRecordCount = 0; + while (remain > 0) { + kv = (KVBuffer *)pos; +- kv->keyLength = bswap(kv->keyLength); +- kv->valueLength = bswap(kv->valueLength); ++ kv->keyLength = bswap32(kv->keyLength); ++ kv->valueLength = bswap32(kv->valueLength); + _writer->write(kv->getKey(), kv->keyLength, kv->getValue(), kv->valueLength); + outputRecordCount++; + remain -= kv->length(); diff --git a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_handler_MCollectorOutputHandler.cc b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_handler_MCollectorOutputHandler.cc --- a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_handler_MCollectorOutputHandler.cc +++ b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_handler_MCollectorOutputHandler.cc @@ -0,0 +1,15 @@ +--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/handler/MCollectorOutputHandler.cc.orig 2018-10-18 18:38:39 UTC ++++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/handler/MCollectorOutputHandler.cc +@@ -74,9 +74,9 @@ void MCollectorOutputHandler::handleInput(ByteBuffer & + } + + if (_endium == LARGE_ENDIUM) { +- kvBuffer->partitionId = bswap(kvBuffer->partitionId); +- kvBuffer->buffer.keyLength = bswap(kvBuffer->buffer.keyLength); +- kvBuffer->buffer.valueLength = bswap(kvBuffer->buffer.valueLength); ++ kvBuffer->partitionId = bswap32(kvBuffer->partitionId); ++ kvBuffer->buffer.keyLength = bswap32(kvBuffer->buffer.keyLength); ++ kvBuffer->buffer.valueLength = bswap32(kvBuffer->buffer.valueLength); + } + + uint32_t kvLength = kvBuffer->buffer.length(); diff --git a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_Buffers.h b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_Buffers.h --- a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_Buffers.h +++ b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_Buffers.h @@ -0,0 +1,65 @@ +--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Buffers.h.orig 2018-10-18 18:38:39 UTC ++++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Buffers.h +@@ -79,11 +79,11 @@ class ReadBuffer { (public) + } + + /** +- * read to outside buffer, use simple_memcpy ++ * read to outside buffer, use memcpy + */ + inline void readUnsafe(char * buff, uint32_t len) { + if (likely(len <= _remain)) { +- simple_memcpy(buff, current(), len); ++ memcpy(buff, current(), len); + _remain -= len; + return; + } +@@ -115,7 +115,7 @@ class ReadBuffer { (public) + * read uint32_t big endian + */ + inline uint32_t read_uint32_be() { +- return bswap(read_uint32_le()); ++ return bswap32(read_uint32_le()); + } + }; + +@@ -181,7 +181,7 @@ class AppendBuffer { (public) + + inline void write(const void * data, uint32_t len) { + if (likely(len <= _remain)) { // append directly +- simple_memcpy(current(), data, len); ++ memcpy(current(), data, len); + _remain -= len; + return; + } +@@ -198,7 +198,7 @@ class AppendBuffer { (public) + } + + inline void write_uint32_be(uint32_t v) { +- write_uint32_le(bswap(v)); ++ write_uint32_le(bswap32(v)); + } + + inline void write_uint64_le(uint64_t v) { +@@ -291,10 +291,10 @@ struct KVBuffer { + valueLength = vallen; + + if (keylen > 0) { +- simple_memcpy(getKey(), key, keylen); ++ memcpy(getKey(), key, keylen); + } + if (vallen > 0) { +- simple_memcpy(getValue(), value, vallen); ++ memcpy(getValue(), value, vallen); + } + } + +@@ -479,7 +479,7 @@ class FixSizeContainer { (public) + } + uint32_t remain = _size - _pos; + uint32_t length = (maxSize < remain) ? maxSize : remain; +- simple_memcpy(_buff + _pos, source, length); ++ memcpy(_buff + _pos, source, length); + _pos += length; + return length; + } diff --git a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_Buffers.cc b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_Buffers.cc --- a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_Buffers.cc +++ b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_Buffers.cc @@ -0,0 +1,11 @@ +--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Buffers.cc.orig 2018-10-18 18:38:39 UTC ++++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Buffers.cc +@@ -206,7 +206,7 @@ void AppendBuffer::write_inner(const void * data, uint + _dest->write(data, len); + _counter += len; + } else { +- simple_memcpy(_buff, data, len); ++ memcpy(_buff, data, len); + _remain -= len; + } + } diff --git a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_IFile.h b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_IFile.h --- a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_IFile.h +++ b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_IFile.h @@ -0,0 +1,20 @@ +--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/IFile.h.orig 2018-10-18 18:38:39 UTC ++++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/IFile.h +@@ -74,7 +74,7 @@ class IFileReader { (public) + keyLen = WritableUtils::ReadVInt(kvbuff, len); + break; + case BytesType: +- keyLen = bswap(*(uint32_t*)kvbuff); ++ keyLen = bswap32(*(uint32_t*)kvbuff); + len = 4; + break; + default: +@@ -89,7 +89,7 @@ class IFileReader { (public) + _valuePos = vbuff + len; + break; + case BytesType: +- _valueLen = bswap(*(uint32_t*)vbuff); ++ _valueLen = bswap32(*(uint32_t*)vbuff); + _valuePos = vbuff + 4; + break; + default: diff --git a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_IFile.cc b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_IFile.cc --- a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_IFile.cc +++ b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_IFile.cc @@ -0,0 +1,20 @@ +--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/IFile.cc.orig 2018-10-18 18:38:39 UTC ++++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/IFile.cc +@@ -60,7 +60,7 @@ bool IFileReader::nextPartition() { + if (4 != _stream->readFully(&chsum, 4)) { + THROW_EXCEPTION(IOException, "read ifile checksum failed"); + } +- uint32_t actual = bswap(chsum); ++ uint32_t actual = bswap32(chsum); + uint32_t expect = _source->getChecksum(); + if (actual != expect) { + THROW_EXCEPTION_EX(IOException, "read ifile checksum not match, actual %x expect %x", actual, +@@ -130,7 +130,7 @@ void IFileWriter::endPartition() { + } + + uint32_t chsum = _dest->getChecksum(); +- chsum = bswap(chsum); ++ chsum = bswap32(chsum); + _stream->write(&chsum, sizeof(chsum)); + _stream->flush(); + IFileSegment * info = &(_spillFileSegments[_spillFileSegments.size() - 1]); diff --git a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_Iterator.cc b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_Iterator.cc --- a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_Iterator.cc +++ b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_Iterator.cc @@ -0,0 +1,11 @@ +--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Iterator.cc.orig 2018-10-18 18:38:39 UTC ++++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Iterator.cc +@@ -61,7 +61,7 @@ const char * KeyGroupIteratorImpl::nextValue(uint32_t + case SAME_KEY: { + if (next()) { + if (_key.length() == _currentGroupKey.length()) { +- if (fmemeq(_key.data(), _currentGroupKey.c_str(), _key.length())) { ++ if (memcmp(_key.data(), _currentGroupKey.c_str(), _key.length()) == 0) { + len = _value.length(); + return _value.data(); + } diff --git a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_MapOutputCollector.cc b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_MapOutputCollector.cc --- a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_MapOutputCollector.cc +++ b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_MapOutputCollector.cc @@ -0,0 +1,32 @@ +--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/MapOutputCollector.cc.orig 2019-04-21 10:25:54 UTC ++++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/MapOutputCollector.cc +@@ -302,10 +302,10 @@ void MapOutputCollector::middleSpill(const std::string + uint64_t spillTime = timer.now() - timer.last() - metrics.sortTime; + + const uint64_t M = 1000000; // million +- LOG("%s-spill: { id: %d, collect: %"PRIu64" ms, " +- "in-memory sort: %"PRIu64" ms, in-memory records: %"PRIu64", " +- "merge&spill: %"PRIu64" ms, uncompressed size: %"PRIu64", " +- "real size: %"PRIu64" path: %s }", ++ LOG("%s-spill: { id: %d, collect: %" PRIu64" ms, " ++ "in-memory sort: %" PRIu64" ms, in-memory records: %" PRIu64", " ++ "merge&spill: %" PRIu64" ms, uncompressed size: %" PRIu64", " ++ "real size: %" PRIu64" path: %s }", + final ? "Final" : "Mid", + _spillInfos.getSpillCount(), + collecttime / M, +@@ -370,10 +370,10 @@ void MapOutputCollector::finalSpill(const std::string + writer->getStatistics(outputSize, realOutputSize, recordCount); + + const uint64_t M = 1000000; // million +- LOG("Final-merge-spill: { id: %d, in-memory sort: %"PRIu64" ms, " +- "in-memory records: %"PRIu64", merge&spill: %"PRIu64" ms, " +- "records: %"PRIu64", uncompressed size: %"PRIu64", " +- "real size: %"PRIu64" path: %s }", ++ LOG("Final-merge-spill: { id: %d, in-memory sort: %" PRIu64" ms, " ++ "in-memory records: %" PRIu64", merge&spill: %" PRIu64" ms, " ++ "records: %" PRIu64", uncompressed size: %" PRIu64", " ++ "real size: %" PRIu64" path: %s }", + _spillInfos.getSpillCount(), + metrics.sortTime / M, + metrics.recordCount, diff --git a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_NativeObjectFactory.cc b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_NativeObjectFactory.cc --- a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_NativeObjectFactory.cc +++ b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_NativeObjectFactory.cc @@ -0,0 +1,33 @@ +--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/NativeObjectFactory.cc.orig 2018-10-18 18:38:39 UTC ++++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/NativeObjectFactory.cc +@@ -299,7 +299,7 @@ int NativeObjectFactory::BytesComparator(const char * + uint32_t destLength) { + + uint32_t minlen = std::min(srcLength, destLength); +- int64_t ret = fmemcmp(src, dest, minlen); ++ int64_t ret = memcmp(src, dest, minlen); + if (ret > 0) { + return 1; + } else if (ret < 0) { +@@ -317,8 +317,8 @@ int NativeObjectFactory::IntComparator(const char * sr + uint32_t destLength) { + int result = (*src) - (*dest); + if (result == 0) { +- uint32_t from = bswap(*(uint32_t*)src); +- uint32_t to = bswap(*(uint32_t*)dest); ++ uint32_t from = bswap32(*(uint32_t*)src); ++ uint32_t to = bswap32(*(uint32_t*)dest); + if (from > to) { + return 1; + } else if (from == to) { +@@ -380,8 +380,8 @@ int NativeObjectFactory::FloatComparator(const char * + THROW_EXCEPTION_EX(IOException, "float comparator, while src/dest lengt is not 4"); + } + +- uint32_t from = bswap(*(uint32_t*)src); +- uint32_t to = bswap(*(uint32_t*)dest); ++ uint32_t from = bswap32(*(uint32_t*)src); ++ uint32_t to = bswap32(*(uint32_t*)dest); + + float * srcValue = (float *)(&from); + float * destValue = (float *)(&to); diff --git a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_SpillInfo.cc b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_SpillInfo.cc --- a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_SpillInfo.cc +++ b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_SpillInfo.cc @@ -0,0 +1,11 @@ +--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/SpillInfo.cc.orig 2018-10-18 18:38:39 UTC ++++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/SpillInfo.cc +@@ -58,7 +58,7 @@ void SingleSpillInfo::writeSpillInfo(const std::string + appendBuffer.flush(); + uint32_t chsum = dest.getChecksum(); + #ifdef SPILLRECORD_CHECKSUM_UINT +- chsum = bswap(chsum); ++ chsum = bswap32(chsum); + fout->write(&chsum, sizeof(uint32_t)); + #else + uint64_t wtchsum = bswap64((uint64_t)chsum); diff --git a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_commons.h b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_commons.h --- a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_commons.h +++ b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_lib_commons.h @@ -0,0 +1,23 @@ +--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/commons.h.orig 2018-10-18 18:38:39 UTC ++++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/commons.h +@@ -41,7 +41,6 @@ + #include + #include + +-#include "lib/primitives.h" + #include "lib/Log.h" + #include "NativeTask.h" + +@@ -49,4 +48,12 @@ + + #include "lib/Iterator.h" + ++#ifdef __GNUC__ ++#define likely(x) __builtin_expect((x),1) ++#define unlikely(x) __builtin_expect((x),0) ++#else ++#define likely(x) (x) ++#define unlikely(x) (x) ++#endif ++ + #endif /* COMMONS_H_ */ diff --git a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_util_StringUtil.cc b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_util_StringUtil.cc --- a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_util_StringUtil.cc +++ b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_util_StringUtil.cc @@ -0,0 +1,25 @@ +--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/util/StringUtil.cc.orig 2019-04-21 10:28:49 UTC ++++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/util/StringUtil.cc +@@ -36,19 +36,19 @@ string StringUtil::ToString(uint32_t v) { + + string StringUtil::ToString(int64_t v) { + char tmp[32]; +- snprintf(tmp, 32, "%"PRId64, v); ++ snprintf(tmp, 32, "%" PRId64, v); + return tmp; + } + + string StringUtil::ToString(int64_t v, char pad, int64_t len) { + char tmp[32]; +- snprintf(tmp, 32, "%%%c%"PRId64""PRId64, pad, len); ++ snprintf(tmp, 32, "%%%c%" PRId64"" PRId64, pad, len); + return Format(tmp, v); + } + + string StringUtil::ToString(uint64_t v) { + char tmp[32]; +- snprintf(tmp, 32, "%"PRIu64, v); ++ snprintf(tmp, 32, "%" PRIu64, v); + return tmp; + } + diff --git a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_util_WritableUtils.cc b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_util_WritableUtils.cc --- a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_util_WritableUtils.cc +++ b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_src_util_WritableUtils.cc @@ -0,0 +1,74 @@ +--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/util/WritableUtils.cc.orig 2018-10-18 18:38:39 UTC ++++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/util/WritableUtils.cc +@@ -120,22 +120,22 @@ void WritableUtils::WriteVLongInner(int64_t v, char * + len = 4; + } else if (value < (1ULL << 32)) { + *(pos++) = base - 3; +- *(uint32_t*)(pos) = bswap((uint32_t)value); ++ *(uint32_t*)(pos) = bswap32((uint32_t)value); + len = 5; + } else if (value < (1ULL << 40)) { + *(pos++) = base - 4; +- *(uint32_t*)(pos) = bswap((uint32_t)(value >> 8)); ++ *(uint32_t*)(pos) = bswap32((uint32_t)(value >> 8)); + *(uint8_t*)(pos + 4) = value; + len = 6; + } else if (value < (1ULL << 48)) { + *(pos++) = base - 5; +- *(uint32_t*)(pos) = bswap((uint32_t)(value >> 16)); ++ *(uint32_t*)(pos) = bswap32((uint32_t)(value >> 16)); + *(uint8_t*)(pos + 4) = value >> 8; + *(uint8_t*)(pos + 5) = value; + len = 7; + } else if (value < (1ULL << 56)) { + *(pos++) = base - 6; +- *(uint32_t*)(pos) = bswap((uint32_t)(value >> 24)); ++ *(uint32_t*)(pos) = bswap32((uint32_t)(value >> 24)); + *(uint8_t*)(pos + 4) = value >> 16; + *(uint8_t*)(pos + 5) = value >> 8; + *(uint8_t*)(pos + 6) = value; +@@ -176,7 +176,7 @@ int32_t WritableUtils::ReadInt(InputStream * stream) { + if (stream->readFully(&ret, 4) != 4) { + THROW_EXCEPTION(IOException, "ReadInt reach EOF"); + } +- return (int32_t)bswap(ret); ++ return (int32_t)bswap32(ret); + } + + int16_t WritableUtils::ReadShort(InputStream * stream) { +@@ -192,7 +192,7 @@ float WritableUtils::ReadFloat(InputStream * stream) { + if (stream->readFully(&ret, 4) != 4) { + THROW_EXCEPTION(IOException, "ReadFloat reach EOF"); + } +- ret = bswap(ret); ++ ret = bswap32(ret); + return *(float*)&ret; + } + +@@ -237,7 +237,7 @@ void WritableUtils::WriteLong(OutputStream * stream, i + } + + void WritableUtils::WriteInt(OutputStream * stream, int32_t v) { +- uint32_t be = bswap((uint32_t)v); ++ uint32_t be = bswap32((uint32_t)v); + stream->write(&be, 4); + } + +@@ -249,7 +249,7 @@ void WritableUtils::WriteShort(OutputStream * stream, + + void WritableUtils::WriteFloat(OutputStream * stream, float v) { + uint32_t intv = *(uint32_t*)&v; +- intv = bswap(intv); ++ intv = bswap32(intv); + stream->write(&intv, 4); + } + +@@ -286,7 +286,7 @@ void WritableUtils::toString(string & dest, KeyValueTy + dest.append(*(uint8_t*)data ? "true" : "false"); + break; + case IntType: +- dest.append(StringUtil::ToString((int32_t)bswap(*(uint32_t*)data))); ++ dest.append(StringUtil::ToString((int32_t)bswap32(*(uint32_t*)data))); + break; + case LongType: + dest.append(StringUtil::ToString((int64_t)bswap64(*(uint64_t*)data))); diff --git a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_test_TestCompressions.cc b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_test_TestCompressions.cc --- a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_test_TestCompressions.cc +++ b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_test_TestCompressions.cc @@ -0,0 +1,11 @@ +--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/test/TestCompressions.cc.orig 2019-04-21 10:30:06 UTC ++++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/test/TestCompressions.cc +@@ -269,7 +269,7 @@ TEST(Perf, RawCompressionSnappy) { + vector inputfiles; + FileSystem::getLocal().list(inputdir, inputfiles); + CompressResult total; +- printf("Block size: %"PRId64"K\n", blockSize / 1024); ++ printf("Block size: %" PRId64"K\n", blockSize / 1024); + for (size_t i = 0; i < inputfiles.size(); i++) { + if (!inputfiles[i].isDirectory) { + MeasureSingleFileSnappy((inputdir + "/" + inputfiles[i].name).c_str(), total, blockSize, diff --git a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_test_TestIFile.cc b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_test_TestIFile.cc --- a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_test_TestIFile.cc +++ b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_test_TestIFile.cc @@ -0,0 +1,11 @@ +--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/test/TestIFile.cc.orig 2018-10-18 18:38:39 UTC ++++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/test/TestIFile.cc +@@ -190,7 +190,7 @@ TEST(IFile, TestGlibCBug) { + reader->nextPartition(); + uint32_t index = 0; + while (NULL != (key = reader->nextKey(length))) { +- int32_t realKey = (int32_t)bswap(*(uint32_t *)(key)); ++ int32_t realKey = (int32_t)bswap32(*(uint32_t *)(key)); + ASSERT_LT(index, 5); + ASSERT_EQ(expect[index], realKey); + index++; diff --git a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_test_TestPrimitives.cc b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_test_TestPrimitives.cc --- a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_test_TestPrimitives.cc +++ b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_test_TestPrimitives.cc @@ -0,0 +1,130 @@ +--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/test/TestPrimitives.cc.orig 2018-10-18 18:38:39 UTC ++++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/test/TestPrimitives.cc +@@ -18,98 +18,7 @@ + + #include "test_commons.h" + +-TEST(Primitives, fmemcmp) { +- std::vector vs; +- char buff[14]; +- vs.push_back(""); +- for (uint32_t i = 0; i < 5000; i += 7) { +- snprintf(buff, 14, "%d", i * 31); +- vs.push_back(buff); +- snprintf(buff, 10, "%010d", i); +- vs.push_back(buff); +- } +- for (size_t i = 0; i < vs.size(); i++) { +- for (size_t j = 0; j < vs.size(); j++) { +- std::string & ls = vs[i]; +- std::string & rs = vs[j]; +- size_t m = std::min(ls.length(), rs.length()); +- int c = memcmp(ls.c_str(), rs.c_str(), m); +- int t = fmemcmp(ls.c_str(), rs.c_str(), m); +- if (!((c == 0 && t == 0) || (c > 0 && t > 0) || (c < 0 && t < 0))) { +- ASSERT_TRUE(false); +- } +- } +- } +-} +- +-static int test_memcmp() { +- uint8_t buff[2048]; +- for (uint32_t i = 0; i < 2048; i++) { +- buff[i] = i & 0xff; +- } +- std::random_shuffle(buff, buff + 2048); +- int r = 0; +- for (uint32_t i = 0; i < 100000000; i++) { +- int offset = i % 1000; +- r += memcmp(buff, buff + 1024, 5); +- r += memcmp(buff + offset, buff + 1124, 9); +- r += memcmp(buff + offset, buff + 1224, 10); +- r += memcmp(buff + offset, buff + 1324, 15); +- r += memcmp(buff + offset, buff + 1424, 16); +- r += memcmp(buff + offset, buff + 1524, 17); +- r += memcmp(buff + offset, buff + 1624, 18); +- r += memcmp(buff + offset, buff + 1724, 19); +- } +- return r; +-} +- +-static int test_fmemcmp() { +- char buff[2048]; +- for (uint32_t i = 0; i < 2048; i++) { +- buff[i] = i & 0xff; +- } +- std::random_shuffle(buff, buff + 2048); +- int r = 0; +- for (uint32_t i = 0; i < 100000000; i++) { +- int offset = i % 1000; +- r += fmemcmp(buff, buff + 1024, 5); +- r += fmemcmp(buff + offset, buff + 1124, 9); +- r += fmemcmp(buff + offset, buff + 1224, 10); +- r += fmemcmp(buff + offset, buff + 1324, 15); +- r += fmemcmp(buff + offset, buff + 1424, 16); +- r += fmemcmp(buff + offset, buff + 1524, 17); +- r += fmemcmp(buff + offset, buff + 1624, 18); +- r += fmemcmp(buff + offset, buff + 1724, 19); +- } +- return r; +-} +- +-TEST(Perf, fmemcmp) { +- Timer t; +- int a = test_memcmp(); +- LOG("%s", t.getInterval(" memcmp ").c_str()); +- t.reset(); +- int b = test_fmemcmp(); +- LOG("%s", t.getInterval(" fmemcmp ").c_str()); +- // prevent compiler optimization +- TestConfig.setInt("tempvalue", a + b); +-} +- +-static void test_memcpy_perf_len(char * src, char * dest, size_t len, size_t time) { +- for (size_t i = 0; i < time; i++) { +- memcpy(src, dest, len); +- memcpy(dest, src, len); +- } +-} +- +-static void test_simple_memcpy_perf_len(char * src, char * dest, size_t len, size_t time) { +- for (size_t i = 0; i < time; i++) { +- simple_memcpy(src, dest, len); +- simple_memcpy(dest, src, len); +- } +-} +- +-TEST(Perf, simple_memcpy_small) { ++TEST(Perf, memcpy_small) { + char * src = new char[10240]; + char * dest = new char[10240]; + char buff[32]; +@@ -117,11 +26,10 @@ TEST(Perf, simple_memcpy_small) { + LOG("------------------------------"); + snprintf(buff, 32, " memcpy %luB\t", len); + Timer t; +- test_memcpy_perf_len(src, dest, len, 1000000); +- LOG("%s", t.getInterval(buff).c_str()); +- snprintf(buff, 32, "simple_memcpy %luB\t", len); +- t.reset(); +- test_simple_memcpy_perf_len(src, dest, len, 1000000); ++ for (size_t i = 0; i < 1000000; i++) { ++ memcpy(src, dest, len); ++ memcpy(dest, src, len); ++ } + LOG("%s", t.getInterval(buff).c_str()); + } + delete[] src; +@@ -293,11 +201,6 @@ TEST(Perf, memcpy_batch) { + memcpy(dest, src, size); + } + LOG("%s", t.getSpeedM("memcpy", mb).c_str()); +- t.reset(); +- for (size_t i = 0; i < mb; i += size) { +- simple_memcpy(dest, src, size); +- } +- LOG("%s", t.getSpeedM("simple_memcpy", mb).c_str()); + delete[] src; + delete[] dest; + } diff --git a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_test_TestSort.cc b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_test_TestSort.cc --- a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_test_TestSort.cc +++ b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_test_TestSort.cc @@ -0,0 +1,29 @@ +--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/test/TestSort.cc.orig 2018-10-18 18:38:39 UTC ++++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/test/TestSort.cc +@@ -121,7 +121,7 @@ static int compare_offset2(const void * plh, const voi + KVBuffer * rhb = (KVBuffer*)get_position(*(uint32_t*)prh); + + uint32_t minlen = std::min(lhb->keyLength, rhb->keyLength); +- int64_t ret = fmemcmp(lhb->content, rhb->content, minlen); ++ int64_t ret = memcmp(lhb->content, rhb->content, minlen); + if (ret) { + return ret; + } +@@ -139,7 +139,7 @@ class CompareOffset2 { + KVBuffer * rhb = (KVBuffer*)get_position(rhs); + + uint32_t minlen = std::min(lhb->keyLength, rhb->keyLength); +- int64_t ret = fmemcmp(lhb->content, rhb->content, minlen); ++ int64_t ret = memcmp(lhb->content, rhb->content, minlen); + if (ret) { + return ret; + } +@@ -158,7 +158,7 @@ class OffsetLessThan2 { + KVBuffer * rhb = (KVBuffer*)get_position(rhs); + + uint32_t minlen = std::min(lhb->keyLength, rhb->keyLength); +- int64_t ret = fmemcmp(lhb->content, rhb->content, minlen); ++ int64_t ret = memcmp(lhb->content, rhb->content, minlen); + return ret < 0 || (ret == 0 && (lhb->keyLength < rhb->keyLength)); + } + }; diff --git a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_test_lib_TestKVBuffer.cc b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_test_lib_TestKVBuffer.cc --- a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_test_lib_TestKVBuffer.cc +++ b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_test_lib_TestKVBuffer.cc @@ -0,0 +1,13 @@ +--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/test/lib/TestKVBuffer.cc.orig 2018-10-18 18:38:39 UTC ++++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/test/lib/TestKVBuffer.cc +@@ -43,8 +43,8 @@ TEST(KVBuffer, test) { + ASSERT_EQ(8, kv1->getKey() - buff); + ASSERT_EQ(strlen(KEY) + 8, kv1->getValue() - buff); + +- kv1->keyLength = bswap(kv1->keyLength); +- kv1->valueLength = bswap(kv1->valueLength); ++ kv1->keyLength = bswap32(kv1->keyLength); ++ kv1->valueLength = bswap32(kv1->valueLength); + + ASSERT_EQ(8, kv1->headerLength()); + ASSERT_EQ(strlen(KEY) + strlen(VALUE) + 8, kv1->lengthConvertEndium()); diff --git a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_test_lib_TestMemBlockIterator.cc b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_test_lib_TestMemBlockIterator.cc --- a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_test_lib_TestMemBlockIterator.cc +++ b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_test_lib_TestMemBlockIterator.cc @@ -0,0 +1,11 @@ +--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/test/lib/TestMemBlockIterator.cc.orig 2018-10-18 18:38:39 UTC ++++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/test/lib/TestMemBlockIterator.cc +@@ -59,7 +59,7 @@ class MemoryBlockFactory { + kv->keyLength = 4; + kv->valueLength = 4; + uint32_t * key = (uint32_t *)kv->getKey(); +- *key = bswap(index); ++ *key = bswap32(index); + } + return block1; + } diff --git a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_test_lib_TestMemoryBlock.cc b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_test_lib_TestMemoryBlock.cc --- a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_test_lib_TestMemoryBlock.cc +++ b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_test_lib_TestMemoryBlock.cc @@ -0,0 +1,23 @@ +--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/test/lib/TestMemoryBlock.cc.orig 2018-10-18 18:38:39 UTC ++++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/test/lib/TestMemoryBlock.cc +@@ -85,17 +85,17 @@ TEST(MemoryBlock, sort) { + medium->keyLength = 4; + medium->valueLength = 4; + uint32_t * mediumKey = (uint32_t *)medium->getKey(); +- *mediumKey = bswap(MEDIUM); ++ *mediumKey = bswap32(MEDIUM); + + small->keyLength = 4; + small->valueLength = 4; + uint32_t * smallKey = (uint32_t *)small->getKey(); +- *smallKey = bswap(SMALL); ++ *smallKey = bswap32(SMALL); + + big->keyLength = 4; + big->valueLength = 4; + uint32_t * bigKey = (uint32_t *)big->getKey(); +- *bigKey = bswap(BIG); ++ *bigKey = bswap32(BIG); + + ComparatorPtr bytesComparator = NativeTask::get_comparator(BytesType, NULL); + block.sort(CPPSORT, bytesComparator); diff --git a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_test_lib_TestPartitionBucket.cc b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_test_lib_TestPartitionBucket.cc --- a/hadoop3.orig/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_test_lib_TestPartitionBucket.cc +++ b/hadoop3/files/patch-hadoop-mapreduce-project_hadoop-mapreduce-client_hadoop-mapreduce-client-nativetask_src_main_native_test_lib_TestPartitionBucket.cc @@ -0,0 +1,78 @@ +--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/test/lib/TestPartitionBucket.cc.orig 2018-10-18 18:38:39 UTC ++++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/test/lib/TestPartitionBucket.cc +@@ -129,15 +129,15 @@ TEST(PartitionBucket, sort) { + const uint32_t BIG = 1000; + + kv1->keyLength = 4; +- *((uint32_t *)kv1->getKey()) = bswap(BIG); ++ *((uint32_t *)kv1->getKey()) = bswap32(BIG); + kv1->valueLength = KV_SIZE - kv1->headerLength() - kv1->keyLength; + + kv2->keyLength = 4; +- *((uint32_t *)kv2->getKey()) = bswap(SMALL); ++ *((uint32_t *)kv2->getKey()) = bswap32(SMALL); + kv2->valueLength = KV_SIZE - kv2->headerLength() - kv2->keyLength; + + kv3->keyLength = 4; +- *((uint32_t *)kv3->getKey()) = bswap(MEDIUM); ++ *((uint32_t *)kv3->getKey()) = bswap32(MEDIUM); + kv3->valueLength = KV_SIZE - kv3->headerLength() - kv3->keyLength; + + bucket->sort(DUALPIVOTSORT); +@@ -148,13 +148,13 @@ TEST(PartitionBucket, sort) { + Buffer value; + iter->next(key, value); + +- ASSERT_EQ(SMALL, bswap(*(uint32_t * )key.data())); ++ ASSERT_EQ(SMALL, bswap32(*(uint32_t * )key.data())); + + iter->next(key, value); +- ASSERT_EQ(MEDIUM, bswap(*(uint32_t * )key.data())); ++ ASSERT_EQ(MEDIUM, bswap32(*(uint32_t * )key.data())); + + iter->next(key, value); +- ASSERT_EQ(BIG, bswap(*(uint32_t * )key.data())); ++ ASSERT_EQ(BIG, bswap32(*(uint32_t * )key.data())); + + delete iter; + delete bucket; +@@ -181,15 +181,15 @@ TEST(PartitionBucket, spill) { + const uint32_t BIG = 1000; + + kv1->keyLength = 4; +- *((uint32_t *)kv1->getKey()) = bswap(BIG); ++ *((uint32_t *)kv1->getKey()) = bswap32(BIG); + kv1->valueLength = KV_SIZE - KVBuffer::headerLength() - kv1->keyLength; + + kv2->keyLength = 4; +- *((uint32_t *)kv2->getKey()) = bswap(SMALL); ++ *((uint32_t *)kv2->getKey()) = bswap32(SMALL); + kv2->valueLength = KV_SIZE - KVBuffer::headerLength() - kv2->keyLength; + + kv3->keyLength = 4; +- *((uint32_t *)kv3->getKey()) = bswap(MEDIUM); ++ *((uint32_t *)kv3->getKey()) = bswap32(MEDIUM); + kv3->valueLength = KV_SIZE - KVBuffer::headerLength() - kv3->keyLength; + + bucket->sort(DUALPIVOTSORT); +@@ -203,17 +203,17 @@ TEST(PartitionBucket, spill) { + KVBuffer * first = (KVBuffer *)writer.buff(); + ASSERT_EQ(4, first->keyLength); + ASSERT_EQ(KV_SIZE - KVBuffer::headerLength() - 4, first->valueLength); +- ASSERT_EQ(bswap(SMALL), (*(uint32_t * )(first->getKey()))); ++ ASSERT_EQ(bswap32(SMALL), (*(uint32_t * )(first->getKey()))); + + KVBuffer * second = first->next(); + ASSERT_EQ(4, second->keyLength); + ASSERT_EQ(KV_SIZE - KVBuffer::headerLength() - 4, second->valueLength); +- ASSERT_EQ(bswap(MEDIUM), (*(uint32_t * )(second->getKey()))); ++ ASSERT_EQ(bswap32(MEDIUM), (*(uint32_t * )(second->getKey()))); + + KVBuffer * third = second->next(); + ASSERT_EQ(4, third->keyLength); + ASSERT_EQ(KV_SIZE - KVBuffer::headerLength() - 4, third->valueLength); +- ASSERT_EQ(bswap(BIG), (*(uint32_t * )(third->getKey()))); ++ ASSERT_EQ(bswap32(BIG), (*(uint32_t * )(third->getKey()))); + + delete [] buff; + delete bucket; diff --git a/hadoop3.orig/files/patch-hadoop-yarn-project_hadoop-yarn_hadoop-yarn-server_hadoop-yarn-server-nodemanager_pom.xml b/hadoop3/files/patch-hadoop-yarn-project_hadoop-yarn_hadoop-yarn-server_hadoop-yarn-server-nodemanager_pom.xml --- a/hadoop3.orig/files/patch-hadoop-yarn-project_hadoop-yarn_hadoop-yarn-server_hadoop-yarn-server-nodemanager_pom.xml +++ b/hadoop3/files/patch-hadoop-yarn-project_hadoop-yarn_hadoop-yarn-server_hadoop-yarn-server-nodemanager_pom.xml @@ -0,0 +1,30 @@ +--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml.orig 2019-07-27 18:33:18 UTC ++++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml +@@ -311,6 +311,27 @@ + + + ++ ++ native-bsd ++ ++ ++ FreeBSD ++ ++ ++ ++ ++ ++ org.apache.maven.plugins ++ maven-surefire-plugin ++ ++ ++ org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.** ++ ++ ++ ++ ++ ++ + + + diff --git a/hadoop3.orig/files/patch-hadoop-yarn-project_hadoop-yarn_hadoop-yarn-server_hadoop-yarn-server-nodemanager_src_main_java_org_apache_hadoop_yarn_server_nodemanager_DefaultContainerExecutor.java b/hadoop3/files/patch-hadoop-yarn-project_hadoop-yarn_hadoop-yarn-server_hadoop-yarn-server-nodemanager_src_main_java_org_apache_hadoop_yarn_server_nodemanager_DefaultContainerExecutor.java --- a/hadoop3.orig/files/patch-hadoop-yarn-project_hadoop-yarn_hadoop-yarn-server_hadoop-yarn-server-nodemanager_src_main_java_org_apache_hadoop_yarn_server_nodemanager_DefaultContainerExecutor.java +++ b/hadoop3/files/patch-hadoop-yarn-project_hadoop-yarn_hadoop-yarn-server_hadoop-yarn-server-nodemanager_src_main_java_org_apache_hadoop_yarn_server_nodemanager_DefaultContainerExecutor.java @@ -0,0 +1,29 @@ +--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java.orig 2018-10-19 02:30:34 UTC ++++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java +@@ -480,8 +480,8 @@ public class DefaultContainerExecutor ex + String exitCodeFile = ContainerLaunch.getExitCodeFile( + pidFile.toString()); + String tmpFile = exitCodeFile + ".tmp"; +- pout.println("#!/bin/bash"); +- pout.println("/bin/bash \"" + sessionScriptPath.toString() + "\""); ++ pout.println("#!/usr/local/bin/bash"); ++ pout.println("/usr/local/bin/bash \"" + sessionScriptPath.toString() + "\""); + pout.println("rc=$?"); + pout.println("echo $rc > \"" + tmpFile + "\""); + pout.println("/bin/mv -f \"" + tmpFile + "\" \"" + exitCodeFile + "\""); +@@ -497,12 +497,12 @@ public class DefaultContainerExecutor ex + // We need to do a move as writing to a file is not atomic + // Process reading a file being written to may get garbled data + // hence write pid to tmp file first followed by a mv +- pout.println("#!/bin/bash"); ++ pout.println("#!/usr/local/bin/bash"); + pout.println(); + pout.println("echo $$ > " + pidFile.toString() + ".tmp"); + pout.println("/bin/mv -f " + pidFile.toString() + ".tmp " + pidFile); +- String exec = Shell.isSetsidAvailable? "exec setsid" : "exec"; +- pout.printf("%s /bin/bash \"%s\"", exec, launchDst.toUri().getPath()); ++ String exec = Shell.isSetsidAvailable? "exec ssid" : "exec"; ++ pout.printf("%s /usr/local/bin/bash \"%s\"", exec, launchDst.toUri().getPath()); + } + lfs.setPermission(sessionScriptPath, + ContainerExecutor.TASK_LAUNCH_SCRIPT_PERMISSION); diff --git a/hadoop3.orig/files/patch-hadoop-yarn-project_hadoop-yarn_hadoop-yarn-server_hadoop-yarn-server-nodemanager_src_main_native_container-executor_impl_configuration.c b/hadoop3/files/patch-hadoop-yarn-project_hadoop-yarn_hadoop-yarn-server_hadoop-yarn-server-nodemanager_src_main_native_container-executor_impl_configuration.c --- a/hadoop3.orig/files/patch-hadoop-yarn-project_hadoop-yarn_hadoop-yarn-server_hadoop-yarn-server-nodemanager_src_main_native_container-executor_impl_configuration.c +++ b/hadoop3/files/patch-hadoop-yarn-project_hadoop-yarn_hadoop-yarn-server_hadoop-yarn-server-nodemanager_src_main_native_container-executor_impl_configuration.c @@ -0,0 +1,14 @@ +--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/configuration.c.orig 2018-10-18 18:38:40 UTC ++++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/configuration.c +@@ -27,9 +27,11 @@ + #include + #include + #include ++#include + #include + #include + #include ++#include + + #define MAX_SIZE 10 + diff --git a/hadoop3.orig/files/patch-hadoop-yarn-project_hadoop-yarn_hadoop-yarn-server_hadoop-yarn-server-nodemanager_src_main_native_container-executor_test_test__configuration.cc b/hadoop3/files/patch-hadoop-yarn-project_hadoop-yarn_hadoop-yarn-server_hadoop-yarn-server-nodemanager_src_main_native_container-executor_test_test__configuration.cc --- a/hadoop3.orig/files/patch-hadoop-yarn-project_hadoop-yarn_hadoop-yarn-server_hadoop-yarn-server-nodemanager_src_main_native_container-executor_test_test__configuration.cc +++ b/hadoop3/files/patch-hadoop-yarn-project_hadoop-yarn_hadoop-yarn-server_hadoop-yarn-server-nodemanager_src_main_native_container-executor_test_test__configuration.cc @@ -0,0 +1,10 @@ +--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/test/test_configuration.cc.orig 2018-10-18 18:38:40 UTC ++++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/test/test_configuration.cc +@@ -18,6 +18,7 @@ + + #include + #include ++#include + + extern "C" { + #include "util.h" diff --git a/hadoop3.orig/files/resourcemanager.in b/hadoop3/files/resourcemanager.in --- a/hadoop3.orig/files/resourcemanager.in +++ b/hadoop3/files/resourcemanager.in @@ -0,0 +1,39 @@ +#!/bin/sh +# +# $FreeBSD$ +# +# PROVIDE: resourcemanager +# REQUIRE: LOGIN +# KEYWORD: shutdown +# +# resourcemanager_enable (bool): Set to NO by default. +# Set it to YES to enable resourcemanager. + +. /etc/rc.subr + +export PATH=${PATH}:%%LOCALBASE%%/bin +name=resourcemanager +rcvar=resourcemanager_enable + +load_rc_config "${name}" + +: ${resourcemanager_enable:=NO} +: ${resourcemanager_user:=%%MAPRED_USER%%} + +command="%%PREFIX%%/sbin/yarn-daemon.sh" +command_interpreter_execution="%%JAVA_HOME%%/bin/java" +command_args='--config %%ETCDIR%% start resourcemanager' + +stop_cmd=resourcemanager_stop +start_postcmd="start_postcmd" +status_precmd=find_pid + +resourcemanager_stop () { + su -m ${resourcemanager_user} -c "${command} --config %%ETCDIR%% stop resourcemanager" +} + +find_pid () { + rc_pid=$(check_pidfile $pidfile $command_interpreter_execution) +} + +run_rc_command "$1" diff --git a/hadoop3.orig/files/secondarynamenode.in b/hadoop3/files/secondarynamenode.in --- a/hadoop3.orig/files/secondarynamenode.in +++ b/hadoop3/files/secondarynamenode.in @@ -0,0 +1,32 @@ +#!/bin/sh +# +# $FreeBSD$ +# +# PROVIDE: secondarynamenode +# REQUIRE: LOGIN +# KEYWORD: shutdown +# +# secondarynamenode_enable (bool): Set to NO by default. +# Set it to YES to enable secondarynamenode. + +. /etc/rc.subr + +export PATH=${PATH}:%%LOCALBASE%%/bin +name=secondarynamenode +rcvar=secondarynamenode_enable + +load_rc_config "${name}" + +: ${secondarynamenode_enable:=NO} +: ${secondarynamenode_user:=%%HDFS_USER%%} + +command="%%PREFIX%%/sbin/hadoop-daemon.sh" +command_args='--config %%ETCDIR%% start secondarynamenode' + +stop_cmd=secondarynamenode_stop + +secondarynamenode_stop () { + su -m ${secondarynamenode_user} -c "${command} --config %%ETCDIR%% stop secondarynamenode" +} + +run_rc_command "$1" diff --git a/hadoop3.orig/files/webappproxyserver.in b/hadoop3/files/webappproxyserver.in --- a/hadoop3.orig/files/webappproxyserver.in +++ b/hadoop3/files/webappproxyserver.in @@ -0,0 +1,32 @@ +#!/bin/sh +# +# $FreeBSD$ +# +# PROVIDE: webappproxyserver +# REQUIRE: LOGIN +# KEYWORD: shutdown +# +# webappproxyserver_enable (bool): Set to NO by default. +# Set it to YES to enable webappproxyserver. + +. /etc/rc.subr + +export PATH=${PATH}:%%LOCALBASE%%/bin +name=webappproxyserver +rcvar=webappproxyserver_enable + +load_rc_config "${name}" + +: ${webappproxyserver_enable:=NO} +: ${webappproxyserver_user:=%%MAPRED_USER%%} + +command="%%PREFIX%%/sbin/yarn-daemon.sh" +command_args='--config %%ETCDIR%% start proxyserver' + +stop_cmd=webappproxyserver_stop + +webappproxyserver_stop () { + su -m ${webappproxyserver_user} -c "${command} --config %%ETCDIR%% stop proxyserver" +} + +run_rc_command "$1" diff --git a/hadoop3.orig/files/zkfc.in b/hadoop3/files/zkfc.in --- a/hadoop3.orig/files/zkfc.in +++ b/hadoop3/files/zkfc.in @@ -0,0 +1,38 @@ +#!/bin/sh +# +# $FreeBSD$ +# +# PROVIDE: zkfc +# REQUIRE: LOGIN +# KEYWORD: shutdown +# +# zkfc_enable (bool): Set to NO by default. +# Set it to YES to enable zkfc. + +. /etc/rc.subr + +export PATH=${PATH}:%%LOCALBASE%%/bin +name=zkfc +rcvar=zkfc_enable + +load_rc_config "${name}" + +: ${zkfc_enable:=NO} +: ${zkfc_user:=%%HDFS_USER%%} + +command="%%PREFIX%%/sbin/hadoop-daemon.sh" +command_interpreter_execution="%%JAVA_HOME%%/bin/java" +command_args='--config %%ETCDIR%% start zkfc' + +stop_cmd=zkfc_stop +status_precmd=find_pid + +zkfc_stop () { + su -m ${zkfc_user} -c "${command} --config %%ETCDIR%% stop zkfc" +} + +find_pid () { + rc_pid=$(check_pidfile $pidfile $command_interpreter_execution) +} + +run_rc_command "$1" diff --git a/hadoop3.orig/pkg-descr b/hadoop3/pkg-descr --- a/hadoop3.orig/pkg-descr +++ b/hadoop3/pkg-descr @@ -0,0 +1,5 @@ +The Apache Hadoop software library is a framework that allows for the +distributed processing of large data sets across clusters of computers +using a simple programming model. + +WWW: http://hadoop.apache.org/ diff --git a/hadoop3.orig/pkg-plist b/hadoop3/pkg-plist --- a/hadoop3.orig/pkg-plist +++ b/hadoop3/pkg-plist @@ -0,0 +1,581 @@ +bin/%%HADOOP_GROUP%% +bin/%%HDFS_USER%% +bin/%%MAPRED_USER%% +bin/yarn +%%ETCDIR%%/core-site.xml +%%ETCDIR%%/httpfs-env.sh +%%ETCDIR%%/kms-env.sh +%%ETCDIR%%/log4j.properties +include/Pipes.hh +include/SerialUtils.hh +include/StringUtils.hh +include/TemplateFactory.hh +include/%%HDFS_USER%%.h +lib/lib%%HADOOP_GROUP%%.a +lib/lib%%HADOOP_GROUP%%.so +lib/lib%%HADOOP_GROUP%%.so.1.0.0 +lib/lib%%HADOOP_GROUP%%pipes.a +lib/lib%%HADOOP_GROUP%%utils.a +lib/lib%%HDFS_USER%%.so +lib/lib%%HDFS_USER%%.so.0.0.0 +lib/lib%%HDFS_USER%%pp.so +lib/lib%%HDFS_USER%%pp.so.0.1.0 +lib/libnativetask.a +lib/libnativetask.so +lib/libnativetask.so.1.0.0 +libexec/%%HADOOP_GROUP%%-config.sh +libexec/%%HADOOP_GROUP%%-functions.sh +libexec/%%HADOOP_GROUP%%-layout.sh +libexec/%%HADOOP_GROUP%%-layout.sh.example +libexec/%%HDFS_USER%%-config.sh +libexec/%%MAPRED_USER%%-config.sh +libexec/shellprofile.d/%%HADOOP_GROUP%%-aliyun.sh +libexec/shellprofile.d/%%HADOOP_GROUP%%-archive-logs.sh +libexec/shellprofile.d/%%HADOOP_GROUP%%-archives.sh +libexec/shellprofile.d/%%HADOOP_GROUP%%-aws.sh +libexec/shellprofile.d/%%HADOOP_GROUP%%-azure-datalake.sh +libexec/shellprofile.d/%%HADOOP_GROUP%%-azure.sh +libexec/shellprofile.d/%%HADOOP_GROUP%%-distcp.sh +libexec/shellprofile.d/%%HADOOP_GROUP%%-extras.sh +libexec/shellprofile.d/%%HADOOP_GROUP%%-gridmix.sh +libexec/shellprofile.d/%%HADOOP_GROUP%%-%%HDFS_USER%%.sh +libexec/shellprofile.d/%%HADOOP_GROUP%%-httpfs.sh +libexec/shellprofile.d/%%HADOOP_GROUP%%-kafka.sh +libexec/shellprofile.d/%%HADOOP_GROUP%%-kms.sh +libexec/shellprofile.d/%%HADOOP_GROUP%%-%%MAPRED_USER%%uce.sh +libexec/shellprofile.d/%%HADOOP_GROUP%%-openstack.sh +libexec/shellprofile.d/%%HADOOP_GROUP%%-rumen.sh +libexec/shellprofile.d/%%HADOOP_GROUP%%-s3guard.sh +libexec/shellprofile.d/%%HADOOP_GROUP%%-streaming.sh +libexec/shellprofile.d/%%HADOOP_GROUP%%-yarn.sh +libexec/tools/%%HADOOP_GROUP%%-archive-logs.sh +libexec/tools/%%HADOOP_GROUP%%-archives.sh +libexec/tools/%%HADOOP_GROUP%%-aws.sh +libexec/tools/%%HADOOP_GROUP%%-distcp.sh +libexec/tools/%%HADOOP_GROUP%%-extras.sh +libexec/tools/%%HADOOP_GROUP%%-gridmix.sh +libexec/tools/%%HADOOP_GROUP%%-resourceestimator.sh +libexec/tools/%%HADOOP_GROUP%%-rumen.sh +libexec/tools/%%HADOOP_GROUP%%-sls.sh +libexec/tools/%%HADOOP_GROUP%%-streaming.sh +libexec/yarn-config.sh +sbin/FederationStateStore/MySQL/FederationStateStoreDatabase.sql +sbin/FederationStateStore/MySQL/FederationStateStoreStoredProcs.sql +sbin/FederationStateStore/MySQL/FederationStateStoreTables.sql +sbin/FederationStateStore/MySQL/FederationStateStoreUser.sql +sbin/FederationStateStore/MySQL/dropDatabase.sql +sbin/FederationStateStore/MySQL/dropStoreProcedures.sql +sbin/FederationStateStore/MySQL/dropTables.sql +sbin/FederationStateStore/MySQL/dropUser.sql +sbin/FederationStateStore/SQLServer/FederationStateStoreStoreProcs.sql +sbin/FederationStateStore/SQLServer/FederationStateStoreTables.sql +sbin/distribute-exclude.sh +sbin/%%HADOOP_GROUP%%-daemon.sh +sbin/%%HADOOP_GROUP%%-daemons.sh +sbin/httpfs.sh +sbin/kms.sh +sbin/mr-jobhistory-daemon.sh +sbin/refresh-namenodes.sh +sbin/start-all.sh +sbin/start-balancer.sh +sbin/start-dfs.sh +sbin/start-secure-dns.sh +sbin/start-yarn.sh +sbin/stop-all.sh +sbin/stop-balancer.sh +sbin/stop-dfs.sh +sbin/stop-secure-dns.sh +sbin/stop-yarn.sh +sbin/workers.sh +sbin/yarn-daemon.sh +sbin/yarn-daemons.sh +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/conf/capacity-scheduler.xml +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/conf/configuration.xsl +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/conf/container-executor.cfg +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/conf/core-site.xml +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/conf/%%HADOOP_GROUP%%-env.sh +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/conf/%%HADOOP_GROUP%%-metrics2.properties +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/conf/%%HADOOP_GROUP%%-policy.xml +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/conf/%%HADOOP_GROUP%%-user-functions.sh.example +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/conf/%%HDFS_USER%%-site.xml +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/conf/httpfs-env.sh +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/conf/httpfs-log4j.properties +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/conf/httpfs-signature.secret +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/conf/httpfs-site.xml +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/conf/kms-acls.xml +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/conf/kms-env.sh +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/conf/kms-log4j.properties +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/conf/kms-site.xml +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/conf/log4j.properties +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/conf/%%MAPRED_USER%%-env.sh +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/conf/%%MAPRED_USER%%-queues.xml.template +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/conf/%%MAPRED_USER%%-site.xml +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/conf/shellprofile.d/example.sh +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/conf/ssl-client.xml.example +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/conf/ssl-server.xml.example +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/conf/user_ec_policies.xml.template +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/conf/workers +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/conf/yarn-env.sh +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/conf/yarn-site.xml +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/conf/yarnservice-log4j.properties +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/core-default.xml +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/distcp-default.xml +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/%%HDFS_USER%%-default.xml +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/httpfs-default.xml +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/%%MAPRED_USER%%-default.xml +%%PORTEXAMPLES%%%%EXAMPLESDIR%%/yarn-default.xml +%%DATADIR%%/client/%%HADOOP_GROUP%%-client-api-%%PORTVERSION%%.jar +%%DATADIR%%/client/%%HADOOP_GROUP%%-client-minicluster-%%PORTVERSION%%.jar +%%DATADIR%%/client/%%HADOOP_GROUP%%-client-runtime-%%PORTVERSION%%.jar +%%DATADIR%%/common/%%HADOOP_GROUP%%-common-%%PORTVERSION%%-tests.jar +%%DATADIR%%/common/%%HADOOP_GROUP%%-common-%%PORTVERSION%%.jar +%%DATADIR%%/common/%%HADOOP_GROUP%%-kms-%%PORTVERSION%%.jar +%%DATADIR%%/common/%%HADOOP_GROUP%%-nfs-%%PORTVERSION%%.jar +%%DATADIR%%/common/jdiff/Apache_Hadoop_Common_2.6.0.xml +%%DATADIR%%/common/jdiff/Apache_Hadoop_Common_2.7.2.xml +%%DATADIR%%/common/jdiff/Apache_Hadoop_Common_2.8.0.xml +%%DATADIR%%/common/jdiff/Apache_Hadoop_Common_2.8.2.xml +%%DATADIR%%/common/jdiff/Apache_Hadoop_Common_2.8.3.xml +%%DATADIR%%/common/jdiff/Null.java +%%DATADIR%%/common/jdiff/%%HADOOP_GROUP%%-core_0.20.0.xml +%%DATADIR%%/common/jdiff/%%HADOOP_GROUP%%-core_0.21.0.xml +%%DATADIR%%/common/jdiff/%%HADOOP_GROUP%%-core_0.22.0.xml +%%DATADIR%%/common/jdiff/%%HADOOP_GROUP%%_0.17.0.xml +%%DATADIR%%/common/jdiff/%%HADOOP_GROUP%%_0.18.1.xml +%%DATADIR%%/common/jdiff/%%HADOOP_GROUP%%_0.18.2.xml +%%DATADIR%%/common/jdiff/%%HADOOP_GROUP%%_0.18.3.xml +%%DATADIR%%/common/jdiff/%%HADOOP_GROUP%%_0.19.0.xml +%%DATADIR%%/common/jdiff/%%HADOOP_GROUP%%_0.19.1.xml +%%DATADIR%%/common/jdiff/%%HADOOP_GROUP%%_0.19.2.xml +%%DATADIR%%/common/jdiff/%%HADOOP_GROUP%%_0.20.0.xml +%%DATADIR%%/common/jdiff/%%HADOOP_GROUP%%_0.20.1.xml +%%DATADIR%%/common/jdiff/%%HADOOP_GROUP%%_0.20.2.xml +%%DATADIR%%/common/lib/accessors-smart-1.2.jar +%%DATADIR%%/common/lib/asm-5.0.4.jar +%%DATADIR%%/common/lib/audience-annotations-0.5.0.jar +%%DATADIR%%/common/lib/avro-1.7.7.jar +%%DATADIR%%/common/lib/commons-beanutils-1.9.3.jar +%%DATADIR%%/common/lib/commons-cli-1.2.jar +%%DATADIR%%/common/lib/commons-codec-1.11.jar +%%DATADIR%%/common/lib/commons-collections-3.2.2.jar +%%DATADIR%%/common/lib/commons-compress-1.4.1.jar +%%DATADIR%%/common/lib/commons-configuration2-2.1.1.jar +%%DATADIR%%/common/lib/commons-io-2.5.jar +%%DATADIR%%/common/lib/commons-lang3-3.7.jar +%%DATADIR%%/common/lib/commons-logging-1.1.3.jar +%%DATADIR%%/common/lib/commons-math3-3.1.1.jar +%%DATADIR%%/common/lib/commons-net-3.6.jar +%%DATADIR%%/common/lib/commons-text-1.4.jar +%%DATADIR%%/common/lib/curator-client-2.12.0.jar +%%DATADIR%%/common/lib/curator-framework-2.12.0.jar +%%DATADIR%%/common/lib/curator-recipes-2.12.0.jar +%%DATADIR%%/common/lib/dnsjava-2.1.7.jar +%%DATADIR%%/common/lib/gson-2.2.4.jar +%%DATADIR%%/common/lib/guava-11.0.2.jar +%%DATADIR%%/common/lib/%%HADOOP_GROUP%%-annotations-%%PORTVERSION%%.jar +%%DATADIR%%/common/lib/%%HADOOP_GROUP%%-auth-%%PORTVERSION%%.jar +%%DATADIR%%/common/lib/htrace-core4-4.1.0-incubating.jar +%%DATADIR%%/common/lib/httpclient-4.5.2.jar +%%DATADIR%%/common/lib/httpcore-4.4.4.jar +%%DATADIR%%/common/lib/jackson-annotations-2.9.5.jar +%%DATADIR%%/common/lib/jackson-core-2.9.5.jar +%%DATADIR%%/common/lib/jackson-core-asl-1.9.13.jar +%%DATADIR%%/common/lib/jackson-databind-2.9.5.jar +%%DATADIR%%/common/lib/jackson-jaxrs-1.9.13.jar +%%DATADIR%%/common/lib/jackson-mapper-asl-1.9.13.jar +%%DATADIR%%/common/lib/jackson-xc-1.9.13.jar +%%DATADIR%%/common/lib/javax.servlet-api-3.1.0.jar +%%DATADIR%%/common/lib/jaxb-api-2.2.11.jar +%%DATADIR%%/common/lib/jaxb-impl-2.2.3-1.jar +%%DATADIR%%/common/lib/jcip-annotations-1.0-1.jar +%%DATADIR%%/common/lib/jersey-core-1.19.jar +%%DATADIR%%/common/lib/jersey-json-1.19.jar +%%DATADIR%%/common/lib/jersey-server-1.19.jar +%%DATADIR%%/common/lib/jersey-servlet-1.19.jar +%%DATADIR%%/common/lib/jettison-1.1.jar +%%DATADIR%%/common/lib/jetty-http-9.3.24.v20180605.jar +%%DATADIR%%/common/lib/jetty-io-9.3.24.v20180605.jar +%%DATADIR%%/common/lib/jetty-security-9.3.24.v20180605.jar +%%DATADIR%%/common/lib/jetty-server-9.3.24.v20180605.jar +%%DATADIR%%/common/lib/jetty-servlet-9.3.24.v20180605.jar +%%DATADIR%%/common/lib/jetty-util-9.3.24.v20180605.jar +%%DATADIR%%/common/lib/jetty-webapp-9.3.24.v20180605.jar +%%DATADIR%%/common/lib/jetty-xml-9.3.24.v20180605.jar +%%DATADIR%%/common/lib/jsch-0.1.54.jar +%%DATADIR%%/common/lib/json-smart-2.3.jar +%%DATADIR%%/common/lib/jsp-api-2.1.jar +%%DATADIR%%/common/lib/jsr305-3.0.0.jar +%%DATADIR%%/common/lib/jsr311-api-1.1.1.jar +%%DATADIR%%/common/lib/jul-to-slf4j-1.7.25.jar +%%DATADIR%%/common/lib/kerb-admin-1.0.1.jar +%%DATADIR%%/common/lib/kerb-client-1.0.1.jar +%%DATADIR%%/common/lib/kerb-common-1.0.1.jar +%%DATADIR%%/common/lib/kerb-core-1.0.1.jar +%%DATADIR%%/common/lib/kerb-crypto-1.0.1.jar +%%DATADIR%%/common/lib/kerb-identity-1.0.1.jar +%%DATADIR%%/common/lib/kerb-server-1.0.1.jar +%%DATADIR%%/common/lib/kerb-simplekdc-1.0.1.jar +%%DATADIR%%/common/lib/kerb-util-1.0.1.jar +%%DATADIR%%/common/lib/kerby-asn1-1.0.1.jar +%%DATADIR%%/common/lib/kerby-config-1.0.1.jar +%%DATADIR%%/common/lib/kerby-pkix-1.0.1.jar +%%DATADIR%%/common/lib/kerby-util-1.0.1.jar +%%DATADIR%%/common/lib/kerby-xdr-1.0.1.jar +%%DATADIR%%/common/lib/log4j-1.2.17.jar +%%DATADIR%%/common/lib/metrics-core-3.2.4.jar +%%DATADIR%%/common/lib/netty-3.10.5.Final.jar +%%DATADIR%%/common/lib/nimbus-jose-jwt-4.41.1.jar +%%DATADIR%%/common/lib/paranamer-2.3.jar +%%DATADIR%%/common/lib/protobuf-java-2.5.0.jar +%%DATADIR%%/common/lib/re2j-1.1.jar +%%DATADIR%%/common/lib/slf4j-api-1.7.25.jar +%%DATADIR%%/common/lib/slf4j-log4j12-1.7.25.jar +%%DATADIR%%/common/lib/snappy-java-1.0.5.jar +%%DATADIR%%/common/lib/stax2-api-3.1.4.jar +%%DATADIR%%/common/lib/token-provider-1.0.1.jar +%%DATADIR%%/common/lib/woodstox-core-5.0.3.jar +%%DATADIR%%/common/lib/xz-1.0.jar +%%DATADIR%%/common/lib/zookeeper-3.4.13.jar +%%DATADIR%%/common/webapps/static/%%HADOOP_GROUP%%.css.gz +%%DATADIR%%/%%HDFS_USER%%/%%HADOOP_GROUP%%-%%HDFS_USER%%-%%PORTVERSION%%-tests.jar +%%DATADIR%%/%%HDFS_USER%%/%%HADOOP_GROUP%%-%%HDFS_USER%%-%%PORTVERSION%%.jar +%%DATADIR%%/%%HDFS_USER%%/%%HADOOP_GROUP%%-%%HDFS_USER%%-client-%%PORTVERSION%%-tests.jar +%%DATADIR%%/%%HDFS_USER%%/%%HADOOP_GROUP%%-%%HDFS_USER%%-client-%%PORTVERSION%%.jar +%%DATADIR%%/%%HDFS_USER%%/%%HADOOP_GROUP%%-%%HDFS_USER%%-httpfs-%%PORTVERSION%%.jar +%%DATADIR%%/%%HDFS_USER%%/%%HADOOP_GROUP%%-%%HDFS_USER%%-native-client-%%PORTVERSION%%-tests.jar +%%DATADIR%%/%%HDFS_USER%%/%%HADOOP_GROUP%%-%%HDFS_USER%%-native-client-%%PORTVERSION%%.jar +%%DATADIR%%/%%HDFS_USER%%/%%HADOOP_GROUP%%-%%HDFS_USER%%-nfs-%%PORTVERSION%%.jar +%%DATADIR%%/%%HDFS_USER%%/%%HADOOP_GROUP%%-%%HDFS_USER%%-rbf-%%PORTVERSION%%-tests.jar +%%DATADIR%%/%%HDFS_USER%%/%%HADOOP_GROUP%%-%%HDFS_USER%%-rbf-%%PORTVERSION%%.jar +%%DATADIR%%/%%HDFS_USER%%/jdiff/Apache_Hadoop_HDFS_2.6.0.xml +%%DATADIR%%/%%HDFS_USER%%/jdiff/Apache_Hadoop_HDFS_2.7.2.xml +%%DATADIR%%/%%HDFS_USER%%/jdiff/Apache_Hadoop_HDFS_2.8.0.xml +%%DATADIR%%/%%HDFS_USER%%/jdiff/Apache_Hadoop_HDFS_2.8.2.xml +%%DATADIR%%/%%HDFS_USER%%/jdiff/Apache_Hadoop_HDFS_2.8.3.xml +%%DATADIR%%/%%HDFS_USER%%/jdiff/Apache_Hadoop_HDFS_2.9.1.xml +%%DATADIR%%/%%HDFS_USER%%/jdiff/Apache_Hadoop_HDFS_3.0.0-alpha2.xml +%%DATADIR%%/%%HDFS_USER%%/jdiff/Apache_Hadoop_HDFS_3.0.0-alpha3.xml +%%DATADIR%%/%%HDFS_USER%%/jdiff/Apache_Hadoop_HDFS_3.0.0-alpha4.xml +%%DATADIR%%/%%HDFS_USER%%/jdiff/Apache_Hadoop_HDFS_3.0.0.xml +%%DATADIR%%/%%HDFS_USER%%/jdiff/Apache_Hadoop_HDFS_3.0.1.xml +%%DATADIR%%/%%HDFS_USER%%/jdiff/Apache_Hadoop_HDFS_3.0.2.xml +%%DATADIR%%/%%HDFS_USER%%/jdiff/Apache_Hadoop_HDFS_3.0.3.xml +%%DATADIR%%/%%HDFS_USER%%/jdiff/Apache_Hadoop_HDFS_3.1.0.xml +%%DATADIR%%/%%HDFS_USER%%/jdiff/Apache_Hadoop_HDFS_3.1.1.xml +%%DATADIR%%/%%HDFS_USER%%/jdiff/Null.java +%%DATADIR%%/%%HDFS_USER%%/jdiff/%%HADOOP_GROUP%%-%%HDFS_USER%%_0.20.0.xml +%%DATADIR%%/%%HDFS_USER%%/jdiff/%%HADOOP_GROUP%%-%%HDFS_USER%%_0.21.0.xml +%%DATADIR%%/%%HDFS_USER%%/jdiff/%%HADOOP_GROUP%%-%%HDFS_USER%%_0.22.0.xml +%%DATADIR%%/%%HDFS_USER%%/lib/accessors-smart-1.2.jar +%%DATADIR%%/%%HDFS_USER%%/lib/asm-5.0.4.jar +%%DATADIR%%/%%HDFS_USER%%/lib/audience-annotations-0.5.0.jar +%%DATADIR%%/%%HDFS_USER%%/lib/avro-1.7.7.jar +%%DATADIR%%/%%HDFS_USER%%/lib/commons-beanutils-1.9.3.jar +%%DATADIR%%/%%HDFS_USER%%/lib/commons-cli-1.2.jar +%%DATADIR%%/%%HDFS_USER%%/lib/commons-codec-1.11.jar +%%DATADIR%%/%%HDFS_USER%%/lib/commons-collections-3.2.2.jar +%%DATADIR%%/%%HDFS_USER%%/lib/commons-compress-1.4.1.jar +%%DATADIR%%/%%HDFS_USER%%/lib/commons-configuration2-2.1.1.jar +%%DATADIR%%/%%HDFS_USER%%/lib/commons-daemon-1.0.13.jar +%%DATADIR%%/%%HDFS_USER%%/lib/commons-io-2.5.jar +%%DATADIR%%/%%HDFS_USER%%/lib/commons-lang3-3.7.jar +%%DATADIR%%/%%HDFS_USER%%/lib/commons-logging-1.1.3.jar +%%DATADIR%%/%%HDFS_USER%%/lib/commons-math3-3.1.1.jar +%%DATADIR%%/%%HDFS_USER%%/lib/commons-net-3.6.jar +%%DATADIR%%/%%HDFS_USER%%/lib/commons-text-1.4.jar +%%DATADIR%%/%%HDFS_USER%%/lib/curator-client-2.12.0.jar +%%DATADIR%%/%%HDFS_USER%%/lib/curator-framework-2.12.0.jar +%%DATADIR%%/%%HDFS_USER%%/lib/curator-recipes-2.12.0.jar +%%DATADIR%%/%%HDFS_USER%%/lib/dnsjava-2.1.7.jar +%%DATADIR%%/%%HDFS_USER%%/lib/gson-2.2.4.jar +%%DATADIR%%/%%HDFS_USER%%/lib/guava-11.0.2.jar +%%DATADIR%%/%%HDFS_USER%%/lib/%%HADOOP_GROUP%%-annotations-%%PORTVERSION%%.jar +%%DATADIR%%/%%HDFS_USER%%/lib/%%HADOOP_GROUP%%-auth-%%PORTVERSION%%.jar +%%DATADIR%%/%%HDFS_USER%%/lib/htrace-core4-4.1.0-incubating.jar +%%DATADIR%%/%%HDFS_USER%%/lib/httpclient-4.5.2.jar +%%DATADIR%%/%%HDFS_USER%%/lib/httpcore-4.4.4.jar +%%DATADIR%%/%%HDFS_USER%%/lib/jackson-annotations-2.9.5.jar +%%DATADIR%%/%%HDFS_USER%%/lib/jackson-core-2.9.5.jar +%%DATADIR%%/%%HDFS_USER%%/lib/jackson-core-asl-1.9.13.jar +%%DATADIR%%/%%HDFS_USER%%/lib/jackson-databind-2.9.5.jar +%%DATADIR%%/%%HDFS_USER%%/lib/jackson-jaxrs-1.9.13.jar +%%DATADIR%%/%%HDFS_USER%%/lib/jackson-mapper-asl-1.9.13.jar +%%DATADIR%%/%%HDFS_USER%%/lib/jackson-xc-1.9.13.jar +%%DATADIR%%/%%HDFS_USER%%/lib/javax.servlet-api-3.1.0.jar +%%DATADIR%%/%%HDFS_USER%%/lib/jaxb-api-2.2.11.jar +%%DATADIR%%/%%HDFS_USER%%/lib/jaxb-impl-2.2.3-1.jar +%%DATADIR%%/%%HDFS_USER%%/lib/jcip-annotations-1.0-1.jar +%%DATADIR%%/%%HDFS_USER%%/lib/jersey-core-1.19.jar +%%DATADIR%%/%%HDFS_USER%%/lib/jersey-json-1.19.jar +%%DATADIR%%/%%HDFS_USER%%/lib/jersey-server-1.19.jar +%%DATADIR%%/%%HDFS_USER%%/lib/jersey-servlet-1.19.jar +%%DATADIR%%/%%HDFS_USER%%/lib/jettison-1.1.jar +%%DATADIR%%/%%HDFS_USER%%/lib/jetty-http-9.3.24.v20180605.jar +%%DATADIR%%/%%HDFS_USER%%/lib/jetty-io-9.3.24.v20180605.jar +%%DATADIR%%/%%HDFS_USER%%/lib/jetty-security-9.3.24.v20180605.jar +%%DATADIR%%/%%HDFS_USER%%/lib/jetty-server-9.3.24.v20180605.jar +%%DATADIR%%/%%HDFS_USER%%/lib/jetty-servlet-9.3.24.v20180605.jar +%%DATADIR%%/%%HDFS_USER%%/lib/jetty-util-9.3.24.v20180605.jar +%%DATADIR%%/%%HDFS_USER%%/lib/jetty-util-ajax-9.3.24.v20180605.jar +%%DATADIR%%/%%HDFS_USER%%/lib/jetty-webapp-9.3.24.v20180605.jar +%%DATADIR%%/%%HDFS_USER%%/lib/jetty-xml-9.3.24.v20180605.jar +%%DATADIR%%/%%HDFS_USER%%/lib/jsch-0.1.54.jar +%%DATADIR%%/%%HDFS_USER%%/lib/json-simple-1.1.1.jar +%%DATADIR%%/%%HDFS_USER%%/lib/json-smart-2.3.jar +%%DATADIR%%/%%HDFS_USER%%/lib/jsr305-3.0.0.jar +%%DATADIR%%/%%HDFS_USER%%/lib/jsr311-api-1.1.1.jar +%%DATADIR%%/%%HDFS_USER%%/lib/kerb-admin-1.0.1.jar +%%DATADIR%%/%%HDFS_USER%%/lib/kerb-client-1.0.1.jar +%%DATADIR%%/%%HDFS_USER%%/lib/kerb-common-1.0.1.jar +%%DATADIR%%/%%HDFS_USER%%/lib/kerb-core-1.0.1.jar +%%DATADIR%%/%%HDFS_USER%%/lib/kerb-crypto-1.0.1.jar +%%DATADIR%%/%%HDFS_USER%%/lib/kerb-identity-1.0.1.jar +%%DATADIR%%/%%HDFS_USER%%/lib/kerb-server-1.0.1.jar +%%DATADIR%%/%%HDFS_USER%%/lib/kerb-simplekdc-1.0.1.jar +%%DATADIR%%/%%HDFS_USER%%/lib/kerb-util-1.0.1.jar +%%DATADIR%%/%%HDFS_USER%%/lib/kerby-asn1-1.0.1.jar +%%DATADIR%%/%%HDFS_USER%%/lib/kerby-config-1.0.1.jar +%%DATADIR%%/%%HDFS_USER%%/lib/kerby-pkix-1.0.1.jar +%%DATADIR%%/%%HDFS_USER%%/lib/kerby-util-1.0.1.jar +%%DATADIR%%/%%HDFS_USER%%/lib/kerby-xdr-1.0.1.jar +%%DATADIR%%/%%HDFS_USER%%/lib/leveldbjni-all-1.8.jar +%%DATADIR%%/%%HDFS_USER%%/lib/log4j-1.2.17.jar +%%DATADIR%%/%%HDFS_USER%%/lib/netty-3.10.5.Final.jar +%%DATADIR%%/%%HDFS_USER%%/lib/netty-all-4.0.52.Final.jar +%%DATADIR%%/%%HDFS_USER%%/lib/nimbus-jose-jwt-4.41.1.jar +%%DATADIR%%/%%HDFS_USER%%/lib/okhttp-2.7.5.jar +%%DATADIR%%/%%HDFS_USER%%/lib/okio-1.6.0.jar +%%DATADIR%%/%%HDFS_USER%%/lib/paranamer-2.3.jar +%%DATADIR%%/%%HDFS_USER%%/lib/protobuf-java-2.5.0.jar +%%DATADIR%%/%%HDFS_USER%%/lib/re2j-1.1.jar +%%DATADIR%%/%%HDFS_USER%%/lib/snappy-java-1.0.5.jar +%%DATADIR%%/%%HDFS_USER%%/lib/stax2-api-3.1.4.jar +%%DATADIR%%/%%HDFS_USER%%/lib/token-provider-1.0.1.jar +%%DATADIR%%/%%HDFS_USER%%/lib/woodstox-core-5.0.3.jar +%%DATADIR%%/%%HDFS_USER%%/lib/xz-1.0.jar +%%DATADIR%%/%%HDFS_USER%%/lib/zookeeper-3.4.13.jar +%%DATADIR%%/%%HDFS_USER%%/webapps/datanode/WEB-INF/web.xml +%%DATADIR%%/%%HDFS_USER%%/webapps/datanode/datanode.html +%%DATADIR%%/%%HDFS_USER%%/webapps/datanode/dn.js +%%DATADIR%%/%%HDFS_USER%%/webapps/datanode/index.html +%%DATADIR%%/%%HDFS_USER%%/webapps/datanode/robots.txt +%%DATADIR%%/%%HDFS_USER%%/webapps/%%HDFS_USER%%/WEB-INF/web.xml +%%DATADIR%%/%%HDFS_USER%%/webapps/%%HDFS_USER%%/dfshealth.html +%%DATADIR%%/%%HDFS_USER%%/webapps/%%HDFS_USER%%/dfshealth.js +%%DATADIR%%/%%HDFS_USER%%/webapps/%%HDFS_USER%%/explorer.html +%%DATADIR%%/%%HDFS_USER%%/webapps/%%HDFS_USER%%/explorer.js +%%DATADIR%%/%%HDFS_USER%%/webapps/%%HDFS_USER%%/index.html +%%DATADIR%%/%%HDFS_USER%%/webapps/%%HDFS_USER%%/robots.txt +%%DATADIR%%/%%HDFS_USER%%/webapps/journal/WEB-INF/web.xml +%%DATADIR%%/%%HDFS_USER%%/webapps/journal/index.html +%%DATADIR%%/%%HDFS_USER%%/webapps/journal/robots.txt +%%DATADIR%%/%%HDFS_USER%%/webapps/nfs3/WEB-INF/web.xml +%%DATADIR%%/%%HDFS_USER%%/webapps/router/WEB-INF/web.xml +%%DATADIR%%/%%HDFS_USER%%/webapps/router/federationhealth.html +%%DATADIR%%/%%HDFS_USER%%/webapps/router/federationhealth.js +%%DATADIR%%/%%HDFS_USER%%/webapps/router/index.html +%%DATADIR%%/%%HDFS_USER%%/webapps/router/robots.txt +%%DATADIR%%/%%HDFS_USER%%/webapps/secondary/WEB-INF/web.xml +%%DATADIR%%/%%HDFS_USER%%/webapps/secondary/index.html +%%DATADIR%%/%%HDFS_USER%%/webapps/secondary/robots.txt +%%DATADIR%%/%%HDFS_USER%%/webapps/secondary/snn.js +%%DATADIR%%/%%HDFS_USER%%/webapps/secondary/status.html +%%DATADIR%%/%%HDFS_USER%%/webapps/static/bootstrap-3.3.7/css/bootstrap-editable.css +%%DATADIR%%/%%HDFS_USER%%/webapps/static/bootstrap-3.3.7/css/bootstrap-editable.css.gz +%%DATADIR%%/%%HDFS_USER%%/webapps/static/bootstrap-3.3.7/css/bootstrap-theme.css +%%DATADIR%%/%%HDFS_USER%%/webapps/static/bootstrap-3.3.7/css/bootstrap-theme.css.gz +%%DATADIR%%/%%HDFS_USER%%/webapps/static/bootstrap-3.3.7/css/bootstrap-theme.css.map +%%DATADIR%%/%%HDFS_USER%%/webapps/static/bootstrap-3.3.7/css/bootstrap-theme.min.css +%%DATADIR%%/%%HDFS_USER%%/webapps/static/bootstrap-3.3.7/css/bootstrap-theme.min.css.gz +%%DATADIR%%/%%HDFS_USER%%/webapps/static/bootstrap-3.3.7/css/bootstrap-theme.min.css.map +%%DATADIR%%/%%HDFS_USER%%/webapps/static/bootstrap-3.3.7/css/bootstrap.css +%%DATADIR%%/%%HDFS_USER%%/webapps/static/bootstrap-3.3.7/css/bootstrap.css.gz +%%DATADIR%%/%%HDFS_USER%%/webapps/static/bootstrap-3.3.7/css/bootstrap.css.map +%%DATADIR%%/%%HDFS_USER%%/webapps/static/bootstrap-3.3.7/css/bootstrap.min.css +%%DATADIR%%/%%HDFS_USER%%/webapps/static/bootstrap-3.3.7/css/bootstrap.min.css.gz +%%DATADIR%%/%%HDFS_USER%%/webapps/static/bootstrap-3.3.7/css/bootstrap.min.css.map +%%DATADIR%%/%%HDFS_USER%%/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.eot +%%DATADIR%%/%%HDFS_USER%%/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.svg +%%DATADIR%%/%%HDFS_USER%%/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.ttf +%%DATADIR%%/%%HDFS_USER%%/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.woff +%%DATADIR%%/%%HDFS_USER%%/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.woff2 +%%DATADIR%%/%%HDFS_USER%%/webapps/static/bootstrap-3.3.7/js/bootstrap-editable.min.js +%%DATADIR%%/%%HDFS_USER%%/webapps/static/bootstrap-3.3.7/js/bootstrap-editable.min.js.gz +%%DATADIR%%/%%HDFS_USER%%/webapps/static/bootstrap-3.3.7/js/bootstrap.js +%%DATADIR%%/%%HDFS_USER%%/webapps/static/bootstrap-3.3.7/js/bootstrap.js.gz +%%DATADIR%%/%%HDFS_USER%%/webapps/static/bootstrap-3.3.7/js/bootstrap.min.js +%%DATADIR%%/%%HDFS_USER%%/webapps/static/bootstrap-3.3.7/js/bootstrap.min.js.gz +%%DATADIR%%/%%HDFS_USER%%/webapps/static/bootstrap-3.3.7/js/npm.js +%%DATADIR%%/%%HDFS_USER%%/webapps/static/bootstrap-3.3.7/js/npm.js.gz +%%DATADIR%%/%%HDFS_USER%%/webapps/static/d3-v4.1.1.min.js +%%DATADIR%%/%%HDFS_USER%%/webapps/static/d3-v4.1.1.min.js.gz +%%DATADIR%%/%%HDFS_USER%%/webapps/static/dataTables.bootstrap.css +%%DATADIR%%/%%HDFS_USER%%/webapps/static/dataTables.bootstrap.css.gz +%%DATADIR%%/%%HDFS_USER%%/webapps/static/dataTables.bootstrap.js +%%DATADIR%%/%%HDFS_USER%%/webapps/static/dataTables.bootstrap.js.gz +%%DATADIR%%/%%HDFS_USER%%/webapps/static/dfs-dust.js +%%DATADIR%%/%%HDFS_USER%%/webapps/static/dfs-dust.js.gz +%%DATADIR%%/%%HDFS_USER%%/webapps/static/dust-full-2.0.0.min.js +%%DATADIR%%/%%HDFS_USER%%/webapps/static/dust-full-2.0.0.min.js.gz +%%DATADIR%%/%%HDFS_USER%%/webapps/static/dust-helpers-1.1.1.min.js +%%DATADIR%%/%%HDFS_USER%%/webapps/static/dust-helpers-1.1.1.min.js.gz +%%DATADIR%%/%%HDFS_USER%%/webapps/static/%%HADOOP_GROUP%%.css +%%DATADIR%%/%%HDFS_USER%%/webapps/static/%%HADOOP_GROUP%%.css.gz +%%DATADIR%%/%%HDFS_USER%%/webapps/static/jquery-3.3.1.min.js +%%DATADIR%%/%%HDFS_USER%%/webapps/static/jquery-3.3.1.min.js.gz +%%DATADIR%%/%%HDFS_USER%%/webapps/static/jquery.dataTables.min.js +%%DATADIR%%/%%HDFS_USER%%/webapps/static/jquery.dataTables.min.js.gz +%%DATADIR%%/%%HDFS_USER%%/webapps/static/json-bignum.js +%%DATADIR%%/%%HDFS_USER%%/webapps/static/json-bignum.js.gz +%%DATADIR%%/%%HDFS_USER%%/webapps/static/moment.min.js +%%DATADIR%%/%%HDFS_USER%%/webapps/static/moment.min.js.gz +%%DATADIR%%/%%HDFS_USER%%/webapps/static/rbf.css +%%DATADIR%%/%%HDFS_USER%%/webapps/static/rest-csrf.js +%%DATADIR%%/%%HDFS_USER%%/webapps/static/rest-csrf.js.gz +%%DATADIR%%/%%MAPRED_USER%%uce/%%HADOOP_GROUP%%-%%MAPRED_USER%%uce-client-app-%%PORTVERSION%%.jar +%%DATADIR%%/%%MAPRED_USER%%uce/%%HADOOP_GROUP%%-%%MAPRED_USER%%uce-client-common-%%PORTVERSION%%.jar +%%DATADIR%%/%%MAPRED_USER%%uce/%%HADOOP_GROUP%%-%%MAPRED_USER%%uce-client-core-%%PORTVERSION%%.jar +%%DATADIR%%/%%MAPRED_USER%%uce/%%HADOOP_GROUP%%-%%MAPRED_USER%%uce-client-hs-%%PORTVERSION%%.jar +%%DATADIR%%/%%MAPRED_USER%%uce/%%HADOOP_GROUP%%-%%MAPRED_USER%%uce-client-hs-plugins-%%PORTVERSION%%.jar +%%DATADIR%%/%%MAPRED_USER%%uce/%%HADOOP_GROUP%%-%%MAPRED_USER%%uce-client-jobclient-%%PORTVERSION%%-tests.jar +%%DATADIR%%/%%MAPRED_USER%%uce/%%HADOOP_GROUP%%-%%MAPRED_USER%%uce-client-jobclient-%%PORTVERSION%%.jar +%%DATADIR%%/%%MAPRED_USER%%uce/%%HADOOP_GROUP%%-%%MAPRED_USER%%uce-client-nativetask-%%PORTVERSION%%.jar +%%DATADIR%%/%%MAPRED_USER%%uce/%%HADOOP_GROUP%%-%%MAPRED_USER%%uce-client-shuffle-%%PORTVERSION%%.jar +%%DATADIR%%/%%MAPRED_USER%%uce/%%HADOOP_GROUP%%-%%MAPRED_USER%%uce-client-uploader-%%PORTVERSION%%.jar +%%DATADIR%%/%%MAPRED_USER%%uce/%%HADOOP_GROUP%%-%%MAPRED_USER%%uce-examples-%%PORTVERSION%%.jar +%%DATADIR%%/%%MAPRED_USER%%uce/jdiff/Apache_Hadoop_MapReduce_Common_2.6.0.xml +%%DATADIR%%/%%MAPRED_USER%%uce/jdiff/Apache_Hadoop_MapReduce_Common_2.7.2.xml +%%DATADIR%%/%%MAPRED_USER%%uce/jdiff/Apache_Hadoop_MapReduce_Common_2.8.0.xml +%%DATADIR%%/%%MAPRED_USER%%uce/jdiff/Apache_Hadoop_MapReduce_Common_2.8.2.xml +%%DATADIR%%/%%MAPRED_USER%%uce/jdiff/Apache_Hadoop_MapReduce_Common_2.8.3.xml +%%DATADIR%%/%%MAPRED_USER%%uce/jdiff/Apache_Hadoop_MapReduce_Common_3.1.0.xml +%%DATADIR%%/%%MAPRED_USER%%uce/jdiff/Apache_Hadoop_MapReduce_Core_2.6.0.xml +%%DATADIR%%/%%MAPRED_USER%%uce/jdiff/Apache_Hadoop_MapReduce_Core_2.7.2.xml +%%DATADIR%%/%%MAPRED_USER%%uce/jdiff/Apache_Hadoop_MapReduce_Core_2.8.0.xml +%%DATADIR%%/%%MAPRED_USER%%uce/jdiff/Apache_Hadoop_MapReduce_Core_2.8.2.xml +%%DATADIR%%/%%MAPRED_USER%%uce/jdiff/Apache_Hadoop_MapReduce_Core_2.8.3.xml +%%DATADIR%%/%%MAPRED_USER%%uce/jdiff/Apache_Hadoop_MapReduce_Core_3.1.0.xml +%%DATADIR%%/%%MAPRED_USER%%uce/jdiff/Apache_Hadoop_MapReduce_JobClient_2.6.0.xml +%%DATADIR%%/%%MAPRED_USER%%uce/jdiff/Apache_Hadoop_MapReduce_JobClient_2.7.2.xml +%%DATADIR%%/%%MAPRED_USER%%uce/jdiff/Apache_Hadoop_MapReduce_JobClient_2.8.0.xml +%%DATADIR%%/%%MAPRED_USER%%uce/jdiff/Apache_Hadoop_MapReduce_JobClient_2.8.2.xml +%%DATADIR%%/%%MAPRED_USER%%uce/jdiff/Apache_Hadoop_MapReduce_JobClient_2.8.3.xml +%%DATADIR%%/%%MAPRED_USER%%uce/jdiff/Apache_Hadoop_MapReduce_JobClient_3.1.0.xml +%%DATADIR%%/%%MAPRED_USER%%uce/jdiff/Null.java +%%DATADIR%%/%%MAPRED_USER%%uce/lib-examples/hsqldb-2.3.4.jar +%%DATADIR%%/%%MAPRED_USER%%uce/lib/hamcrest-core-1.3.jar +%%DATADIR%%/%%MAPRED_USER%%uce/lib/junit-4.11.jar +%%DATADIR%%/tools/lib/aliyun-sdk-oss-2.8.3.jar +%%DATADIR%%/tools/lib/aws-java-sdk-bundle-1.11.375.jar +%%DATADIR%%/tools/lib/azure-data-lake-store-sdk-2.2.9.jar +%%DATADIR%%/tools/lib/azure-keyvault-core-1.0.0.jar +%%DATADIR%%/tools/lib/azure-storage-7.0.0.jar +%%DATADIR%%/tools/lib/%%HADOOP_GROUP%%-aliyun-%%PORTVERSION%%.jar +%%DATADIR%%/tools/lib/%%HADOOP_GROUP%%-archive-logs-%%PORTVERSION%%.jar +%%DATADIR%%/tools/lib/%%HADOOP_GROUP%%-archives-%%PORTVERSION%%.jar +%%DATADIR%%/tools/lib/%%HADOOP_GROUP%%-aws-%%PORTVERSION%%.jar +%%DATADIR%%/tools/lib/%%HADOOP_GROUP%%-azure-%%PORTVERSION%%.jar +%%DATADIR%%/tools/lib/%%HADOOP_GROUP%%-azure-datalake-%%PORTVERSION%%.jar +%%DATADIR%%/tools/lib/%%HADOOP_GROUP%%-datajoin-%%PORTVERSION%%.jar +%%DATADIR%%/tools/lib/%%HADOOP_GROUP%%-distcp-%%PORTVERSION%%.jar +%%DATADIR%%/tools/lib/%%HADOOP_GROUP%%-extras-%%PORTVERSION%%.jar +%%DATADIR%%/tools/lib/%%HADOOP_GROUP%%-fs2img-%%PORTVERSION%%.jar +%%DATADIR%%/tools/lib/%%HADOOP_GROUP%%-gridmix-%%PORTVERSION%%.jar +%%DATADIR%%/tools/lib/%%HADOOP_GROUP%%-kafka-%%PORTVERSION%%.jar +%%DATADIR%%/tools/lib/%%HADOOP_GROUP%%-openstack-%%PORTVERSION%%.jar +%%DATADIR%%/tools/lib/%%HADOOP_GROUP%%-resourceestimator-%%PORTVERSION%%.jar +%%DATADIR%%/tools/lib/%%HADOOP_GROUP%%-rumen-%%PORTVERSION%%.jar +%%DATADIR%%/tools/lib/%%HADOOP_GROUP%%-sls-%%PORTVERSION%%.jar +%%DATADIR%%/tools/lib/%%HADOOP_GROUP%%-streaming-%%PORTVERSION%%.jar +%%DATADIR%%/tools/lib/jdom-1.1.jar +%%DATADIR%%/tools/lib/kafka-clients-0.8.2.1.jar +%%DATADIR%%/tools/lib/lz4-1.2.0.jar +%%DATADIR%%/tools/lib/ojalgo-43.0.jar +%%DATADIR%%/tools/lib/wildfly-openssl-1.0.4.Final.jar +%%DATADIR%%/tools/resourceestimator/bin/estimator.cmd +%%DATADIR%%/tools/resourceestimator/bin/estimator.sh +%%DATADIR%%/tools/resourceestimator/bin/start-estimator.cmd +%%DATADIR%%/tools/resourceestimator/bin/start-estimator.sh +%%DATADIR%%/tools/resourceestimator/bin/stop-estimator.cmd +%%DATADIR%%/tools/resourceestimator/bin/stop-estimator.sh +%%DATADIR%%/tools/resourceestimator/conf/resourceestimator-config.xml +%%DATADIR%%/tools/resourceestimator/data/resourceEstimatorService.txt +%%DATADIR%%/tools/sls/bin/rumen2sls.sh +%%DATADIR%%/tools/sls/bin/slsrun.sh +%%DATADIR%%/tools/sls/html/css/bootstrap-responsive.min.css +%%DATADIR%%/tools/sls/html/css/bootstrap.min.css +%%DATADIR%%/tools/sls/html/js/thirdparty/bootstrap.min.js +%%DATADIR%%/tools/sls/html/js/thirdparty/d3-LICENSE +%%DATADIR%%/tools/sls/html/js/thirdparty/d3.v3.js +%%DATADIR%%/tools/sls/html/js/thirdparty/jquery.js +%%DATADIR%%/tools/sls/html/showSimulationTrace.html +%%DATADIR%%/tools/sls/html/simulate.html.template +%%DATADIR%%/tools/sls/html/simulate.info.html.template +%%DATADIR%%/tools/sls/html/track.html.template +%%DATADIR%%/tools/sls/sample-conf/capacity-scheduler.xml +%%DATADIR%%/tools/sls/sample-conf/fair-scheduler.xml +%%DATADIR%%/tools/sls/sample-conf/log4j.properties +%%DATADIR%%/tools/sls/sample-conf/sls-runner.xml +%%DATADIR%%/tools/sls/sample-conf/yarn-site.xml +%%DATADIR%%/tools/sls/sample-data/2jobs2min-rumen-jh.json +%%DATADIR%%/yarn/%%HADOOP_GROUP%%-yarn-api-%%PORTVERSION%%.jar +%%DATADIR%%/yarn/%%HADOOP_GROUP%%-yarn-applications-distributedshell-%%PORTVERSION%%.jar +%%DATADIR%%/yarn/%%HADOOP_GROUP%%-yarn-applications-unmanaged-am-launcher-%%PORTVERSION%%.jar +%%DATADIR%%/yarn/%%HADOOP_GROUP%%-yarn-client-%%PORTVERSION%%.jar +%%DATADIR%%/yarn/%%HADOOP_GROUP%%-yarn-common-%%PORTVERSION%%.jar +%%DATADIR%%/yarn/%%HADOOP_GROUP%%-yarn-registry-%%PORTVERSION%%.jar +%%DATADIR%%/yarn/%%HADOOP_GROUP%%-yarn-server-applicationhistoryservice-%%PORTVERSION%%.jar +%%DATADIR%%/yarn/%%HADOOP_GROUP%%-yarn-server-common-%%PORTVERSION%%.jar +%%DATADIR%%/yarn/%%HADOOP_GROUP%%-yarn-server-nodemanager-%%PORTVERSION%%.jar +%%DATADIR%%/yarn/%%HADOOP_GROUP%%-yarn-server-resourcemanager-%%PORTVERSION%%.jar +%%DATADIR%%/yarn/%%HADOOP_GROUP%%-yarn-server-router-%%PORTVERSION%%.jar +%%DATADIR%%/yarn/%%HADOOP_GROUP%%-yarn-server-sharedcachemanager-%%PORTVERSION%%.jar +%%DATADIR%%/yarn/%%HADOOP_GROUP%%-yarn-server-tests-%%PORTVERSION%%.jar +%%DATADIR%%/yarn/%%HADOOP_GROUP%%-yarn-server-timeline-pluginstorage-%%PORTVERSION%%.jar +%%DATADIR%%/yarn/%%HADOOP_GROUP%%-yarn-server-web-proxy-%%PORTVERSION%%.jar +%%DATADIR%%/yarn/%%HADOOP_GROUP%%-yarn-services-api-%%PORTVERSION%%.jar +%%DATADIR%%/yarn/%%HADOOP_GROUP%%-yarn-services-core-%%PORTVERSION%%.jar +%%DATADIR%%/yarn/%%HADOOP_GROUP%%-yarn-submarine-%%PORTVERSION%%.jar +%%DATADIR%%/yarn/lib/HikariCP-java7-2.4.12.jar +%%DATADIR%%/yarn/lib/aopalliance-1.0.jar +%%DATADIR%%/yarn/lib/ehcache-3.3.1.jar +%%DATADIR%%/yarn/lib/fst-2.50.jar +%%DATADIR%%/yarn/lib/geronimo-jcache_1.0_spec-1.0-alpha-1.jar +%%DATADIR%%/yarn/lib/guice-4.0.jar +%%DATADIR%%/yarn/lib/guice-servlet-4.0.jar +%%DATADIR%%/yarn/lib/jackson-jaxrs-base-2.9.5.jar +%%DATADIR%%/yarn/lib/jackson-jaxrs-json-provider-2.9.5.jar +%%DATADIR%%/yarn/lib/jackson-module-jaxb-annotations-2.9.5.jar +%%DATADIR%%/yarn/lib/java-util-1.9.0.jar +%%DATADIR%%/yarn/lib/javax.inject-1.jar +%%DATADIR%%/yarn/lib/jersey-client-1.19.jar +%%DATADIR%%/yarn/lib/jersey-guice-1.19.jar +%%DATADIR%%/yarn/lib/json-io-2.5.1.jar +%%DATADIR%%/yarn/lib/metrics-core-3.2.4.jar +%%DATADIR%%/yarn/lib/mssql-jdbc-6.2.1.jre7.jar +%%DATADIR%%/yarn/lib/objenesis-1.0.jar +%%DATADIR%%/yarn/lib/snakeyaml-1.16.jar +%%DATADIR%%/yarn/lib/swagger-annotations-1.5.4.jar +%%DATADIR%%/yarn/test/%%HADOOP_GROUP%%-yarn-server-tests-%%PORTVERSION%%-tests.jar +%%DATADIR%%/yarn/timelineservice/%%HADOOP_GROUP%%-yarn-server-timelineservice-%%PORTVERSION%%.jar +%%DATADIR%%/yarn/timelineservice/%%HADOOP_GROUP%%-yarn-server-timelineservice-hbase-client-%%PORTVERSION%%.jar +%%DATADIR%%/yarn/timelineservice/%%HADOOP_GROUP%%-yarn-server-timelineservice-hbase-common-%%PORTVERSION%%.jar +%%DATADIR%%/yarn/timelineservice/%%HADOOP_GROUP%%-yarn-server-timelineservice-hbase-coprocessor-%%PORTVERSION%%.jar +%%DATADIR%%/yarn/timelineservice/lib/commons-csv-1.0.jar +%%DATADIR%%/yarn/timelineservice/lib/commons-lang-2.6.jar +%%DATADIR%%/yarn/timelineservice/lib/hbase-annotations-1.2.6.jar +%%DATADIR%%/yarn/timelineservice/lib/hbase-client-1.2.6.jar +%%DATADIR%%/yarn/timelineservice/lib/hbase-common-1.2.6.jar +%%DATADIR%%/yarn/timelineservice/lib/hbase-protocol-1.2.6.jar +%%DATADIR%%/yarn/timelineservice/lib/htrace-core-3.1.0-incubating.jar +%%DATADIR%%/yarn/timelineservice/lib/jcodings-1.0.13.jar +%%DATADIR%%/yarn/timelineservice/lib/joni-2.1.2.jar +%%DATADIR%%/yarn/timelineservice/lib/metrics-core-2.2.0.jar +%%DATADIR%%/yarn/timelineservice/test/%%HADOOP_GROUP%%-yarn-server-timelineservice-hbase-tests-%%PORTVERSION%%.jar +%%DATADIR%%/yarn/yarn-service-examples/httpd-no-dns/httpd-no-dns.json +%%DATADIR%%/yarn/yarn-service-examples/httpd-no-dns/httpd-proxy-no-dns.conf +%%DATADIR%%/yarn/yarn-service-examples/httpd/httpd-proxy.conf +%%DATADIR%%/yarn/yarn-service-examples/httpd/httpd.json +%%DATADIR%%/yarn/yarn-service-examples/sleeper/sleeper.json +@dir %%HADOOP_LOGDIR%% +@dir %%HADOOP_RUNDIR%%