This is an automated email from the ASF dual-hosted git repository.
adonisling pushed a commit to branch hadoop-3.3.4
in repository https://gitbox.apache.org/repos/asf/doris-thirdparty.git
The following commit(s) were added to refs/heads/hadoop-3.3.4 by this push:
new ae4a4428 [chore] Fix the workflow (#84)
ae4a4428 is described below
commit ae4a442849764b81965acc8f579d736f0192d246
Author: Adonis Ling <[email protected]>
AuthorDate: Tue May 30 22:06:50 2023 +0800
[chore] Fix the workflow (#84)
Fix lots of issues with GitHub workflows.
---
build.sh | 36 +++++-----
.../hadoop-hdfs-native-client/src/CMakeLists.txt | 41 +++++++----
.../src/CMakeLists.txt | 82 ++++++++++++----------
hadoop-tools/hadoop-pipes/src/CMakeLists.txt | 30 ++++----
4 files changed, 108 insertions(+), 81 deletions(-)
diff --git a/build.sh b/build.sh
index 8eb02ecb..8d84735c 100755
--- a/build.sh
+++ b/build.sh
@@ -29,34 +29,34 @@ export SRC_HOME="${ROOT}"
. "${SRC_HOME}/env.sh"
if [[ -z "${THIRDPARTY_INSTALLED}" ]]; then
- echo "Must set 'THIRDPARTY_INSTALLED' in env.sh"
- exit -1
+ echo "Must set 'THIRDPARTY_INSTALLED' in env.sh"
+ exit 255
fi
-DIST_DIR=${SRC_HOME}/hadoop-dist/target/hadoop-3.3.4/
-LIBHDFS_DIST_DIR=${SRC_HOME}/hadoop-dist/target/hadoop-libhdfs-3.3.4/
-rm -rf ${DIST_DIR}
-rm -rf ${LIBHDFS_DIST_DIR}
+DIST_DIR="${SRC_HOME}/hadoop-dist/target/hadoop-3.3.4"
+LIBHDFS_DIST_DIR="${SRC_HOME}/hadoop-dist/target/hadoop-libhdfs-3.3.4"
+rm -rf "${DIST_DIR}"
+rm -rf "${LIBHDFS_DIST_DIR}"
export PATH=${THIRDPARTY_INSTALLED}/bin:$PATH
-mvn clean package -Pnative,dist -DskipTests -Dmaven.javadoc.skip=true
-Dthirdparty.installed=${THIRDPARTY_INSTALLED}/
-Dopenssl.lib=${THIRDPARTY_INSTALLED}/lib/ -e
+mvn clean package -Pnative,dist -DskipTests -Dmaven.javadoc.skip=true \
+ -Dthirdparty.installed="${THIRDPARTY_INSTALLED}"
-Dopenssl.prefix="${THIRDPARTY_INSTALLED}" -e
if [[ ! -d "${DIST_DIR}" ]]; then
- echo "${DIST_DIR} is missing. Build failed."
- exit -1
+ echo "${DIST_DIR} is missing. Build failed."
+ exit 255
fi
echo "Finished. Begin to pacakge for libhdfs..."
-mkdir -p ${LIBHDFS_DIST_DIR}/common
-mkdir -p ${LIBHDFS_DIST_DIR}/hdfs
-mkdir -p ${LIBHDFS_DIST_DIR}/include
-mkdir -p ${LIBHDFS_DIST_DIR}/native
-cp -r ${DIST_DIR}/share/hadoop/common/* ${LIBHDFS_DIST_DIR}/common/
-cp -r ${DIST_DIR}/share/hadoop/hdfs/* ${LIBHDFS_DIST_DIR}/hdfs/
-cp -r ${DIST_DIR}/include/hdfs.h ${LIBHDFS_DIST_DIR}/include/
-cp -r ${DIST_DIR}/lib/native/libhdfs.a ${LIBHDFS_DIST_DIR}/native/
+mkdir -p "${LIBHDFS_DIST_DIR}/common"
+mkdir -p "${LIBHDFS_DIST_DIR}/hdfs"
+mkdir -p "${LIBHDFS_DIST_DIR}/include"
+mkdir -p "${LIBHDFS_DIST_DIR}/native"
+cp -r "${DIST_DIR}/share/hadoop/common"/* "${LIBHDFS_DIST_DIR}/common"/
+cp -r "${DIST_DIR}/share/hadoop/hdfs"/* "${LIBHDFS_DIST_DIR}/hdfs"/
+cp -r "${DIST_DIR}/include/hdfs.h" "${LIBHDFS_DIST_DIR}/include"/
+cp -r "${DIST_DIR}/lib/native/libhdfs.a" "${LIBHDFS_DIST_DIR}/native"/
echo "Done!"
echo "The full dist package is under: ${DIST_DIR}"
echo "The LIBHDFS dist package is under: ${LIBHDFS_DIST_DIR}"
-
diff --git a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/CMakeLists.txt
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/CMakeLists.txt
index bcee6f9f..02b25d40 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/CMakeLists.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/CMakeLists.txt
@@ -18,6 +18,9 @@
cmake_minimum_required(VERSION 3.1 FATAL_ERROR)
+option(CHANGE_FIND_LIBRARY_SUFFIXES "Change suffixes to find library" OFF)
+option(LIBRARY_ONLY "Build libraries only" ON)
+
enable_testing()
list(APPEND CMAKE_MODULE_PATH
${CMAKE_SOURCE_DIR}/../../../hadoop-common-project/hadoop-common)
@@ -79,28 +82,36 @@ endif()
include(HadoopJNI)
function(build_libhdfs_test NAME LIBRARY)
- set(FILES)
- foreach(FIL ${ARGN})
- if (IS_ABSOLUTE ${FIL})
- list(APPEND FILES ${FIL})
- else()
- list(APPEND FILES
${CMAKE_SOURCE_DIR}/main/native/libhdfs-tests/${FIL})
- endif()
- endforeach()
- add_executable("${NAME}_${LIBRARY}" ${FILES})
+ if (NOT LIBRARY_ONLY)
+ set(FILES)
+ foreach(FIL ${ARGN})
+ if (IS_ABSOLUTE ${FIL})
+ list(APPEND FILES ${FIL})
+ else()
+ list(APPEND FILES
${CMAKE_SOURCE_DIR}/main/native/libhdfs-tests/${FIL})
+ endif()
+ endforeach()
+ add_executable("${NAME}_${LIBRARY}" ${FILES})
+ endif()
endfunction()
function(add_libhdfs_test NAME LIBRARY)
- add_test("test_${NAME}_${LIBRARY}" "${NAME}_${LIBRARY}")
+ if (NOT LIBRARY_ONLY)
+ add_test("test_${NAME}_${LIBRARY}" "${NAME}_${LIBRARY}")
+ endif()
endfunction()
function(link_libhdfs_test NAME LIBRARY)
-target_link_libraries("${NAME}_${LIBRARY}" ${LIBRARY} ${ARGN})
+ if (NOT LIBRARY_ONLY)
+ target_link_libraries("${NAME}_${LIBRARY}" ${LIBRARY} ${ARGN})
+ endif()
endfunction()
set(STORED_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES})
-hadoop_set_find_shared_library_without_version()
+if (CHANGE_FIND_LIBRARY_SUFFIXES)
+ hadoop_set_find_shared_library_without_version()
+endif()
set(OPENSSL_NAME "crypto")
if(${CMAKE_SYSTEM_NAME} MATCHES "Windows")
SET(OPENSSL_NAME "eay32")
@@ -147,8 +158,10 @@ else()
endif()
add_subdirectory(main/native/libhdfs)
-add_subdirectory(main/native/libhdfs-tests)
-add_subdirectory(main/native/libhdfs-examples)
+if (NOT LIBRARY_ONLY)
+ add_subdirectory(main/native/libhdfs-tests)
+ add_subdirectory(main/native/libhdfs-examples)
+endif()
# Temporary fix to disable Libhdfs++ build on older systems that do not
support thread_local
include(CheckCXXSourceCompiles)
diff --git
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
index b9a011ef..1a0384d5 100644
---
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
+++
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
@@ -18,6 +18,8 @@
cmake_minimum_required(VERSION 3.1 FATAL_ERROR)
+option(LIBRARY_ONLY "Build libraries only" ON)
+
list(APPEND CMAKE_MODULE_PATH
${CMAKE_SOURCE_DIR}/../../../../hadoop-common-project/hadoop-common/)
include(HadoopCommon)
@@ -27,6 +29,9 @@ set(GTEST_SRC_DIR
${CMAKE_SOURCE_DIR}/../../../../hadoop-common-project/hadoop-c
# Add extra compiler and linker flags.
# -Wno-sign-compare
hadoop_add_compiler_flags("-DNDEBUG -DSIMPLE_MEMCPY -fno-strict-aliasing
-fsigned-char")
+if (CMAKE_CXX_COMPILER_ID STREQUAL "Clang")
+ hadoop_add_compiler_flags(-Wno-reserved-user-defined-literal)
+endif()
set(CMAKE_CXX_STANDARD 11)
# Source location.
@@ -161,41 +166,43 @@ hadoop_add_dual_library(nativetask
target_link_libraries(nativetask ${NT_DEPEND_LIBRARY})
-add_library(gtest ${GTEST_SRC_DIR}/gtest-all.cc)
-set_target_properties(gtest PROPERTIES COMPILE_FLAGS "-w")
-add_executable(nttest
- ${SRC}/test/lib/TestByteArray.cc
- ${SRC}/test/lib/TestByteBuffer.cc
- ${SRC}/test/lib/TestComparatorForDualPivotQuickSort.cc
- ${SRC}/test/lib/TestComparatorForStdSort.cc
- ${SRC}/test/lib/TestFixSizeContainer.cc
- ${SRC}/test/lib/TestMemoryPool.cc
- ${SRC}/test/lib/TestIterator.cc
- ${SRC}/test/lib/TestKVBuffer.cc
- ${SRC}/test/lib/TestMemBlockIterator.cc
- ${SRC}/test/lib/TestMemoryBlock.cc
- ${SRC}/test/lib/TestPartitionBucket.cc
- ${SRC}/test/lib/TestReadBuffer.cc
- ${SRC}/test/lib/TestReadWriteBuffer.cc
- ${SRC}/test/util/TestChecksum.cc
- ${SRC}/test/util/TestStringUtil.cc
- ${SRC}/test/util/TestWritableUtils.cc
- ${SRC}/test/TestCommand.cc
- ${SRC}/test/TestConfig.cc
- ${SRC}/test/TestCounter.cc
- ${SRC}/test/TestCompressions.cc
- ${SRC}/test/TestFileSystem.cc
- ${SRC}/test/TestIFile.cc
- ${SRC}/test/TestPrimitives.cc
- ${SRC}/test/TestSort.cc
- ${SRC}/test/TestMain.cc
- ${SRC}/test/test_commons.cc)
-
-target_link_libraries(nttest
- nativetask_static
- gtest
- ${NT_DEPEND_LIBRARY}
-)
+if (NOT LIBRARY_ONLY)
+ add_library(gtest ${GTEST_SRC_DIR}/gtest-all.cc)
+ set_target_properties(gtest PROPERTIES COMPILE_FLAGS "-w")
+ add_executable(nttest
+ ${SRC}/test/lib/TestByteArray.cc
+ ${SRC}/test/lib/TestByteBuffer.cc
+ ${SRC}/test/lib/TestComparatorForDualPivotQuickSort.cc
+ ${SRC}/test/lib/TestComparatorForStdSort.cc
+ ${SRC}/test/lib/TestFixSizeContainer.cc
+ ${SRC}/test/lib/TestMemoryPool.cc
+ ${SRC}/test/lib/TestIterator.cc
+ ${SRC}/test/lib/TestKVBuffer.cc
+ ${SRC}/test/lib/TestMemBlockIterator.cc
+ ${SRC}/test/lib/TestMemoryBlock.cc
+ ${SRC}/test/lib/TestPartitionBucket.cc
+ ${SRC}/test/lib/TestReadBuffer.cc
+ ${SRC}/test/lib/TestReadWriteBuffer.cc
+ ${SRC}/test/util/TestChecksum.cc
+ ${SRC}/test/util/TestStringUtil.cc
+ ${SRC}/test/util/TestWritableUtils.cc
+ ${SRC}/test/TestCommand.cc
+ ${SRC}/test/TestConfig.cc
+ ${SRC}/test/TestCounter.cc
+ ${SRC}/test/TestCompressions.cc
+ ${SRC}/test/TestFileSystem.cc
+ ${SRC}/test/TestIFile.cc
+ ${SRC}/test/TestPrimitives.cc
+ ${SRC}/test/TestSort.cc
+ ${SRC}/test/TestMain.cc
+ ${SRC}/test/test_commons.cc)
+
+ target_link_libraries(nttest
+ nativetask_static
+ gtest
+ ${NT_DEPEND_LIBRARY}
+ )
+endif()
# By embedding '$ORIGIN' into the RPATH of libnativetask.so, dlopen will look
in
# the directory containing libnativetask.so. However, $ORIGIN is not supported
by
@@ -207,4 +214,7 @@ endif()
set(LIBNATIVETASK_VERSION "1.0.0")
set_target_properties(nativetask PROPERTIES SOVERSION ${LIBNATIVETASK_VERSION})
hadoop_dual_output_directory(nativetask target/usr/local/lib)
-hadoop_output_directory(nttest test)
+
+if (NOT LIBRARY_ONLY)
+ hadoop_output_directory(nttest test)
+endif()
diff --git a/hadoop-tools/hadoop-pipes/src/CMakeLists.txt
b/hadoop-tools/hadoop-pipes/src/CMakeLists.txt
index ce6ee317..88180795 100644
--- a/hadoop-tools/hadoop-pipes/src/CMakeLists.txt
+++ b/hadoop-tools/hadoop-pipes/src/CMakeLists.txt
@@ -18,6 +18,8 @@
cmake_minimum_required(VERSION 3.1 FATAL_ERROR)
+option(LIBRARY_ONLY "Build libraries only" ON)
+
list(APPEND CMAKE_MODULE_PATH
${CMAKE_SOURCE_DIR}/../../../hadoop-common-project/hadoop-common)
include(HadoopCommon)
@@ -49,22 +51,24 @@ include_directories(
${OPENSSL_INCLUDE_DIR}
)
-# Example programs
-add_executable(wordcount-simple main/native/examples/impl/wordcount-simple.cc)
-target_link_libraries(wordcount-simple hadooppipes hadooputils)
-hadoop_output_directory(wordcount-simple examples)
+if (NOT LIBRARY_ONLY)
+ # Example programs
+ add_executable(wordcount-simple
main/native/examples/impl/wordcount-simple.cc)
+ target_link_libraries(wordcount-simple hadooppipes hadooputils)
+ hadoop_output_directory(wordcount-simple examples)
-add_executable(wordcount-part main/native/examples/impl/wordcount-part.cc)
-target_link_libraries(wordcount-part hadooppipes hadooputils)
-hadoop_output_directory(wordcount-part examples)
+ add_executable(wordcount-part main/native/examples/impl/wordcount-part.cc)
+ target_link_libraries(wordcount-part hadooppipes hadooputils)
+ hadoop_output_directory(wordcount-part examples)
-add_executable(wordcount-nopipe main/native/examples/impl/wordcount-nopipe.cc)
-target_link_libraries(wordcount-nopipe hadooppipes hadooputils)
-hadoop_output_directory(wordcount-nopipe examples)
+ add_executable(wordcount-nopipe
main/native/examples/impl/wordcount-nopipe.cc)
+ target_link_libraries(wordcount-nopipe hadooppipes hadooputils)
+ hadoop_output_directory(wordcount-nopipe examples)
-add_executable(pipes-sort main/native/examples/impl/sort.cc)
-target_link_libraries(pipes-sort hadooppipes hadooputils)
-hadoop_output_directory(pipes-sort examples)
+ add_executable(pipes-sort main/native/examples/impl/sort.cc)
+ target_link_libraries(pipes-sort hadooppipes hadooputils)
+ hadoop_output_directory(pipes-sort examples)
+endif()
add_library(hadooputils STATIC
main/native/utils/impl/StringUtils.cc
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]