This is an automated email from the ASF dual-hosted git repository.
yiguolei pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/master by this push:
new 303d5ac3f7 Revert "[enhancement](thirdparty) upgrade thirdparty libs
(#23290)" (#23420)
303d5ac3f7 is described below
commit 303d5ac3f7f32ba99c9f81a243c6694c0156eaaf
Author: Dongyang Li <[email protected]>
AuthorDate: Thu Aug 24 14:40:51 2023 +0800
Revert "[enhancement](thirdparty) upgrade thirdparty libs (#23290)" (#23420)
This reverts commit 2185268bab2f5890a2c3c9a3c315375a971c4416.
Co-authored-by: stephen <[email protected]>
---
thirdparty/CHANGELOG.md | 17 -
thirdparty/build-thirdparty.sh | 169 +++-------
thirdparty/download-thirdparty.sh | 40 +--
thirdparty/patches/absl.patch | 7 +-
...arrow-12.0.1.patch => apache-arrow-7.0.0.patch} | 52 +--
thirdparty/patches/brpc-1.5.0-clang16.patch | 22 ++
thirdparty/patches/brpc-1.5.0-gcc13.patch | 12 +
thirdparty/patches/glog-0.6.0.patch | 354 ---------------------
...-1.10.0.patch => opentelemetry-cpp-1.8.3.patch} | 97 +++---
thirdparty/vars.sh | 126 ++++----
10 files changed, 232 insertions(+), 664 deletions(-)
diff --git a/thirdparty/CHANGELOG.md b/thirdparty/CHANGELOG.md
index be6bfb8fdd..c90363fcb7 100644
--- a/thirdparty/CHANGELOG.md
+++ b/thirdparty/CHANGELOG.md
@@ -2,23 +2,6 @@
This file contains version of the third-party dependency libraries in the
build-env image. The docker build-env image is apache/doris, and the tag is
`build-env-${version}`
-## v20230411
-- Modified: protobuf 3.15.0 -> 21.11
-- Modified: glog 0.4.0 -> 0.6.0
-- Modified: lz4 1.9.3 -> 1.9.4
-- Modified: curl 7.79.0 -> 8.2.1
-- Modified: brpc 1.4.0 -> 1.6.0
-- Modified: zstd 1.5.2 -> 1.5.5
-- Modified: arrow 7.0.0 -> 13.0.0
-- Modified: abseil 20220623.1 -> 20230125.3
-- Modified: orc 1.7.2 -> 1.9.0
-- Modified: jemalloc for arrow 5.2.1 -> 5.3.0
-- Modified: xsimd 7.0.0 -> 13.0.0
-- Modified: opentelemetry-proto 0.19.0 -> 1.0.0
-- Modified: opentelemetry 1.8.3 -> 1.10.0
-- Added: c-ares -> 1.19.1
-- Added: grpc -> 1.54.3
-
## v20230721
- Modified hadoop libhdfs 3.3.4.4 -> 3.3.4.5
diff --git a/thirdparty/build-thirdparty.sh b/thirdparty/build-thirdparty.sh
index fa329f48de..ec8aa049b9 100755
--- a/thirdparty/build-thirdparty.sh
+++ b/thirdparty/build-thirdparty.sh
@@ -416,6 +416,19 @@ build_thrift() {
build_protobuf() {
check_if_source_exist "${PROTOBUF_SOURCE}"
cd "${TP_SOURCE_DIR}/${PROTOBUF_SOURCE}"
+ rm -fr gmock
+
+ # NOTE(amos): -Wl,--undefined=pthread_create force searching for pthread
symbols.
+ # See https://stackoverflow.com/a/65348893/1329147 for detailed
explanation.
+ mkdir gmock
+ cd gmock
+ tar xf "${TP_SOURCE_DIR}/${GTEST_NAME}"
+
+ mv "${GTEST_SOURCE}" gtest
+
+ cd "${TP_SOURCE_DIR}/${PROTOBUF_SOURCE}"
+
+ ./autogen.sh
if [[ "${KERNEL}" == 'Darwin' ]]; then
ldflags="-L${TP_LIB_DIR}"
@@ -423,20 +436,21 @@ build_protobuf() {
ldflags="-L${TP_LIB_DIR} -static-libstdc++ -static-libgcc
-Wl,--undefined=pthread_create"
fi
- mkdir -p cmake/build
- cd cmake/build
-
- CXXFLAGS="-O2 -I${TP_INCLUDE_DIR}" \
+ CXXFLAGS="-fPIC -O2 -I${TP_INCLUDE_DIR}" \
LDFLAGS="${ldflags}" \
- "${CMAKE_CMD}" -DCMAKE_BUILD_TYPE=Release \
- -DCMAKE_PREFIX_PATH="${TP_INSTALL_DIR}" \
- -Dprotobuf_USE_EXTERNAL_GTEST=ON \
- -DCMAKE_POSITION_INDEPENDENT_CODE=ON \
- -Dprotobuf_BUILD_SHARED_LIBS=OFF \
- -Dprotobuf_BUILD_TESTS=OFF \
- -Dprotobuf_WITH_ZLIB_DEFAULT=ON \
- -Dprotobuf_ABSL_PROVIDER=package \
- -DCMAKE_INSTALL_PREFIX="${TP_INSTALL_DIR}" ../..
+ ./configure --prefix="${TP_INSTALL_DIR}" --disable-shared
--enable-static --with-zlib="${TP_INSTALL_DIR}/include"
+
+ # ATTN: If protoc is not built fully statically the linktime libc may
newer than runtime.
+ # This will casue protoc cannot run
+ # If you really need to dynamically link protoc, please set the
environment variable DYN_LINK_PROTOC=1
+
+ if [[ "${DYN_LINK_PROTOC:-0}" == "1" || "${KERNEL}" == 'Darwin' ]]; then
+ echo "link protoc dynamiclly"
+ else
+ cd src
+ sed -i 's/^AM_LDFLAGS\(.*\)$/AM_LDFLAGS\1 -all-static/' Makefile
+ cd -
+ fi
make -j "${PARALLEL}"
make install
@@ -467,28 +481,16 @@ build_glog() {
check_if_source_exist "${GLOG_SOURCE}"
cd "${TP_SOURCE_DIR}/${GLOG_SOURCE}"
- if [[ "${GLOG_SOURCE}" == "glog-0.4.0" ]]; then
- # to generate config.guess and config.sub to support aarch64
- rm -rf config.*
- autoreconf -i
-
- CPPFLAGS="-I${TP_INCLUDE_DIR} -fpermissive -fPIC" \
- LDFLAGS="-L${TP_LIB_DIR}" \
- ./configure --prefix="${TP_INSTALL_DIR}" --enable-frame-pointers
--disable-shared --enable-static
+ # to generate config.guess and config.sub to support aarch64
+ rm -rf config.*
+ autoreconf -i
- make -j "${PARALLEL}"
- make install
- elif [[ "${GLOG_SOURCE}" == "glog-0.6.0" ]]; then
+ CPPFLAGS="-I${TP_INCLUDE_DIR} -fpermissive -fPIC" \
LDFLAGS="-L${TP_LIB_DIR}" \
- "${CMAKE_CMD}" -S . -B build -G "Unix Makefiles"
-DCMAKE_INSTALL_PREFIX="${TP_INSTALL_DIR}" \
- -DCMAKE_BUILD_TYPE=Release \
- -DCMAKE_POSITION_INDEPENDENT_CODE=ON \
- -DWITH_UNWIND=OFF \
- -DBUILD_SHARED_LIBS=OFF
-
- cmake --build build --target install
- fi
+ ./configure --prefix="${TP_INSTALL_DIR}" --enable-frame-pointers
--disable-shared --enable-static
+ make -j "${PARALLEL}"
+ make install
strip_lib libglog.a
}
@@ -671,8 +673,7 @@ build_re2() {
check_if_source_exist "${RE2_SOURCE}"
cd "${TP_SOURCE_DIR}/${RE2_SOURCE}"
- "${CMAKE_CMD}" -DCMAKE_BUILD_TYPE=Release -G "${GENERATOR}"
-DBUILD_SHARED_LIBS=0 -DCMAKE_POSITION_INDEPENDENT_CODE=ON \
- -DCMAKE_PREFIX_PATH="${TP_INSTALL_DIR}"
-DCMAKE_INSTALL_PREFIX="${TP_INSTALL_DIR}"
+ "${CMAKE_CMD}" -DCMAKE_BUILD_TYPE=Release -G "${GENERATOR}"
-DBUILD_SHARED_LIBS=0 -DCMAKE_INSTALL_PREFIX="${TP_INSTALL_DIR}"
"${BUILD_SYSTEM}" -j "${PARALLEL}" install
strip_lib libre2.a
}
@@ -812,12 +813,8 @@ build_brpc() {
# Currently, BRPC can't be built for static libraries only (without .so).
Therefore, we should add `-fPIC`
# to the dependencies which are required by BRPC. Dependencies: zlib,
glog, protobuf, leveldb
- # If BUILD_SHARED_LIBS=OFF, on centos 5.4 will error: `undefined reference
to `google::FlagRegisterer`, no error on MacOS.
- # If glog is compiled before gflags, the above error will not exist, this
works in glog 0.4,
- # but glog 0.6 enforces dependency on gflags.
- # glog must be enabled, otherwise error: `flag 'v' was defined more than
once` (in files 'glog-0.6.0/src/vlog_is_on.cc' and
'brpc-1.6.0/src/butil/logging.cc')
LDFLAGS="${ldflags}" \
- "${CMAKE_CMD}" -G "${GENERATOR}" -DBUILD_SHARED_LIBS=ON -DWITH_GLOG=ON
-DCMAKE_INSTALL_PREFIX="${TP_INSTALL_DIR}" \
+ "${CMAKE_CMD}" -G "${GENERATOR}" -DBUILD_SHARED_LIBS=1 -DWITH_GLOG=ON
-DCMAKE_INSTALL_PREFIX="${TP_INSTALL_DIR}" \
-DCMAKE_LIBRARY_PATH="${TP_INSTALL_DIR}/lib64"
-DCMAKE_INCLUDE_PATH="${TP_INSTALL_DIR}/include" \
-DBUILD_BRPC_TOOLS=OFF \
-DPROTOBUF_PROTOC_EXECUTABLE="${TP_INSTALL_DIR}/bin/protoc" ..
@@ -942,56 +939,6 @@ build_flatbuffers() {
cp libflatbuffers.a ../../../installed/lib/libflatbuffers.a
}
-# c-ares
-build_cares() {
- check_if_source_exist "${CARES_SOURCE}"
- cd "${TP_SOURCE_DIR}/${CARES_SOURCE}"
-
- mkdir -p build
- cd build
- cmake -DCMAKE_BUILD_TYPE=Release \
- -DCARES_STATIC=ON \
- -DCARES_SHARED=OFF \
- -DCARES_STATIC_PIC=ON \
- -DCMAKE_INSTALL_PREFIX="${TP_INSTALL_DIR}" ..
- make
- make install
-}
-
-# grpc
-build_grpc() {
- check_if_source_exist "${GRPC_SOURCE}"
- cd "${TP_SOURCE_DIR}/${GRPC_SOURCE}"
-
- mkdir -p cmake/build
- cd cmake/build
-
- cmake -DgRPC_INSTALL=ON \
- -DgRPC_BUILD_TESTS=OFF \
- -DCMAKE_BUILD_TYPE=Release \
- -DCMAKE_INSTALL_PREFIX="${TP_INSTALL_DIR}" \
- -DgRPC_CARES_PROVIDER=package \
- -Dc-ares_DIR="${TP_INSTALL_DIR}" \
- -DgRPC_ABSL_PROVIDER=package \
- -Dabsl_DIR="${TP_INSTALL_DIR}" \
- -DgRPC_PROTOBUF_PROVIDER=package \
- -DProtobuf_DIR="${TP_INSTALL_DIR}" \
- -DgRPC_RE2_PROVIDER=package \
- -Dre2_DIR:STRING="${TP_INSTALL_DIR}" \
- -DgRPC_SSL_PROVIDER=package \
- -DOPENSSL_ROOT_DIR="${TP_INSTALL_DIR}" \
- -DgRPC_ZLIB_PROVIDER=package \
- -DZLIB_ROOT="${TP_INSTALL_DIR}" \
- -DCMAKE_POSITION_INDEPENDENT_CODE=ON \
- ../..
-
- make -j "${PARALLEL}"
- make install
-
- # for grpc > v1.55, cmake 2.22 does not support find_dependency, delete
this line after cmake version upgrade.
- # sed -i 's/find_dependency/find_package/g'
"${TP_INSTALL_DIR}"/lib64/cmake/grpc/gRPCConfig.cmake
-}
-
# arrow
build_arrow() {
check_if_source_exist "${ARROW_SOURCE}"
@@ -1011,8 +958,6 @@ build_arrow() {
export ARROW_ZLIB_URL="${TP_SOURCE_DIR}/${ZLIB_NAME}"
export ARROW_XSIMD_URL="${TP_SOURCE_DIR}/${XSIMD_NAME}"
export ARROW_ORC_URL="${TP_SOURCE_DIR}/${ORC_NAME}"
- export ARROW_GRPC_URL="${TP_SOURCE_DIR}/${GRPC_NAME}"
- export ARROW_PROTOBUF_URL="${TP_SOURCE_DIR}/${PROTOBUF_NAME}"
if [[ "${KERNEL}" != 'Darwin' ]]; then
ldflags="-L${TP_LIB_DIR} -static-libstdc++ -static-libgcc"
@@ -1028,38 +973,22 @@ build_arrow() {
-DCMAKE_INSTALL_PREFIX="${TP_INSTALL_DIR}" \
-DCMAKE_INSTALL_LIBDIR=lib64 \
-DARROW_BOOST_USE_SHARED=OFF \
- -DARROW_WITH_GRPC=ON \
- -DgRPC_SOURCE=SYSTEM \
- -DgRPC_ROOT="${TP_INSTALL_DIR}" \
- -DARROW_WITH_PROTOBUF=ON \
- -DProtobuf_SOURCE=SYSTEM \
- -DProtobuf_LIB="${TP_INSTALL_DIR}/lib/libprotoc.a"
-DProtobuf_INCLUDE_DIR="${TP_INSTALL_DIR}/include" \
- -DARROW_FLIGHT=ON \
- -DARROW_FLIGHT_SQL=ON \
-DBoost_USE_STATIC_RUNTIME=ON \
-DARROW_GFLAGS_USE_SHARED=OFF \
-Dgflags_ROOT="${TP_INSTALL_DIR}" \
-DGLOG_ROOT="${TP_INSTALL_DIR}" \
-DRE2_ROOT="${TP_INSTALL_DIR}" \
- -DZLIB_SOURCE=SYSTEM \
-DZLIB_LIBRARY="${TP_INSTALL_DIR}/lib/libz.a"
-DZLIB_INCLUDE_DIR="${TP_INSTALL_DIR}/include" \
- -DRapidJSON_SOURCE=SYSTEM \
-DRapidJSON_ROOT="${TP_INSTALL_DIR}" \
-DORC_ROOT="${TP_INSTALL_DIR}" \
- -Dxsimd_SOURCE=BUNDLED \
-DBrotli_SOURCE=BUNDLED \
- -DARROW_LZ4_USE_SHARED=OFF \
-DLZ4_LIB="${TP_INSTALL_DIR}/lib/liblz4.a"
-DLZ4_INCLUDE_DIR="${TP_INSTALL_DIR}/include/lz4" \
-DLz4_SOURCE=SYSTEM \
- -DARROW_ZSTD_USE_SHARED=OFF \
-DZSTD_LIB="${TP_INSTALL_DIR}/lib/libzstd.a"
-DZSTD_INCLUDE_DIR="${TP_INSTALL_DIR}/include" \
-Dzstd_SOURCE=SYSTEM \
-DSnappy_LIB="${TP_INSTALL_DIR}/lib/libsnappy.a"
-DSnappy_INCLUDE_DIR="${TP_INSTALL_DIR}/include" \
-DSnappy_SOURCE=SYSTEM \
-DBOOST_ROOT="${TP_INSTALL_DIR}" --no-warn-unused-cli \
- -Djemalloc_SOURCE=BUNDLED \
- -DARROW_THRIFT_USE_SHARED=OFF \
- -DThrift_SOURCE=SYSTEM \
-DThrift_ROOT="${TP_INSTALL_DIR}" ..
"${BUILD_SYSTEM}" -j "${PARALLEL}"
@@ -1085,9 +1014,8 @@ build_abseil() {
-DABSL_ENABLE_INSTALL=ON \
-DBUILD_DEPS=ON \
-DCMAKE_BUILD_TYPE=Release \
- -DCMAKE_POSITION_INDEPENDENT_CODE=ON \
- -DABSL_PROPAGATE_CXX_STD=ON \
- -DBUILD_SHARED_LIBS=OFF
+ -DBUILD_SHARED_LIBS=OFF \
+ -DCMAKE_CXX_STANDARD=11
cmake --build "${BUILD_DIR}" -j "${PARALLEL}"
cmake --install "${BUILD_DIR}" --prefix "${TP_INSTALL_DIR}"
@@ -1599,19 +1527,8 @@ build_opentelemetry() {
mkdir -p "${BUILD_DIR}"
cd "${BUILD_DIR}"
- CXXFLAGS="-O2 -I${TP_INCLUDE_DIR}" \
- LDFLAGS="-L${TP_LIB_DIR}" \
- "${CMAKE_CMD}" -G "${GENERATOR}"
-DCMAKE_INSTALL_PREFIX="${TP_INSTALL_DIR}" \
- -DCMAKE_PREFIX_PATH="${TP_INSTALL_DIR}" \
- -DBUILD_TESTING=OFF \
- -DWITH_OTLP_GRPC=ON \
- -DBUILD_SHARED_LIBS=OFF \
- -DWITH_OTLP_HTTP=ON \
- -DWITH_ABSEIL=ON \
- -DWITH_FUNC_TESTS=OFF \
- -DWITH_ZIPKIN=ON \
- -DCMAKE_POSITION_INDEPENDENT_CODE=ON \
- -DWITH_EXAMPLES=OFF ..
+ "${CMAKE_CMD}" -G "${GENERATOR}"
-DCMAKE_INSTALL_PREFIX="${TP_INSTALL_DIR}"
-DCMAKE_PREFIX_PATH="${TP_INSTALL_DIR}" -DBUILD_TESTING=OFF \
+ -DWITH_OTLP=ON -DWITH_OTLP_GRPC=OFF -DWITH_OTLP_HTTP=ON
-DWITH_ZIPKIN=ON -DWITH_EXAMPLES=OFF ..
"${BUILD_SYSTEM}" -j "${PARALLEL}"
"${BUILD_SYSTEM}" install
@@ -1712,11 +1629,10 @@ if [[ "${#packages[@]}" -eq 0 ]]; then
lzo2
zstd
boost # must before thrift
- abseil
+ protobuf
gflags
gtest
glog
- protobuf # after gtest
rapidjson
snappy
gperftools
@@ -1733,9 +1649,8 @@ if [[ "${#packages[@]}" -eq 0 ]]; then
librdkafka
flatbuffers
orc
- cares
- grpc # after cares, protobuf
arrow
+ abseil
s2
bitshuffle
croaringbitmap
diff --git a/thirdparty/download-thirdparty.sh
b/thirdparty/download-thirdparty.sh
index 39b97b2c0b..6a7e62d0b2 100755
--- a/thirdparty/download-thirdparty.sh
+++ b/thirdparty/download-thirdparty.sh
@@ -231,21 +231,12 @@ cd -
echo "Finished patching ${ABSEIL_SOURCE}"
# glog patch
-if [[ "${GLOG_SOURCE}" == "glog-0.4.0" ]]; then
- cd "${TP_SOURCE_DIR}/${GLOG_SOURCE}"
- if [[ ! -f "${PATCHED_MARK}" ]]; then
- patch -p1 <"${TP_PATCH_DIR}/glog-0.4.0.patch"
- touch "${PATCHED_MARK}"
- fi
- cd -
-elif [[ "${GLOG_SOURCE}" == "glog-0.6.0" ]]; then
- cd "${TP_SOURCE_DIR}/${GLOG_SOURCE}"
- if [[ ! -f "${PATCHED_MARK}" ]]; then
- patch -p1 <"${TP_PATCH_DIR}/glog-0.6.0.patch"
- touch "${PATCHED_MARK}"
- fi
- cd -
+cd "${TP_SOURCE_DIR}/${GLOG_SOURCE}"
+if [[ ! -f "${PATCHED_MARK}" ]]; then
+ patch -p1 <"${TP_PATCH_DIR}/glog-0.4.0.patch"
+ touch "${PATCHED_MARK}"
fi
+cd -
echo "Finished patching ${GLOG_SOURCE}"
# gtest patch
@@ -316,16 +307,15 @@ fi
echo "Finished patching ${ROCKSDB_SOURCE}"
# opentelemetry patch is used to solve the problem that threadlocal depends on
GLIBC_2.18
-# fix error: unknown type name 'uint64_t'
# see: https://github.com/apache/doris/pull/7911
-if [[ "${OPENTELEMETRY_SOURCE}" == "opentelemetry-cpp-1.10.0" ]]; then
+if [[ "${OPENTELEMETRY_SOURCE}" == "opentelemetry-cpp-1.8.3" ]]; then
rm -rf
"${TP_SOURCE_DIR}/${OPENTELEMETRY_SOURCE}/third_party/opentelemetry-proto"/*
cp -r "${TP_SOURCE_DIR}/${OPENTELEMETRY_PROTO_SOURCE}"/*
"${TP_SOURCE_DIR}/${OPENTELEMETRY_SOURCE}/third_party/opentelemetry-proto"
mkdir -p
"${TP_SOURCE_DIR}/${OPENTELEMETRY_SOURCE}/third_party/opentelemetry-proto/.git"
cd "${TP_SOURCE_DIR}/${OPENTELEMETRY_SOURCE}"
if [[ ! -f "${PATCHED_MARK}" ]]; then
- patch -p1 <"${TP_PATCH_DIR}/opentelemetry-cpp-1.10.0.patch"
+ patch -p1 <"${TP_PATCH_DIR}/opentelemetry-cpp-1.8.3.patch"
touch "${PATCHED_MARK}"
fi
cd -
@@ -333,10 +323,10 @@ fi
echo "Finished patching ${OPENTELEMETRY_SOURCE}"
# arrow patch is used to get the raw orc reader for filter prune.
-if [[ "${ARROW_SOURCE}" == "apache-arrow-12.0.1" ]]; then
+if [[ "${ARROW_SOURCE}" == "apache-arrow-7.0.0" ]]; then
cd "${TP_SOURCE_DIR}/${ARROW_SOURCE}"
if [[ ! -f "${PATCHED_MARK}" ]]; then
- patch -p1 <"${TP_PATCH_DIR}/apache-arrow-12.0.1.patch"
+ patch -p1 <"${TP_PATCH_DIR}/apache-arrow-7.0.0.patch"
touch "${PATCHED_MARK}"
fi
cd -
@@ -410,3 +400,15 @@ if [[ "${SIMDJSON_SOURCE}" = "simdjson-3.0.1" ]]; then
cd -
fi
echo "Finished patching ${SIMDJSON_SOURCE}"
+
+if [[ "${BRPC_SOURCE}" == 'brpc-1.4.0' ]]; then
+ cd "${TP_SOURCE_DIR}/${BRPC_SOURCE}"
+ if [[ ! -f "${PATCHED_MARK}" ]]; then
+ for patch_file in "${TP_PATCH_DIR}"/brpc-*; do
+ patch -p1 <"${patch_file}"
+ done
+ touch "${PATCHED_MARK}"
+ fi
+ cd -
+fi
+echo "Finished patching ${BRPC_SOURCE}"
diff --git a/thirdparty/patches/absl.patch b/thirdparty/patches/absl.patch
index 4a16368a9b..a5568ea6f7 100644
--- a/thirdparty/patches/absl.patch
+++ b/thirdparty/patches/absl.patch
@@ -1,11 +1,11 @@
diff --git a/absl/container/internal/btree.h b/absl/container/internal/btree.h
-index d734676a..43216318 100644
+index 01f4e749..3ff8e798 100644
--- a/absl/container/internal/btree.h
+++ b/absl/container/internal/btree.h
-@@ -76,16 +76,6 @@ namespace absl {
+@@ -75,16 +75,6 @@ namespace absl {
ABSL_NAMESPACE_BEGIN
namespace container_internal {
-
+
-#ifdef ABSL_BTREE_ENABLE_GENERATIONS
-#error ABSL_BTREE_ENABLE_GENERATIONS cannot be directly set
-#elif defined(ABSL_HAVE_ADDRESS_SANITIZER) || \
@@ -18,3 +18,4 @@ index d734676a..43216318 100644
-
template <typename Compare, typename T, typename U>
using compare_result_t = absl::result_of_t<const Compare(const T &, const U
&)>;
+
diff --git a/thirdparty/patches/apache-arrow-12.0.1.patch
b/thirdparty/patches/apache-arrow-7.0.0.patch
similarity index 83%
rename from thirdparty/patches/apache-arrow-12.0.1.patch
rename to thirdparty/patches/apache-arrow-7.0.0.patch
index 9a4f12e14b..072ea9b567 100644
--- a/thirdparty/patches/apache-arrow-12.0.1.patch
+++ b/thirdparty/patches/apache-arrow-7.0.0.patch
@@ -1,30 +1,30 @@
diff --git a/cpp/src/arrow/adapters/orc/adapter.cc
b/cpp/src/arrow/adapters/orc/adapter.cc
-index ac2a3db64..630d45798 100644
+index 03243e751..1eb9b2c81 100644
--- a/cpp/src/arrow/adapters/orc/adapter.cc
+++ b/cpp/src/arrow/adapters/orc/adapter.cc
@@ -47,9 +47,6 @@
#include "arrow/util/visibility.h"
#include "orc/Exceptions.hh"
-
+
-// alias to not interfere with nested orc namespace
-namespace liborc = orc;
-
#define ORC_THROW_NOT_OK(s) \
do { \
Status _s = (s); \
-@@ -202,6 +199,8 @@ class ORCFileReader::Impl {
+@@ -198,6 +195,8 @@ class ORCFileReader::Impl {
return Init();
}
-
+
+ virtual liborc::Reader* GetRawORCReader() { return reader_.get(); }
+
Status Init() {
int64_t nstripes = reader_->getNumberOfStripes();
stripes_.resize(nstripes);
-@@ -479,6 +478,32 @@ class ORCFileReader::Impl {
+@@ -504,6 +503,32 @@ class ORCFileReader::Impl {
return Status::OK();
}
-
+
+ Result<std::shared_ptr<RecordBatchReader>> NextStripeReader(
+ int64_t batch_size, const std::vector<std::string>& include_names) {
+ if (current_row_ >= NumberOfRows()) {
@@ -51,32 +51,32 @@ index ac2a3db64..630d45798 100644
+ pool_);
+ }
+
- Result<std::shared_ptr<RecordBatchReader>> NextStripeReader(
- int64_t batch_size, const std::vector<int>& include_indices) {
- if (current_row_ >= NumberOfRows()) {
-@@ -544,6 +569,8 @@ Result<std::unique_ptr<ORCFileReader>> ORCFileReader::Open(
+ Status NextStripeReader(int64_t batch_size,
std::shared_ptr<RecordBatchReader>* out) {
+ return NextStripeReader(batch_size, {}, out);
+ }
+@@ -531,6 +556,8 @@ Result<std::unique_ptr<ORCFileReader>> ORCFileReader::Open(
return std::move(result);
}
-
+
+liborc::Reader* ORCFileReader::GetRawORCReader() { return
impl_->GetRawORCReader(); }
+
Result<std::shared_ptr<const KeyValueMetadata>> ORCFileReader::ReadMetadata()
{
return impl_->ReadMetadata();
}
-@@ -605,6 +632,11 @@ Result<std::shared_ptr<RecordBatchReader>>
ORCFileReader::NextStripeReader(
- return impl_->NextStripeReader(batch_size, include_indices);
+@@ -653,6 +680,11 @@ Result<std::shared_ptr<RecordBatchReader>>
ORCFileReader::NextStripeReader(
+ return reader;
}
-
+
+Result<std::shared_ptr<RecordBatchReader>> ORCFileReader::NextStripeReader(
+ int64_t batch_size, const std::vector<std::string>& include_names) {
+ return impl_->NextStripeReader(batch_size, include_names);
+}
+
int64_t ORCFileReader::NumberOfStripes() { return impl_->NumberOfStripes(); }
-
+
int64_t ORCFileReader::NumberOfRows() { return impl_->NumberOfRows(); }
diff --git a/cpp/src/arrow/adapters/orc/adapter.h
b/cpp/src/arrow/adapters/orc/adapter.h
-index 013be7860..7fd06bcb8 100644
+index 223efa515..04e6b0612 100644
--- a/cpp/src/arrow/adapters/orc/adapter.h
+++ b/cpp/src/arrow/adapters/orc/adapter.h
@@ -30,6 +30,10 @@
@@ -87,23 +87,23 @@ index 013be7860..7fd06bcb8 100644
+
+// alias to not interfere with nested orc namespace
+namespace liborc = orc;
-
+
namespace arrow {
namespace adapters {
-@@ -53,6 +57,9 @@ class ARROW_EXPORT ORCFileReader {
- public:
- ~ORCFileReader();
-
+@@ -51,6 +55,9 @@ class ARROW_EXPORT ORCFileReader {
+ static Status Open(const std::shared_ptr<io::RandomAccessFile>& file,
MemoryPool* pool,
+ std::unique_ptr<ORCFileReader>* reader);
+
+ /// \brief Get ORC reader from inside.
+ liborc::Reader* GetRawORCReader();
+
/// \brief Creates a new ORC reader
///
/// \param[in] file the data source
-@@ -174,6 +181,19 @@ class ARROW_EXPORT ORCFileReader {
- Result<std::shared_ptr<RecordBatchReader>> GetRecordBatchReader(
- int64_t batch_size, const std::vector<std::string>& include_names);
-
+@@ -240,6 +247,19 @@ class ARROW_EXPORT ORCFileReader {
+ Result<std::shared_ptr<RecordBatchReader>> NextStripeReader(
+ int64_t batch_size, const std::vector<int>& include_indices);
+
+ /// \brief Get a stripe level record batch iterator with specified row count
+ /// in each record batch. NextStripeReader serves as a fine grain
+ /// alternative to ReadStripe which may cause OOM issue by loading
@@ -119,4 +119,4 @@ index 013be7860..7fd06bcb8 100644
+
/// \brief The number of stripes in the file
int64_t NumberOfStripes();
-
+
diff --git a/thirdparty/patches/brpc-1.5.0-clang16.patch
b/thirdparty/patches/brpc-1.5.0-clang16.patch
new file mode 100644
index 0000000000..42bb5bc5c9
--- /dev/null
+++ b/thirdparty/patches/brpc-1.5.0-clang16.patch
@@ -0,0 +1,22 @@
+diff --git a/src/bthread/task_group.cpp b/src/bthread/task_group.cpp
+index 6f5a4abd..e3aef91c 100644
+--- a/src/bthread/task_group.cpp
++++ b/src/bthread/task_group.cpp
+@@ -248,7 +248,7 @@ int TaskGroup::init(size_t runqueue_capacity) {
+ return 0;
+ }
+
+-#if defined(__linux__) && defined(__aarch64__) && defined(__clang__)
++#if defined(__clang__)
+ __attribute__((optnone))
+ #endif
+ void TaskGroup::task_runner(intptr_t skip_remained) {
+@@ -570,7 +570,7 @@ void TaskGroup::sched(TaskGroup** pg) {
+ sched_to(pg, next_tid);
+ }
+
+-#if defined(__linux__) && defined(__aarch64__) && defined(__clang__)
++#if defined(__clang__)
+ __attribute__((optnone))
+ #endif
+ void TaskGroup::sched_to(TaskGroup** pg, TaskMeta* next_meta) {
diff --git a/thirdparty/patches/brpc-1.5.0-gcc13.patch
b/thirdparty/patches/brpc-1.5.0-gcc13.patch
new file mode 100644
index 0000000000..0cf7988d1d
--- /dev/null
+++ b/thirdparty/patches/brpc-1.5.0-gcc13.patch
@@ -0,0 +1,12 @@
+diff --git a/src/brpc/http2.h b/src/brpc/http2.h
+index 9a40d40..69d3087 100644
+--- a/src/brpc/http2.h
++++ b/src/brpc/http2.h
+@@ -18,6 +18,7 @@
+ #ifndef BAIDU_RPC_HTTP2_H
+ #define BAIDU_RPC_HTTP2_H
+
++#include <cstdint>
+ #include "brpc/http_status_code.h"
+
+ // To baidu-rpc developers: This is a header included by user, don't depend
diff --git a/thirdparty/patches/glog-0.6.0.patch
b/thirdparty/patches/glog-0.6.0.patch
deleted file mode 100644
index 1997f860cb..0000000000
--- a/thirdparty/patches/glog-0.6.0.patch
+++ /dev/null
@@ -1,354 +0,0 @@
-diff --git a/src/glog/logging.h.in b/src/glog/logging.h.in
-index 95a573b..57d2e9b 100644
---- a/src/glog/logging.h.in
-+++ b/src/glog/logging.h.in
-@@ -425,6 +425,12 @@ typedef void(*CustomPrefixCallback)(std::ostream& s,
const LogMessageInfo& l, vo
- using fLS::FLAGS_##name
- #endif
-
-+// Set max log file num
-+DECLARE_int32(log_filenum_quota);
-+
-+// Set log file split method
-+DECLARE_string(log_split_method);
-+
- // Set whether appending a timestamp to the log file name
- DECLARE_bool(timestamp_in_logfile_name);
-
-diff --git a/src/logging.cc b/src/logging.cc
-index 4028ccc..fd60fc3 100644
---- a/src/logging.cc
-+++ b/src/logging.cc
-@@ -34,6 +34,7 @@
- #include <algorithm>
- #include <cassert>
- #include <iomanip>
-+#include <list>
- #include <string>
- #ifdef HAVE_UNISTD_H
- # include <unistd.h> // For _exit.
-@@ -50,6 +51,7 @@
- #include <iostream>
- #include <cstdarg>
- #include <cstdlib>
-+#include <dirent.h>
- #ifdef HAVE_PWD_H
- # include <pwd.h>
- #endif
-@@ -204,6 +206,12 @@ GLOG_DEFINE_string(log_backtrace_at, "",
- GLOG_DEFINE_bool(log_utc_time, false,
- "Use UTC time for logging.");
-
-+GLOG_DEFINE_string(log_split_method, "day",
-+ "split log by size, day, hour");
-+
-+GLOG_DEFINE_int32(log_filenum_quota, 10,
-+ "max log file num in log dir");
-+
- // TODO(hamaji): consider windows
- #define PATH_SEPARATOR '/'
-
-@@ -429,6 +437,15 @@ namespace {
-
- namespace {
-
-+typedef struct filetime {
-+ std::string name;
-+ time_t time;
-+
-+ bool operator < (const struct filetime& o) const {
-+ return o.time > time;
-+ }
-+} Filetime;
-+
- // Encapsulates all file-system related state
- class LogFileObject : public base::Logger {
- public:
-@@ -459,6 +476,7 @@ class LogFileObject : public base::Logger {
- // can avoid grabbing a lock. Usually Flush() calls it after
- // acquiring lock_.
- void FlushUnlocked();
-+ void CheckFileNumQuota();
-
- private:
- static const uint32 kRolloverAttemptFrequency = 0x20;
-@@ -476,6 +494,9 @@ class LogFileObject : public base::Logger {
- unsigned int rollover_attempt_;
- int64 next_flush_time_; // cycle count at which to flush log
- WallTime start_time_;
-+ std::list<Filetime> file_list_;
-+ bool inited_;
-+ struct ::tm tm_time_;
-
- // Actually create a logfile using the value of base_filename_ and the
- // optional argument time_pid_string
-@@ -665,7 +686,7 @@ inline void LogDestination::FlushLogFiles(int
min_severity) {
- // all this stuff.
- MutexLock l(&log_mutex);
- for (int i = min_severity; i < NUM_SEVERITIES; i++) {
-- LogDestination* log = log_destination(i);
-+ LogDestination* log = log_destinations_[i];
- if (log != NULL) {
- log->logger_->Flush();
- }
-@@ -876,9 +897,12 @@ inline void LogDestination::LogToAllLogfiles(LogSeverity
severity,
- } else if (FLAGS_logtostderr) { // global flag: never log to file
- ColoredWriteToStderr(severity, message, len);
- } else {
-- for (int i = severity; i >= 0; --i) {
-- LogDestination::MaybeLogToLogfile(i, timestamp, message, len);
-- }
-+ if (severity >= 1) {
-+ LogDestination::MaybeLogToLogfile(1, timestamp, message, len);
-+ LogDestination::MaybeLogToLogfile(0, timestamp, message, len);
-+ } else if (severity == 0) {
-+ LogDestination::MaybeLogToLogfile(0, timestamp, message, len);
-+ } else {}
- }
- }
-
-@@ -976,6 +1000,7 @@ LogFileObject::LogFileObject(LogSeverity severity,
- file_length_(0),
- rollover_attempt_(kRolloverAttemptFrequency-1),
- next_flush_time_(0),
-+ inited_(false),
- start_time_(WallTime_Now()) {
- assert(severity >= 0);
- assert(severity < NUM_SEVERITIES);
-@@ -1045,11 +1070,11 @@ bool LogFileObject::CreateLogfile(const string&
time_pid_string) {
- string_filename += filename_extension_;
- const char* filename = string_filename.c_str();
- //only write to files, create if non-existant.
-- int flags = O_WRONLY | O_CREAT;
-- if (FLAGS_timestamp_in_logfile_name) {
-- //demand that the file is unique for our timestamp (fail if it exists).
-- flags = flags | O_EXCL;
-- }
-+ int flags = O_WRONLY | O_CREAT | O_APPEND;
-+ // if (FLAGS_timestamp_in_logfile_name) {
-+ // //demand that the file is unique for our timestamp (fail if it exists).
-+ // flags = flags | O_EXCL;
-+ // }
- int fd = open(filename, flags, FLAGS_logfile_mode);
- if (fd == -1) return false;
- #ifdef HAVE_FCNTL
-@@ -1097,6 +1122,10 @@ bool LogFileObject::CreateLogfile(const string&
time_pid_string) {
- }
- }
- #endif
-+ Filetime ft;
-+ ft.name = string_filename;
-+ file_list_.push_back(ft);
-+
- // We try to create a symlink called <program_name>.<severity>,
- // which is easier to use. (Every time we create a new logfile,
- // we destroy the old symlink and create a new one, so it always
-@@ -1138,6 +1167,59 @@ bool LogFileObject::CreateLogfile(const string&
time_pid_string) {
- return true; // Everything worked
- }
-
-+void LogFileObject::CheckFileNumQuota() {
-+ struct dirent *entry;
-+ DIR *dp;
-+
-+ const vector<string> & log_dirs = GetLoggingDirectories();
-+ if (log_dirs.size() < 1) return;
-+
-+ //fprintf(stderr, "log dir: %s\n", log_dirs[0].c_str());
-+
-+ // list file in log dir
-+ dp = opendir(log_dirs[0].c_str());
-+ if (dp == NULL) {
-+ fprintf(stderr, "open log dir %s fail\n", log_dirs[0].c_str());
-+ return;
-+ }
-+
-+ file_list_.clear();
-+ while ((entry = readdir(dp)) != NULL) {
-+ if (DT_DIR == entry->d_type ||
-+ DT_LNK == entry->d_type) {
-+ continue;
-+ }
-+ std::string filename = std::string(entry->d_name);
-+ //fprintf(stderr, "filename: %s\n", filename.c_str());
-+
-+ if (filename.find(symlink_basename_ + '.' +
LogSeverityNames[severity_]) == 0) {
-+ std::string filepath = log_dirs[0] + "/" + filename;
-+
-+ struct stat fstat;
-+ if (::stat(filepath.c_str(), &fstat) < 0) {
-+ fprintf(stderr, "state %s fail\n", filepath.c_str());
-+ closedir(dp);
-+ return;
-+ }
-+ //fprintf(stderr, "filepath: %s\n", filepath.c_str());
-+
-+ Filetime file_time;
-+ file_time.time = fstat.st_mtime;
-+ file_time.name = filepath;
-+ file_list_.push_back(file_time);
-+ }
-+ }
-+ closedir(dp);
-+
-+ file_list_.sort();
-+
-+ while (FLAGS_log_filenum_quota > 0 && file_list_.size() >=
FLAGS_log_filenum_quota) {
-+ // fprintf(stderr, "delete %s\n", file_list_.front().name.c_str());
-+ unlink(file_list_.front().name.c_str());
-+ file_list_.pop_front();
-+ }
-+}
-+
- void LogFileObject::Write(bool force_flush,
- time_t timestamp,
- const char* message,
-@@ -1149,13 +1231,55 @@ void LogFileObject::Write(bool force_flush,
- return;
- }
-
-- if (file_length_ >> 20U >= MaxLogSize() || PidHasChanged()) {
-+ struct ::tm tm_time;
-+
-+ bool is_split = false;
-+ if ("day" == FLAGS_log_split_method) {
-+ localtime_r(×tamp, &tm_time);
-+ if (tm_time.tm_year != tm_time_.tm_year
-+ || tm_time.tm_mon != tm_time_.tm_mon
-+ || tm_time.tm_mday != tm_time_.tm_mday) {
-+ is_split = true;
-+ }
-+ } else if ("hour" == FLAGS_log_split_method) {
-+ localtime_r(×tamp, &tm_time);
-+ if (tm_time.tm_year != tm_time_.tm_year
-+ || tm_time.tm_mon != tm_time_.tm_mon
-+ || tm_time.tm_mday != tm_time_.tm_mday
-+ || tm_time.tm_hour != tm_time_.tm_hour) {
-+ is_split = true;
-+ }
-+ } else if (file_length_ >> 20U >= MaxLogSize()) {
-+ // PidHasChanged()) {
-+ is_split = true;
-+ }
-+
-+ if (is_split) {
- if (file_ != NULL) fclose(file_);
- file_ = NULL;
- file_length_ = bytes_since_flush_ = dropped_mem_length_ = 0;
- rollover_attempt_ = kRolloverAttemptFrequency - 1;
- }
-
-+ if ((file_ == NULL) && (!inited_) && (FLAGS_log_split_method == "size")) {
-+ CheckFileNumQuota();
-+ const char* filename = file_list_.back().name.c_str();
-+ int fd = open(filename, O_WRONLY | O_CREAT | O_APPEND,
FLAGS_logfile_mode);
-+ if (fd != -1) {
-+#ifdef HAVE_FCNTL
-+ // Mark the file close-on-exec. We don't really care if this fails
-+ fcntl(fd, F_SETFD, FD_CLOEXEC);
-+#endif
-+ file_ = fopen(filename, "a+"); // Read and append a FILE*.
-+ if (file_ == NULL) { // Man, we're screwed!, try to create new log
file
-+ close(fd);
-+ }
-+ fseek(file_, 0, SEEK_END);
-+ file_length_ = bytes_since_flush_ = ftell(file_);
-+ inited_ = true;
-+ }
-+ }
-+
- // If there's no destination file, make one before outputting
- if (file_ == NULL) {
- // Try to rollover the log file every 32 log messages. The only time
-@@ -1164,7 +1288,15 @@ void LogFileObject::Write(bool force_flush,
- if (++rollover_attempt_ != kRolloverAttemptFrequency) return;
- rollover_attempt_ = 0;
-
-- struct ::tm tm_time;
-+ if (!inited_) {
-+ CheckFileNumQuota();
-+ inited_ = true;
-+ } else {
-+ while (FLAGS_log_filenum_quota > 0 && file_list_.size() >=
FLAGS_log_filenum_quota) {
-+ unlink(file_list_.front().name.c_str());
-+ file_list_.pop_front();
-+ }
-+ }
- if (FLAGS_log_utc_time) {
- gmtime_r(×tamp, &tm_time);
- } else {
-@@ -1176,13 +1308,19 @@ void LogFileObject::Write(bool force_flush,
- time_pid_stream.fill('0');
- time_pid_stream << 1900+tm_time.tm_year
- << setw(2) << 1+tm_time.tm_mon
-- << setw(2) << tm_time.tm_mday
-- << '-'
-- << setw(2) << tm_time.tm_hour
-- << setw(2) << tm_time.tm_min
-- << setw(2) << tm_time.tm_sec
-- << '.'
-- << GetMainThreadPid();
-+ << setw(2) << tm_time.tm_mday;
-+
-+ if ("hour" == FLAGS_log_split_method) {
-+ time_pid_stream << setw(2) << tm_time.tm_hour;
-+ } else if ("day" != FLAGS_log_split_method) {
-+ time_pid_stream << '-'
-+ << setw(2) << tm_time.tm_hour
-+ << setw(2) << tm_time.tm_min
-+ << setw(2) << tm_time.tm_sec;
-+ }
-+
-+ tm_time_ = tm_time;
-+
- const string& time_pid_string = time_pid_stream.str();
-
- if (base_filename_selected_) {
-@@ -1216,9 +1354,7 @@ void LogFileObject::Write(bool force_flush,
- // deadlock. Simply use a name like invalid-user.
- if (uidname.empty()) uidname = "invalid-user";
-
-- stripped_filename = stripped_filename+'.'+hostname+'.'
-- +uidname+".log."
-- +LogSeverityNames[severity_]+'.';
-+ stripped_filename = stripped_filename + "." +
LogSeverityNames[severity_] + ".log.";
- // We're going to (potentially) try to put logs in several different
dirs
- const vector<string> & log_dirs = GetLoggingDirectories();
-
-@@ -1243,36 +1379,6 @@ void LogFileObject::Write(bool force_flush,
- }
- }
-
-- // Write a header message into the log file
-- ostringstream file_header_stream;
-- file_header_stream.fill('0');
-- file_header_stream << "Log file created at: "
-- << 1900+tm_time.tm_year << '/'
-- << setw(2) << 1+tm_time.tm_mon << '/'
-- << setw(2) << tm_time.tm_mday
-- << ' '
-- << setw(2) << tm_time.tm_hour << ':'
-- << setw(2) << tm_time.tm_min << ':'
-- << setw(2) << tm_time.tm_sec << (FLAGS_log_utc_time ?
" UTC\n" : "\n")
-- << "Running on machine: "
-- << LogDestination::hostname() << '\n';
--
-- if(!g_application_fingerprint.empty()) {
-- file_header_stream << "Application fingerprint: " <<
g_application_fingerprint << '\n';
-- }
-- const char* const date_time_format = FLAGS_log_year_in_prefix
-- ? "yyyymmdd hh:mm:ss.uuuuuu"
-- : "mmdd hh:mm:ss.uuuuuu";
-- file_header_stream << "Running duration (h:mm:ss): "
-- << PrettyDuration(static_cast<int>(WallTime_Now() -
start_time_)) << '\n'
-- << "Log line format: [IWEF]" << date_time_format << " "
-- << "threadid file:line] msg" << '\n';
-- const string& file_header_string = file_header_stream.str();
--
-- const size_t header_len = file_header_string.size();
-- fwrite(file_header_string.data(), 1, header_len, file_);
-- file_length_ += header_len;
-- bytes_since_flush_ += header_len;
- }
-
- // Write to LOG file
\ No newline at end of file
diff --git a/thirdparty/patches/opentelemetry-cpp-1.10.0.patch
b/thirdparty/patches/opentelemetry-cpp-1.8.3.patch
similarity index 93%
rename from thirdparty/patches/opentelemetry-cpp-1.10.0.patch
rename to thirdparty/patches/opentelemetry-cpp-1.8.3.patch
index b0e7102a6d..534d6056f6 100644
--- a/thirdparty/patches/opentelemetry-cpp-1.10.0.patch
+++ b/thirdparty/patches/opentelemetry-cpp-1.8.3.patch
@@ -1,6 +1,21 @@
+diff --git a/CMakeLists.txt b/CMakeLists.txt
+index f4fa0649..a64e74bc 100644
+--- a/CMakeLists.txt
++++ b/CMakeLists.txt
+@@ -118,9 +118,8 @@ option(WITH_GSL
+
+ option(WITH_ABSEIL "Whether to use Abseil for C++latest features" OFF)
+
++set(OPENTELEMETRY_INSTALL_default ON)
+ if(NOT CMAKE_SOURCE_DIR STREQUAL CMAKE_CURRENT_SOURCE_DIR)
+- set(OPENTELEMETRY_INSTALL_default ON)
+-else()
+ set(OPENTELEMETRY_INSTALL_default OFF)
+ endif()
+ option(OPENTELEMETRY_INSTALL "Whether to install opentelemetry targets"
diff --git a/api/include/opentelemetry/common/threadlocal.h
b/api/include/opentelemetry/common/threadlocal.h
new file mode 100644
-index 00000000..623a6454
+index 00000000..23a39e14
--- /dev/null
+++ b/api/include/opentelemetry/common/threadlocal.h
@@ -0,0 +1,123 @@
@@ -11,12 +26,12 @@ index 00000000..623a6454
+
+#include "opentelemetry/version.h"
+
-+
++//
+// GCC can be told that a certain branch is not likely to be taken (for
+// instance, a CHECK failure), and use that information in static analysis.
+// Giving it this information can help it optimize for the common case in
+// the absence of better information (ie. -fprofile-arcs).
-+
++//
+#define LIKELY(expr) __builtin_expect(!!(expr), 1)
+#define UNLIKELY(expr) __builtin_expect(!!(expr), 0)
+
@@ -129,27 +144,27 @@ index 00000000..623a6454
+OPENTELEMETRY_END_NAMESPACE
\ No newline at end of file
diff --git a/api/include/opentelemetry/context/runtime_context.h
b/api/include/opentelemetry/context/runtime_context.h
-index 2cd5b0ff..9e3c5c47 100644
+index bec96a9a..2349a29c 100644
--- a/api/include/opentelemetry/context/runtime_context.h
+++ b/api/include/opentelemetry/context/runtime_context.h
@@ -5,6 +5,7 @@
-
+
#include "opentelemetry/common/macros.h"
#include "opentelemetry/context/context.h"
+#include "opentelemetry/common/threadlocal.h"
- #include "opentelemetry/nostd/shared_ptr.h"
- #include "opentelemetry/nostd/string_view.h"
- #include "opentelemetry/nostd/unique_ptr.h"
-@@ -193,7 +194,7 @@ class ThreadLocalContextStorage : public
RuntimeContextStorage
+
+ OPENTELEMETRY_BEGIN_NAMESPACE
+ namespace context
+@@ -189,7 +190,7 @@ class ThreadLocalContextStorage : public
RuntimeContextStorage
ThreadLocalContextStorage() noexcept = default;
-
+
// Return the current context.
- Context GetCurrent() noexcept override { return GetStack().Top(); }
+ Context GetCurrent() noexcept override { return GetStack()->Top(); }
-
+
// Resets the context to the value previous to the passed in token. This
will
// also detach all child contexts of the passed in token.
-@@ -201,23 +202,23 @@ class ThreadLocalContextStorage : public
RuntimeContextStorage
+@@ -197,23 +198,23 @@ class ThreadLocalContextStorage : public
RuntimeContextStorage
bool Detach(Token &token) noexcept override
{
// In most cases, the context to be detached is on the top of the stack.
@@ -160,26 +175,26 @@ index 2cd5b0ff..9e3c5c47 100644
+ GetStack()->Pop();
return true;
}
-
+
- if (!GetStack().Contains(token))
+ if (!GetStack()->Contains(token))
{
return false;
}
-
+
- while (!(token == GetStack().Top()))
+ while (!(token == GetStack()->Top()))
{
- GetStack().Pop();
+ GetStack()->Pop();
}
-
+
- GetStack().Pop();
+ GetStack()->Pop();
-
+
return true;
}
-@@ -226,14 +227,14 @@ class ThreadLocalContextStorage : public
RuntimeContextStorage
+@@ -222,14 +223,14 @@ class ThreadLocalContextStorage : public
RuntimeContextStorage
// that can be used to reset to the previous Context.
nostd::unique_ptr<Token> Attach(const Context &context) noexcept override
{
@@ -187,19 +202,19 @@ index 2cd5b0ff..9e3c5c47 100644
+ GetStack()->Push(context);
return CreateToken(context);
}
-
+
-private:
// A nested class to store the attached contexts in a stack.
class Stack
{
+ public:
friend class ThreadLocalContextStorage;
-
+
Stack() noexcept : size_(0), capacity_(0), base_(nullptr) {}
-@@ -320,9 +321,10 @@ class ThreadLocalContextStorage : public
RuntimeContextStorage
+@@ -316,9 +317,10 @@ class ThreadLocalContextStorage : public
RuntimeContextStorage
Context *base_;
};
-
+
- OPENTELEMETRY_API_SINGLETON Stack &GetStack()
+ OPENTELEMETRY_API_SINGLETON Stack *GetStack()
{
@@ -216,32 +231,20 @@ index 2e8f3bb6..224e6463 100644
@@ -1,7 +1,7 @@
# Copyright The OpenTelemetry Authors
# SPDX-License-Identifier: Apache-2.0
-
+
-add_library(opentelemetry_sdk INTERFACE)
+add_library(opentelemetry_sdk INTERFACE
../api/include/opentelemetry/common/threadlocal.h)
target_include_directories(
opentelemetry_sdk
INTERFACE "$<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}/include>"
-diff --git a/sdk/include/opentelemetry/sdk/metrics/data/circular_buffer.h
b/sdk/include/opentelemetry/sdk/metrics/data/circular_buffer.h
-index 1608a3a0..6af2bd06 100644
---- a/sdk/include/opentelemetry/sdk/metrics/data/circular_buffer.h
-+++ b/sdk/include/opentelemetry/sdk/metrics/data/circular_buffer.h
-@@ -7,6 +7,7 @@
-
- #include <limits>
- #include <vector>
-+#include <stdint.h>
-
- OPENTELEMETRY_BEGIN_NAMESPACE
- namespace sdk
diff --git a/sdk/src/common/CMakeLists.txt b/sdk/src/common/CMakeLists.txt
-index 7bb645f5..041d8ede 100644
+index b23a93e8..1cc0e0f2 100644
--- a/sdk/src/common/CMakeLists.txt
+++ b/sdk/src/common/CMakeLists.txt
@@ -1,7 +1,7 @@
# Copyright The OpenTelemetry Authors
# SPDX-License-Identifier: Apache-2.0
-
+
-set(COMMON_SRCS random.cc core.cc global_log_handler.cc env_variables.cc)
+set(COMMON_SRCS random.cc core.cc global_log_handler.cc env_variables.cc
threadlocal.cc)
if(WIN32)
@@ -253,26 +256,26 @@ index 77b88cfa..dc71f9c1 100644
+++ b/sdk/src/common/random.cc
@@ -3,6 +3,7 @@
// SPDX-License-Identifier: Apache-2.0
-
+
#include "src/common/random.h"
+#include "opentelemetry/common/threadlocal.h"
#include "src/common/platform/fork.h"
-
+
#include <cstring>
@@ -29,33 +30,37 @@ class TlsRandomNumberGenerator
platform::AtFork(nullptr, nullptr, OnFork);
}
-
+
- static FastRandomNumberGenerator &engine() noexcept { return engine_; }
+ static FastRandomNumberGenerator *engine() noexcept { return engine_; }
-
+
private:
- static thread_local FastRandomNumberGenerator engine_;
+ // static thread_local FastRandomNumberGenerator engine_;
+ DECLARE_STATIC_THREAD_LOCAL_TELEMETRY(FastRandomNumberGenerator, engine_);
-
+
static void OnFork() noexcept { Seed(); }
-
+
static void Seed() noexcept
{
+ INIT_STATIC_THREAD_LOCAL_TELEMETRY(FastRandomNumberGenerator, engine_);
@@ -282,12 +285,12 @@ index 77b88cfa..dc71f9c1 100644
+ engine_->seed(seed_seq);
}
};
-
+
-thread_local FastRandomNumberGenerator TlsRandomNumberGenerator::engine_{};
+// thread_local FastRandomNumberGenerator TlsRandomNumberGenerator::engine_{};
+DEFINE_STATIC_THREAD_LOCAL_TELEMETRY(FastRandomNumberGenerator,
TlsRandomNumberGenerator, engine_);
} // namespace
-
+
-FastRandomNumberGenerator &Random::GetRandomNumberGenerator() noexcept
+FastRandomNumberGenerator *Random::GetRandomNumberGenerator() noexcept
{
@@ -296,13 +299,13 @@ index 77b88cfa..dc71f9c1 100644
+ BLOCK_STATIC_THREAD_LOCAL_TELEMTRY(TlsRandomNumberGenerator,
random_number_generator);
return TlsRandomNumberGenerator::engine();
}
-
+
uint64_t Random::GenerateRandom64() noexcept
{
- return GetRandomNumberGenerator()();
+ return GetRandomNumberGenerator()->operator()();
}
-
+
void Random::GenerateRandomBuffer(opentelemetry::nostd::span<uint8_t> buffer)
noexcept
diff --git a/sdk/src/common/random.h b/sdk/src/common/random.h
index ecd6dabc..1aaa2204 100644
@@ -406,4 +409,4 @@ index 00000000..ec2038b4
+}
+} // namespace internal_threadlocal
+OPENTELEMETRY_END_NAMESPACE
-\ No newline at end of file
\ No newline at end of file
+\ No newline at end of file
diff --git a/thirdparty/vars.sh b/thirdparty/vars.sh
index 8fdafe184b..435434927f 100644
--- a/thirdparty/vars.sh
+++ b/thirdparty/vars.sh
@@ -73,11 +73,10 @@ THRIFT_SOURCE=thrift-0.16.0
THRIFT_MD5SUM="44cf1b54b4ec1890576c85804acfa637"
# protobuf
-# brpc is not yet compatible with protobuf >= 22
-PROTOBUF_DOWNLOAD="https://github.com/protocolbuffers/protobuf/releases/download/v21.11/protobuf-all-21.11.tar.gz"
-PROTOBUF_NAME="protobuf-all-21.11.tar.gz"
-PROTOBUF_SOURCE=protobuf-21.11
-PROTOBUF_MD5SUM="b3b104f0374802e1add5d5d7a5a845ac"
+PROTOBUF_DOWNLOAD="https://github.com/google/protobuf/archive/v3.15.0.tar.gz"
+PROTOBUF_NAME=protobuf-3.15.0.tar.gz
+PROTOBUF_SOURCE=protobuf-3.15.0
+PROTOBUF_MD5SUM="3fb6c41f7b3c621424543f43d54baa42"
# gflags
GFLAGS_DOWNLOAD="https://github.com/gflags/gflags/archive/v2.2.2.tar.gz"
@@ -86,10 +85,10 @@ GFLAGS_SOURCE=gflags-2.2.2
GFLAGS_MD5SUM="1a865b93bacfa963201af3f75b7bd64c"
# glog
-GLOG_DOWNLOAD="https://github.com/google/glog/archive/refs/tags/v0.6.0.tar.gz"
-GLOG_NAME="glog-v0.6.0.tar.gz"
-GLOG_SOURCE=glog-0.6.0
-GLOG_MD5SUM="c98a6068bc9b8ad9cebaca625ca73aa2"
+GLOG_DOWNLOAD="https://github.com/google/glog/archive/v0.4.0.tar.gz"
+GLOG_NAME=glog-0.4.0.tar.gz
+GLOG_SOURCE=glog-0.4.0
+GLOG_MD5SUM="0daea8785e6df922d7887755c3d100d0"
# gtest
GTEST_DOWNLOAD="https://github.com/google/googletest/archive/release-1.11.0.tar.gz"
@@ -116,10 +115,10 @@ ZLIB_SOURCE=zlib-1.2.11
ZLIB_MD5SUM="1c9f62f0778697a09d36121ead88e08e"
# lz4
-LZ4_DOWNLOAD="https://github.com/lz4/lz4/archive/v1.9.4.tar.gz"
-LZ4_NAME=lz4-1.9.4.tar.gz
-LZ4_SOURCE=lz4-1.9.4
-LZ4_MD5SUM="e9286adb64040071c5e23498bf753261"
+LZ4_DOWNLOAD="https://github.com/lz4/lz4/archive/v1.9.3.tar.gz"
+LZ4_NAME=lz4-1.9.3.tar.gz
+LZ4_SOURCE=lz4-1.9.3
+LZ4_MD5SUM="3a1ab1684e14fc1afc66228ce61b2db3"
# bzip
BZIP_DOWNLOAD="https://fossies.org/linux/misc/bzip2-1.0.8.tar.gz"
@@ -128,7 +127,7 @@ BZIP_SOURCE=bzip2-1.0.8
BZIP_MD5SUM="67e051268d0c475ea773822f7500d0e5"
# lzo2
-LZO2_DOWNLOAD="https://fossies.org/linux/misc/lzo-2.10.tar.gz"
+LZO2_DOWNLOAD="http://www.oberhumer.com/opensource/lzo/download/lzo-2.10.tar.gz"
LZO2_NAME=lzo-2.10.tar.gz
LZO2_SOURCE=lzo-2.10
LZO2_MD5SUM="39d3f3f9c55c87b1e5d6888e1420f4b5"
@@ -140,10 +139,10 @@
RAPIDJSON_SOURCE=rapidjson-1a803826f1197b5e30703afe4b9c0e7dd48074f5
RAPIDJSON_MD5SUM="f2212a77e055a15501477f1e390007ea"
# curl
-CURL_DOWNLOAD="https://curl.se/download/curl-8.2.1.tar.gz"
-CURL_NAME="curl-8.2.1.tar.gz"
-CURL_SOURCE=curl-8.2.1
-CURL_MD5SUM="b25588a43556068be05e1624e0e74d41"
+CURL_DOWNLOAD="https://curl.se/download/curl-7.79.0.tar.gz"
+CURL_NAME=curl-7.79.0.tar.gz
+CURL_SOURCE=curl-7.79.0
+CURL_MD5SUM="b40e4dc4bbc9e109c330556cd58c8ec8"
# RE2
RE2_DOWNLOAD="https://github.com/google/re2/archive/2021-02-02.tar.gz"
@@ -198,10 +197,10 @@ LEVELDB_SOURCE=leveldb-1.23
LEVELDB_MD5SUM="afbde776fb8760312009963f09a586c7"
# brpc
-BRPC_DOWNLOAD="https://github.com/apache/brpc/archive/refs/tags/1.6.0.tar.gz"
-BRPC_NAME="brpc-1.6.0.tar.gz"
-BRPC_SOURCE="brpc-1.6.0"
-BRPC_MD5SUM="0d37cea25bd006e89806f461ef7e39ba"
+BRPC_DOWNLOAD="https://github.com/apache/brpc/archive/refs/tags/1.4.0.tar.gz"
+BRPC_NAME="brpc-1.4.0.tar.gz"
+BRPC_SOURCE="brpc-1.4.0"
+BRPC_MD5SUM="6af9d50822c33a3abc56a1ec0af0e0bc"
# rocksdb
ROCKSDB_DOWNLOAD="https://github.com/facebook/rocksdb/archive/v5.14.2.tar.gz"
@@ -222,10 +221,10 @@ LIBRDKAFKA_SOURCE=librdkafka-1.8.2
LIBRDKAFKA_MD5SUM="0abec0888d10c9553cdcbcbf9172d558"
# zstd
-ZSTD_DOWNLOAD="https://github.com/facebook/zstd/releases/download/v1.5.5/zstd-1.5.5.tar.gz"
-ZSTD_NAME=zstd-1.5.5.tar.gz
-ZSTD_SOURCE=zstd-1.5.5
-ZSTD_MD5SUM="63251602329a106220e0a5ad26ba656f"
+ZSTD_DOWNLOAD="https://github.com/facebook/zstd/releases/download/v1.5.2/zstd-1.5.2.tar.gz"
+ZSTD_NAME=zstd-1.5.2.tar.gz
+ZSTD_SOURCE=zstd-1.5.2
+ZSTD_MD5SUM="072b10f71f5820c24761a65f31f43e73"
# brotli
BROTLI_DOWNLOAD="https://github.com/google/brotli/archive/v1.0.9.tar.gz"
@@ -239,30 +238,17 @@ FLATBUFFERS_NAME=flatbuffers-2.0.0.tar.gz
FLATBUFFERS_SOURCE=flatbuffers-2.0.0
FLATBUFFERS_MD5SUM="a27992324c3cbf86dd888268a23d17bd"
-# c-ares
-CARES_DOWNLOAD="https://github.com/c-ares/c-ares/releases/download/cares-1_19_1/c-ares-1.19.1.tar.gz"
-CARES_NAME="c-ares-1.19.1.tar.gz"
-CARES_SOURCE=c-ares-1.19.1
-CARES_MD5SUM="dafc5825a92dc907e144570e4e75a908"
-
-# grpc
-# grpc v1.55 and above require protobuf >= 22
-GRPC_DOWNLOAD="https://github.com/grpc/grpc/archive/refs/tags/v1.54.3.tar.gz"
-GRPC_NAME="grpc-v1.54.3.tar.gz"
-GRPC_SOURCE=grpc-1.54.3
-GRPC_MD5SUM="af00a2edeae0f02bb25917cc3473b7de"
-
# arrow
-ARROW_DOWNLOAD="https://github.com/apache/arrow/archive/refs/tags/apache-arrow-13.0.0.tar.gz"
-ARROW_NAME="apache-arrow-13.0.0.tar.gz"
-ARROW_SOURCE="arrow-apache-arrow-13.0.0"
-ARROW_MD5SUM="8ec1ec6a119514bcaea1cf7aabc9df1f"
+ARROW_DOWNLOAD="https://archive.apache.org/dist/arrow/arrow-7.0.0/apache-arrow-7.0.0.tar.gz"
+ARROW_NAME="apache-arrow-7.0.0.tar.gz"
+ARROW_SOURCE="apache-arrow-7.0.0"
+ARROW_MD5SUM="316ade159901646849b3b4760fa52816"
# Abseil
-ABSEIL_DOWNLOAD="https://github.com/abseil/abseil-cpp/archive/refs/tags/20230125.3.tar.gz"
-ABSEIL_NAME="abseil-cpp-20230125.3.tar.gz"
-ABSEIL_SOURCE=abseil-cpp-20230125.3
-ABSEIL_MD5SUM="9b6dae642c4bd92f007ab2c148bc0498"
+ABSEIL_DOWNLOAD="https://github.com/abseil/abseil-cpp/archive/refs/tags/20220623.1.tar.gz"
+ABSEIL_NAME=abseil-cpp-20220623.1.tar.gz
+ABSEIL_SOURCE=abseil-cpp-20220623.1
+ABSEIL_MD5SUM="2aea7c1171c4c280f755de170295afd6"
# S2
S2_DOWNLOAD="https://github.com/google/s2geometry/archive/refs/tags/v0.10.0.tar.gz"
@@ -295,16 +281,16 @@ PARALLEL_HASHMAP_SOURCE="parallel-hashmap-1.3.8"
PARALLEL_HASHMAP_MD5SUM="1b8130d0b4f656257ef654699bfbf941"
# orc
-ORC_DOWNLOAD="https://archive.apache.org/dist/orc/orc-1.9.0/orc-1.9.0.tar.gz"
-ORC_NAME="orc-1.9.0.tar.gz"
-ORC_SOURCE=orc-1.9.0
-ORC_MD5SUM="5dc1c91c4867e4519aab531ffc30fab7"
+ORC_DOWNLOAD="https://archive.apache.org/dist/orc/orc-1.7.2/orc-1.7.2.tar.gz"
+ORC_NAME=orc-1.7.2.tar.gz
+ORC_SOURCE=orc-1.7.2
+ORC_MD5SUM="6cab37935eacdec7d078d327746a8578"
# jemalloc for arrow
-JEMALLOC_ARROW_DOWNLOAD="https://github.com/jemalloc/jemalloc/releases/download/5.3.0/jemalloc-5.3.0.tar.bz2"
-JEMALLOC_ARROW_NAME="jemalloc-5.3.0.tar.bz2"
-JEMALLOC_ARROW_SOURCE="jemalloc-5.3.0"
-JEMALLOC_ARROW_MD5SUM="09a8328574dab22a7df848eae6dbbf53"
+JEMALLOC_ARROW_DOWNLOAD="https://github.com/jemalloc/jemalloc/releases/download/5.2.1/jemalloc-5.2.1.tar.bz2"
+JEMALLOC_ARROW_NAME="jemalloc-5.2.1.tar.bz2"
+JEMALLOC_ARROW_SOURCE="jemalloc-5.2.1"
+JEMALLOC_ARROW_MD5SUM="3d41fbf006e6ebffd489bdb304d009ae"
# jemalloc for doris
JEMALLOC_DORIS_DOWNLOAD="https://github.com/jemalloc/jemalloc/releases/download/5.3.0/jemalloc-5.3.0.tar.bz2"
@@ -407,16 +393,16 @@ PDQSORT_MD5SUM="af28f79d5d7d7a5486f54d9f1244c2b5"
# benchmark
BENCHMARK_DOWNLOAD="https://github.com/google/benchmark/archive/refs/tags/v1.8.0.tar.gz"
-BENCHMARK_NAME=benchmark-v1.8.0.tar.gz
+BENCHMARK_NAME=v1.8.0.tar.gz
BENCHMARK_SOURCE=benchmark-1.8.0
BENCHMARK_MD5SUM="8ddf8571d3f6198d37852bcbd964f817"
# xsimd
-# for arrow-13.0.0, if arrow upgrade, this version may also need to be changed
-XSIMD_DOWNLOAD="https://github.com/xtensor-stack/xsimd/archive/refs/tags/9.0.1.tar.gz"
-XSIMD_NAME="xsimd-9.0.1.tar.gz"
-XSIMD_SOURCE=xsimd-9.0.1
-XSIMD_MD5SUM="59f38fe3364acd7ed137771258812d6c"
+# for arrow-7.0.0, if arrow upgrade, this version may also need to be changed
+XSIMD_DOWNLOAD="https://github.com/xtensor-stack/xsimd/archive/aeec9c872c8b475dedd7781336710f2dd2666cb2.tar.gz"
+XSIMD_NAME=xsimd-aeec9c872c8b475dedd7781336710f2dd2666cb2.tar.gz
+XSIMD_SOURCE=xsimd-aeec9c872c8b475dedd7781336710f2dd2666cb2
+XSIMD_MD5SUM="d024855f71c0a2837a6918c0f8f66245"
# simdjson
SIMDJSON_DOWNLOAD="https://github.com/simdjson/simdjson/archive/refs/tags/v3.0.1.tar.gz"
@@ -431,16 +417,16 @@ NLOHMANN_JSON_SOURCE=json-3.10.1
NLOHMANN_JSON_MD5SUM="7b369d567afc0dffdcf5800fd9abb836"
# opentelemetry-proto
-OPENTELEMETRY_PROTO_DOWNLOAD="https://github.com/open-telemetry/opentelemetry-proto/archive/refs/tags/v1.0.0.tar.gz"
-OPENTELEMETRY_PROTO_NAME="opentelemetry-proto-v1.0.0.tar.gz"
-OPENTELEMETRY_PROTO_SOURCE=opentelemetry-proto-1.0.0
-OPENTELEMETRY_PROTO_MD5SUM="8c7495a0dceea7cfdbdbcd53b07436dc"
+OPENTELEMETRY_PROTO_DOWNLOAD="https://github.com/open-telemetry/opentelemetry-proto/archive/refs/tags/v0.19.0.tar.gz"
+OPENTELEMETRY_PROTO_NAME=opentelemetry-proto-0.19.0.tar.gz
+OPENTELEMETRY_PROTO_SOURCE=opentelemetry-proto-0.19.0
+OPENTELEMETRY_PROTO_MD5SUM="8a5fb1ba721341994a5e7cae8b38bcc6"
# opentelemetry
-OPENTELEMETRY_DOWNLOAD="https://github.com/open-telemetry/opentelemetry-cpp/archive/refs/tags/v1.10.0.tar.gz"
-OPENTELEMETRY_NAME="opentelemetry-cpp-v1.10.0.tar.gz"
-OPENTELEMETRY_SOURCE=opentelemetry-cpp-1.10.0
-OPENTELEMETRY_MD5SUM="89169762241b2f5142b728c775173283"
+OPENTELEMETRY_DOWNLOAD="https://github.com/open-telemetry/opentelemetry-cpp/archive/refs/tags/v1.8.3.tar.gz"
+OPENTELEMETRY_NAME=opentelemetry-cpp-1.8.3.tar.gz
+OPENTELEMETRY_SOURCE=opentelemetry-cpp-1.8.3
+OPENTELEMETRY_MD5SUM="a051dbbced2b7eb7a7227bdf5b3b9a10"
# libbacktrace
LIBBACKTRACE_DOWNLOAD="https://codeload.github.com/ianlancetaylor/libbacktrace/zip/2446c66076480ce07a6bd868badcbceb3eeecc2e"
@@ -517,8 +503,6 @@ export TP_ARCHIVES=(
'FMT'
'PARALLEL_HASHMAP'
'ORC'
- 'CARES'
- 'GRPC'
'JEMALLOC_ARROW'
'JEMALLOC_DORIS'
'LIBUNWIND'
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]